< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

  28 #include "classfile/javaClasses.inline.hpp"
  29 #include "classfile/stringTable.hpp"
  30 #include "classfile/vmClasses.hpp"
  31 #include "classfile/vmSymbols.hpp"
  32 #include "code/aotCodeCache.hpp"
  33 #include "code/codeCache.hpp"
  34 #include "code/compiledIC.hpp"
  35 #include "code/nmethod.inline.hpp"
  36 #include "code/scopeDesc.hpp"
  37 #include "code/vtableStubs.hpp"
  38 #include "compiler/abstractCompiler.hpp"
  39 #include "compiler/compileBroker.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "gc/shared/barrierSet.hpp"
  42 #include "gc/shared/collectedHeap.hpp"
  43 #include "interpreter/interpreter.hpp"
  44 #include "interpreter/interpreterRuntime.hpp"
  45 #include "jfr/jfrEvents.hpp"
  46 #include "jvm.h"
  47 #include "logging/log.hpp"

  48 #include "memory/resourceArea.hpp"
  49 #include "memory/universe.hpp"
  50 #include "metaprogramming/primitiveConversions.hpp"



  51 #include "oops/klass.hpp"
  52 #include "oops/method.inline.hpp"
  53 #include "oops/objArrayKlass.hpp"

  54 #include "oops/oop.inline.hpp"
  55 #include "prims/forte.hpp"
  56 #include "prims/jvmtiExport.hpp"
  57 #include "prims/jvmtiThreadState.hpp"
  58 #include "prims/methodHandles.hpp"
  59 #include "prims/nativeLookup.hpp"
  60 #include "runtime/arguments.hpp"
  61 #include "runtime/atomicAccess.hpp"
  62 #include "runtime/basicLock.inline.hpp"
  63 #include "runtime/frame.inline.hpp"
  64 #include "runtime/handles.inline.hpp"
  65 #include "runtime/init.hpp"
  66 #include "runtime/interfaceSupport.inline.hpp"
  67 #include "runtime/java.hpp"
  68 #include "runtime/javaCalls.hpp"
  69 #include "runtime/jniHandles.inline.hpp"
  70 #include "runtime/osThread.hpp"
  71 #include "runtime/perfData.hpp"
  72 #include "runtime/sharedRuntime.hpp"

  73 #include "runtime/stackWatermarkSet.hpp"
  74 #include "runtime/stubRoutines.hpp"
  75 #include "runtime/synchronizer.hpp"
  76 #include "runtime/timerTrace.hpp"
  77 #include "runtime/vframe.inline.hpp"
  78 #include "runtime/vframeArray.hpp"
  79 #include "runtime/vm_version.hpp"
  80 #include "utilities/copy.hpp"
  81 #include "utilities/dtrace.hpp"
  82 #include "utilities/events.hpp"
  83 #include "utilities/globalDefinitions.hpp"
  84 #include "utilities/hashTable.hpp"
  85 #include "utilities/macros.hpp"
  86 #include "utilities/xmlstream.hpp"
  87 #ifdef COMPILER1
  88 #include "c1/c1_Runtime1.hpp"
  89 #endif
  90 #ifdef COMPILER2
  91 #include "opto/runtime.hpp"
  92 #endif

1210 // for a call current in progress, i.e., arguments has been pushed on stack
1211 // but callee has not been invoked yet.  Caller frame must be compiled.
1212 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1213                                               CallInfo& callinfo, TRAPS) {
1214   Handle receiver;
1215   Handle nullHandle;  // create a handy null handle for exception returns
1216   JavaThread* current = THREAD;
1217 
1218   assert(!vfst.at_end(), "Java frame must exist");
1219 
1220   // Find caller and bci from vframe
1221   methodHandle caller(current, vfst.method());
1222   int          bci   = vfst.bci();
1223 
1224   if (caller->is_continuation_enter_intrinsic()) {
1225     bc = Bytecodes::_invokestatic;
1226     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1227     return receiver;
1228   }
1229 
















1230   Bytecode_invoke bytecode(caller, bci);
1231   int bytecode_index = bytecode.index();
1232   bc = bytecode.invoke_code();
1233 
1234   methodHandle attached_method(current, extract_attached_method(vfst));
1235   if (attached_method.not_null()) {
1236     Method* callee = bytecode.static_target(CHECK_NH);
1237     vmIntrinsics::ID id = callee->intrinsic_id();
1238     // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1239     // it attaches statically resolved method to the call site.
1240     if (MethodHandles::is_signature_polymorphic(id) &&
1241         MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1242       bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1243 
1244       // Adjust invocation mode according to the attached method.
1245       switch (bc) {
1246         case Bytecodes::_invokevirtual:
1247           if (attached_method->method_holder()->is_interface()) {
1248             bc = Bytecodes::_invokeinterface;
1249           }
1250           break;
1251         case Bytecodes::_invokeinterface:
1252           if (!attached_method->method_holder()->is_interface()) {
1253             bc = Bytecodes::_invokevirtual;
1254           }
1255           break;
1256         case Bytecodes::_invokehandle:
1257           if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1258             bc = attached_method->is_static() ? Bytecodes::_invokestatic
1259                                               : Bytecodes::_invokevirtual;
1260           }
1261           break;
1262         default:
1263           break;
1264       }






1265     }
1266   }
1267 
1268   assert(bc != Bytecodes::_illegal, "not initialized");
1269 
1270   bool has_receiver = bc != Bytecodes::_invokestatic &&
1271                       bc != Bytecodes::_invokedynamic &&
1272                       bc != Bytecodes::_invokehandle;

1273 
1274   // Find receiver for non-static call
1275   if (has_receiver) {
1276     // This register map must be update since we need to find the receiver for
1277     // compiled frames. The receiver might be in a register.
1278     RegisterMap reg_map2(current,
1279                          RegisterMap::UpdateMap::include,
1280                          RegisterMap::ProcessFrames::include,
1281                          RegisterMap::WalkContinuation::skip);
1282     frame stubFrame   = current->last_frame();
1283     // Caller-frame is a compiled frame
1284     frame callerFrame = stubFrame.sender(&reg_map2);
1285 
1286     if (attached_method.is_null()) {
1287       Method* callee = bytecode.static_target(CHECK_NH);

1288       if (callee == nullptr) {
1289         THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1290       }
1291     }
1292 
1293     // Retrieve from a compiled argument list
1294     receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1295     assert(oopDesc::is_oop_or_null(receiver()), "");
1296 
1297     if (receiver.is_null()) {
1298       THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);










1299     }
1300   }
1301 
1302   // Resolve method
1303   if (attached_method.not_null()) {
1304     // Parameterized by attached method.
1305     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, CHECK_NH);
1306   } else {
1307     // Parameterized by bytecode.
1308     constantPoolHandle constants(current, caller->constants());
1309     LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1310   }
1311 
1312 #ifdef ASSERT
1313   // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1314   if (has_receiver) {
1315     assert(receiver.not_null(), "should have thrown exception");
1316     Klass* receiver_klass = receiver->klass();
1317     Klass* rk = nullptr;
1318     if (attached_method.not_null()) {
1319       // In case there's resolved method attached, use its holder during the check.
1320       rk = attached_method->method_holder();
1321     } else {
1322       // Klass is already loaded.
1323       constantPoolHandle constants(current, caller->constants());
1324       rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1325     }
1326     Klass* static_receiver_klass = rk;
1327     assert(receiver_klass->is_subtype_of(static_receiver_klass),
1328            "actual receiver must be subclass of static receiver klass");
1329     if (receiver_klass->is_instance_klass()) {
1330       if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1331         tty->print_cr("ERROR: Klass not yet initialized!!");
1332         receiver_klass->print();
1333       }
1334       assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1335     }
1336   }
1337 #endif
1338 
1339   return receiver;
1340 }
1341 
1342 methodHandle SharedRuntime::find_callee_method(TRAPS) {
1343   JavaThread* current = THREAD;
1344   ResourceMark rm(current);
1345   // We need first to check if any Java activations (compiled, interpreted)
1346   // exist on the stack since last JavaCall.  If not, we need
1347   // to get the target method from the JavaCall wrapper.
1348   vframeStream vfst(current, true);  // Do not skip any javaCalls
1349   methodHandle callee_method;
1350   if (vfst.at_end()) {
1351     // No Java frames were found on stack since we did the JavaCall.
1352     // Hence the stack can only contain an entry_frame.  We need to
1353     // find the target method from the stub frame.
1354     RegisterMap reg_map(current,
1355                         RegisterMap::UpdateMap::skip,
1356                         RegisterMap::ProcessFrames::include,
1357                         RegisterMap::WalkContinuation::skip);
1358     frame fr = current->last_frame();
1359     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1360     fr = fr.sender(&reg_map);
1361     assert(fr.is_entry_frame(), "must be");
1362     // fr is now pointing to the entry frame.
1363     callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1364   } else {
1365     Bytecodes::Code bc;
1366     CallInfo callinfo;
1367     find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));




1368     callee_method = methodHandle(current, callinfo.selected_method());
1369   }
1370   assert(callee_method()->is_method(), "must be");
1371   return callee_method;
1372 }
1373 
1374 // Resolves a call.
1375 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, TRAPS) {
1376   JavaThread* current = THREAD;
1377   ResourceMark rm(current);
1378   RegisterMap cbl_map(current,
1379                       RegisterMap::UpdateMap::skip,
1380                       RegisterMap::ProcessFrames::include,
1381                       RegisterMap::WalkContinuation::skip);
1382   frame caller_frame = current->last_frame().sender(&cbl_map);
1383 
1384   CodeBlob* caller_cb = caller_frame.cb();
1385   guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1386   nmethod* caller_nm = caller_cb->as_nmethod();
1387 
1388   // determine call info & receiver
1389   // note: a) receiver is null for static calls
1390   //       b) an exception is thrown if receiver is null for non-static calls
1391   CallInfo call_info;
1392   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1393   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1394 
1395   NoSafepointVerifier nsv;
1396 
1397   methodHandle callee_method(current, call_info.selected_method());




1398 
1399   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1400          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1401          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1402          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1403          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1404 
1405   assert(!caller_nm->is_unloading(), "It should not be unloading");
1406 
1407 #ifndef PRODUCT
1408   // tracing/debugging/statistics
1409   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1410                  (is_virtual) ? (&_resolve_virtual_ctr) :
1411                                 (&_resolve_static_ctr);
1412   AtomicAccess::inc(addr);
1413 
1414   if (TraceCallFixup) {
1415     ResourceMark rm(current);
1416     tty->print("resolving %s%s (%s) call to",
1417                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1418                Bytecodes::name(invoke_code));
1419     callee_method->print_short_name(tty);
1420     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1421                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1422   }
1423 #endif
1424 
1425   if (invoke_code == Bytecodes::_invokestatic) {
1426     assert(callee_method->method_holder()->is_initialized() ||
1427            callee_method->method_holder()->is_reentrant_initialization(current),
1428            "invalid class initialization state for invoke_static");
1429     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1430       // In order to keep class initialization check, do not patch call
1431       // site for static call when the class is not fully initialized.
1432       // Proper check is enforced by call site re-resolution on every invocation.
1433       //
1434       // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1435       // explicit class initialization check is put in nmethod entry (VEP).
1436       assert(callee_method->method_holder()->is_linked(), "must be");
1437       return callee_method;
1438     }
1439   }
1440 
1441 
1442   // JSR 292 key invariant:
1443   // If the resolved method is a MethodHandle invoke target, the call
1444   // site must be a MethodHandle call site, because the lambda form might tail-call
1445   // leaving the stack in a state unknown to either caller or callee
1446 
1447   // Compute entry points. The computation of the entry points is independent of
1448   // patching the call.
1449 
1450   // Make sure the callee nmethod does not get deoptimized and removed before
1451   // we are done patching the code.
1452 
1453 
1454   CompiledICLocker ml(caller_nm);
1455   if (is_virtual && !is_optimized) {
1456     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1457     inline_cache->update(&call_info, receiver->klass());
1458   } else {
1459     // Callsite is a direct call - set it to the destination method
1460     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1461     callsite->set(callee_method);
1462   }
1463 
1464   return callee_method;
1465 }
1466 
1467 // Inline caches exist only in compiled code
1468 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1469 #ifdef ASSERT
1470   RegisterMap reg_map(current,
1471                       RegisterMap::UpdateMap::skip,
1472                       RegisterMap::ProcessFrames::include,
1473                       RegisterMap::WalkContinuation::skip);
1474   frame stub_frame = current->last_frame();
1475   assert(stub_frame.is_runtime_frame(), "sanity check");
1476   frame caller_frame = stub_frame.sender(&reg_map);
1477   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1478 #endif /* ASSERT */
1479 
1480   methodHandle callee_method;

1481   JRT_BLOCK
1482     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1483     // Return Method* through TLS
1484     current->set_vm_result_metadata(callee_method());
1485   JRT_BLOCK_END
1486   // return compiled code entry point after potential safepoints
1487   return get_resolved_entry(current, callee_method);
1488 JRT_END
1489 
1490 
1491 // Handle call site that has been made non-entrant
1492 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1493   // 6243940 We might end up in here if the callee is deoptimized
1494   // as we race to call it.  We don't want to take a safepoint if
1495   // the caller was interpreted because the caller frame will look
1496   // interpreted to the stack walkers and arguments are now
1497   // "compiled" so it is much better to make this transition
1498   // invisible to the stack walking code. The i2c path will
1499   // place the callee method in the callee_target. It is stashed
1500   // there because if we try and find the callee by normal means a
1501   // safepoint is possible and have trouble gc'ing the compiled args.
1502   RegisterMap reg_map(current,
1503                       RegisterMap::UpdateMap::skip,
1504                       RegisterMap::ProcessFrames::include,
1505                       RegisterMap::WalkContinuation::skip);
1506   frame stub_frame = current->last_frame();
1507   assert(stub_frame.is_runtime_frame(), "sanity check");
1508   frame caller_frame = stub_frame.sender(&reg_map);
1509 
1510   if (caller_frame.is_interpreted_frame() ||
1511       caller_frame.is_entry_frame() ||
1512       caller_frame.is_upcall_stub_frame()) {
1513     Method* callee = current->callee_target();
1514     guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1515     current->set_vm_result_metadata(callee);
1516     current->set_callee_target(nullptr);
1517     if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1518       // Bypass class initialization checks in c2i when caller is in native.
1519       // JNI calls to static methods don't have class initialization checks.
1520       // Fast class initialization checks are present in c2i adapters and call into
1521       // SharedRuntime::handle_wrong_method() on the slow path.
1522       //
1523       // JVM upcalls may land here as well, but there's a proper check present in
1524       // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1525       // so bypassing it in c2i adapter is benign.
1526       return callee->get_c2i_no_clinit_check_entry();
1527     } else {
1528       return callee->get_c2i_entry();




1529     }
1530   }
1531 
1532   // Must be compiled to compiled path which is safe to stackwalk
1533   methodHandle callee_method;



1534   JRT_BLOCK
1535     // Force resolving of caller (if we called from compiled frame)
1536     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1537     current->set_vm_result_metadata(callee_method());
1538   JRT_BLOCK_END
1539   // return compiled code entry point after potential safepoints
1540   return get_resolved_entry(current, callee_method);
1541 JRT_END
1542 
1543 // Handle abstract method call
1544 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1545   // Verbose error message for AbstractMethodError.
1546   // Get the called method from the invoke bytecode.
1547   vframeStream vfst(current, true);
1548   assert(!vfst.at_end(), "Java frame must exist");
1549   methodHandle caller(current, vfst.method());
1550   Bytecode_invoke invoke(caller, vfst.bci());
1551   DEBUG_ONLY( invoke.verify(); )
1552 
1553   // Find the compiled caller frame.
1554   RegisterMap reg_map(current,
1555                       RegisterMap::UpdateMap::include,
1556                       RegisterMap::ProcessFrames::include,
1557                       RegisterMap::WalkContinuation::skip);
1558   frame stubFrame = current->last_frame();
1559   assert(stubFrame.is_runtime_frame(), "must be");
1560   frame callerFrame = stubFrame.sender(&reg_map);
1561   assert(callerFrame.is_compiled_frame(), "must be");
1562 
1563   // Install exception and return forward entry.
1564   address res = SharedRuntime::throw_AbstractMethodError_entry();
1565   JRT_BLOCK
1566     methodHandle callee(current, invoke.static_target(current));
1567     if (!callee.is_null()) {
1568       oop recv = callerFrame.retrieve_receiver(&reg_map);
1569       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1570       res = StubRoutines::forward_exception_entry();
1571       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1572     }
1573   JRT_BLOCK_END
1574   return res;
1575 JRT_END
1576 
1577 // return verified_code_entry if interp_only_mode is not set for the current thread;
1578 // otherwise return c2i entry.
1579 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {

1580   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1581     // In interp_only_mode we need to go to the interpreted entry
1582     // The c2i won't patch in this mode -- see fixup_callers_callsite
1583     return callee_method->get_c2i_entry();
1584   }
1585   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1586   return callee_method->verified_code_entry();









1587 }
1588 
1589 // resolve a static call and patch code
1590 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1591   methodHandle callee_method;

1592   bool enter_special = false;
1593   JRT_BLOCK
1594     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1595     current->set_vm_result_metadata(callee_method());
1596   JRT_BLOCK_END
1597   // return compiled code entry point after potential safepoints
1598   return get_resolved_entry(current, callee_method);
1599 JRT_END
1600 
1601 // resolve virtual call and update inline cache to monomorphic
1602 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1603   methodHandle callee_method;

1604   JRT_BLOCK
1605     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1606     current->set_vm_result_metadata(callee_method());
1607   JRT_BLOCK_END
1608   // return compiled code entry point after potential safepoints
1609   return get_resolved_entry(current, callee_method);
1610 JRT_END
1611 
1612 
1613 // Resolve a virtual call that can be statically bound (e.g., always
1614 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1615 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1616   methodHandle callee_method;

1617   JRT_BLOCK
1618     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1619     current->set_vm_result_metadata(callee_method());
1620   JRT_BLOCK_END
1621   // return compiled code entry point after potential safepoints
1622   return get_resolved_entry(current, callee_method);
1623 JRT_END
1624 
1625 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {


1626   JavaThread* current = THREAD;
1627   ResourceMark rm(current);
1628   CallInfo call_info;
1629   Bytecodes::Code bc;
1630 
1631   // receiver is null for static calls. An exception is thrown for null
1632   // receivers for non-static calls
1633   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1634 
1635   methodHandle callee_method(current, call_info.selected_method());
1636 
1637 #ifndef PRODUCT
1638   AtomicAccess::inc(&_ic_miss_ctr);
1639 
1640   // Statistics & Tracing
1641   if (TraceCallFixup) {
1642     ResourceMark rm(current);
1643     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1644     callee_method->print_short_name(tty);
1645     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1646   }
1647 
1648   if (ICMissHistogram) {
1649     MutexLocker m(VMStatistic_lock);
1650     RegisterMap reg_map(current,
1651                         RegisterMap::UpdateMap::skip,
1652                         RegisterMap::ProcessFrames::include,
1653                         RegisterMap::WalkContinuation::skip);
1654     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1655     // produce statistics under the lock
1656     trace_ic_miss(f.pc());
1657   }
1658 #endif
1659 
1660   // install an event collector so that when a vtable stub is created the
1661   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1662   // event can't be posted when the stub is created as locks are held
1663   // - instead the event will be deferred until the event collector goes
1664   // out of scope.
1665   JvmtiDynamicCodeEventCollector event_collector;
1666 
1667   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1668   RegisterMap reg_map(current,
1669                       RegisterMap::UpdateMap::skip,
1670                       RegisterMap::ProcessFrames::include,
1671                       RegisterMap::WalkContinuation::skip);
1672   frame caller_frame = current->last_frame().sender(&reg_map);
1673   CodeBlob* cb = caller_frame.cb();
1674   nmethod* caller_nm = cb->as_nmethod();




1675 
1676   CompiledICLocker ml(caller_nm);
1677   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1678   inline_cache->update(&call_info, receiver()->klass());
1679 
1680   return callee_method;
1681 }
1682 
1683 //
1684 // Resets a call-site in compiled code so it will get resolved again.
1685 // This routines handles both virtual call sites, optimized virtual call
1686 // sites, and static call sites. Typically used to change a call sites
1687 // destination from compiled to interpreted.
1688 //
1689 methodHandle SharedRuntime::reresolve_call_site(TRAPS) {
1690   JavaThread* current = THREAD;
1691   ResourceMark rm(current);
1692   RegisterMap reg_map(current,
1693                       RegisterMap::UpdateMap::skip,
1694                       RegisterMap::ProcessFrames::include,
1695                       RegisterMap::WalkContinuation::skip);
1696   frame stub_frame = current->last_frame();
1697   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1698   frame caller = stub_frame.sender(&reg_map);
1699 
1700   // Do nothing if the frame isn't a live compiled frame.
1701   // nmethod could be deoptimized by the time we get here
1702   // so no update to the caller is needed.
1703 
1704   if ((caller.is_compiled_frame() && !caller.is_deoptimized_frame()) ||
1705       (caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic())) {
1706 













1707     address pc = caller.pc();
1708 
1709     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1710     assert(caller_nm != nullptr, "did not find caller nmethod");
1711 
1712     // Default call_addr is the location of the "basic" call.
1713     // Determine the address of the call we a reresolving. With
1714     // Inline Caches we will always find a recognizable call.
1715     // With Inline Caches disabled we may or may not find a
1716     // recognizable call. We will always find a call for static
1717     // calls and for optimized virtual calls. For vanilla virtual
1718     // calls it depends on the state of the UseInlineCaches switch.
1719     //
1720     // With Inline Caches disabled we can get here for a virtual call
1721     // for two reasons:
1722     //   1 - calling an abstract method. The vtable for abstract methods
1723     //       will run us thru handle_wrong_method and we will eventually
1724     //       end up in the interpreter to throw the ame.
1725     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1726     //       call and between the time we fetch the entry address and
1727     //       we jump to it the target gets deoptimized. Similar to 1
1728     //       we will wind up in the interprter (thru a c2i with c2).
1729     //
1730     CompiledICLocker ml(caller_nm);
1731     address call_addr = caller_nm->call_instruction_address(pc);
1732 
1733     if (call_addr != nullptr) {
1734       // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1735       // bytes back in the instruction stream so we must also check for reloc info.
1736       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1737       bool ret = iter.next(); // Get item
1738       if (ret) {

1739         switch (iter.type()) {
1740           case relocInfo::static_call_type:

1741           case relocInfo::opt_virtual_call_type: {
1742             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1743             cdc->set_to_clean();



1744             break;
1745           }
1746 
1747           case relocInfo::virtual_call_type: {
1748             // compiled, dispatched call (which used to call an interpreted method)
1749             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1750             inline_cache->set_to_clean();


1751             break;
1752           }
1753           default:
1754             break;
1755         }
1756       }
1757     }
1758   }
1759 
1760   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1761 
1762 
1763 #ifndef PRODUCT
1764   AtomicAccess::inc(&_wrong_method_ctr);
1765 
1766   if (TraceCallFixup) {
1767     ResourceMark rm(current);
1768     tty->print("handle_wrong_method reresolving call to");
1769     callee_method->print_short_name(tty);
1770     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1771   }
1772 #endif
1773 
1774   return callee_method;
1775 }
1776 
1777 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1778   // The faulting unsafe accesses should be changed to throw the error
1779   // synchronously instead. Meanwhile the faulting instruction will be
1780   // skipped over (effectively turning it into a no-op) and an
1781   // asynchronous exception will be raised which the thread will
1782   // handle at a later point. If the instruction is a load it will
1783   // return garbage.
1784 
1785   // Request an async exception.
1786   thread->set_pending_unsafe_access_error();
1787 
1788   // Return address of next instruction to execute.

1954   msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
1955 
1956   char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
1957   if (message == nullptr) {
1958     // Shouldn't happen, but don't cause even more problems if it does
1959     message = const_cast<char*>(caster_klass->external_name());
1960   } else {
1961     jio_snprintf(message,
1962                  msglen,
1963                  "class %s cannot be cast to class %s (%s%s%s)",
1964                  caster_name,
1965                  target_name,
1966                  caster_klass_description,
1967                  klass_separator,
1968                  target_klass_description
1969                  );
1970   }
1971   return message;
1972 }
1973 















1974 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
1975   (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
1976 JRT_END
1977 
1978 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
1979   if (!SafepointSynchronize::is_synchronizing()) {
1980     // Only try quick_enter() if we're not trying to reach a safepoint
1981     // so that the calling thread reaches the safepoint more quickly.
1982     if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
1983       return;
1984     }
1985   }
1986   // NO_ASYNC required because an async exception on the state transition destructor
1987   // would leave you with the lock held and it would never be released.
1988   // The normal monitorenter NullPointerException is thrown without acquiring a lock
1989   // and the model is that an exception implies the method failed.
1990   JRT_BLOCK_NO_ASYNC
1991   Handle h_obj(THREAD, obj);
1992   ObjectSynchronizer::enter(h_obj, lock, current);
1993   assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");

2187   tty->print_cr("Note 1: counter updates are not MT-safe.");
2188   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2189   tty->print_cr("        %% in nested categories are relative to their category");
2190   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2191   tty->cr();
2192 
2193   MethodArityHistogram h;
2194 }
2195 #endif
2196 
2197 #ifndef PRODUCT
2198 static int _lookups; // number of calls to lookup
2199 static int _equals;  // number of buckets checked with matching hash
2200 static int _archived_hits; // number of successful lookups in archived table
2201 static int _runtime_hits;  // number of successful lookups in runtime table
2202 #endif
2203 
2204 // A simple wrapper class around the calling convention information
2205 // that allows sharing of adapters for the same calling convention.
2206 class AdapterFingerPrint : public MetaspaceObj {
2207  private:
2208   enum {
2209     _basic_type_bits = 4,
2210     _basic_type_mask = right_n_bits(_basic_type_bits),
2211     _basic_types_per_int = BitsPerInt / _basic_type_bits,

























2212   };
2213   // TO DO:  Consider integrating this with a more global scheme for compressing signatures.
2214   // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2215 
2216   int _length;


2217 
2218   static int data_offset() { return sizeof(AdapterFingerPrint); }
2219   int* data_pointer() {
2220     return (int*)((address)this + data_offset());






2221   }
2222 
2223   // Private construtor. Use allocate() to get an instance.
2224   AdapterFingerPrint(int total_args_passed, BasicType* sig_bt, int len) {
2225     int* data = data_pointer();
2226     // Pack the BasicTypes with 8 per int
2227     assert(len == length(total_args_passed), "sanity");
2228     _length = len;
2229     int sig_index = 0;
2230     for (int index = 0; index < _length; index++) {
2231       int value = 0;
2232       for (int byte = 0; sig_index < total_args_passed && byte < _basic_types_per_int; byte++) {
2233         int bt = adapter_encoding(sig_bt[sig_index++]);
2234         assert((bt & _basic_type_mask) == bt, "must fit in 4 bits");
2235         value = (value << _basic_type_bits) | bt;










2236       }
2237       data[index] = value;


2238     }

2239   }
2240 
2241   // Call deallocate instead
2242   ~AdapterFingerPrint() {
2243     ShouldNotCallThis();
2244   }
2245 
2246   static int length(int total_args) {
2247     return (total_args + (_basic_types_per_int-1)) / _basic_types_per_int;
2248   }
2249 
2250   static int compute_size_in_words(int len) {
2251     return (int)heap_word_size(sizeof(AdapterFingerPrint) + (len * sizeof(int)));
2252   }
2253 
2254   // Remap BasicTypes that are handled equivalently by the adapters.
2255   // These are correct for the current system but someday it might be
2256   // necessary to make this mapping platform dependent.
2257   static int adapter_encoding(BasicType in) {
2258     switch (in) {
2259       case T_BOOLEAN:
2260       case T_BYTE:
2261       case T_SHORT:
2262       case T_CHAR:
2263         // There are all promoted to T_INT in the calling convention
2264         return T_INT;
2265 
2266       case T_OBJECT:
2267       case T_ARRAY:
2268         // In other words, we assume that any register good enough for
2269         // an int or long is good enough for a managed pointer.
2270 #ifdef _LP64
2271         return T_LONG;
2272 #else
2273         return T_INT;
2274 #endif
2275 
2276       case T_INT:
2277       case T_LONG:
2278       case T_FLOAT:
2279       case T_DOUBLE:
2280       case T_VOID:
2281         return in;
2282 
2283       default:
2284         ShouldNotReachHere();
2285         return T_CONFLICT;
2286     }
2287   }
2288 
2289   void* operator new(size_t size, size_t fp_size) throw() {
2290     assert(fp_size >= size, "sanity check");
2291     void* p = AllocateHeap(fp_size, mtCode);
2292     memset(p, 0, fp_size);
2293     return p;
2294   }
2295 

2296   template<typename Function>
2297   void iterate_args(Function function) {
2298     for (int i = 0; i < length(); i++) {
2299       unsigned val = (unsigned)value(i);
2300       // args are packed so that first/lower arguments are in the highest
2301       // bits of each int value, so iterate from highest to the lowest
2302       for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {
2303         unsigned v = (val >> j) & _basic_type_mask;
2304         if (v == 0) {
2305           continue;
2306         }
2307         function(v);
2308       }
2309     }
2310   }
2311 
2312  public:
2313   static AdapterFingerPrint* allocate(int total_args_passed, BasicType* sig_bt) {
2314     int len = length(total_args_passed);
2315     int size_in_bytes = BytesPerWord * compute_size_in_words(len);
2316     AdapterFingerPrint* afp = new (size_in_bytes) AdapterFingerPrint(total_args_passed, sig_bt, len);
2317     assert((afp->size() * BytesPerWord) == size_in_bytes, "should match");
2318     return afp;
2319   }
2320 
2321   static void deallocate(AdapterFingerPrint* fp) {
2322     FreeHeap(fp);
2323   }
2324 
2325   int value(int index) {
2326     int* data = data_pointer();
2327     return data[index];
2328   }
2329 
2330   int length() {
2331     return _length;
2332   }
2333 
2334   unsigned int compute_hash() {
2335     int hash = 0;
2336     for (int i = 0; i < length(); i++) {
2337       int v = value(i);
2338       //Add arithmetic operation to the hash, like +3 to improve hashing
2339       hash = ((hash << 8) ^ v ^ (hash >> 5)) + 3;
2340     }
2341     return (unsigned int)hash;
2342   }
2343 
2344   const char* as_string() {
2345     stringStream st;
2346     st.print("0x");





2347     for (int i = 0; i < length(); i++) {
2348       st.print("%x", value(i));


2349     }

2350     return st.as_string();
2351   }
2352 
2353   const char* as_basic_args_string() {
2354     stringStream st;
2355     bool long_prev = false;
2356     iterate_args([&] (int arg) {
2357       if (long_prev) {
2358         long_prev = false;
2359         if (arg == T_VOID) {
2360           st.print("J");
2361         } else {
2362           st.print("L");
2363         }
2364       }
2365       switch (arg) {
2366         case T_INT:    st.print("I");    break;
2367         case T_LONG:   long_prev = true; break;
2368         case T_FLOAT:  st.print("F");    break;
2369         case T_DOUBLE: st.print("D");    break;
2370         case T_VOID:   break;
2371         default: ShouldNotReachHere();
2372       }
2373     });
2374     if (long_prev) {
2375       st.print("L");
2376     }
2377     return st.as_string();
2378   }
2379 
2380   BasicType* as_basic_type(int& nargs) {
2381     nargs = 0;
2382     GrowableArray<BasicType> btarray;
2383     bool long_prev = false;
2384 
2385     iterate_args([&] (int arg) {
2386       if (long_prev) {
2387         long_prev = false;
2388         if (arg == T_VOID) {
2389           btarray.append(T_LONG);
2390         } else {
2391           btarray.append(T_OBJECT); // it could be T_ARRAY; it shouldn't matter
2392         }
2393       }
2394       switch (arg) {
2395         case T_INT: // fallthrough
2396         case T_FLOAT: // fallthrough
2397         case T_DOUBLE:
2398         case T_VOID:
2399           btarray.append((BasicType)arg);
2400           break;
2401         case T_LONG:
2402           long_prev = true;
2403           break;
2404         default: ShouldNotReachHere();
2405       }
2406     });
2407 
2408     if (long_prev) {
2409       btarray.append(T_OBJECT);
2410     }
2411 
2412     nargs = btarray.length();
2413     BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, nargs);
2414     int index = 0;
2415     GrowableArrayIterator<BasicType> iter = btarray.begin();
2416     while (iter != btarray.end()) {
2417       sig_bt[index++] = *iter;
2418       ++iter;
2419     }
2420     assert(index == btarray.length(), "sanity check");
2421 #ifdef ASSERT
2422     {
2423       AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(nargs, sig_bt);
2424       assert(this->equals(compare_fp), "sanity check");
2425       AdapterFingerPrint::deallocate(compare_fp);
2426     }
2427 #endif
2428     return sig_bt;
2429   }
2430 
2431   bool equals(AdapterFingerPrint* other) {
2432     if (other->_length != _length) {


2433       return false;
2434     } else {
2435       for (int i = 0; i < _length; i++) {
2436         if (value(i) != other->value(i)) {
2437           return false;
2438         }
2439       }
2440     }
2441     return true;
2442   }
2443 
2444   // methods required by virtue of being a MetaspaceObj
2445   void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2446   int size() const { return compute_size_in_words(_length); }
2447   MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2448 
2449   static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2450     NOT_PRODUCT(_equals++);
2451     return fp1->equals(fp2);
2452   }
2453 
2454   static unsigned int compute_hash(AdapterFingerPrint* const& fp) {
2455     return fp->compute_hash();
2456   }

2459 #if INCLUDE_CDS
2460 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2461   return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2462 }
2463 
2464 class ArchivedAdapterTable : public OffsetCompactHashtable<
2465   AdapterFingerPrint*,
2466   AdapterHandlerEntry*,
2467   adapter_fp_equals_compact_hashtable_entry> {};
2468 #endif // INCLUDE_CDS
2469 
2470 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2471 using AdapterHandlerTable = HashTable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2472                   AnyObj::C_HEAP, mtCode,
2473                   AdapterFingerPrint::compute_hash,
2474                   AdapterFingerPrint::equals>;
2475 static AdapterHandlerTable* _adapter_handler_table;
2476 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2477 
2478 // Find a entry with the same fingerprint if it exists
2479 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(int total_args_passed, BasicType* sig_bt) {
2480   NOT_PRODUCT(_lookups++);
2481   assert_lock_strong(AdapterHandlerLibrary_lock);
2482   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2483   AdapterHandlerEntry* entry = nullptr;
2484 #if INCLUDE_CDS
2485   // if we are building the archive then the archived adapter table is
2486   // not valid and we need to use the ones added to the runtime table
2487   if (AOTCodeCache::is_using_adapter()) {
2488     // Search archived table first. It is read-only table so can be searched without lock
2489     entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2490 #ifndef PRODUCT
2491     if (entry != nullptr) {
2492       _archived_hits++;
2493     }
2494 #endif
2495   }
2496 #endif // INCLUDE_CDS
2497   if (entry == nullptr) {
2498     assert_lock_strong(AdapterHandlerLibrary_lock);
2499     AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2500     if (entry_p != nullptr) {
2501       entry = *entry_p;
2502       assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",

2519   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2520   ts.print(tty, "AdapterHandlerTable");
2521   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2522                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2523   int total_hits = _archived_hits + _runtime_hits;
2524   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2525                 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2526 }
2527 #endif
2528 
2529 // ---------------------------------------------------------------------------
2530 // Implementation of AdapterHandlerLibrary
2531 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2532 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2533 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2534 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2535 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2536 #if INCLUDE_CDS
2537 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2538 #endif // INCLUDE_CDS
2539 static const int AdapterHandlerLibrary_size = 16*K;
2540 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2541 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2542 
2543 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2544   assert(_buffer != nullptr, "should be initialized");
2545   return _buffer;
2546 }
2547 
2548 static void post_adapter_creation(const AdapterHandlerEntry* entry) {
2549   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2550     AdapterBlob* adapter_blob = entry->adapter_blob();
2551     char blob_id[256];
2552     jio_snprintf(blob_id,
2553                  sizeof(blob_id),
2554                  "%s(%s)",
2555                  adapter_blob->name(),
2556                  entry->fingerprint()->as_string());
2557     if (Forte::is_enabled()) {
2558       Forte::register_stub(blob_id, adapter_blob->content_begin(), adapter_blob->content_end());
2559     }

2567 void AdapterHandlerLibrary::initialize() {
2568   {
2569     ResourceMark rm;
2570     _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2571     _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2572   }
2573 
2574 #if INCLUDE_CDS
2575   // Link adapters in AOT Cache to their code in AOT Code Cache
2576   if (AOTCodeCache::is_using_adapter() && !_aot_adapter_handler_table.empty()) {
2577     link_aot_adapters();
2578     lookup_simple_adapters();
2579     return;
2580   }
2581 #endif // INCLUDE_CDS
2582 
2583   ResourceMark rm;
2584   {
2585     MutexLocker mu(AdapterHandlerLibrary_lock);
2586 
2587     _no_arg_handler = create_adapter(0, nullptr);


2588 
2589     BasicType obj_args[] = { T_OBJECT };
2590     _obj_arg_handler = create_adapter(1, obj_args);


2591 
2592     BasicType int_args[] = { T_INT };
2593     _int_arg_handler = create_adapter(1, int_args);


2594 
2595     BasicType obj_int_args[] = { T_OBJECT, T_INT };
2596     _obj_int_arg_handler = create_adapter(2, obj_int_args);



2597 
2598     BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
2599     _obj_obj_arg_handler = create_adapter(2, obj_obj_args);



2600 
2601     // we should always get an entry back but we don't have any
2602     // associated blob on Zero
2603     assert(_no_arg_handler != nullptr &&
2604            _obj_arg_handler != nullptr &&
2605            _int_arg_handler != nullptr &&
2606            _obj_int_arg_handler != nullptr &&
2607            _obj_obj_arg_handler != nullptr, "Initial adapter handlers must be properly created");
2608   }
2609 
2610   // Outside of the lock
2611 #ifndef ZERO
2612   // no blobs to register when we are on Zero
2613   post_adapter_creation(_no_arg_handler);
2614   post_adapter_creation(_obj_arg_handler);
2615   post_adapter_creation(_int_arg_handler);
2616   post_adapter_creation(_obj_int_arg_handler);
2617   post_adapter_creation(_obj_obj_arg_handler);
2618 #endif // ZERO
2619 }
2620 
2621 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint) {
2622   uint id = (uint)AtomicAccess::add((int*)&_id_counter, 1);
2623   assert(id > 0, "we can never overflow because AOT cache cannot contain more than 2^32 methods");
2624   return AdapterHandlerEntry::allocate(id, fingerprint);
2625 }
2626 
2627 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2628   int total_args_passed = method->size_of_parameters(); // All args on stack
2629   if (total_args_passed == 0) {
2630     return _no_arg_handler;
2631   } else if (total_args_passed == 1) {
2632     if (!method->is_static()) {



2633       return _obj_arg_handler;
2634     }
2635     switch (method->signature()->char_at(1)) {
2636       case JVM_SIGNATURE_CLASS:









2637       case JVM_SIGNATURE_ARRAY:
2638         return _obj_arg_handler;
2639       case JVM_SIGNATURE_INT:
2640       case JVM_SIGNATURE_BOOLEAN:
2641       case JVM_SIGNATURE_CHAR:
2642       case JVM_SIGNATURE_BYTE:
2643       case JVM_SIGNATURE_SHORT:
2644         return _int_arg_handler;
2645     }
2646   } else if (total_args_passed == 2 &&
2647              !method->is_static()) {
2648     switch (method->signature()->char_at(1)) {
2649       case JVM_SIGNATURE_CLASS:









2650       case JVM_SIGNATURE_ARRAY:
2651         return _obj_obj_arg_handler;
2652       case JVM_SIGNATURE_INT:
2653       case JVM_SIGNATURE_BOOLEAN:
2654       case JVM_SIGNATURE_CHAR:
2655       case JVM_SIGNATURE_BYTE:
2656       case JVM_SIGNATURE_SHORT:
2657         return _obj_int_arg_handler;
2658     }
2659   }
2660   return nullptr;
2661 }
2662 
2663 class AdapterSignatureIterator : public SignatureIterator {
2664  private:
2665   BasicType stack_sig_bt[16];
2666   BasicType* sig_bt;
2667   int index;




2668 
2669  public:
2670   AdapterSignatureIterator(Symbol* signature,
2671                            fingerprint_t fingerprint,
2672                            bool is_static,
2673                            int total_args_passed) :
2674     SignatureIterator(signature, fingerprint),
2675     index(0)
2676   {
2677     sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2678     if (!is_static) { // Pass in receiver first
2679       sig_bt[index++] = T_OBJECT;













2680     }
2681     do_parameters_on(this);
2682   }
2683 
2684   BasicType* basic_types() {
2685     return sig_bt;







2686   }

2687 
2688 #ifdef ASSERT
2689   int slots() {
2690     return index;




































2691   }































































2692 #endif


















































2693 
2694  private:


2695 
2696   friend class SignatureIterator;  // so do_parameters_on can call do_type
2697   void do_type(BasicType type) {
2698     sig_bt[index++] = type;
2699     if (type == T_LONG || type == T_DOUBLE) {
2700       sig_bt[index++] = T_VOID; // Longs & doubles take 2 Java slots











2701     }
2702   }
2703 };
2704 


































































































































2705 
2706 const char* AdapterHandlerEntry::_entry_names[] = {
2707   "i2c", "c2i", "c2i_unverified", "c2i_no_clinit_check"
2708 };
2709 
2710 #ifdef ASSERT
2711 void AdapterHandlerLibrary::verify_adapter_sharing(int total_args_passed, BasicType* sig_bt, AdapterHandlerEntry* cached_entry) {
2712   // we can only check for the same code if there is any
2713 #ifndef ZERO
2714   AdapterHandlerEntry* comparison_entry = create_adapter(total_args_passed, sig_bt, true);
2715   assert(comparison_entry->adapter_blob() == nullptr, "no blob should be created when creating an adapter for comparison");
2716   assert(comparison_entry->compare_code(cached_entry), "code must match");
2717   // Release the one just created
2718   AdapterHandlerEntry::deallocate(comparison_entry);
2719 # endif // ZERO
2720 }
2721 #endif /* ASSERT*/
2722 
2723 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
2724   assert(!method->is_abstract(), "abstract methods do not have adapters");
2725   // Use customized signature handler.  Need to lock around updates to
2726   // the _adapter_handler_table (it is not safe for concurrent readers
2727   // and a single writer: this could be fixed if it becomes a
2728   // problem).
2729 
2730   // Fast-path for trivial adapters
2731   AdapterHandlerEntry* entry = get_simple_adapter(method);
2732   if (entry != nullptr) {
2733     return entry;
2734   }
2735 
2736   ResourceMark rm;
2737   bool new_entry = false;
2738 
2739   // Fill in the signature array, for the calling-convention call.
2740   int total_args_passed = method->size_of_parameters(); // All args on stack











2741 
2742   AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2743                               method->is_static(), total_args_passed);
2744   assert(si.slots() == total_args_passed, "");
2745   BasicType* sig_bt = si.basic_types();
2746   {
2747     MutexLocker mu(AdapterHandlerLibrary_lock);
2748 
2749     // Lookup method signature's fingerprint
2750     entry = lookup(total_args_passed, sig_bt);
2751 
2752     if (entry != nullptr) {
2753 #ifndef ZERO
2754       assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
2755 #endif
2756 #ifdef ASSERT
2757       if (!entry->in_aot_cache() && VerifyAdapterSharing) {
2758         verify_adapter_sharing(total_args_passed, sig_bt, entry);
2759       }
2760 #endif
2761     } else {
2762       entry = create_adapter(total_args_passed, sig_bt);
2763       if (entry != nullptr) {
2764         new_entry = true;
2765       }
2766     }
2767   }
2768 
2769   // Outside of the lock
2770   if (new_entry) {
2771     post_adapter_creation(entry);
2772   }
2773   return entry;
2774 }
2775 
2776 void AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler) {
2777   ResourceMark rm;
2778   const char* name = AdapterHandlerLibrary::name(handler);
2779   const uint32_t id = AdapterHandlerLibrary::id(handler);
2780 
2781   CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::Adapter, id, name);
2782   if (blob != nullptr) {

2797   }
2798   insts_size = adapter_blob->code_size();
2799   st->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
2800                 handler->fingerprint()->as_basic_args_string(),
2801                 handler->fingerprint()->as_string(), insts_size);
2802   st->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
2803   if (Verbose || PrintStubCode) {
2804     address first_pc = adapter_blob->content_begin();
2805     if (first_pc != nullptr) {
2806       Disassembler::decode(first_pc, first_pc + insts_size, st, &adapter_blob->asm_remarks());
2807       st->cr();
2808     }
2809   }
2810 }
2811 #endif // PRODUCT
2812 
2813 void AdapterHandlerLibrary::address_to_offset(address entry_address[AdapterBlob::ENTRY_COUNT],
2814                                               int entry_offset[AdapterBlob::ENTRY_COUNT]) {
2815   entry_offset[AdapterBlob::I2C] = 0;
2816   entry_offset[AdapterBlob::C2I] = entry_address[AdapterBlob::C2I] - entry_address[AdapterBlob::I2C];


2817   entry_offset[AdapterBlob::C2I_Unverified] = entry_address[AdapterBlob::C2I_Unverified] - entry_address[AdapterBlob::I2C];

2818   if (entry_address[AdapterBlob::C2I_No_Clinit_Check] == nullptr) {
2819     entry_offset[AdapterBlob::C2I_No_Clinit_Check] = -1;
2820   } else {
2821     entry_offset[AdapterBlob::C2I_No_Clinit_Check] = entry_address[AdapterBlob::C2I_No_Clinit_Check] - entry_address[AdapterBlob::I2C];
2822   }
2823 }
2824 
2825 bool AdapterHandlerLibrary::generate_adapter_code(AdapterHandlerEntry* handler,
2826                                                   int total_args_passed,
2827                                                   BasicType* sig_bt,
2828                                                   bool is_transient) {
2829   if (log_is_enabled(Info, perf, class, link)) {
2830     ClassLoader::perf_method_adapters_count()->inc();
2831   }
2832 
2833 #ifndef ZERO

2834   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2835   CodeBuffer buffer(buf);
2836   short buffer_locs[20];
2837   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2838                                          sizeof(buffer_locs)/sizeof(relocInfo));
2839   MacroAssembler masm(&buffer);
2840   VMRegPair stack_regs[16];
2841   VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2842 
2843   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2844   int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2845   address entry_address[AdapterBlob::ENTRY_COUNT];
2846   SharedRuntime::generate_i2c2i_adapters(&masm,
2847                                          total_args_passed,
2848                                          comp_args_on_stack,
2849                                          sig_bt,
2850                                          regs,
2851                                          entry_address);












2852   // On zero there is no code to save and no need to create a blob and
2853   // or relocate the handler.
2854   int entry_offset[AdapterBlob::ENTRY_COUNT];
2855   address_to_offset(entry_address, entry_offset);
2856 #ifdef ASSERT
2857   if (VerifyAdapterSharing) {
2858     handler->save_code(buf->code_begin(), buffer.insts_size());
2859     if (is_transient) {
2860       return true;
2861     }
2862   }
2863 #endif
2864   AdapterBlob* adapter_blob = AdapterBlob::create(&buffer, entry_offset);
2865   if (adapter_blob == nullptr) {
2866     // CodeCache is full, disable compilation
2867     // Ought to log this but compile log is only per compile thread
2868     // and we're some non descript Java thread.
2869     return false;
2870   }
2871   handler->set_adapter_blob(adapter_blob);
2872   if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
2873     // try to save generated code
2874     const char* name = AdapterHandlerLibrary::name(handler);
2875     const uint32_t id = AdapterHandlerLibrary::id(handler);
2876     bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name);
2877     assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
2878   }
2879 #endif // ZERO
2880 
2881 #ifndef PRODUCT
2882   // debugging support
2883   if (PrintAdapterHandlers || PrintStubCode) {
2884     print_adapter_handler_info(tty, handler);
2885   }
2886 #endif
2887 
2888   return true;
2889 }
2890 
2891 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(int total_args_passed,
2892                                                            BasicType* sig_bt,
2893                                                            bool is_transient) {
2894   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);





2895   AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
2896   if (!generate_adapter_code(handler, total_args_passed, sig_bt, is_transient)) {
2897     AdapterHandlerEntry::deallocate(handler);
2898     return nullptr;
2899   }
2900   if (!is_transient) {
2901     assert_lock_strong(AdapterHandlerLibrary_lock);
2902     _adapter_handler_table->put(fp, handler);
2903   }
2904   return handler;
2905 }
2906 
2907 #if INCLUDE_CDS
2908 void AdapterHandlerEntry::remove_unshareable_info() {
2909 #ifdef ASSERT
2910    _saved_code = nullptr;
2911    _saved_code_length = 0;
2912 #endif // ASSERT
2913    _adapter_blob = nullptr;
2914    _linked = false;

2915 }
2916 
2917 class CopyAdapterTableToArchive : StackObj {
2918 private:
2919   CompactHashtableWriter* _writer;
2920   ArchiveBuilder* _builder;
2921 public:
2922   CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
2923                                                              _builder(ArchiveBuilder::current())
2924   {}
2925 
2926   bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
2927     LogStreamHandle(Trace, aot) lsh;
2928     if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
2929       assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
2930       AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
2931       assert(buffered_fp != nullptr,"sanity check");
2932       AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
2933       assert(buffered_entry != nullptr,"sanity check");
2934 

2979 // This method is used during production run to link archived adapters (stored in AOT Cache)
2980 // to their code in AOT Code Cache
2981 void AdapterHandlerEntry::link() {
2982   ResourceMark rm;
2983   assert(_fingerprint != nullptr, "_fingerprint must not be null");
2984   bool generate_code = false;
2985   // Generate code only if AOTCodeCache is not available, or
2986   // caching adapters is disabled, or we fail to link
2987   // the AdapterHandlerEntry to its code in the AOTCodeCache
2988   if (AOTCodeCache::is_using_adapter()) {
2989     AdapterHandlerLibrary::link_aot_adapter_handler(this);
2990     // If link_aot_adapter_handler() succeeds, _adapter_blob will be non-null
2991     if (_adapter_blob == nullptr) {
2992       log_warning(aot)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
2993       generate_code = true;
2994     }
2995   } else {
2996     generate_code = true;
2997   }
2998   if (generate_code) {
2999     int nargs;
3000     BasicType* bt = _fingerprint->as_basic_type(nargs);
3001     if (!AdapterHandlerLibrary::generate_adapter_code(this, nargs, bt, /* is_transient */ false)) {
3002       // Don't throw exceptions during VM initialization because java.lang.* classes
3003       // might not have been initialized, causing problems when constructing the
3004       // Java exception object.
3005       vm_exit_during_initialization("Out of space in CodeCache for adapters");
3006     }
3007   }
3008   if (_adapter_blob != nullptr) {
3009     post_adapter_creation(this);
3010   }
3011   assert(_linked, "AdapterHandlerEntry must now be linked");
3012 }
3013 
3014 void AdapterHandlerLibrary::link_aot_adapters() {
3015   uint max_id = 0;
3016   assert(AOTCodeCache::is_using_adapter(), "AOT adapters code should be available");
3017   /* It is possible that some adapters generated in assembly phase are not stored in the cache.
3018    * That implies adapter ids of the adapters in the cache may not be contiguous.
3019    * If the size of the _aot_adapter_handler_table is used to initialize _id_counter, then it may
3020    * result in collision of adapter ids between AOT stored handlers and runtime generated handlers.
3021    * To avoid such situation, initialize the _id_counter with the largest adapter id among the AOT stored handlers.
3022    */
3023   _aot_adapter_handler_table.iterate_all([&](AdapterHandlerEntry* entry) {
3024     assert(!entry->is_linked(), "AdapterHandlerEntry is already linked!");
3025     entry->link();
3026     max_id = MAX2(max_id, entry->id());
3027   });
3028   // Set adapter id to the maximum id found in the AOTCache
3029   assert(_id_counter == 0, "Did not expect new AdapterHandlerEntry to be created at this stage");
3030   _id_counter = max_id;
3031 }
3032 
3033 // This method is called during production run to lookup simple adapters
3034 // in the archived adapter handler table
3035 void AdapterHandlerLibrary::lookup_simple_adapters() {
3036   assert(!_aot_adapter_handler_table.empty(), "archived adapter handler table is empty");
3037 
3038   MutexLocker mu(AdapterHandlerLibrary_lock);
3039   _no_arg_handler = lookup(0, nullptr);
3040 
3041   BasicType obj_args[] = { T_OBJECT };
3042   _obj_arg_handler = lookup(1, obj_args);
3043 
3044   BasicType int_args[] = { T_INT };
3045   _int_arg_handler = lookup(1, int_args);
3046 
3047   BasicType obj_int_args[] = { T_OBJECT, T_INT };
3048   _obj_int_arg_handler = lookup(2, obj_int_args);
3049 
3050   BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
3051   _obj_obj_arg_handler = lookup(2, obj_obj_args);













3052 
3053   assert(_no_arg_handler != nullptr &&
3054          _obj_arg_handler != nullptr &&
3055          _int_arg_handler != nullptr &&
3056          _obj_int_arg_handler != nullptr &&
3057          _obj_obj_arg_handler != nullptr, "Initial adapters not found in archived adapter handler table");
3058   assert(_no_arg_handler->is_linked() &&
3059          _obj_arg_handler->is_linked() &&
3060          _int_arg_handler->is_linked() &&
3061          _obj_int_arg_handler->is_linked() &&
3062          _obj_obj_arg_handler->is_linked(), "Initial adapters not in linked state");
3063 }
3064 #endif // INCLUDE_CDS
3065 
3066 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3067   LogStreamHandle(Trace, aot) lsh;
3068   if (lsh.is_enabled()) {
3069     lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3070     lsh.cr();
3071   }
3072   it->push(&_fingerprint);
3073 }
3074 
3075 AdapterHandlerEntry::~AdapterHandlerEntry() {
3076   if (_fingerprint != nullptr) {
3077     AdapterFingerPrint::deallocate(_fingerprint);
3078     _fingerprint = nullptr;
3079   }



3080 #ifdef ASSERT
3081   FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
3082 #endif
3083   FreeHeap(this);
3084 }
3085 
3086 
3087 #ifdef ASSERT
3088 // Capture the code before relocation so that it can be compared
3089 // against other versions.  If the code is captured after relocation
3090 // then relative instructions won't be equivalent.
3091 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3092   _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3093   _saved_code_length = length;
3094   memcpy(_saved_code, buffer, length);
3095 }
3096 
3097 
3098 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3099   assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");

3147 
3148       struct { double data[20]; } locs_buf;
3149       struct { double data[20]; } stubs_locs_buf;
3150       buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3151 #if defined(AARCH64) || defined(PPC64)
3152       // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3153       // in the constant pool to ensure ordering between the barrier and oops
3154       // accesses. For native_wrappers we need a constant.
3155       // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3156       // static java call that is resolved in the runtime.
3157       if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3158         buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3159       }
3160 #endif
3161       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3162       MacroAssembler _masm(&buffer);
3163 
3164       // Fill in the signature array, for the calling-convention call.
3165       const int total_args_passed = method->size_of_parameters();
3166 

3167       VMRegPair stack_regs[16];

3168       VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3169 
3170       AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
3171                               method->is_static(), total_args_passed);
3172       BasicType* sig_bt = si.basic_types();
3173       assert(si.slots() == total_args_passed, "");
3174       BasicType ret_type = si.return_type();








3175 
3176       // Now get the compiled-Java arguments layout.
3177       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3178 
3179       // Generate the compiled-to-native wrapper code
3180       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3181 
3182       if (nm != nullptr) {
3183         {
3184           MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3185           if (nm->make_in_use()) {
3186             method->set_code(method, nm);
3187           }
3188         }
3189 
3190         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3191         if (directive->PrintAssemblyOption) {
3192           nm->print_code();
3193         }
3194         DirectivesStack::release(directive);

3402       if (b == handler->adapter_blob()) {
3403         found = true;
3404         st->print("Adapter for signature: ");
3405         handler->print_adapter_on(st);
3406         return false; // abort iteration
3407       } else {
3408         return true; // keep looking
3409       }
3410     };
3411     assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3412     _adapter_handler_table->iterate(findblob_runtime_table);
3413   }
3414   assert(found, "Should have found handler");
3415 }
3416 
3417 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3418   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3419   if (adapter_blob() != nullptr) {
3420     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3421     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3422     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));



3423     if (get_c2i_no_clinit_check_entry() != nullptr) {
3424       st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3425     }
3426   }
3427   st->cr();
3428 }
3429 
3430 #ifndef PRODUCT
3431 
3432 void AdapterHandlerLibrary::print_statistics() {
3433   print_table_statistics();
3434 }
3435 
3436 #endif /* PRODUCT */
3437 
3438 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3439   assert(current == JavaThread::current(), "pre-condition");
3440   StackOverflow* overflow_state = current->stack_overflow_state();
3441   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3442   overflow_state->set_reserved_stack_activation(current->stack_base());

3489         event.set_method(method);
3490         event.commit();
3491       }
3492     }
3493   }
3494   return activation;
3495 }
3496 
3497 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
3498   // After any safepoint, just before going back to compiled code,
3499   // we inform the GC that we will be doing initializing writes to
3500   // this object in the future without emitting card-marks, so
3501   // GC may take any compensating steps.
3502 
3503   oop new_obj = current->vm_result_oop();
3504   if (new_obj == nullptr) return;
3505 
3506   BarrierSet *bs = BarrierSet::barrier_set();
3507   bs->on_slowpath_allocation_exit(current, new_obj);
3508 }




































































































































































































  28 #include "classfile/javaClasses.inline.hpp"
  29 #include "classfile/stringTable.hpp"
  30 #include "classfile/vmClasses.hpp"
  31 #include "classfile/vmSymbols.hpp"
  32 #include "code/aotCodeCache.hpp"
  33 #include "code/codeCache.hpp"
  34 #include "code/compiledIC.hpp"
  35 #include "code/nmethod.inline.hpp"
  36 #include "code/scopeDesc.hpp"
  37 #include "code/vtableStubs.hpp"
  38 #include "compiler/abstractCompiler.hpp"
  39 #include "compiler/compileBroker.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "gc/shared/barrierSet.hpp"
  42 #include "gc/shared/collectedHeap.hpp"
  43 #include "interpreter/interpreter.hpp"
  44 #include "interpreter/interpreterRuntime.hpp"
  45 #include "jfr/jfrEvents.hpp"
  46 #include "jvm.h"
  47 #include "logging/log.hpp"
  48 #include "memory/oopFactory.hpp"
  49 #include "memory/resourceArea.hpp"
  50 #include "memory/universe.hpp"
  51 #include "metaprogramming/primitiveConversions.hpp"
  52 #include "oops/access.hpp"
  53 #include "oops/fieldStreams.inline.hpp"
  54 #include "oops/inlineKlass.inline.hpp"
  55 #include "oops/klass.hpp"
  56 #include "oops/method.inline.hpp"
  57 #include "oops/objArrayKlass.hpp"
  58 #include "oops/objArrayOop.inline.hpp"
  59 #include "oops/oop.inline.hpp"
  60 #include "prims/forte.hpp"
  61 #include "prims/jvmtiExport.hpp"
  62 #include "prims/jvmtiThreadState.hpp"
  63 #include "prims/methodHandles.hpp"
  64 #include "prims/nativeLookup.hpp"
  65 #include "runtime/arguments.hpp"
  66 #include "runtime/atomicAccess.hpp"
  67 #include "runtime/basicLock.inline.hpp"
  68 #include "runtime/frame.inline.hpp"
  69 #include "runtime/handles.inline.hpp"
  70 #include "runtime/init.hpp"
  71 #include "runtime/interfaceSupport.inline.hpp"
  72 #include "runtime/java.hpp"
  73 #include "runtime/javaCalls.hpp"
  74 #include "runtime/jniHandles.inline.hpp"
  75 #include "runtime/osThread.hpp"
  76 #include "runtime/perfData.hpp"
  77 #include "runtime/sharedRuntime.hpp"
  78 #include "runtime/signature.hpp"
  79 #include "runtime/stackWatermarkSet.hpp"
  80 #include "runtime/stubRoutines.hpp"
  81 #include "runtime/synchronizer.hpp"
  82 #include "runtime/timerTrace.hpp"
  83 #include "runtime/vframe.inline.hpp"
  84 #include "runtime/vframeArray.hpp"
  85 #include "runtime/vm_version.hpp"
  86 #include "utilities/copy.hpp"
  87 #include "utilities/dtrace.hpp"
  88 #include "utilities/events.hpp"
  89 #include "utilities/globalDefinitions.hpp"
  90 #include "utilities/hashTable.hpp"
  91 #include "utilities/macros.hpp"
  92 #include "utilities/xmlstream.hpp"
  93 #ifdef COMPILER1
  94 #include "c1/c1_Runtime1.hpp"
  95 #endif
  96 #ifdef COMPILER2
  97 #include "opto/runtime.hpp"
  98 #endif

1216 // for a call current in progress, i.e., arguments has been pushed on stack
1217 // but callee has not been invoked yet.  Caller frame must be compiled.
1218 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1219                                               CallInfo& callinfo, TRAPS) {
1220   Handle receiver;
1221   Handle nullHandle;  // create a handy null handle for exception returns
1222   JavaThread* current = THREAD;
1223 
1224   assert(!vfst.at_end(), "Java frame must exist");
1225 
1226   // Find caller and bci from vframe
1227   methodHandle caller(current, vfst.method());
1228   int          bci   = vfst.bci();
1229 
1230   if (caller->is_continuation_enter_intrinsic()) {
1231     bc = Bytecodes::_invokestatic;
1232     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1233     return receiver;
1234   }
1235 
1236   // Substitutability test implementation piggy backs on static call resolution
1237   Bytecodes::Code code = caller->java_code_at(bci);
1238   if (code == Bytecodes::_if_acmpeq || code == Bytecodes::_if_acmpne) {
1239     bc = Bytecodes::_invokestatic;
1240     methodHandle attached_method(THREAD, extract_attached_method(vfst));
1241     assert(attached_method.not_null(), "must have attached method");
1242     vmClasses::ValueObjectMethods_klass()->initialize(CHECK_NH);
1243     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, false, CHECK_NH);
1244 #ifdef ASSERT
1245     Symbol* subst_method_name = UseAltSubstitutabilityMethod ? vmSymbols::isSubstitutableAlt_name() : vmSymbols::isSubstitutable_name();
1246     Method* is_subst = vmClasses::ValueObjectMethods_klass()->find_method(subst_method_name, vmSymbols::object_object_boolean_signature());
1247     assert(callinfo.selected_method() == is_subst, "must be isSubstitutable method");
1248 #endif
1249     return receiver;
1250   }
1251 
1252   Bytecode_invoke bytecode(caller, bci);
1253   int bytecode_index = bytecode.index();
1254   bc = bytecode.invoke_code();
1255 
1256   methodHandle attached_method(current, extract_attached_method(vfst));
1257   if (attached_method.not_null()) {
1258     Method* callee = bytecode.static_target(CHECK_NH);
1259     vmIntrinsics::ID id = callee->intrinsic_id();
1260     // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1261     // it attaches statically resolved method to the call site.
1262     if (MethodHandles::is_signature_polymorphic(id) &&
1263         MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1264       bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1265 
1266       // Adjust invocation mode according to the attached method.
1267       switch (bc) {
1268         case Bytecodes::_invokevirtual:
1269           if (attached_method->method_holder()->is_interface()) {
1270             bc = Bytecodes::_invokeinterface;
1271           }
1272           break;
1273         case Bytecodes::_invokeinterface:
1274           if (!attached_method->method_holder()->is_interface()) {
1275             bc = Bytecodes::_invokevirtual;
1276           }
1277           break;
1278         case Bytecodes::_invokehandle:
1279           if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1280             bc = attached_method->is_static() ? Bytecodes::_invokestatic
1281                                               : Bytecodes::_invokevirtual;
1282           }
1283           break;
1284         default:
1285           break;
1286       }
1287     } else {
1288       assert(attached_method->has_scalarized_args(), "invalid use of attached method");
1289       if (!attached_method->method_holder()->is_inline_klass()) {
1290         // Ignore the attached method in this case to not confuse below code
1291         attached_method = methodHandle(current, nullptr);
1292       }
1293     }
1294   }
1295 
1296   assert(bc != Bytecodes::_illegal, "not initialized");
1297 
1298   bool has_receiver = bc != Bytecodes::_invokestatic &&
1299                       bc != Bytecodes::_invokedynamic &&
1300                       bc != Bytecodes::_invokehandle;
1301   bool check_null_and_abstract = true;
1302 
1303   // Find receiver for non-static call
1304   if (has_receiver) {
1305     // This register map must be update since we need to find the receiver for
1306     // compiled frames. The receiver might be in a register.
1307     RegisterMap reg_map2(current,
1308                          RegisterMap::UpdateMap::include,
1309                          RegisterMap::ProcessFrames::include,
1310                          RegisterMap::WalkContinuation::skip);
1311     frame stubFrame   = current->last_frame();
1312     // Caller-frame is a compiled frame
1313     frame callerFrame = stubFrame.sender(&reg_map2);
1314 
1315     Method* callee = attached_method();
1316     if (callee == nullptr) {
1317       callee = bytecode.static_target(CHECK_NH);
1318       if (callee == nullptr) {
1319         THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1320       }
1321     }
1322     bool caller_is_c1 = callerFrame.is_compiled_frame() && callerFrame.cb()->as_nmethod()->is_compiled_by_c1();
1323     if (!caller_is_c1 && callee->is_scalarized_arg(0)) {
1324       // If the receiver is an inline type that is passed as fields, no oop is available
1325       // Resolve the call without receiver null checking.
1326       assert(!callee->mismatch(), "calls with inline type receivers should never mismatch");
1327       assert(attached_method.not_null() && !attached_method->is_abstract(), "must have non-abstract attached method");
1328       if (bc == Bytecodes::_invokeinterface) {
1329         bc = Bytecodes::_invokevirtual; // C2 optimistically replaces interface calls by virtual calls
1330       }
1331       check_null_and_abstract = false;
1332     } else {
1333       // Retrieve from a compiled argument list
1334       receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1335       assert(oopDesc::is_oop_or_null(receiver()), "");
1336       if (receiver.is_null()) {
1337         THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);
1338       }
1339     }
1340   }
1341 
1342   // Resolve method
1343   if (attached_method.not_null()) {
1344     // Parameterized by attached method.
1345     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, check_null_and_abstract, CHECK_NH);
1346   } else {
1347     // Parameterized by bytecode.
1348     constantPoolHandle constants(current, caller->constants());
1349     LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1350   }
1351 
1352 #ifdef ASSERT
1353   // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1354   if (has_receiver && check_null_and_abstract) {
1355     assert(receiver.not_null(), "should have thrown exception");
1356     Klass* receiver_klass = receiver->klass();
1357     Klass* rk = nullptr;
1358     if (attached_method.not_null()) {
1359       // In case there's resolved method attached, use its holder during the check.
1360       rk = attached_method->method_holder();
1361     } else {
1362       // Klass is already loaded.
1363       constantPoolHandle constants(current, caller->constants());
1364       rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1365     }
1366     Klass* static_receiver_klass = rk;
1367     assert(receiver_klass->is_subtype_of(static_receiver_klass),
1368            "actual receiver must be subclass of static receiver klass");
1369     if (receiver_klass->is_instance_klass()) {
1370       if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1371         tty->print_cr("ERROR: Klass not yet initialized!!");
1372         receiver_klass->print();
1373       }
1374       assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1375     }
1376   }
1377 #endif
1378 
1379   return receiver;
1380 }
1381 
1382 methodHandle SharedRuntime::find_callee_method(bool& caller_does_not_scalarize, TRAPS) {
1383   JavaThread* current = THREAD;
1384   ResourceMark rm(current);
1385   // We need first to check if any Java activations (compiled, interpreted)
1386   // exist on the stack since last JavaCall.  If not, we need
1387   // to get the target method from the JavaCall wrapper.
1388   vframeStream vfst(current, true);  // Do not skip any javaCalls
1389   methodHandle callee_method;
1390   if (vfst.at_end()) {
1391     // No Java frames were found on stack since we did the JavaCall.
1392     // Hence the stack can only contain an entry_frame.  We need to
1393     // find the target method from the stub frame.
1394     RegisterMap reg_map(current,
1395                         RegisterMap::UpdateMap::skip,
1396                         RegisterMap::ProcessFrames::include,
1397                         RegisterMap::WalkContinuation::skip);
1398     frame fr = current->last_frame();
1399     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1400     fr = fr.sender(&reg_map);
1401     assert(fr.is_entry_frame(), "must be");
1402     // fr is now pointing to the entry frame.
1403     callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1404   } else {
1405     Bytecodes::Code bc;
1406     CallInfo callinfo;
1407     find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));
1408     // Calls via mismatching methods are always non-scalarized
1409     if (callinfo.resolved_method()->mismatch()) {
1410       caller_does_not_scalarize = true;
1411     }
1412     callee_method = methodHandle(current, callinfo.selected_method());
1413   }
1414   assert(callee_method()->is_method(), "must be");
1415   return callee_method;
1416 }
1417 
1418 // Resolves a call.
1419 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, bool& caller_does_not_scalarize, TRAPS) {
1420   JavaThread* current = THREAD;
1421   ResourceMark rm(current);
1422   RegisterMap cbl_map(current,
1423                       RegisterMap::UpdateMap::skip,
1424                       RegisterMap::ProcessFrames::include,
1425                       RegisterMap::WalkContinuation::skip);
1426   frame caller_frame = current->last_frame().sender(&cbl_map);
1427 
1428   CodeBlob* caller_cb = caller_frame.cb();
1429   guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1430   nmethod* caller_nm = caller_cb->as_nmethod();
1431 
1432   // determine call info & receiver
1433   // note: a) receiver is null for static calls
1434   //       b) an exception is thrown if receiver is null for non-static calls
1435   CallInfo call_info;
1436   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1437   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1438 
1439   NoSafepointVerifier nsv;
1440 
1441   methodHandle callee_method(current, call_info.selected_method());
1442   // Calls via mismatching methods are always non-scalarized
1443   if (caller_nm->is_compiled_by_c1() || call_info.resolved_method()->mismatch()) {
1444     caller_does_not_scalarize = true;
1445   }
1446 
1447   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1448          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1449          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1450          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1451          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1452 
1453   assert(!caller_nm->is_unloading(), "It should not be unloading");
1454 
1455 #ifndef PRODUCT
1456   // tracing/debugging/statistics
1457   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1458                  (is_virtual) ? (&_resolve_virtual_ctr) :
1459                                 (&_resolve_static_ctr);
1460   AtomicAccess::inc(addr);
1461 
1462   if (TraceCallFixup) {
1463     ResourceMark rm(current);
1464     tty->print("resolving %s%s (%s) %s call to",
1465                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1466                Bytecodes::name(invoke_code), (caller_does_not_scalarize) ? "non-scalar" : "");
1467     callee_method->print_short_name(tty);
1468     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1469                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1470   }
1471 #endif
1472 
1473   if (invoke_code == Bytecodes::_invokestatic) {
1474     assert(callee_method->method_holder()->is_initialized() ||
1475            callee_method->method_holder()->is_reentrant_initialization(current),
1476            "invalid class initialization state for invoke_static");
1477     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1478       // In order to keep class initialization check, do not patch call
1479       // site for static call when the class is not fully initialized.
1480       // Proper check is enforced by call site re-resolution on every invocation.
1481       //
1482       // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1483       // explicit class initialization check is put in nmethod entry (VEP).
1484       assert(callee_method->method_holder()->is_linked(), "must be");
1485       return callee_method;
1486     }
1487   }
1488 
1489 
1490   // JSR 292 key invariant:
1491   // If the resolved method is a MethodHandle invoke target, the call
1492   // site must be a MethodHandle call site, because the lambda form might tail-call
1493   // leaving the stack in a state unknown to either caller or callee
1494 
1495   // Compute entry points. The computation of the entry points is independent of
1496   // patching the call.
1497 
1498   // Make sure the callee nmethod does not get deoptimized and removed before
1499   // we are done patching the code.
1500 
1501 
1502   CompiledICLocker ml(caller_nm);
1503   if (is_virtual && !is_optimized) {
1504     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1505     inline_cache->update(&call_info, receiver->klass(), caller_does_not_scalarize);
1506   } else {
1507     // Callsite is a direct call - set it to the destination method
1508     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1509     callsite->set(callee_method, caller_does_not_scalarize);
1510   }
1511 
1512   return callee_method;
1513 }
1514 
1515 // Inline caches exist only in compiled code
1516 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1517 #ifdef ASSERT
1518   RegisterMap reg_map(current,
1519                       RegisterMap::UpdateMap::skip,
1520                       RegisterMap::ProcessFrames::include,
1521                       RegisterMap::WalkContinuation::skip);
1522   frame stub_frame = current->last_frame();
1523   assert(stub_frame.is_runtime_frame(), "sanity check");
1524   frame caller_frame = stub_frame.sender(&reg_map);
1525   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1526 #endif /* ASSERT */
1527 
1528   methodHandle callee_method;
1529   bool caller_does_not_scalarize = false;
1530   JRT_BLOCK
1531     callee_method = SharedRuntime::handle_ic_miss_helper(caller_does_not_scalarize, CHECK_NULL);
1532     // Return Method* through TLS
1533     current->set_vm_result_metadata(callee_method());
1534   JRT_BLOCK_END
1535   // return compiled code entry point after potential safepoints
1536   return get_resolved_entry(current, callee_method, false, false, caller_does_not_scalarize);
1537 JRT_END
1538 
1539 
1540 // Handle call site that has been made non-entrant
1541 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1542   // 6243940 We might end up in here if the callee is deoptimized
1543   // as we race to call it.  We don't want to take a safepoint if
1544   // the caller was interpreted because the caller frame will look
1545   // interpreted to the stack walkers and arguments are now
1546   // "compiled" so it is much better to make this transition
1547   // invisible to the stack walking code. The i2c path will
1548   // place the callee method in the callee_target. It is stashed
1549   // there because if we try and find the callee by normal means a
1550   // safepoint is possible and have trouble gc'ing the compiled args.
1551   RegisterMap reg_map(current,
1552                       RegisterMap::UpdateMap::skip,
1553                       RegisterMap::ProcessFrames::include,
1554                       RegisterMap::WalkContinuation::skip);
1555   frame stub_frame = current->last_frame();
1556   assert(stub_frame.is_runtime_frame(), "sanity check");
1557   frame caller_frame = stub_frame.sender(&reg_map);
1558 
1559   if (caller_frame.is_interpreted_frame() ||
1560       caller_frame.is_entry_frame() ||
1561       caller_frame.is_upcall_stub_frame()) {
1562     Method* callee = current->callee_target();
1563     guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1564     current->set_vm_result_metadata(callee);
1565     current->set_callee_target(nullptr);
1566     if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1567       // Bypass class initialization checks in c2i when caller is in native.
1568       // JNI calls to static methods don't have class initialization checks.
1569       // Fast class initialization checks are present in c2i adapters and call into
1570       // SharedRuntime::handle_wrong_method() on the slow path.
1571       //
1572       // JVM upcalls may land here as well, but there's a proper check present in
1573       // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1574       // so bypassing it in c2i adapter is benign.
1575       return callee->get_c2i_no_clinit_check_entry();
1576     } else {
1577       if (caller_frame.is_interpreted_frame()) {
1578         return callee->get_c2i_inline_entry();
1579       } else {
1580         return callee->get_c2i_entry();
1581       }
1582     }
1583   }
1584 
1585   // Must be compiled to compiled path which is safe to stackwalk
1586   methodHandle callee_method;
1587   bool is_static_call = false;
1588   bool is_optimized = false;
1589   bool caller_does_not_scalarize = false;
1590   JRT_BLOCK
1591     // Force resolving of caller (if we called from compiled frame)
1592     callee_method = SharedRuntime::reresolve_call_site(is_optimized, caller_does_not_scalarize, CHECK_NULL);
1593     current->set_vm_result_metadata(callee_method());
1594   JRT_BLOCK_END
1595   // return compiled code entry point after potential safepoints
1596   return get_resolved_entry(current, callee_method, callee_method->is_static(), is_optimized, caller_does_not_scalarize);
1597 JRT_END
1598 
1599 // Handle abstract method call
1600 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1601   // Verbose error message for AbstractMethodError.
1602   // Get the called method from the invoke bytecode.
1603   vframeStream vfst(current, true);
1604   assert(!vfst.at_end(), "Java frame must exist");
1605   methodHandle caller(current, vfst.method());
1606   Bytecode_invoke invoke(caller, vfst.bci());
1607   DEBUG_ONLY( invoke.verify(); )
1608 
1609   // Find the compiled caller frame.
1610   RegisterMap reg_map(current,
1611                       RegisterMap::UpdateMap::include,
1612                       RegisterMap::ProcessFrames::include,
1613                       RegisterMap::WalkContinuation::skip);
1614   frame stubFrame = current->last_frame();
1615   assert(stubFrame.is_runtime_frame(), "must be");
1616   frame callerFrame = stubFrame.sender(&reg_map);
1617   assert(callerFrame.is_compiled_frame(), "must be");
1618 
1619   // Install exception and return forward entry.
1620   address res = SharedRuntime::throw_AbstractMethodError_entry();
1621   JRT_BLOCK
1622     methodHandle callee(current, invoke.static_target(current));
1623     if (!callee.is_null()) {
1624       oop recv = callerFrame.retrieve_receiver(&reg_map);
1625       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1626       res = StubRoutines::forward_exception_entry();
1627       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1628     }
1629   JRT_BLOCK_END
1630   return res;
1631 JRT_END
1632 
1633 // return verified_code_entry if interp_only_mode is not set for the current thread;
1634 // otherwise return c2i entry.
1635 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method,
1636                                           bool is_static_call, bool is_optimized, bool caller_does_not_scalarize) {
1637   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1638     // In interp_only_mode we need to go to the interpreted entry
1639     // The c2i won't patch in this mode -- see fixup_callers_callsite
1640     return callee_method->get_c2i_entry();
1641   }
1642 
1643   if (caller_does_not_scalarize) {
1644     assert(callee_method->verified_inline_code_entry() != nullptr, "Jump to zero!");
1645     return callee_method->verified_inline_code_entry();
1646   } else if (is_static_call || is_optimized) {
1647     assert(callee_method->verified_code_entry() != nullptr, "Jump to zero!");
1648     return callee_method->verified_code_entry();
1649   } else {
1650     assert(callee_method->verified_inline_ro_code_entry() != nullptr, "Jump to zero!");
1651     return callee_method->verified_inline_ro_code_entry();
1652   }
1653 }
1654 
1655 // resolve a static call and patch code
1656 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1657   methodHandle callee_method;
1658   bool caller_does_not_scalarize = false;
1659   bool enter_special = false;
1660   JRT_BLOCK
1661     callee_method = SharedRuntime::resolve_helper(false, false, caller_does_not_scalarize, CHECK_NULL);
1662     current->set_vm_result_metadata(callee_method());
1663   JRT_BLOCK_END
1664   // return compiled code entry point after potential safepoints
1665   return get_resolved_entry(current, callee_method, true, false, caller_does_not_scalarize);
1666 JRT_END
1667 
1668 // resolve virtual call and update inline cache to monomorphic
1669 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1670   methodHandle callee_method;
1671   bool caller_does_not_scalarize = false;
1672   JRT_BLOCK
1673     callee_method = SharedRuntime::resolve_helper(true, false, caller_does_not_scalarize, CHECK_NULL);
1674     current->set_vm_result_metadata(callee_method());
1675   JRT_BLOCK_END
1676   // return compiled code entry point after potential safepoints
1677   return get_resolved_entry(current, callee_method, false, false, caller_does_not_scalarize);
1678 JRT_END
1679 
1680 
1681 // Resolve a virtual call that can be statically bound (e.g., always
1682 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1683 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1684   methodHandle callee_method;
1685   bool caller_does_not_scalarize = false;
1686   JRT_BLOCK
1687     callee_method = SharedRuntime::resolve_helper(true, true, caller_does_not_scalarize, CHECK_NULL);
1688     current->set_vm_result_metadata(callee_method());
1689   JRT_BLOCK_END
1690   // return compiled code entry point after potential safepoints
1691   return get_resolved_entry(current, callee_method, false, true, caller_does_not_scalarize);
1692 JRT_END
1693 
1694 
1695 
1696 methodHandle SharedRuntime::handle_ic_miss_helper(bool& caller_does_not_scalarize, TRAPS) {
1697   JavaThread* current = THREAD;
1698   ResourceMark rm(current);
1699   CallInfo call_info;
1700   Bytecodes::Code bc;
1701 
1702   // receiver is null for static calls. An exception is thrown for null
1703   // receivers for non-static calls
1704   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1705 
1706   methodHandle callee_method(current, call_info.selected_method());
1707 
1708 #ifndef PRODUCT
1709   AtomicAccess::inc(&_ic_miss_ctr);
1710 
1711   // Statistics & Tracing
1712   if (TraceCallFixup) {
1713     ResourceMark rm(current);
1714     tty->print("IC miss (%s) %s call to", Bytecodes::name(bc), (caller_does_not_scalarize) ? "non-scalar" : "");
1715     callee_method->print_short_name(tty);
1716     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1717   }
1718 
1719   if (ICMissHistogram) {
1720     MutexLocker m(VMStatistic_lock);
1721     RegisterMap reg_map(current,
1722                         RegisterMap::UpdateMap::skip,
1723                         RegisterMap::ProcessFrames::include,
1724                         RegisterMap::WalkContinuation::skip);
1725     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1726     // produce statistics under the lock
1727     trace_ic_miss(f.pc());
1728   }
1729 #endif
1730 
1731   // install an event collector so that when a vtable stub is created the
1732   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1733   // event can't be posted when the stub is created as locks are held
1734   // - instead the event will be deferred until the event collector goes
1735   // out of scope.
1736   JvmtiDynamicCodeEventCollector event_collector;
1737 
1738   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1739   RegisterMap reg_map(current,
1740                       RegisterMap::UpdateMap::skip,
1741                       RegisterMap::ProcessFrames::include,
1742                       RegisterMap::WalkContinuation::skip);
1743   frame caller_frame = current->last_frame().sender(&reg_map);
1744   CodeBlob* cb = caller_frame.cb();
1745   nmethod* caller_nm = cb->as_nmethod();
1746   // Calls via mismatching methods are always non-scalarized
1747   if (caller_nm->is_compiled_by_c1() || call_info.resolved_method()->mismatch()) {
1748     caller_does_not_scalarize = true;
1749   }
1750 
1751   CompiledICLocker ml(caller_nm);
1752   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1753   inline_cache->update(&call_info, receiver()->klass(), caller_does_not_scalarize);
1754 
1755   return callee_method;
1756 }
1757 
1758 //
1759 // Resets a call-site in compiled code so it will get resolved again.
1760 // This routines handles both virtual call sites, optimized virtual call
1761 // sites, and static call sites. Typically used to change a call sites
1762 // destination from compiled to interpreted.
1763 //
1764 methodHandle SharedRuntime::reresolve_call_site(bool& is_optimized, bool& caller_does_not_scalarize, TRAPS) {
1765   JavaThread* current = THREAD;
1766   ResourceMark rm(current);
1767   RegisterMap reg_map(current,
1768                       RegisterMap::UpdateMap::skip,
1769                       RegisterMap::ProcessFrames::include,
1770                       RegisterMap::WalkContinuation::skip);
1771   frame stub_frame = current->last_frame();
1772   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1773   frame caller = stub_frame.sender(&reg_map);
1774   if (caller.is_compiled_frame()) {
1775     caller_does_not_scalarize = caller.cb()->as_nmethod()->is_compiled_by_c1();
1776   }
1777   assert(!caller.is_interpreted_frame(), "must be compiled");
1778 
1779   // If the frame isn't a live compiled frame (i.e. deoptimized by the time we get here), no IC clearing must be done
1780   // for the caller. However, when the caller is C2 compiled and the callee a C1 or C2 compiled method, then we still
1781   // need to figure out whether it was an optimized virtual call with an inline type receiver. Otherwise, we end up
1782   // using the wrong method entry point and accidentally skip the buffering of the receiver.
1783   methodHandle callee_method = find_callee_method(caller_does_not_scalarize, CHECK_(methodHandle()));
1784   const bool caller_is_compiled_and_not_deoptimized = caller.is_compiled_frame() && !caller.is_deoptimized_frame();
1785   const bool caller_is_continuation_enter_intrinsic =
1786     caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic();
1787   const bool do_IC_clearing = caller_is_compiled_and_not_deoptimized || caller_is_continuation_enter_intrinsic;
1788 
1789   const bool callee_compiled_with_scalarized_receiver = callee_method->has_compiled_code() &&
1790                                                         !callee_method()->is_static() &&
1791                                                         callee_method()->is_scalarized_arg(0);
1792   const bool compute_is_optimized = !caller_does_not_scalarize && callee_compiled_with_scalarized_receiver;
1793 
1794   if (do_IC_clearing || compute_is_optimized) {
1795     address pc = caller.pc();
1796 
1797     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1798     assert(caller_nm != nullptr, "did not find caller nmethod");
1799 
1800     // Default call_addr is the location of the "basic" call.
1801     // Determine the address of the call we a reresolving. With
1802     // Inline Caches we will always find a recognizable call.
1803     // With Inline Caches disabled we may or may not find a
1804     // recognizable call. We will always find a call for static
1805     // calls and for optimized virtual calls. For vanilla virtual
1806     // calls it depends on the state of the UseInlineCaches switch.
1807     //
1808     // With Inline Caches disabled we can get here for a virtual call
1809     // for two reasons:
1810     //   1 - calling an abstract method. The vtable for abstract methods
1811     //       will run us thru handle_wrong_method and we will eventually
1812     //       end up in the interpreter to throw the ame.
1813     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1814     //       call and between the time we fetch the entry address and
1815     //       we jump to it the target gets deoptimized. Similar to 1
1816     //       we will wind up in the interprter (thru a c2i with c2).
1817     //
1818     CompiledICLocker ml(caller_nm);
1819     address call_addr = caller_nm->call_instruction_address(pc);
1820 
1821     if (call_addr != nullptr) {
1822       // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1823       // bytes back in the instruction stream so we must also check for reloc info.
1824       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1825       bool ret = iter.next(); // Get item
1826       if (ret) {
1827         is_optimized = false;
1828         switch (iter.type()) {
1829           case relocInfo::static_call_type:
1830             assert(callee_method->is_static(), "must be");
1831           case relocInfo::opt_virtual_call_type: {
1832             is_optimized = (iter.type() == relocInfo::opt_virtual_call_type);
1833             if (do_IC_clearing) {
1834               CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1835               cdc->set_to_clean();
1836             }
1837             break;
1838           }

1839           case relocInfo::virtual_call_type: {
1840             if (do_IC_clearing) {
1841               // compiled, dispatched call (which used to call an interpreted method)
1842               CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1843               inline_cache->set_to_clean();
1844             }
1845             break;
1846           }
1847           default:
1848             break;
1849         }
1850       }
1851     }
1852   }
1853 



1854 #ifndef PRODUCT
1855   AtomicAccess::inc(&_wrong_method_ctr);
1856 
1857   if (TraceCallFixup) {
1858     ResourceMark rm(current);
1859     tty->print("handle_wrong_method reresolving %s call to", (caller_does_not_scalarize) ? "non-scalar" : "");
1860     callee_method->print_short_name(tty);
1861     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1862   }
1863 #endif
1864 
1865   return callee_method;
1866 }
1867 
1868 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1869   // The faulting unsafe accesses should be changed to throw the error
1870   // synchronously instead. Meanwhile the faulting instruction will be
1871   // skipped over (effectively turning it into a no-op) and an
1872   // asynchronous exception will be raised which the thread will
1873   // handle at a later point. If the instruction is a load it will
1874   // return garbage.
1875 
1876   // Request an async exception.
1877   thread->set_pending_unsafe_access_error();
1878 
1879   // Return address of next instruction to execute.

2045   msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
2046 
2047   char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
2048   if (message == nullptr) {
2049     // Shouldn't happen, but don't cause even more problems if it does
2050     message = const_cast<char*>(caster_klass->external_name());
2051   } else {
2052     jio_snprintf(message,
2053                  msglen,
2054                  "class %s cannot be cast to class %s (%s%s%s)",
2055                  caster_name,
2056                  target_name,
2057                  caster_klass_description,
2058                  klass_separator,
2059                  target_klass_description
2060                  );
2061   }
2062   return message;
2063 }
2064 
2065 char* SharedRuntime::generate_identity_exception_message(JavaThread* current, Klass* klass) {
2066   assert(klass->is_inline_klass(), "Must be a concrete value class");
2067   const char* desc = "Cannot synchronize on an instance of value class ";
2068   const char* className = klass->external_name();
2069   size_t msglen = strlen(desc) + strlen(className) + 1;
2070   char* message = NEW_RESOURCE_ARRAY(char, msglen);
2071   if (nullptr == message) {
2072     // Out of memory: can't create detailed error message
2073     message = const_cast<char*>(klass->external_name());
2074   } else {
2075     jio_snprintf(message, msglen, "%s%s", desc, className);
2076   }
2077   return message;
2078 }
2079 
2080 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
2081   (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
2082 JRT_END
2083 
2084 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
2085   if (!SafepointSynchronize::is_synchronizing()) {
2086     // Only try quick_enter() if we're not trying to reach a safepoint
2087     // so that the calling thread reaches the safepoint more quickly.
2088     if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
2089       return;
2090     }
2091   }
2092   // NO_ASYNC required because an async exception on the state transition destructor
2093   // would leave you with the lock held and it would never be released.
2094   // The normal monitorenter NullPointerException is thrown without acquiring a lock
2095   // and the model is that an exception implies the method failed.
2096   JRT_BLOCK_NO_ASYNC
2097   Handle h_obj(THREAD, obj);
2098   ObjectSynchronizer::enter(h_obj, lock, current);
2099   assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");

2293   tty->print_cr("Note 1: counter updates are not MT-safe.");
2294   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2295   tty->print_cr("        %% in nested categories are relative to their category");
2296   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2297   tty->cr();
2298 
2299   MethodArityHistogram h;
2300 }
2301 #endif
2302 
2303 #ifndef PRODUCT
2304 static int _lookups; // number of calls to lookup
2305 static int _equals;  // number of buckets checked with matching hash
2306 static int _archived_hits; // number of successful lookups in archived table
2307 static int _runtime_hits;  // number of successful lookups in runtime table
2308 #endif
2309 
2310 // A simple wrapper class around the calling convention information
2311 // that allows sharing of adapters for the same calling convention.
2312 class AdapterFingerPrint : public MetaspaceObj {
2313 public:
2314   class Element {
2315   private:
2316     // The highest byte is the type of the argument. The remaining bytes contain the offset of the
2317     // field if it is flattened in the calling convention, -1 otherwise.
2318     juint _payload;
2319 
2320     static constexpr int offset_bit_width = 24;
2321     static constexpr juint offset_bit_mask = (1 << offset_bit_width) - 1;
2322   public:
2323     Element(BasicType bt, int offset) : _payload((static_cast<juint>(bt) << offset_bit_width) | (juint(offset) & offset_bit_mask)) {
2324       assert(offset >= -1 && offset < jint(offset_bit_mask), "invalid offset %d", offset);
2325     }
2326 
2327     BasicType bt() const {
2328       return static_cast<BasicType>(_payload >> offset_bit_width);
2329     }
2330 
2331     int offset() const {
2332       juint res = _payload & offset_bit_mask;
2333       return res == offset_bit_mask ? -1 : res;
2334     }
2335 
2336     juint hash() const {
2337       return _payload;
2338     }
2339 
2340     bool operator!=(const Element& other) const {
2341       return _payload != other._payload;
2342     }
2343   };


2344 
2345 private:
2346   const bool _has_ro_adapter;
2347   const int _length;
2348 
2349   static int data_offset() { return sizeof(AdapterFingerPrint); }
2350   Element* data_pointer() {
2351     return reinterpret_cast<Element*>(reinterpret_cast<address>(this) + data_offset());
2352   }
2353 
2354   const Element& element_at(int index) {
2355     assert(index < length(), "index %d out of bounds for length %d", index, length());
2356     Element* data = data_pointer();
2357     return data[index];
2358   }
2359 
2360   // Private construtor. Use allocate() to get an instance.
2361   AdapterFingerPrint(const GrowableArray<SigEntry>* sig, bool has_ro_adapter)
2362     : _has_ro_adapter(has_ro_adapter), _length(total_args_passed_in_sig(sig)) {
2363     Element* data = data_pointer();
2364     BasicType prev_bt = T_ILLEGAL;
2365     int vt_count = 0;

2366     for (int index = 0; index < _length; index++) {
2367       const SigEntry& sig_entry = sig->at(index);
2368       BasicType bt = sig_entry._bt;
2369       if (bt == T_METADATA) {
2370         // Found start of inline type in signature
2371         assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
2372         vt_count++;
2373       } else if (bt == T_VOID && prev_bt != T_LONG && prev_bt != T_DOUBLE) {
2374         // Found end of inline type in signature
2375         assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
2376         vt_count--;
2377         assert(vt_count >= 0, "invalid vt_count");
2378       } else if (vt_count == 0) {
2379         // Widen fields that are not part of a scalarized inline type argument
2380         assert(sig_entry._offset == -1, "invalid offset for argument that is not a flattened field %d", sig_entry._offset);
2381         bt = adapter_encoding(bt);
2382       }
2383 
2384       ::new(&data[index]) Element(bt, sig_entry._offset);
2385       prev_bt = bt;
2386     }
2387     assert(vt_count == 0, "invalid vt_count");
2388   }
2389 
2390   // Call deallocate instead
2391   ~AdapterFingerPrint() {
2392     ShouldNotCallThis();
2393   }
2394 
2395   static int total_args_passed_in_sig(const GrowableArray<SigEntry>* sig) {
2396     return (sig != nullptr) ? sig->length() : 0;
2397   }
2398 
2399   static int compute_size_in_words(int len) {
2400     return (int)heap_word_size(sizeof(AdapterFingerPrint) + (len * sizeof(Element)));
2401   }
2402 
2403   // Remap BasicTypes that are handled equivalently by the adapters.
2404   // These are correct for the current system but someday it might be
2405   // necessary to make this mapping platform dependent.
2406   static BasicType adapter_encoding(BasicType in) {
2407     switch (in) {
2408       case T_BOOLEAN:
2409       case T_BYTE:
2410       case T_SHORT:
2411       case T_CHAR:
2412         // They are all promoted to T_INT in the calling convention
2413         return T_INT;
2414 
2415       case T_OBJECT:
2416       case T_ARRAY:
2417         // In other words, we assume that any register good enough for
2418         // an int or long is good enough for a managed pointer.
2419 #ifdef _LP64
2420         return T_LONG;
2421 #else
2422         return T_INT;
2423 #endif
2424 
2425       case T_INT:
2426       case T_LONG:
2427       case T_FLOAT:
2428       case T_DOUBLE:
2429       case T_VOID:
2430         return in;
2431 
2432       default:
2433         ShouldNotReachHere();
2434         return T_CONFLICT;
2435     }
2436   }
2437 
2438   void* operator new(size_t size, size_t fp_size) throw() {
2439     assert(fp_size >= size, "sanity check");
2440     void* p = AllocateHeap(fp_size, mtCode);
2441     memset(p, 0, fp_size);
2442     return p;
2443   }
2444 
2445 public:
2446   template<typename Function>
2447   void iterate_args(Function function) {
2448     for (int i = 0; i < length(); i++) {
2449       function(element_at(i));









2450     }
2451   }
2452 
2453   static AdapterFingerPrint* allocate(const GrowableArray<SigEntry>* sig, bool has_ro_adapter = false) {
2454     int len = total_args_passed_in_sig(sig);

2455     int size_in_bytes = BytesPerWord * compute_size_in_words(len);
2456     AdapterFingerPrint* afp = new (size_in_bytes) AdapterFingerPrint(sig, has_ro_adapter);
2457     assert((afp->size() * BytesPerWord) == size_in_bytes, "should match");
2458     return afp;
2459   }
2460 
2461   static void deallocate(AdapterFingerPrint* fp) {
2462     FreeHeap(fp);
2463   }
2464 
2465   bool has_ro_adapter() const {
2466     return _has_ro_adapter;

2467   }
2468 
2469   int length() const {
2470     return _length;
2471   }
2472 
2473   unsigned int compute_hash() {
2474     int hash = 0;
2475     for (int i = 0; i < length(); i++) {
2476       const Element& v = element_at(i);
2477       //Add arithmetic operation to the hash, like +3 to improve hashing
2478       hash = ((hash << 8) ^ v.hash() ^ (hash >> 5)) + 3;
2479     }
2480     return (unsigned int)hash;
2481   }
2482 
2483   const char* as_string() {
2484     stringStream st;
2485     st.print("{");
2486     if (_has_ro_adapter) {
2487       st.print("has_ro_adapter");
2488     } else {
2489       st.print("no_ro_adapter");
2490     }
2491     for (int i = 0; i < length(); i++) {
2492       st.print(", ");
2493       const Element& elem = element_at(i);
2494       st.print("{%s, %d}", type2name(elem.bt()), elem.offset());
2495     }
2496     st.print("}");
2497     return st.as_string();
2498   }
2499 
2500   const char* as_basic_args_string() {
2501     stringStream st;
2502     bool long_prev = false;
2503     iterate_args([&] (const Element& arg) {
2504       if (long_prev) {
2505         long_prev = false;
2506         if (arg.bt() == T_VOID) {
2507           st.print("J");
2508         } else {
2509           st.print("L");
2510         }
2511       }
2512       if (arg.bt() == T_LONG) {
2513         long_prev = true;
2514       } else if (arg.bt() != T_VOID) {
2515         st.print("%c", type2char(arg.bt()));



2516       }
2517     });
2518     if (long_prev) {
2519       st.print("L");
2520     }
2521     return st.as_string();
2522   }
2523 



















































2524   bool equals(AdapterFingerPrint* other) {
2525     if (other->_has_ro_adapter != _has_ro_adapter) {
2526       return false;
2527     } else if (other->_length != _length) {
2528       return false;
2529     } else {
2530       for (int i = 0; i < _length; i++) {
2531         if (element_at(i) != other->element_at(i)) {
2532           return false;
2533         }
2534       }
2535     }
2536     return true;
2537   }
2538 
2539   // methods required by virtue of being a MetaspaceObj
2540   void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2541   int size() const { return compute_size_in_words(_length); }
2542   MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2543 
2544   static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2545     NOT_PRODUCT(_equals++);
2546     return fp1->equals(fp2);
2547   }
2548 
2549   static unsigned int compute_hash(AdapterFingerPrint* const& fp) {
2550     return fp->compute_hash();
2551   }

2554 #if INCLUDE_CDS
2555 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2556   return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2557 }
2558 
2559 class ArchivedAdapterTable : public OffsetCompactHashtable<
2560   AdapterFingerPrint*,
2561   AdapterHandlerEntry*,
2562   adapter_fp_equals_compact_hashtable_entry> {};
2563 #endif // INCLUDE_CDS
2564 
2565 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2566 using AdapterHandlerTable = HashTable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2567                   AnyObj::C_HEAP, mtCode,
2568                   AdapterFingerPrint::compute_hash,
2569                   AdapterFingerPrint::equals>;
2570 static AdapterHandlerTable* _adapter_handler_table;
2571 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2572 
2573 // Find a entry with the same fingerprint if it exists
2574 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(const GrowableArray<SigEntry>* sig, bool has_ro_adapter) {
2575   NOT_PRODUCT(_lookups++);
2576   assert_lock_strong(AdapterHandlerLibrary_lock);
2577   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(sig, has_ro_adapter);
2578   AdapterHandlerEntry* entry = nullptr;
2579 #if INCLUDE_CDS
2580   // if we are building the archive then the archived adapter table is
2581   // not valid and we need to use the ones added to the runtime table
2582   if (AOTCodeCache::is_using_adapter()) {
2583     // Search archived table first. It is read-only table so can be searched without lock
2584     entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2585 #ifndef PRODUCT
2586     if (entry != nullptr) {
2587       _archived_hits++;
2588     }
2589 #endif
2590   }
2591 #endif // INCLUDE_CDS
2592   if (entry == nullptr) {
2593     assert_lock_strong(AdapterHandlerLibrary_lock);
2594     AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2595     if (entry_p != nullptr) {
2596       entry = *entry_p;
2597       assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",

2614   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2615   ts.print(tty, "AdapterHandlerTable");
2616   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2617                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2618   int total_hits = _archived_hits + _runtime_hits;
2619   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2620                 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2621 }
2622 #endif
2623 
2624 // ---------------------------------------------------------------------------
2625 // Implementation of AdapterHandlerLibrary
2626 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2627 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2628 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2629 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2630 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2631 #if INCLUDE_CDS
2632 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2633 #endif // INCLUDE_CDS
2634 static const int AdapterHandlerLibrary_size = 48*K;
2635 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2636 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2637 
2638 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2639   assert(_buffer != nullptr, "should be initialized");
2640   return _buffer;
2641 }
2642 
2643 static void post_adapter_creation(const AdapterHandlerEntry* entry) {
2644   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2645     AdapterBlob* adapter_blob = entry->adapter_blob();
2646     char blob_id[256];
2647     jio_snprintf(blob_id,
2648                  sizeof(blob_id),
2649                  "%s(%s)",
2650                  adapter_blob->name(),
2651                  entry->fingerprint()->as_string());
2652     if (Forte::is_enabled()) {
2653       Forte::register_stub(blob_id, adapter_blob->content_begin(), adapter_blob->content_end());
2654     }

2662 void AdapterHandlerLibrary::initialize() {
2663   {
2664     ResourceMark rm;
2665     _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2666     _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2667   }
2668 
2669 #if INCLUDE_CDS
2670   // Link adapters in AOT Cache to their code in AOT Code Cache
2671   if (AOTCodeCache::is_using_adapter() && !_aot_adapter_handler_table.empty()) {
2672     link_aot_adapters();
2673     lookup_simple_adapters();
2674     return;
2675   }
2676 #endif // INCLUDE_CDS
2677 
2678   ResourceMark rm;
2679   {
2680     MutexLocker mu(AdapterHandlerLibrary_lock);
2681 
2682     CompiledEntrySignature no_args;
2683     no_args.compute_calling_conventions();
2684     _no_arg_handler = create_adapter(no_args, true);
2685 
2686     CompiledEntrySignature obj_args;
2687     SigEntry::add_entry(obj_args.sig(), T_OBJECT);
2688     obj_args.compute_calling_conventions();
2689     _obj_arg_handler = create_adapter(obj_args, true);
2690 
2691     CompiledEntrySignature int_args;
2692     SigEntry::add_entry(int_args.sig(), T_INT);
2693     int_args.compute_calling_conventions();
2694     _int_arg_handler = create_adapter(int_args, true);
2695 
2696     CompiledEntrySignature obj_int_args;
2697     SigEntry::add_entry(obj_int_args.sig(), T_OBJECT);
2698     SigEntry::add_entry(obj_int_args.sig(), T_INT);
2699     obj_int_args.compute_calling_conventions();
2700     _obj_int_arg_handler = create_adapter(obj_int_args, true);
2701 
2702     CompiledEntrySignature obj_obj_args;
2703     SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
2704     SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
2705     obj_obj_args.compute_calling_conventions();
2706     _obj_obj_arg_handler = create_adapter(obj_obj_args, true);
2707 
2708     // we should always get an entry back but we don't have any
2709     // associated blob on Zero
2710     assert(_no_arg_handler != nullptr &&
2711            _obj_arg_handler != nullptr &&
2712            _int_arg_handler != nullptr &&
2713            _obj_int_arg_handler != nullptr &&
2714            _obj_obj_arg_handler != nullptr, "Initial adapter handlers must be properly created");
2715   }
2716 
2717   // Outside of the lock
2718 #ifndef ZERO
2719   // no blobs to register when we are on Zero
2720   post_adapter_creation(_no_arg_handler);
2721   post_adapter_creation(_obj_arg_handler);
2722   post_adapter_creation(_int_arg_handler);
2723   post_adapter_creation(_obj_int_arg_handler);
2724   post_adapter_creation(_obj_obj_arg_handler);
2725 #endif // ZERO
2726 }
2727 
2728 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint) {
2729   uint id = (uint)AtomicAccess::add((int*)&_id_counter, 1);
2730   assert(id > 0, "we can never overflow because AOT cache cannot contain more than 2^32 methods");
2731   return AdapterHandlerEntry::allocate(id, fingerprint);
2732 }
2733 
2734 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2735   int total_args_passed = method->size_of_parameters(); // All args on stack
2736   if (total_args_passed == 0) {
2737     return _no_arg_handler;
2738   } else if (total_args_passed == 1) {
2739     if (!method->is_static()) {
2740       if (InlineTypePassFieldsAsArgs && method->method_holder()->is_inline_klass()) {
2741         return nullptr;
2742       }
2743       return _obj_arg_handler;
2744     }
2745     switch (method->signature()->char_at(1)) {
2746       case JVM_SIGNATURE_CLASS: {
2747         if (InlineTypePassFieldsAsArgs) {
2748           SignatureStream ss(method->signature());
2749           InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2750           if (vk != nullptr) {
2751             return nullptr;
2752           }
2753         }
2754         return _obj_arg_handler;
2755       }
2756       case JVM_SIGNATURE_ARRAY:
2757         return _obj_arg_handler;
2758       case JVM_SIGNATURE_INT:
2759       case JVM_SIGNATURE_BOOLEAN:
2760       case JVM_SIGNATURE_CHAR:
2761       case JVM_SIGNATURE_BYTE:
2762       case JVM_SIGNATURE_SHORT:
2763         return _int_arg_handler;
2764     }
2765   } else if (total_args_passed == 2 &&
2766              !method->is_static() && (!InlineTypePassFieldsAsArgs || !method->method_holder()->is_inline_klass())) {
2767     switch (method->signature()->char_at(1)) {
2768       case JVM_SIGNATURE_CLASS: {
2769         if (InlineTypePassFieldsAsArgs) {
2770           SignatureStream ss(method->signature());
2771           InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2772           if (vk != nullptr) {
2773             return nullptr;
2774           }
2775         }
2776         return _obj_obj_arg_handler;
2777       }
2778       case JVM_SIGNATURE_ARRAY:
2779         return _obj_obj_arg_handler;
2780       case JVM_SIGNATURE_INT:
2781       case JVM_SIGNATURE_BOOLEAN:
2782       case JVM_SIGNATURE_CHAR:
2783       case JVM_SIGNATURE_BYTE:
2784       case JVM_SIGNATURE_SHORT:
2785         return _obj_int_arg_handler;
2786     }
2787   }
2788   return nullptr;
2789 }
2790 
2791 CompiledEntrySignature::CompiledEntrySignature(Method* method) :
2792   _method(method), _num_inline_args(0), _has_inline_recv(false),
2793   _regs(nullptr), _regs_cc(nullptr), _regs_cc_ro(nullptr),
2794   _args_on_stack(0), _args_on_stack_cc(0), _args_on_stack_cc_ro(0),
2795   _c1_needs_stack_repair(false), _c2_needs_stack_repair(false), _supers(nullptr) {
2796   _sig = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2797   _sig_cc = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2798   _sig_cc_ro = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2799 }
2800 
2801 // See if we can save space by sharing the same entry for VIEP and VIEP(RO),
2802 // or the same entry for VEP and VIEP(RO).
2803 CodeOffsets::Entries CompiledEntrySignature::c1_inline_ro_entry_type() const {
2804   if (!has_scalarized_args()) {
2805     // VEP/VIEP/VIEP(RO) all share the same entry. There's no packing.
2806     return CodeOffsets::Verified_Entry;
2807   }
2808   if (_method->is_static()) {
2809     // Static methods don't need VIEP(RO)
2810     return CodeOffsets::Verified_Entry;
2811   }
2812 
2813   if (has_inline_recv()) {
2814     if (num_inline_args() == 1) {
2815       // Share same entry for VIEP and VIEP(RO).
2816       // This is quite common: we have an instance method in an InlineKlass that has
2817       // no inline type args other than <this>.
2818       return CodeOffsets::Verified_Inline_Entry;
2819     } else {
2820       assert(num_inline_args() > 1, "must be");
2821       // No sharing:
2822       //   VIEP(RO) -- <this> is passed as object
2823       //   VEP      -- <this> is passed as fields
2824       return CodeOffsets::Verified_Inline_Entry_RO;
2825     }

2826   }
2827 
2828   // Either a static method, or <this> is not an inline type
2829   if (args_on_stack_cc() != args_on_stack_cc_ro()) {
2830     // No sharing:
2831     // Some arguments are passed on the stack, and we have inserted reserved entries
2832     // into the VEP, but we never insert reserved entries into the VIEP(RO).
2833     return CodeOffsets::Verified_Inline_Entry_RO;
2834   } else {
2835     // Share same entry for VEP and VIEP(RO).
2836     return CodeOffsets::Verified_Entry;
2837   }
2838 }
2839 
2840 // Returns all super methods (transitive) in classes and interfaces that are overridden by the current method.
2841 GrowableArray<Method*>* CompiledEntrySignature::get_supers() {
2842   if (_supers != nullptr) {
2843     return _supers;
2844   }
2845   _supers = new GrowableArray<Method*>();
2846   // Skip private, static, and <init> methods
2847   if (_method->is_private() || _method->is_static() || _method->is_object_constructor()) {
2848     return _supers;
2849   }
2850   Symbol* name = _method->name();
2851   Symbol* signature = _method->signature();
2852   const Klass* holder = _method->method_holder()->super();
2853   Symbol* holder_name = holder->name();
2854   ThreadInVMfromUnknown tiv;
2855   JavaThread* current = JavaThread::current();
2856   HandleMark hm(current);
2857   Handle loader(current, _method->method_holder()->class_loader());
2858 
2859   // Walk up the class hierarchy and search for super methods
2860   while (holder != nullptr) {
2861     Method* super_method = holder->lookup_method(name, signature);
2862     if (super_method == nullptr) {
2863       break;
2864     }
2865     if (!super_method->is_static() && !super_method->is_private() &&
2866         (!super_method->is_package_private() ||
2867          super_method->method_holder()->is_same_class_package(loader(), holder_name))) {
2868       _supers->push(super_method);
2869     }
2870     holder = super_method->method_holder()->super();
2871   }
2872   // Search interfaces for super methods
2873   Array<InstanceKlass*>* interfaces = _method->method_holder()->transitive_interfaces();
2874   for (int i = 0; i < interfaces->length(); ++i) {
2875     Method* m = interfaces->at(i)->lookup_method(name, signature);
2876     if (m != nullptr && !m->is_static() && m->is_public()) {
2877       _supers->push(m);
2878     }
2879   }
2880   return _supers;
2881 }
2882 
2883 // Iterate over arguments and compute scalarized and non-scalarized signatures
2884 void CompiledEntrySignature::compute_calling_conventions(bool init) {
2885   bool has_scalarized = false;
2886   if (_method != nullptr) {
2887     InstanceKlass* holder = _method->method_holder();
2888     int arg_num = 0;
2889     if (!_method->is_static()) {
2890       // We shouldn't scalarize 'this' in a value class constructor
2891       if (holder->is_inline_klass() && InlineKlass::cast(holder)->can_be_passed_as_fields() && !_method->is_object_constructor() &&
2892           (init || _method->is_scalarized_arg(arg_num))) {
2893         _sig_cc->appendAll(InlineKlass::cast(holder)->extended_sig());
2894         has_scalarized = true;
2895         _has_inline_recv = true;
2896         _num_inline_args++;
2897       } else {
2898         SigEntry::add_entry(_sig_cc, T_OBJECT, holder->name());
2899       }
2900       SigEntry::add_entry(_sig, T_OBJECT, holder->name());
2901       SigEntry::add_entry(_sig_cc_ro, T_OBJECT, holder->name());
2902       arg_num++;
2903     }
2904     for (SignatureStream ss(_method->signature()); !ss.at_return_type(); ss.next()) {
2905       BasicType bt = ss.type();
2906       if (bt == T_OBJECT) {
2907         InlineKlass* vk = ss.as_inline_klass(holder);
2908         if (vk != nullptr && vk->can_be_passed_as_fields() && (init || _method->is_scalarized_arg(arg_num))) {
2909           // Check for a calling convention mismatch with super method(s)
2910           bool scalar_super = false;
2911           bool non_scalar_super = false;
2912           GrowableArray<Method*>* supers = get_supers();
2913           for (int i = 0; i < supers->length(); ++i) {
2914             Method* super_method = supers->at(i);
2915             if (AOTCodeCache::is_using_adapter() && super_method->adapter()->get_sig_cc() == nullptr) {
2916               // Calling conventions have to be regenerated at runtime and are accessed through method adapters,
2917               // which are archived in the AOT code cache. If the adapters are not regenerated, the
2918               // calling conventions should be regenerated here.
2919               CompiledEntrySignature ces(super_method);
2920               ces.compute_calling_conventions();
2921               if (ces.has_scalarized_args()) {
2922                 // Save a C heap allocated version of the scalarized signature and store it in the adapter
2923                 GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc()->length(), mtInternal);
2924                 heap_sig->appendAll(ces.sig_cc());
2925                 super_method->adapter()->set_sig_cc(heap_sig);
2926               }
2927             }
2928             if (super_method->is_scalarized_arg(arg_num)) {
2929               scalar_super = true;
2930             } else {
2931               non_scalar_super = true;
2932             }
2933           }
2934 #ifdef ASSERT
2935           // Randomly enable below code paths for stress testing
2936           bool stress = init && StressCallingConvention;
2937           if (stress && (os::random() & 1) == 1) {
2938             non_scalar_super = true;
2939             if ((os::random() & 1) == 1) {
2940               scalar_super = true;
2941             }
2942           }
2943 #endif
2944           if (non_scalar_super) {
2945             // Found a super method with a non-scalarized argument. Fall back to the non-scalarized calling convention.
2946             if (scalar_super) {
2947               // Found non-scalar *and* scalar super methods. We can't handle both.
2948               // Mark the scalar method as mismatch and re-compile call sites to use non-scalarized calling convention.
2949               for (int i = 0; i < supers->length(); ++i) {
2950                 Method* super_method = supers->at(i);
2951                 if (super_method->is_scalarized_arg(arg_num) DEBUG_ONLY(|| (stress && (os::random() & 1) == 1))) {
2952                   super_method->set_mismatch();
2953                   MutexLocker ml(Compile_lock, Mutex::_safepoint_check_flag);
2954                   JavaThread* thread = JavaThread::current();
2955                   HandleMark hm(thread);
2956                   methodHandle mh(thread, super_method);
2957                   DeoptimizationScope deopt_scope;
2958                   CodeCache::mark_for_deoptimization(&deopt_scope, mh());
2959                   deopt_scope.deoptimize_marked();
2960                 }
2961               }
2962             }
2963             // Fall back to non-scalarized calling convention
2964             SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2965             SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2966           } else {
2967             _num_inline_args++;
2968             has_scalarized = true;
2969             int last = _sig_cc->length();
2970             int last_ro = _sig_cc_ro->length();
2971             _sig_cc->appendAll(vk->extended_sig());
2972             _sig_cc_ro->appendAll(vk->extended_sig());
2973             if (bt == T_OBJECT) {
2974               // Nullable inline type argument, insert InlineTypeNode::NullMarker field right after T_METADATA delimiter
2975               _sig_cc->insert_before(last+1, SigEntry(T_BOOLEAN, -1, nullptr, true));
2976               _sig_cc_ro->insert_before(last_ro+1, SigEntry(T_BOOLEAN, -1, nullptr, true));
2977             }
2978           }
2979         } else {
2980           SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2981           SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2982         }
2983         bt = T_OBJECT;
2984       } else {
2985         SigEntry::add_entry(_sig_cc, ss.type(), ss.as_symbol());
2986         SigEntry::add_entry(_sig_cc_ro, ss.type(), ss.as_symbol());
2987       }
2988       SigEntry::add_entry(_sig, bt, ss.as_symbol());
2989       if (bt != T_VOID) {
2990         arg_num++;
2991       }
2992     }
2993   }
2994 
2995   // Compute the non-scalarized calling convention
2996   _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
2997   _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
2998 
2999   // Compute the scalarized calling conventions if there are scalarized inline types in the signature
3000   if (has_scalarized && !_method->is_native()) {
3001     _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
3002     _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
3003 
3004     _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
3005     _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
3006 
3007     _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
3008     _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
3009 
3010     // Upper bound on stack arguments to avoid hitting the argument limit and
3011     // bailing out of compilation ("unsupported incoming calling sequence").
3012     // TODO we need a reasonable limit (flag?) here
3013     if (MAX2(_args_on_stack_cc, _args_on_stack_cc_ro) <= 60) {
3014       return; // Success
3015     }
3016   }

3017 
3018   // No scalarized args
3019   _sig_cc = _sig;
3020   _regs_cc = _regs;
3021   _args_on_stack_cc = _args_on_stack;
3022 
3023   _sig_cc_ro = _sig;
3024   _regs_cc_ro = _regs;
3025   _args_on_stack_cc_ro = _args_on_stack;
3026 }
3027 
3028 void CompiledEntrySignature::initialize_from_fingerprint(AdapterFingerPrint* fingerprint) {
3029   _has_inline_recv = fingerprint->has_ro_adapter();
3030 
3031   int value_object_count = 0;
3032   BasicType prev_bt = T_ILLEGAL;
3033   bool has_scalarized_arguments = false;
3034   bool long_prev = false;
3035   int long_prev_offset = -1;
3036 
3037   fingerprint->iterate_args([&] (const AdapterFingerPrint::Element& arg) {
3038     BasicType bt = arg.bt();
3039     int offset = arg.offset();
3040 
3041     if (long_prev) {
3042       long_prev = false;
3043       BasicType bt_to_add;
3044       if (bt == T_VOID) {
3045         bt_to_add = T_LONG;
3046       } else {
3047         bt_to_add = T_OBJECT;
3048       }
3049       if (value_object_count == 0) {
3050         SigEntry::add_entry(_sig, bt_to_add);
3051       }
3052       SigEntry::add_entry(_sig_cc, bt_to_add, nullptr, long_prev_offset);
3053       SigEntry::add_entry(_sig_cc_ro, bt_to_add, nullptr, long_prev_offset);
3054     }
3055 
3056     switch (bt) {
3057       case T_VOID:
3058         if (prev_bt != T_LONG && prev_bt != T_DOUBLE) {
3059           assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
3060           value_object_count--;
3061           SigEntry::add_entry(_sig_cc, T_VOID, nullptr, offset);
3062           SigEntry::add_entry(_sig_cc_ro, T_VOID, nullptr, offset);
3063           assert(value_object_count >= 0, "invalid value object count");
3064         } else {
3065           // Nothing to add for _sig: We already added an addition T_VOID in add_entry() when adding T_LONG or T_DOUBLE.
3066         }
3067         break;
3068       case T_INT:
3069       case T_FLOAT:
3070       case T_DOUBLE:
3071         if (value_object_count == 0) {
3072           SigEntry::add_entry(_sig, bt);
3073         }
3074         SigEntry::add_entry(_sig_cc, bt, nullptr, offset);
3075         SigEntry::add_entry(_sig_cc_ro, bt, nullptr, offset);
3076         break;
3077       case T_LONG:
3078         long_prev = true;
3079         long_prev_offset = offset;
3080         break;
3081       case T_BOOLEAN:
3082       case T_CHAR:
3083       case T_BYTE:
3084       case T_SHORT:
3085       case T_OBJECT:
3086       case T_ARRAY:
3087         assert(value_object_count > 0, "must be value object field");
3088         SigEntry::add_entry(_sig_cc, bt, nullptr, offset);
3089         SigEntry::add_entry(_sig_cc_ro, bt, nullptr, offset);
3090         break;
3091       case T_METADATA:
3092         assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
3093         if (value_object_count == 0) {
3094           SigEntry::add_entry(_sig, T_OBJECT);
3095         }
3096         SigEntry::add_entry(_sig_cc, T_METADATA, nullptr, offset);
3097         SigEntry::add_entry(_sig_cc_ro, T_METADATA, nullptr, offset);
3098         value_object_count++;
3099         has_scalarized_arguments = true;
3100         break;
3101       default: {
3102         fatal("Unexpected BasicType: %s", basictype_to_str(bt));
3103       }
3104     }
3105     prev_bt = bt;
3106   });
3107 
3108   if (long_prev) {
3109     // If previous bt was T_LONG and we reached the end of the signature, we know that it must be a T_OBJECT.
3110     SigEntry::add_entry(_sig, T_OBJECT);
3111     SigEntry::add_entry(_sig_cc, T_OBJECT);
3112     SigEntry::add_entry(_sig_cc_ro, T_OBJECT);
3113   }
3114   assert(value_object_count == 0, "invalid value object count");
3115 
3116   _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
3117   _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
3118 
3119   // Compute the scalarized calling conventions if there are scalarized inline types in the signature
3120   if (has_scalarized_arguments) {
3121     _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
3122     _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
3123 
3124     _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
3125     _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
3126 
3127     _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
3128     _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
3129   } else {
3130     // No scalarized args
3131     _sig_cc = _sig;
3132     _regs_cc = _regs;
3133     _args_on_stack_cc = _args_on_stack;
3134 
3135     _sig_cc_ro = _sig;
3136     _regs_cc_ro = _regs;
3137     _args_on_stack_cc_ro = _args_on_stack;
3138   }
3139 
3140 #ifdef ASSERT
3141   {
3142     AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(_sig_cc, _has_inline_recv);
3143     assert(fingerprint->equals(compare_fp), "%s - %s", fingerprint->as_string(), compare_fp->as_string());
3144     AdapterFingerPrint::deallocate(compare_fp);
3145   }
3146 #endif
3147 }
3148 
3149 const char* AdapterHandlerEntry::_entry_names[] = {
3150   "i2c", "c2i", "c2i_unverified", "c2i_no_clinit_check"
3151 };
3152 
3153 #ifdef ASSERT
3154 void AdapterHandlerLibrary::verify_adapter_sharing(CompiledEntrySignature& ces, AdapterHandlerEntry* cached_entry) {
3155   // we can only check for the same code if there is any
3156 #ifndef ZERO
3157   AdapterHandlerEntry* comparison_entry = create_adapter(ces, false, true);
3158   assert(comparison_entry->adapter_blob() == nullptr, "no blob should be created when creating an adapter for comparison");
3159   assert(comparison_entry->compare_code(cached_entry), "code must match");
3160   // Release the one just created
3161   AdapterHandlerEntry::deallocate(comparison_entry);
3162 # endif // ZERO
3163 }
3164 #endif /* ASSERT*/
3165 
3166 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
3167   assert(!method->is_abstract(), "abstract methods do not have adapters");
3168   // Use customized signature handler.  Need to lock around updates to
3169   // the _adapter_handler_table (it is not safe for concurrent readers
3170   // and a single writer: this could be fixed if it becomes a
3171   // problem).
3172 
3173   // Fast-path for trivial adapters
3174   AdapterHandlerEntry* entry = get_simple_adapter(method);
3175   if (entry != nullptr) {
3176     return entry;
3177   }
3178 
3179   ResourceMark rm;
3180   bool new_entry = false;
3181 
3182   CompiledEntrySignature ces(method());
3183   ces.compute_calling_conventions();
3184   if (ces.has_scalarized_args()) {
3185     if (!method->has_scalarized_args()) {
3186       method->set_has_scalarized_args();
3187     }
3188     if (ces.c1_needs_stack_repair()) {
3189       method->set_c1_needs_stack_repair();
3190     }
3191     if (ces.c2_needs_stack_repair() && !method->c2_needs_stack_repair()) {
3192       method->set_c2_needs_stack_repair();
3193     }
3194   }
3195 




3196   {
3197     MutexLocker mu(AdapterHandlerLibrary_lock);
3198 
3199     // Lookup method signature's fingerprint
3200     entry = lookup(ces.sig_cc(), ces.has_inline_recv());
3201 
3202     if (entry != nullptr) {
3203 #ifndef ZERO
3204       assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
3205 #endif
3206 #ifdef ASSERT
3207       if (!entry->in_aot_cache() && VerifyAdapterSharing) {
3208         verify_adapter_sharing(ces, entry);
3209       }
3210 #endif
3211     } else {
3212       entry = create_adapter(ces, /* allocate_code_blob */ true);
3213       if (entry != nullptr) {
3214         new_entry = true;
3215       }
3216     }
3217   }
3218 
3219   // Outside of the lock
3220   if (new_entry) {
3221     post_adapter_creation(entry);
3222   }
3223   return entry;
3224 }
3225 
3226 void AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler) {
3227   ResourceMark rm;
3228   const char* name = AdapterHandlerLibrary::name(handler);
3229   const uint32_t id = AdapterHandlerLibrary::id(handler);
3230 
3231   CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::Adapter, id, name);
3232   if (blob != nullptr) {

3247   }
3248   insts_size = adapter_blob->code_size();
3249   st->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
3250                 handler->fingerprint()->as_basic_args_string(),
3251                 handler->fingerprint()->as_string(), insts_size);
3252   st->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
3253   if (Verbose || PrintStubCode) {
3254     address first_pc = adapter_blob->content_begin();
3255     if (first_pc != nullptr) {
3256       Disassembler::decode(first_pc, first_pc + insts_size, st, &adapter_blob->asm_remarks());
3257       st->cr();
3258     }
3259   }
3260 }
3261 #endif // PRODUCT
3262 
3263 void AdapterHandlerLibrary::address_to_offset(address entry_address[AdapterBlob::ENTRY_COUNT],
3264                                               int entry_offset[AdapterBlob::ENTRY_COUNT]) {
3265   entry_offset[AdapterBlob::I2C] = 0;
3266   entry_offset[AdapterBlob::C2I] = entry_address[AdapterBlob::C2I] - entry_address[AdapterBlob::I2C];
3267   entry_offset[AdapterBlob::C2I_Inline] = entry_address[AdapterBlob::C2I_Inline] - entry_address[AdapterBlob::I2C];
3268   entry_offset[AdapterBlob::C2I_Inline_RO] = entry_address[AdapterBlob::C2I_Inline_RO] - entry_address[AdapterBlob::I2C];
3269   entry_offset[AdapterBlob::C2I_Unverified] = entry_address[AdapterBlob::C2I_Unverified] - entry_address[AdapterBlob::I2C];
3270   entry_offset[AdapterBlob::C2I_Unverified_Inline] = entry_address[AdapterBlob::C2I_Unverified_Inline] - entry_address[AdapterBlob::I2C];
3271   if (entry_address[AdapterBlob::C2I_No_Clinit_Check] == nullptr) {
3272     entry_offset[AdapterBlob::C2I_No_Clinit_Check] = -1;
3273   } else {
3274     entry_offset[AdapterBlob::C2I_No_Clinit_Check] = entry_address[AdapterBlob::C2I_No_Clinit_Check] - entry_address[AdapterBlob::I2C];
3275   }
3276 }
3277 
3278 bool AdapterHandlerLibrary::generate_adapter_code(AdapterHandlerEntry* handler,
3279                                                   CompiledEntrySignature& ces,
3280                                                   bool allocate_code_blob,
3281                                                   bool is_transient) {
3282   if (log_is_enabled(Info, perf, class, link)) {
3283     ClassLoader::perf_method_adapters_count()->inc();
3284   }
3285 
3286 #ifndef ZERO
3287   AdapterBlob* adapter_blob = nullptr;
3288   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
3289   CodeBuffer buffer(buf);
3290   short buffer_locs[20];
3291   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
3292                                          sizeof(buffer_locs)/sizeof(relocInfo));
3293   MacroAssembler masm(&buffer);
3294   address entry_address[AdapterBlob::ENTRY_COUNT];

3295 
3296   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage


3297   SharedRuntime::generate_i2c2i_adapters(&masm,
3298                                          ces.args_on_stack(),
3299                                          ces.sig(),
3300                                          ces.regs(),
3301                                          ces.sig_cc(),
3302                                          ces.regs_cc(),
3303                                          ces.sig_cc_ro(),
3304                                          ces.regs_cc_ro(),
3305                                          entry_address,
3306                                          adapter_blob,
3307                                          allocate_code_blob);
3308 
3309   if (ces.has_scalarized_args()) {
3310     // Save a C heap allocated version of the scalarized signature and store it in the adapter
3311     GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc()->length(), mtInternal);
3312     heap_sig->appendAll(ces.sig_cc());
3313     handler->set_sig_cc(heap_sig);
3314   }
3315   // On zero there is no code to save and no need to create a blob and
3316   // or relocate the handler.
3317   int entry_offset[AdapterBlob::ENTRY_COUNT];
3318   address_to_offset(entry_address, entry_offset);
3319 #ifdef ASSERT
3320   if (VerifyAdapterSharing) {
3321     handler->save_code(buf->code_begin(), buffer.insts_size());
3322     if (is_transient) {
3323       return true;
3324     }
3325   }
3326 #endif

3327   if (adapter_blob == nullptr) {
3328     // CodeCache is full, disable compilation
3329     // Ought to log this but compile log is only per compile thread
3330     // and we're some non descript Java thread.
3331     return false;
3332   }
3333   handler->set_adapter_blob(adapter_blob);
3334   if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
3335     // try to save generated code
3336     const char* name = AdapterHandlerLibrary::name(handler);
3337     const uint32_t id = AdapterHandlerLibrary::id(handler);
3338     bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name);
3339     assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
3340   }
3341 #endif // ZERO
3342 
3343 #ifndef PRODUCT
3344   // debugging support
3345   if (PrintAdapterHandlers || PrintStubCode) {
3346     print_adapter_handler_info(tty, handler);
3347   }
3348 #endif
3349 
3350   return true;
3351 }
3352 
3353 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(CompiledEntrySignature& ces,
3354                                                            bool allocate_code_blob,
3355                                                            bool is_transient) {
3356   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(ces.sig_cc(), ces.has_inline_recv());
3357 #ifdef ASSERT
3358   // Verify that we can successfully restore the compiled entry signature object.
3359   CompiledEntrySignature ces_verify;
3360   ces_verify.initialize_from_fingerprint(fp);
3361 #endif
3362   AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
3363   if (!generate_adapter_code(handler, ces, allocate_code_blob, is_transient)) {
3364     AdapterHandlerEntry::deallocate(handler);
3365     return nullptr;
3366   }
3367   if (!is_transient) {
3368     assert_lock_strong(AdapterHandlerLibrary_lock);
3369     _adapter_handler_table->put(fp, handler);
3370   }
3371   return handler;
3372 }
3373 
3374 #if INCLUDE_CDS
3375 void AdapterHandlerEntry::remove_unshareable_info() {
3376 #ifdef ASSERT
3377    _saved_code = nullptr;
3378    _saved_code_length = 0;
3379 #endif // ASSERT
3380    _adapter_blob = nullptr;
3381    _linked = false;
3382    _sig_cc = nullptr;
3383 }
3384 
3385 class CopyAdapterTableToArchive : StackObj {
3386 private:
3387   CompactHashtableWriter* _writer;
3388   ArchiveBuilder* _builder;
3389 public:
3390   CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
3391                                                              _builder(ArchiveBuilder::current())
3392   {}
3393 
3394   bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
3395     LogStreamHandle(Trace, aot) lsh;
3396     if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
3397       assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
3398       AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
3399       assert(buffered_fp != nullptr,"sanity check");
3400       AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
3401       assert(buffered_entry != nullptr,"sanity check");
3402 

3447 // This method is used during production run to link archived adapters (stored in AOT Cache)
3448 // to their code in AOT Code Cache
3449 void AdapterHandlerEntry::link() {
3450   ResourceMark rm;
3451   assert(_fingerprint != nullptr, "_fingerprint must not be null");
3452   bool generate_code = false;
3453   // Generate code only if AOTCodeCache is not available, or
3454   // caching adapters is disabled, or we fail to link
3455   // the AdapterHandlerEntry to its code in the AOTCodeCache
3456   if (AOTCodeCache::is_using_adapter()) {
3457     AdapterHandlerLibrary::link_aot_adapter_handler(this);
3458     // If link_aot_adapter_handler() succeeds, _adapter_blob will be non-null
3459     if (_adapter_blob == nullptr) {
3460       log_warning(aot)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
3461       generate_code = true;
3462     }
3463   } else {
3464     generate_code = true;
3465   }
3466   if (generate_code) {
3467     CompiledEntrySignature ces;
3468     ces.initialize_from_fingerprint(_fingerprint);
3469     if (!AdapterHandlerLibrary::generate_adapter_code(this, ces, true, false)) {
3470       // Don't throw exceptions during VM initialization because java.lang.* classes
3471       // might not have been initialized, causing problems when constructing the
3472       // Java exception object.
3473       vm_exit_during_initialization("Out of space in CodeCache for adapters");
3474     }
3475   }
3476   if (_adapter_blob != nullptr) {
3477     post_adapter_creation(this);
3478   }
3479   assert(_linked, "AdapterHandlerEntry must now be linked");
3480 }
3481 
3482 void AdapterHandlerLibrary::link_aot_adapters() {
3483   uint max_id = 0;
3484   assert(AOTCodeCache::is_using_adapter(), "AOT adapters code should be available");
3485   /* It is possible that some adapters generated in assembly phase are not stored in the cache.
3486    * That implies adapter ids of the adapters in the cache may not be contiguous.
3487    * If the size of the _aot_adapter_handler_table is used to initialize _id_counter, then it may
3488    * result in collision of adapter ids between AOT stored handlers and runtime generated handlers.
3489    * To avoid such situation, initialize the _id_counter with the largest adapter id among the AOT stored handlers.
3490    */
3491   _aot_adapter_handler_table.iterate_all([&](AdapterHandlerEntry* entry) {
3492     assert(!entry->is_linked(), "AdapterHandlerEntry is already linked!");
3493     entry->link();
3494     max_id = MAX2(max_id, entry->id());
3495   });
3496   // Set adapter id to the maximum id found in the AOTCache
3497   assert(_id_counter == 0, "Did not expect new AdapterHandlerEntry to be created at this stage");
3498   _id_counter = max_id;
3499 }
3500 
3501 // This method is called during production run to lookup simple adapters
3502 // in the archived adapter handler table
3503 void AdapterHandlerLibrary::lookup_simple_adapters() {
3504   assert(!_aot_adapter_handler_table.empty(), "archived adapter handler table is empty");
3505 
3506   MutexLocker mu(AdapterHandlerLibrary_lock);
3507   ResourceMark rm;
3508   CompiledEntrySignature no_args;
3509   no_args.compute_calling_conventions();
3510   _no_arg_handler = lookup(no_args.sig_cc(), no_args.has_inline_recv());
3511 
3512   CompiledEntrySignature obj_args;
3513   SigEntry::add_entry(obj_args.sig(), T_OBJECT);
3514   obj_args.compute_calling_conventions();
3515   _obj_arg_handler = lookup(obj_args.sig_cc(), obj_args.has_inline_recv());
3516 
3517   CompiledEntrySignature int_args;
3518   SigEntry::add_entry(int_args.sig(), T_INT);
3519   int_args.compute_calling_conventions();
3520   _int_arg_handler = lookup(int_args.sig_cc(), int_args.has_inline_recv());
3521 
3522   CompiledEntrySignature obj_int_args;
3523   SigEntry::add_entry(obj_int_args.sig(), T_OBJECT);
3524   SigEntry::add_entry(obj_int_args.sig(), T_INT);
3525   obj_int_args.compute_calling_conventions();
3526   _obj_int_arg_handler = lookup(obj_int_args.sig_cc(), obj_int_args.has_inline_recv());
3527 
3528   CompiledEntrySignature obj_obj_args;
3529   SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
3530   SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
3531   obj_obj_args.compute_calling_conventions();
3532   _obj_obj_arg_handler = lookup(obj_obj_args.sig_cc(), obj_obj_args.has_inline_recv());
3533 
3534   assert(_no_arg_handler != nullptr &&
3535          _obj_arg_handler != nullptr &&
3536          _int_arg_handler != nullptr &&
3537          _obj_int_arg_handler != nullptr &&
3538          _obj_obj_arg_handler != nullptr, "Initial adapters not found in archived adapter handler table");
3539   assert(_no_arg_handler->is_linked() &&
3540          _obj_arg_handler->is_linked() &&
3541          _int_arg_handler->is_linked() &&
3542          _obj_int_arg_handler->is_linked() &&
3543          _obj_obj_arg_handler->is_linked(), "Initial adapters not in linked state");
3544 }
3545 #endif // INCLUDE_CDS
3546 
3547 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3548   LogStreamHandle(Trace, aot) lsh;
3549   if (lsh.is_enabled()) {
3550     lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3551     lsh.cr();
3552   }
3553   it->push(&_fingerprint);
3554 }
3555 
3556 AdapterHandlerEntry::~AdapterHandlerEntry() {
3557   if (_fingerprint != nullptr) {
3558     AdapterFingerPrint::deallocate(_fingerprint);
3559     _fingerprint = nullptr;
3560   }
3561   if (_sig_cc != nullptr) {
3562     delete _sig_cc;
3563   }
3564 #ifdef ASSERT
3565   FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
3566 #endif
3567   FreeHeap(this);
3568 }
3569 
3570 
3571 #ifdef ASSERT
3572 // Capture the code before relocation so that it can be compared
3573 // against other versions.  If the code is captured after relocation
3574 // then relative instructions won't be equivalent.
3575 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3576   _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3577   _saved_code_length = length;
3578   memcpy(_saved_code, buffer, length);
3579 }
3580 
3581 
3582 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3583   assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");

3631 
3632       struct { double data[20]; } locs_buf;
3633       struct { double data[20]; } stubs_locs_buf;
3634       buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3635 #if defined(AARCH64) || defined(PPC64)
3636       // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3637       // in the constant pool to ensure ordering between the barrier and oops
3638       // accesses. For native_wrappers we need a constant.
3639       // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3640       // static java call that is resolved in the runtime.
3641       if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3642         buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3643       }
3644 #endif
3645       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3646       MacroAssembler _masm(&buffer);
3647 
3648       // Fill in the signature array, for the calling-convention call.
3649       const int total_args_passed = method->size_of_parameters();
3650 
3651       BasicType stack_sig_bt[16];
3652       VMRegPair stack_regs[16];
3653       BasicType* sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
3654       VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3655 
3656       int i = 0;
3657       if (!method->is_static()) {  // Pass in receiver first
3658         sig_bt[i++] = T_OBJECT;
3659       }
3660       SignatureStream ss(method->signature());
3661       for (; !ss.at_return_type(); ss.next()) {
3662         sig_bt[i++] = ss.type();  // Collect remaining bits of signature
3663         if (ss.type() == T_LONG || ss.type() == T_DOUBLE) {
3664           sig_bt[i++] = T_VOID;   // Longs & doubles take 2 Java slots
3665         }
3666       }
3667       assert(i == total_args_passed, "");
3668       BasicType ret_type = ss.type();
3669 
3670       // Now get the compiled-Java arguments layout.
3671       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3672 
3673       // Generate the compiled-to-native wrapper code
3674       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3675 
3676       if (nm != nullptr) {
3677         {
3678           MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3679           if (nm->make_in_use()) {
3680             method->set_code(method, nm);
3681           }
3682         }
3683 
3684         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3685         if (directive->PrintAssemblyOption) {
3686           nm->print_code();
3687         }
3688         DirectivesStack::release(directive);

3896       if (b == handler->adapter_blob()) {
3897         found = true;
3898         st->print("Adapter for signature: ");
3899         handler->print_adapter_on(st);
3900         return false; // abort iteration
3901       } else {
3902         return true; // keep looking
3903       }
3904     };
3905     assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3906     _adapter_handler_table->iterate(findblob_runtime_table);
3907   }
3908   assert(found, "Should have found handler");
3909 }
3910 
3911 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3912   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3913   if (adapter_blob() != nullptr) {
3914     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3915     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3916     st->print(" c2iVE: " INTPTR_FORMAT, p2i(get_c2i_inline_entry()));
3917     st->print(" c2iVROE: " INTPTR_FORMAT, p2i(get_c2i_inline_ro_entry()));
3918     st->print(" c2iUE: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3919     st->print(" c2iUVE: " INTPTR_FORMAT, p2i(get_c2i_unverified_inline_entry()));
3920     if (get_c2i_no_clinit_check_entry() != nullptr) {
3921       st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3922     }
3923   }
3924   st->cr();
3925 }
3926 
3927 #ifndef PRODUCT
3928 
3929 void AdapterHandlerLibrary::print_statistics() {
3930   print_table_statistics();
3931 }
3932 
3933 #endif /* PRODUCT */
3934 
3935 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3936   assert(current == JavaThread::current(), "pre-condition");
3937   StackOverflow* overflow_state = current->stack_overflow_state();
3938   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3939   overflow_state->set_reserved_stack_activation(current->stack_base());

3986         event.set_method(method);
3987         event.commit();
3988       }
3989     }
3990   }
3991   return activation;
3992 }
3993 
3994 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
3995   // After any safepoint, just before going back to compiled code,
3996   // we inform the GC that we will be doing initializing writes to
3997   // this object in the future without emitting card-marks, so
3998   // GC may take any compensating steps.
3999 
4000   oop new_obj = current->vm_result_oop();
4001   if (new_obj == nullptr) return;
4002 
4003   BarrierSet *bs = BarrierSet::barrier_set();
4004   bs->on_slowpath_allocation_exit(current, new_obj);
4005 }
4006 
4007 // We are at a compiled code to interpreter call. We need backing
4008 // buffers for all inline type arguments. Allocate an object array to
4009 // hold them (convenient because once we're done with it we don't have
4010 // to worry about freeing it).
4011 oop SharedRuntime::allocate_inline_types_impl(JavaThread* current, methodHandle callee, bool allocate_receiver, TRAPS) {
4012   assert(InlineTypePassFieldsAsArgs, "no reason to call this");
4013   ResourceMark rm;
4014 
4015   int nb_slots = 0;
4016   InstanceKlass* holder = callee->method_holder();
4017   allocate_receiver &= !callee->is_static() && holder->is_inline_klass() && callee->is_scalarized_arg(0);
4018   if (allocate_receiver) {
4019     nb_slots++;
4020   }
4021   int arg_num = callee->is_static() ? 0 : 1;
4022   for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
4023     BasicType bt = ss.type();
4024     if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
4025       nb_slots++;
4026     }
4027     if (bt != T_VOID) {
4028       arg_num++;
4029     }
4030   }
4031   objArrayOop array_oop = oopFactory::new_objectArray(nb_slots, CHECK_NULL);
4032   objArrayHandle array(THREAD, array_oop);
4033   arg_num = callee->is_static() ? 0 : 1;
4034   int i = 0;
4035   if (allocate_receiver) {
4036     InlineKlass* vk = InlineKlass::cast(holder);
4037     oop res = vk->allocate_instance(CHECK_NULL);
4038     array->obj_at_put(i++, res);
4039   }
4040   for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
4041     BasicType bt = ss.type();
4042     if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
4043       InlineKlass* vk = ss.as_inline_klass(holder);
4044       assert(vk != nullptr, "Unexpected klass");
4045       oop res = vk->allocate_instance(CHECK_NULL);
4046       array->obj_at_put(i++, res);
4047     }
4048     if (bt != T_VOID) {
4049       arg_num++;
4050     }
4051   }
4052   return array();
4053 }
4054 
4055 JRT_ENTRY(void, SharedRuntime::allocate_inline_types(JavaThread* current, Method* callee_method, bool allocate_receiver))
4056   methodHandle callee(current, callee_method);
4057   oop array = SharedRuntime::allocate_inline_types_impl(current, callee, allocate_receiver, CHECK);
4058   current->set_vm_result_oop(array);
4059   current->set_vm_result_metadata(callee()); // TODO: required to keep callee live?
4060 JRT_END
4061 
4062 // We're returning from an interpreted method: load each field into a
4063 // register following the calling convention
4064 JRT_LEAF(void, SharedRuntime::load_inline_type_fields_in_regs(JavaThread* current, oopDesc* res))
4065 {
4066   assert(res->klass()->is_inline_klass(), "only inline types here");
4067   ResourceMark rm;
4068   RegisterMap reg_map(current,
4069                       RegisterMap::UpdateMap::include,
4070                       RegisterMap::ProcessFrames::include,
4071                       RegisterMap::WalkContinuation::skip);
4072   frame stubFrame = current->last_frame();
4073   frame callerFrame = stubFrame.sender(&reg_map);
4074   assert(callerFrame.is_interpreted_frame(), "should be coming from interpreter");
4075 
4076   InlineKlass* vk = InlineKlass::cast(res->klass());
4077 
4078   const Array<SigEntry>* sig_vk = vk->extended_sig();
4079   const Array<VMRegPair>* regs = vk->return_regs();
4080 
4081   if (regs == nullptr) {
4082     // The fields of the inline klass don't fit in registers, bail out
4083     return;
4084   }
4085 
4086   int j = 1;
4087   for (int i = 0; i < sig_vk->length(); i++) {
4088     BasicType bt = sig_vk->at(i)._bt;
4089     if (bt == T_METADATA) {
4090       continue;
4091     }
4092     if (bt == T_VOID) {
4093       if (sig_vk->at(i-1)._bt == T_LONG ||
4094           sig_vk->at(i-1)._bt == T_DOUBLE) {
4095         j++;
4096       }
4097       continue;
4098     }
4099     int off = sig_vk->at(i)._offset;
4100     assert(off > 0, "offset in object should be positive");
4101     VMRegPair pair = regs->at(j);
4102     address loc = reg_map.location(pair.first(), nullptr);
4103     switch(bt) {
4104     case T_BOOLEAN:
4105       *(jboolean*)loc = res->bool_field(off);
4106       break;
4107     case T_CHAR:
4108       *(jchar*)loc = res->char_field(off);
4109       break;
4110     case T_BYTE:
4111       *(jbyte*)loc = res->byte_field(off);
4112       break;
4113     case T_SHORT:
4114       *(jshort*)loc = res->short_field(off);
4115       break;
4116     case T_INT: {
4117       *(jint*)loc = res->int_field(off);
4118       break;
4119     }
4120     case T_LONG:
4121 #ifdef _LP64
4122       *(intptr_t*)loc = res->long_field(off);
4123 #else
4124       Unimplemented();
4125 #endif
4126       break;
4127     case T_OBJECT:
4128     case T_ARRAY: {
4129       *(oop*)loc = res->obj_field(off);
4130       break;
4131     }
4132     case T_FLOAT:
4133       *(jfloat*)loc = res->float_field(off);
4134       break;
4135     case T_DOUBLE:
4136       *(jdouble*)loc = res->double_field(off);
4137       break;
4138     default:
4139       ShouldNotReachHere();
4140     }
4141     j++;
4142   }
4143   assert(j == regs->length(), "missed a field?");
4144 
4145 #ifdef ASSERT
4146   VMRegPair pair = regs->at(0);
4147   address loc = reg_map.location(pair.first(), nullptr);
4148   assert(*(oopDesc**)loc == res, "overwritten object");
4149 #endif
4150 
4151   current->set_vm_result_oop(res);
4152 }
4153 JRT_END
4154 
4155 // We've returned to an interpreted method, the interpreter needs a
4156 // reference to an inline type instance. Allocate it and initialize it
4157 // from field's values in registers.
4158 JRT_BLOCK_ENTRY(void, SharedRuntime::store_inline_type_fields_to_buf(JavaThread* current, intptr_t res))
4159 {
4160   ResourceMark rm;
4161   RegisterMap reg_map(current,
4162                       RegisterMap::UpdateMap::include,
4163                       RegisterMap::ProcessFrames::include,
4164                       RegisterMap::WalkContinuation::skip);
4165   frame stubFrame = current->last_frame();
4166   frame callerFrame = stubFrame.sender(&reg_map);
4167 
4168 #ifdef ASSERT
4169   InlineKlass* verif_vk = InlineKlass::returned_inline_klass(reg_map);
4170 #endif
4171 
4172   if (!is_set_nth_bit(res, 0)) {
4173     // We're not returning with inline type fields in registers (the
4174     // calling convention didn't allow it for this inline klass)
4175     assert(!Metaspace::contains((void*)res), "should be oop or pointer in buffer area");
4176     current->set_vm_result_oop((oopDesc*)res);
4177     assert(verif_vk == nullptr, "broken calling convention");
4178     return;
4179   }
4180 
4181   clear_nth_bit(res, 0);
4182   InlineKlass* vk = (InlineKlass*)res;
4183   assert(verif_vk == vk, "broken calling convention");
4184   assert(Metaspace::contains((void*)res), "should be klass");
4185 
4186   // Allocate handles for every oop field so they are safe in case of
4187   // a safepoint when allocating
4188   GrowableArray<Handle> handles;
4189   vk->save_oop_fields(reg_map, handles);
4190 
4191   // It's unsafe to safepoint until we are here
4192   JRT_BLOCK;
4193   {
4194     JavaThread* THREAD = current;
4195     oop vt = vk->realloc_result(reg_map, handles, CHECK);
4196     current->set_vm_result_oop(vt);
4197   }
4198   JRT_BLOCK_END;
4199 }
4200 JRT_END
< prev index next >