< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

  30 #include "classfile/javaClasses.inline.hpp"
  31 #include "classfile/stringTable.hpp"
  32 #include "classfile/vmClasses.hpp"
  33 #include "classfile/vmSymbols.hpp"
  34 #include "code/aotCodeCache.hpp"
  35 #include "code/codeCache.hpp"
  36 #include "code/compiledIC.hpp"
  37 #include "code/nmethod.inline.hpp"
  38 #include "code/scopeDesc.hpp"
  39 #include "code/vtableStubs.hpp"
  40 #include "compiler/abstractCompiler.hpp"
  41 #include "compiler/compileBroker.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/collectedHeap.hpp"
  45 #include "interpreter/interpreter.hpp"
  46 #include "interpreter/interpreterRuntime.hpp"
  47 #include "jfr/jfrEvents.hpp"
  48 #include "jvm.h"
  49 #include "logging/log.hpp"

  50 #include "memory/resourceArea.hpp"
  51 #include "memory/universe.hpp"
  52 #include "metaprogramming/primitiveConversions.hpp"



  53 #include "oops/klass.hpp"
  54 #include "oops/method.inline.hpp"
  55 #include "oops/objArrayKlass.hpp"

  56 #include "oops/oop.inline.hpp"
  57 #include "prims/forte.hpp"
  58 #include "prims/jvmtiExport.hpp"
  59 #include "prims/jvmtiThreadState.hpp"
  60 #include "prims/methodHandles.hpp"
  61 #include "prims/nativeLookup.hpp"
  62 #include "runtime/arguments.hpp"
  63 #include "runtime/atomicAccess.hpp"
  64 #include "runtime/basicLock.inline.hpp"
  65 #include "runtime/frame.inline.hpp"
  66 #include "runtime/handles.inline.hpp"
  67 #include "runtime/init.hpp"
  68 #include "runtime/interfaceSupport.inline.hpp"
  69 #include "runtime/java.hpp"
  70 #include "runtime/javaCalls.hpp"
  71 #include "runtime/jniHandles.inline.hpp"
  72 #include "runtime/osThread.hpp"
  73 #include "runtime/perfData.hpp"
  74 #include "runtime/sharedRuntime.hpp"

  75 #include "runtime/stackWatermarkSet.hpp"
  76 #include "runtime/stubRoutines.hpp"
  77 #include "runtime/synchronizer.hpp"
  78 #include "runtime/timerTrace.hpp"
  79 #include "runtime/vframe.inline.hpp"
  80 #include "runtime/vframeArray.hpp"
  81 #include "runtime/vm_version.hpp"
  82 #include "utilities/copy.hpp"
  83 #include "utilities/dtrace.hpp"
  84 #include "utilities/events.hpp"
  85 #include "utilities/exceptions.hpp"
  86 #include "utilities/globalDefinitions.hpp"
  87 #include "utilities/hashTable.hpp"
  88 #include "utilities/macros.hpp"
  89 #include "utilities/xmlstream.hpp"
  90 #ifdef COMPILER1
  91 #include "c1/c1_Runtime1.hpp"
  92 #endif
  93 #ifdef COMPILER2
  94 #include "opto/runtime.hpp"

1218 // for a call current in progress, i.e., arguments has been pushed on stack
1219 // but callee has not been invoked yet.  Caller frame must be compiled.
1220 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1221                                               CallInfo& callinfo, TRAPS) {
1222   Handle receiver;
1223   Handle nullHandle;  // create a handy null handle for exception returns
1224   JavaThread* current = THREAD;
1225 
1226   assert(!vfst.at_end(), "Java frame must exist");
1227 
1228   // Find caller and bci from vframe
1229   methodHandle caller(current, vfst.method());
1230   int          bci   = vfst.bci();
1231 
1232   if (caller->is_continuation_enter_intrinsic()) {
1233     bc = Bytecodes::_invokestatic;
1234     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1235     return receiver;
1236   }
1237 
















1238   Bytecode_invoke bytecode(caller, bci);
1239   int bytecode_index = bytecode.index();
1240   bc = bytecode.invoke_code();
1241 
1242   methodHandle attached_method(current, extract_attached_method(vfst));
1243   if (attached_method.not_null()) {
1244     Method* callee = bytecode.static_target(CHECK_NH);
1245     vmIntrinsics::ID id = callee->intrinsic_id();
1246     // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1247     // it attaches statically resolved method to the call site.
1248     if (MethodHandles::is_signature_polymorphic(id) &&
1249         MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1250       bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1251 
1252       // Adjust invocation mode according to the attached method.
1253       switch (bc) {
1254         case Bytecodes::_invokevirtual:
1255           if (attached_method->method_holder()->is_interface()) {
1256             bc = Bytecodes::_invokeinterface;
1257           }
1258           break;
1259         case Bytecodes::_invokeinterface:
1260           if (!attached_method->method_holder()->is_interface()) {
1261             bc = Bytecodes::_invokevirtual;
1262           }
1263           break;
1264         case Bytecodes::_invokehandle:
1265           if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1266             bc = attached_method->is_static() ? Bytecodes::_invokestatic
1267                                               : Bytecodes::_invokevirtual;
1268           }
1269           break;
1270         default:
1271           break;
1272       }






1273     }
1274   }
1275 
1276   assert(bc != Bytecodes::_illegal, "not initialized");
1277 
1278   bool has_receiver = bc != Bytecodes::_invokestatic &&
1279                       bc != Bytecodes::_invokedynamic &&
1280                       bc != Bytecodes::_invokehandle;

1281 
1282   // Find receiver for non-static call
1283   if (has_receiver) {
1284     // This register map must be update since we need to find the receiver for
1285     // compiled frames. The receiver might be in a register.
1286     RegisterMap reg_map2(current,
1287                          RegisterMap::UpdateMap::include,
1288                          RegisterMap::ProcessFrames::include,
1289                          RegisterMap::WalkContinuation::skip);
1290     frame stubFrame   = current->last_frame();
1291     // Caller-frame is a compiled frame
1292     frame callerFrame = stubFrame.sender(&reg_map2);
1293 
1294     if (attached_method.is_null()) {
1295       Method* callee = bytecode.static_target(CHECK_NH);

1296       if (callee == nullptr) {
1297         THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1298       }
1299     }
1300 
1301     // Retrieve from a compiled argument list
1302     receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1303     assert(oopDesc::is_oop_or_null(receiver()), "");
1304 
1305     if (receiver.is_null()) {
1306       THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);










1307     }
1308   }
1309 
1310   // Resolve method
1311   if (attached_method.not_null()) {
1312     // Parameterized by attached method.
1313     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, CHECK_NH);
1314   } else {
1315     // Parameterized by bytecode.
1316     constantPoolHandle constants(current, caller->constants());
1317     LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1318   }
1319 
1320 #ifdef ASSERT
1321   // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1322   if (has_receiver) {
1323     assert(receiver.not_null(), "should have thrown exception");
1324     Klass* receiver_klass = receiver->klass();
1325     Klass* rk = nullptr;
1326     if (attached_method.not_null()) {
1327       // In case there's resolved method attached, use its holder during the check.
1328       rk = attached_method->method_holder();
1329     } else {
1330       // Klass is already loaded.
1331       constantPoolHandle constants(current, caller->constants());
1332       rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1333     }
1334     Klass* static_receiver_klass = rk;
1335     assert(receiver_klass->is_subtype_of(static_receiver_klass),
1336            "actual receiver must be subclass of static receiver klass");
1337     if (receiver_klass->is_instance_klass()) {
1338       if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1339         tty->print_cr("ERROR: Klass not yet initialized!!");
1340         receiver_klass->print();
1341       }
1342       assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1343     }
1344   }
1345 #endif
1346 
1347   return receiver;
1348 }
1349 
1350 methodHandle SharedRuntime::find_callee_method(TRAPS) {
1351   JavaThread* current = THREAD;
1352   ResourceMark rm(current);
1353   // We need first to check if any Java activations (compiled, interpreted)
1354   // exist on the stack since last JavaCall.  If not, we need
1355   // to get the target method from the JavaCall wrapper.
1356   vframeStream vfst(current, true);  // Do not skip any javaCalls
1357   methodHandle callee_method;
1358   if (vfst.at_end()) {
1359     // No Java frames were found on stack since we did the JavaCall.
1360     // Hence the stack can only contain an entry_frame.  We need to
1361     // find the target method from the stub frame.
1362     RegisterMap reg_map(current,
1363                         RegisterMap::UpdateMap::skip,
1364                         RegisterMap::ProcessFrames::include,
1365                         RegisterMap::WalkContinuation::skip);
1366     frame fr = current->last_frame();
1367     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1368     fr = fr.sender(&reg_map);
1369     assert(fr.is_entry_frame(), "must be");
1370     // fr is now pointing to the entry frame.
1371     callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1372   } else {
1373     Bytecodes::Code bc;
1374     CallInfo callinfo;
1375     find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));




1376     callee_method = methodHandle(current, callinfo.selected_method());
1377   }
1378   assert(callee_method()->is_method(), "must be");
1379   return callee_method;
1380 }
1381 
1382 // Resolves a call.
1383 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, TRAPS) {
1384   JavaThread* current = THREAD;
1385   ResourceMark rm(current);
1386   RegisterMap cbl_map(current,
1387                       RegisterMap::UpdateMap::skip,
1388                       RegisterMap::ProcessFrames::include,
1389                       RegisterMap::WalkContinuation::skip);
1390   frame caller_frame = current->last_frame().sender(&cbl_map);
1391 
1392   CodeBlob* caller_cb = caller_frame.cb();
1393   guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1394   nmethod* caller_nm = caller_cb->as_nmethod();
1395 
1396   // determine call info & receiver
1397   // note: a) receiver is null for static calls
1398   //       b) an exception is thrown if receiver is null for non-static calls
1399   CallInfo call_info;
1400   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1401   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1402 
1403   NoSafepointVerifier nsv;
1404 
1405   methodHandle callee_method(current, call_info.selected_method());





1406 
1407   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1408          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1409          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1410          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1411          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1412 
1413   assert(!caller_nm->is_unloading(), "It should not be unloading");
1414 
1415 #ifndef PRODUCT
1416   // tracing/debugging/statistics
1417   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1418                  (is_virtual) ? (&_resolve_virtual_ctr) :
1419                                 (&_resolve_static_ctr);
1420   AtomicAccess::inc(addr);
1421 
1422   if (TraceCallFixup) {
1423     ResourceMark rm(current);
1424     tty->print("resolving %s%s (%s) call to",
1425                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1426                Bytecodes::name(invoke_code));
1427     callee_method->print_short_name(tty);
1428     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1429                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1430   }
1431 #endif
1432 
1433   if (invoke_code == Bytecodes::_invokestatic) {
1434     assert(callee_method->method_holder()->is_initialized() ||
1435            callee_method->method_holder()->is_reentrant_initialization(current),
1436            "invalid class initialization state for invoke_static");
1437     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1438       // In order to keep class initialization check, do not patch call
1439       // site for static call when the class is not fully initialized.
1440       // Proper check is enforced by call site re-resolution on every invocation.
1441       //
1442       // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1443       // explicit class initialization check is put in nmethod entry (VEP).
1444       assert(callee_method->method_holder()->is_linked(), "must be");
1445       return callee_method;
1446     }
1447   }
1448 
1449 
1450   // JSR 292 key invariant:
1451   // If the resolved method is a MethodHandle invoke target, the call
1452   // site must be a MethodHandle call site, because the lambda form might tail-call
1453   // leaving the stack in a state unknown to either caller or callee
1454 
1455   // Compute entry points. The computation of the entry points is independent of
1456   // patching the call.
1457 
1458   // Make sure the callee nmethod does not get deoptimized and removed before
1459   // we are done patching the code.
1460 
1461 
1462   CompiledICLocker ml(caller_nm);
1463   if (is_virtual && !is_optimized) {
1464     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1465     inline_cache->update(&call_info, receiver->klass());
1466   } else {
1467     // Callsite is a direct call - set it to the destination method
1468     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1469     callsite->set(callee_method);
1470   }
1471 
1472   return callee_method;
1473 }
1474 
1475 // Inline caches exist only in compiled code
1476 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1477 #ifdef ASSERT
1478   RegisterMap reg_map(current,
1479                       RegisterMap::UpdateMap::skip,
1480                       RegisterMap::ProcessFrames::include,
1481                       RegisterMap::WalkContinuation::skip);
1482   frame stub_frame = current->last_frame();
1483   assert(stub_frame.is_runtime_frame(), "sanity check");
1484   frame caller_frame = stub_frame.sender(&reg_map);
1485   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1486 #endif /* ASSERT */
1487 
1488   methodHandle callee_method;

1489   JRT_BLOCK
1490     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1491     // Return Method* through TLS
1492     current->set_vm_result_metadata(callee_method());
1493   JRT_BLOCK_END
1494   // return compiled code entry point after potential safepoints
1495   return get_resolved_entry(current, callee_method);
1496 JRT_END
1497 
1498 
1499 // Handle call site that has been made non-entrant
1500 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1501   // 6243940 We might end up in here if the callee is deoptimized
1502   // as we race to call it.  We don't want to take a safepoint if
1503   // the caller was interpreted because the caller frame will look
1504   // interpreted to the stack walkers and arguments are now
1505   // "compiled" so it is much better to make this transition
1506   // invisible to the stack walking code. The i2c path will
1507   // place the callee method in the callee_target. It is stashed
1508   // there because if we try and find the callee by normal means a
1509   // safepoint is possible and have trouble gc'ing the compiled args.
1510   RegisterMap reg_map(current,
1511                       RegisterMap::UpdateMap::skip,
1512                       RegisterMap::ProcessFrames::include,
1513                       RegisterMap::WalkContinuation::skip);
1514   frame stub_frame = current->last_frame();
1515   assert(stub_frame.is_runtime_frame(), "sanity check");
1516   frame caller_frame = stub_frame.sender(&reg_map);
1517 
1518   if (caller_frame.is_interpreted_frame() ||
1519       caller_frame.is_entry_frame() ||
1520       caller_frame.is_upcall_stub_frame()) {
1521     Method* callee = current->callee_target();
1522     guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1523     current->set_vm_result_metadata(callee);
1524     current->set_callee_target(nullptr);
1525     if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1526       // Bypass class initialization checks in c2i when caller is in native.
1527       // JNI calls to static methods don't have class initialization checks.
1528       // Fast class initialization checks are present in c2i adapters and call into
1529       // SharedRuntime::handle_wrong_method() on the slow path.
1530       //
1531       // JVM upcalls may land here as well, but there's a proper check present in
1532       // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1533       // so bypassing it in c2i adapter is benign.
1534       return callee->get_c2i_no_clinit_check_entry();
1535     } else {
1536       return callee->get_c2i_entry();




1537     }
1538   }
1539 
1540   // Must be compiled to compiled path which is safe to stackwalk
1541   methodHandle callee_method;



1542   JRT_BLOCK
1543     // Force resolving of caller (if we called from compiled frame)
1544     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1545     current->set_vm_result_metadata(callee_method());
1546   JRT_BLOCK_END
1547   // return compiled code entry point after potential safepoints
1548   return get_resolved_entry(current, callee_method);
1549 JRT_END
1550 
1551 // Handle abstract method call
1552 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1553   // Verbose error message for AbstractMethodError.
1554   // Get the called method from the invoke bytecode.
1555   vframeStream vfst(current, true);
1556   assert(!vfst.at_end(), "Java frame must exist");
1557   methodHandle caller(current, vfst.method());
1558   Bytecode_invoke invoke(caller, vfst.bci());
1559   DEBUG_ONLY( invoke.verify(); )
1560 
1561   // Find the compiled caller frame.
1562   RegisterMap reg_map(current,
1563                       RegisterMap::UpdateMap::include,
1564                       RegisterMap::ProcessFrames::include,
1565                       RegisterMap::WalkContinuation::skip);
1566   frame stubFrame = current->last_frame();
1567   assert(stubFrame.is_runtime_frame(), "must be");
1568   frame callerFrame = stubFrame.sender(&reg_map);
1569   assert(callerFrame.is_compiled_frame(), "must be");
1570 
1571   // Install exception and return forward entry.
1572   address res = SharedRuntime::throw_AbstractMethodError_entry();
1573   JRT_BLOCK
1574     methodHandle callee(current, invoke.static_target(current));
1575     if (!callee.is_null()) {
1576       oop recv = callerFrame.retrieve_receiver(&reg_map);
1577       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1578       res = StubRoutines::forward_exception_entry();
1579       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1580     }
1581   JRT_BLOCK_END
1582   return res;
1583 JRT_END
1584 
1585 // return verified_code_entry if interp_only_mode is not set for the current thread;
1586 // otherwise return c2i entry.
1587 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1588   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1589     // In interp_only_mode we need to go to the interpreted entry
1590     // The c2i won't patch in this mode -- see fixup_callers_callsite
1591     return callee_method->get_c2i_entry();




















1592   }
1593   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1594   return callee_method->verified_code_entry();
1595 }
1596 
1597 // resolve a static call and patch code
1598 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1599   methodHandle callee_method;

1600   bool enter_special = false;
1601   JRT_BLOCK
1602     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1603     current->set_vm_result_metadata(callee_method());
1604   JRT_BLOCK_END
1605   // return compiled code entry point after potential safepoints
1606   return get_resolved_entry(current, callee_method);
1607 JRT_END
1608 
1609 // resolve virtual call and update inline cache to monomorphic
1610 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1611   methodHandle callee_method;

1612   JRT_BLOCK
1613     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1614     current->set_vm_result_metadata(callee_method());
1615   JRT_BLOCK_END
1616   // return compiled code entry point after potential safepoints
1617   return get_resolved_entry(current, callee_method);
1618 JRT_END
1619 
1620 
1621 // Resolve a virtual call that can be statically bound (e.g., always
1622 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1623 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1624   methodHandle callee_method;

1625   JRT_BLOCK
1626     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1627     current->set_vm_result_metadata(callee_method());
1628   JRT_BLOCK_END
1629   // return compiled code entry point after potential safepoints
1630   return get_resolved_entry(current, callee_method);
1631 JRT_END
1632 
1633 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1634   JavaThread* current = THREAD;
1635   ResourceMark rm(current);
1636   CallInfo call_info;
1637   Bytecodes::Code bc;
1638 
1639   // receiver is null for static calls. An exception is thrown for null
1640   // receivers for non-static calls
1641   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1642 
1643   methodHandle callee_method(current, call_info.selected_method());
1644 
1645 #ifndef PRODUCT
1646   AtomicAccess::inc(&_ic_miss_ctr);
1647 
1648   // Statistics & Tracing
1649   if (TraceCallFixup) {
1650     ResourceMark rm(current);
1651     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1652     callee_method->print_short_name(tty);
1653     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1654   }
1655 
1656   if (ICMissHistogram) {
1657     MutexLocker m(VMStatistic_lock);
1658     RegisterMap reg_map(current,
1659                         RegisterMap::UpdateMap::skip,
1660                         RegisterMap::ProcessFrames::include,
1661                         RegisterMap::WalkContinuation::skip);
1662     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1663     // produce statistics under the lock
1664     trace_ic_miss(f.pc());
1665   }
1666 #endif
1667 
1668   // install an event collector so that when a vtable stub is created the
1669   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1670   // event can't be posted when the stub is created as locks are held
1671   // - instead the event will be deferred until the event collector goes
1672   // out of scope.
1673   JvmtiDynamicCodeEventCollector event_collector;
1674 
1675   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1676   RegisterMap reg_map(current,
1677                       RegisterMap::UpdateMap::skip,
1678                       RegisterMap::ProcessFrames::include,
1679                       RegisterMap::WalkContinuation::skip);
1680   frame caller_frame = current->last_frame().sender(&reg_map);
1681   CodeBlob* cb = caller_frame.cb();
1682   nmethod* caller_nm = cb->as_nmethod();




1683 
1684   CompiledICLocker ml(caller_nm);
1685   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1686   inline_cache->update(&call_info, receiver()->klass());
1687 
1688   return callee_method;
1689 }
1690 
1691 //
1692 // Resets a call-site in compiled code so it will get resolved again.
1693 // This routines handles both virtual call sites, optimized virtual call
1694 // sites, and static call sites. Typically used to change a call sites
1695 // destination from compiled to interpreted.
1696 //
1697 methodHandle SharedRuntime::reresolve_call_site(TRAPS) {
1698   JavaThread* current = THREAD;
1699   ResourceMark rm(current);
1700   RegisterMap reg_map(current,
1701                       RegisterMap::UpdateMap::skip,
1702                       RegisterMap::ProcessFrames::include,
1703                       RegisterMap::WalkContinuation::skip);
1704   frame stub_frame = current->last_frame();
1705   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1706   frame caller = stub_frame.sender(&reg_map);
1707 
1708   // Do nothing if the frame isn't a live compiled frame.
1709   // nmethod could be deoptimized by the time we get here
1710   // so no update to the caller is needed.
1711 
1712   if ((caller.is_compiled_frame() && !caller.is_deoptimized_frame()) ||
1713       (caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic())) {
1714 













1715     address pc = caller.pc();
1716 
1717     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1718     assert(caller_nm != nullptr, "did not find caller nmethod");
1719 
1720     // Default call_addr is the location of the "basic" call.
1721     // Determine the address of the call we a reresolving. With
1722     // Inline Caches we will always find a recognizable call.
1723     // With Inline Caches disabled we may or may not find a
1724     // recognizable call. We will always find a call for static
1725     // calls and for optimized virtual calls. For vanilla virtual
1726     // calls it depends on the state of the UseInlineCaches switch.
1727     //
1728     // With Inline Caches disabled we can get here for a virtual call
1729     // for two reasons:
1730     //   1 - calling an abstract method. The vtable for abstract methods
1731     //       will run us thru handle_wrong_method and we will eventually
1732     //       end up in the interpreter to throw the ame.
1733     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1734     //       call and between the time we fetch the entry address and
1735     //       we jump to it the target gets deoptimized. Similar to 1
1736     //       we will wind up in the interprter (thru a c2i with c2).
1737     //
1738     CompiledICLocker ml(caller_nm);
1739     address call_addr = caller_nm->call_instruction_address(pc);
1740 
1741     if (call_addr != nullptr) {
1742       // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1743       // bytes back in the instruction stream so we must also check for reloc info.
1744       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1745       bool ret = iter.next(); // Get item
1746       if (ret) {

1747         switch (iter.type()) {
1748           case relocInfo::static_call_type:

1749           case relocInfo::opt_virtual_call_type: {
1750             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1751             cdc->set_to_clean();



1752             break;
1753           }
1754 
1755           case relocInfo::virtual_call_type: {
1756             // compiled, dispatched call (which used to call an interpreted method)
1757             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1758             inline_cache->set_to_clean();


1759             break;
1760           }
1761           default:
1762             break;
1763         }
1764       }
1765     }
1766   }
1767 
1768   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1769 
1770 
1771 #ifndef PRODUCT
1772   AtomicAccess::inc(&_wrong_method_ctr);
1773 
1774   if (TraceCallFixup) {
1775     ResourceMark rm(current);
1776     tty->print("handle_wrong_method reresolving call to");
1777     callee_method->print_short_name(tty);
1778     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1779   }
1780 #endif
1781 
1782   return callee_method;
1783 }
1784 
1785 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1786   // The faulting unsafe accesses should be changed to throw the error
1787   // synchronously instead. Meanwhile the faulting instruction will be
1788   // skipped over (effectively turning it into a no-op) and an
1789   // asynchronous exception will be raised which the thread will
1790   // handle at a later point. If the instruction is a load it will
1791   // return garbage.
1792 
1793   // Request an async exception.
1794   thread->set_pending_unsafe_access_error();
1795 
1796   // Return address of next instruction to execute.

1962   msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
1963 
1964   char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
1965   if (message == nullptr) {
1966     // Shouldn't happen, but don't cause even more problems if it does
1967     message = const_cast<char*>(caster_klass->external_name());
1968   } else {
1969     jio_snprintf(message,
1970                  msglen,
1971                  "class %s cannot be cast to class %s (%s%s%s)",
1972                  caster_name,
1973                  target_name,
1974                  caster_klass_description,
1975                  klass_separator,
1976                  target_klass_description
1977                  );
1978   }
1979   return message;
1980 }
1981 















1982 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
1983   (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
1984 JRT_END
1985 
1986 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
1987   if (!SafepointSynchronize::is_synchronizing()) {
1988     // Only try quick_enter() if we're not trying to reach a safepoint
1989     // so that the calling thread reaches the safepoint more quickly.
1990     if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
1991       return;
1992     }
1993   }
1994   // NO_ASYNC required because an async exception on the state transition destructor
1995   // would leave you with the lock held and it would never be released.
1996   // The normal monitorenter NullPointerException is thrown without acquiring a lock
1997   // and the model is that an exception implies the method failed.
1998   JRT_BLOCK_NO_ASYNC
1999   Handle h_obj(THREAD, obj);
2000   ObjectSynchronizer::enter(h_obj, lock, current);
2001   assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");

2195   tty->print_cr("Note 1: counter updates are not MT-safe.");
2196   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2197   tty->print_cr("        %% in nested categories are relative to their category");
2198   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2199   tty->cr();
2200 
2201   MethodArityHistogram h;
2202 }
2203 #endif
2204 
2205 #ifndef PRODUCT
2206 static int _lookups; // number of calls to lookup
2207 static int _equals;  // number of buckets checked with matching hash
2208 static int _archived_hits; // number of successful lookups in archived table
2209 static int _runtime_hits;  // number of successful lookups in runtime table
2210 #endif
2211 
2212 // A simple wrapper class around the calling convention information
2213 // that allows sharing of adapters for the same calling convention.
2214 class AdapterFingerPrint : public MetaspaceObj {
2215  private:
2216   enum {
2217     _basic_type_bits = 4,
2218     _basic_type_mask = right_n_bits(_basic_type_bits),
2219     _basic_types_per_int = BitsPerInt / _basic_type_bits,

























2220   };
2221   // TO DO:  Consider integrating this with a more global scheme for compressing signatures.
2222   // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2223 
2224   int _length;


2225 
2226   static int data_offset() { return sizeof(AdapterFingerPrint); }
2227   int* data_pointer() {
2228     return (int*)((address)this + data_offset());






2229   }
2230 
2231   // Private construtor. Use allocate() to get an instance.
2232   AdapterFingerPrint(int total_args_passed, BasicType* sig_bt, int len) {
2233     int* data = data_pointer();
2234     // Pack the BasicTypes with 8 per int
2235     assert(len == length(total_args_passed), "sanity");
2236     _length = len;
2237     int sig_index = 0;
2238     for (int index = 0; index < _length; index++) {
2239       int value = 0;
2240       for (int byte = 0; sig_index < total_args_passed && byte < _basic_types_per_int; byte++) {
2241         int bt = adapter_encoding(sig_bt[sig_index++]);
2242         assert((bt & _basic_type_mask) == bt, "must fit in 4 bits");
2243         value = (value << _basic_type_bits) | bt;










2244       }
2245       data[index] = value;


2246     }

2247   }
2248 
2249   // Call deallocate instead
2250   ~AdapterFingerPrint() {
2251     ShouldNotCallThis();
2252   }
2253 
2254   static int length(int total_args) {
2255     return (total_args + (_basic_types_per_int-1)) / _basic_types_per_int;
2256   }
2257 
2258   static int compute_size_in_words(int len) {
2259     return (int)heap_word_size(sizeof(AdapterFingerPrint) + (len * sizeof(int)));
2260   }
2261 
2262   // Remap BasicTypes that are handled equivalently by the adapters.
2263   // These are correct for the current system but someday it might be
2264   // necessary to make this mapping platform dependent.
2265   static int adapter_encoding(BasicType in) {
2266     switch (in) {
2267       case T_BOOLEAN:
2268       case T_BYTE:
2269       case T_SHORT:
2270       case T_CHAR:
2271         // There are all promoted to T_INT in the calling convention
2272         return T_INT;
2273 
2274       case T_OBJECT:
2275       case T_ARRAY:
2276         // In other words, we assume that any register good enough for
2277         // an int or long is good enough for a managed pointer.
2278 #ifdef _LP64
2279         return T_LONG;
2280 #else
2281         return T_INT;
2282 #endif
2283 
2284       case T_INT:
2285       case T_LONG:
2286       case T_FLOAT:
2287       case T_DOUBLE:
2288       case T_VOID:
2289         return in;
2290 
2291       default:
2292         ShouldNotReachHere();
2293         return T_CONFLICT;
2294     }
2295   }
2296 
2297   void* operator new(size_t size, size_t fp_size) throw() {
2298     assert(fp_size >= size, "sanity check");
2299     void* p = AllocateHeap(fp_size, mtCode);
2300     memset(p, 0, fp_size);
2301     return p;
2302   }
2303 

2304   template<typename Function>
2305   void iterate_args(Function function) {
2306     for (int i = 0; i < length(); i++) {
2307       unsigned val = (unsigned)value(i);
2308       // args are packed so that first/lower arguments are in the highest
2309       // bits of each int value, so iterate from highest to the lowest
2310       for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {
2311         unsigned v = (val >> j) & _basic_type_mask;
2312         if (v == 0) {
2313           continue;
2314         }
2315         function(v);
2316       }
2317     }
2318   }
2319 
2320  public:
2321   static AdapterFingerPrint* allocate(int total_args_passed, BasicType* sig_bt) {
2322     int len = length(total_args_passed);
2323     int size_in_bytes = BytesPerWord * compute_size_in_words(len);
2324     AdapterFingerPrint* afp = new (size_in_bytes) AdapterFingerPrint(total_args_passed, sig_bt, len);
2325     assert((afp->size() * BytesPerWord) == size_in_bytes, "should match");
2326     return afp;
2327   }
2328 
2329   static void deallocate(AdapterFingerPrint* fp) {
2330     FreeHeap(fp);
2331   }
2332 
2333   int value(int index) {
2334     int* data = data_pointer();
2335     return data[index];
2336   }
2337 
2338   int length() {
2339     return _length;
2340   }
2341 
2342   unsigned int compute_hash() {
2343     int hash = 0;
2344     for (int i = 0; i < length(); i++) {
2345       int v = value(i);
2346       //Add arithmetic operation to the hash, like +3 to improve hashing
2347       hash = ((hash << 8) ^ v ^ (hash >> 5)) + 3;
2348     }
2349     return (unsigned int)hash;
2350   }
2351 
2352   const char* as_string() {
2353     stringStream st;
2354     st.print("0x");





2355     for (int i = 0; i < length(); i++) {
2356       st.print("%x", value(i));


2357     }

2358     return st.as_string();
2359   }
2360 
2361   const char* as_basic_args_string() {
2362     stringStream st;
2363     bool long_prev = false;
2364     iterate_args([&] (int arg) {
2365       if (long_prev) {
2366         long_prev = false;
2367         if (arg == T_VOID) {
2368           st.print("J");
2369         } else {
2370           st.print("L");
2371         }
2372       }
2373       switch (arg) {
2374         case T_INT:    st.print("I");    break;
2375         case T_LONG:   long_prev = true; break;
2376         case T_FLOAT:  st.print("F");    break;
2377         case T_DOUBLE: st.print("D");    break;
2378         case T_VOID:   break;
2379         default: ShouldNotReachHere();
2380       }
2381     });
2382     if (long_prev) {
2383       st.print("L");
2384     }
2385     return st.as_string();
2386   }
2387 
2388   BasicType* as_basic_type(int& nargs) {
2389     nargs = 0;
2390     GrowableArray<BasicType> btarray;
2391     bool long_prev = false;
2392 
2393     iterate_args([&] (int arg) {
2394       if (long_prev) {
2395         long_prev = false;
2396         if (arg == T_VOID) {
2397           btarray.append(T_LONG);
2398         } else {
2399           btarray.append(T_OBJECT); // it could be T_ARRAY; it shouldn't matter
2400         }
2401       }
2402       switch (arg) {
2403         case T_INT: // fallthrough
2404         case T_FLOAT: // fallthrough
2405         case T_DOUBLE:
2406         case T_VOID:
2407           btarray.append((BasicType)arg);
2408           break;
2409         case T_LONG:
2410           long_prev = true;
2411           break;
2412         default: ShouldNotReachHere();
2413       }
2414     });
2415 
2416     if (long_prev) {
2417       btarray.append(T_OBJECT);
2418     }
2419 
2420     nargs = btarray.length();
2421     BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, nargs);
2422     int index = 0;
2423     GrowableArrayIterator<BasicType> iter = btarray.begin();
2424     while (iter != btarray.end()) {
2425       sig_bt[index++] = *iter;
2426       ++iter;
2427     }
2428     assert(index == btarray.length(), "sanity check");
2429 #ifdef ASSERT
2430     {
2431       AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(nargs, sig_bt);
2432       assert(this->equals(compare_fp), "sanity check");
2433       AdapterFingerPrint::deallocate(compare_fp);
2434     }
2435 #endif
2436     return sig_bt;
2437   }
2438 
2439   bool equals(AdapterFingerPrint* other) {
2440     if (other->_length != _length) {


2441       return false;
2442     } else {
2443       for (int i = 0; i < _length; i++) {
2444         if (value(i) != other->value(i)) {
2445           return false;
2446         }
2447       }
2448     }
2449     return true;
2450   }
2451 
2452   // methods required by virtue of being a MetaspaceObj
2453   void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2454   int size() const { return compute_size_in_words(_length); }
2455   MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2456 
2457   static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2458     NOT_PRODUCT(_equals++);
2459     return fp1->equals(fp2);
2460   }
2461 
2462   static unsigned int compute_hash(AdapterFingerPrint* const& fp) {
2463     return fp->compute_hash();
2464   }

2467 #if INCLUDE_CDS
2468 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2469   return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2470 }
2471 
2472 class ArchivedAdapterTable : public OffsetCompactHashtable<
2473   AdapterFingerPrint*,
2474   AdapterHandlerEntry*,
2475   adapter_fp_equals_compact_hashtable_entry> {};
2476 #endif // INCLUDE_CDS
2477 
2478 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2479 using AdapterHandlerTable = HashTable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2480                   AnyObj::C_HEAP, mtCode,
2481                   AdapterFingerPrint::compute_hash,
2482                   AdapterFingerPrint::equals>;
2483 static AdapterHandlerTable* _adapter_handler_table;
2484 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2485 
2486 // Find a entry with the same fingerprint if it exists
2487 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(int total_args_passed, BasicType* sig_bt) {
2488   NOT_PRODUCT(_lookups++);
2489   assert_lock_strong(AdapterHandlerLibrary_lock);
2490   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2491   AdapterHandlerEntry* entry = nullptr;
2492 #if INCLUDE_CDS
2493   // if we are building the archive then the archived adapter table is
2494   // not valid and we need to use the ones added to the runtime table
2495   if (AOTCodeCache::is_using_adapter()) {
2496     // Search archived table first. It is read-only table so can be searched without lock
2497     entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2498 #ifndef PRODUCT
2499     if (entry != nullptr) {
2500       _archived_hits++;
2501     }
2502 #endif
2503   }
2504 #endif // INCLUDE_CDS
2505   if (entry == nullptr) {
2506     assert_lock_strong(AdapterHandlerLibrary_lock);
2507     AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2508     if (entry_p != nullptr) {
2509       entry = *entry_p;
2510       assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",

2527   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2528   ts.print(tty, "AdapterHandlerTable");
2529   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2530                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2531   int total_hits = _archived_hits + _runtime_hits;
2532   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2533                 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2534 }
2535 #endif
2536 
2537 // ---------------------------------------------------------------------------
2538 // Implementation of AdapterHandlerLibrary
2539 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2540 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2541 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2542 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2543 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2544 #if INCLUDE_CDS
2545 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2546 #endif // INCLUDE_CDS
2547 static const int AdapterHandlerLibrary_size = 16*K;
2548 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2549 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2550 
2551 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2552   assert(_buffer != nullptr, "should be initialized");
2553   return _buffer;
2554 }
2555 
2556 static void post_adapter_creation(const AdapterHandlerEntry* entry) {
2557   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2558     AdapterBlob* adapter_blob = entry->adapter_blob();
2559     char blob_id[256];
2560     jio_snprintf(blob_id,
2561                  sizeof(blob_id),
2562                  "%s(%s)",
2563                  adapter_blob->name(),
2564                  entry->fingerprint()->as_string());
2565     if (Forte::is_enabled()) {
2566       Forte::register_stub(blob_id, adapter_blob->content_begin(), adapter_blob->content_end());
2567     }

2575 void AdapterHandlerLibrary::initialize() {
2576   {
2577     ResourceMark rm;
2578     _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2579     _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2580   }
2581 
2582 #if INCLUDE_CDS
2583   // Link adapters in AOT Cache to their code in AOT Code Cache
2584   if (AOTCodeCache::is_using_adapter() && !_aot_adapter_handler_table.empty()) {
2585     link_aot_adapters();
2586     lookup_simple_adapters();
2587     return;
2588   }
2589 #endif // INCLUDE_CDS
2590 
2591   ResourceMark rm;
2592   {
2593     MutexLocker mu(AdapterHandlerLibrary_lock);
2594 
2595     _no_arg_handler = create_adapter(0, nullptr);


2596 
2597     BasicType obj_args[] = { T_OBJECT };
2598     _obj_arg_handler = create_adapter(1, obj_args);


2599 
2600     BasicType int_args[] = { T_INT };
2601     _int_arg_handler = create_adapter(1, int_args);


2602 
2603     BasicType obj_int_args[] = { T_OBJECT, T_INT };
2604     _obj_int_arg_handler = create_adapter(2, obj_int_args);



2605 
2606     BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
2607     _obj_obj_arg_handler = create_adapter(2, obj_obj_args);



2608 
2609     // we should always get an entry back but we don't have any
2610     // associated blob on Zero
2611     assert(_no_arg_handler != nullptr &&
2612            _obj_arg_handler != nullptr &&
2613            _int_arg_handler != nullptr &&
2614            _obj_int_arg_handler != nullptr &&
2615            _obj_obj_arg_handler != nullptr, "Initial adapter handlers must be properly created");
2616   }
2617 
2618   // Outside of the lock
2619 #ifndef ZERO
2620   // no blobs to register when we are on Zero
2621   post_adapter_creation(_no_arg_handler);
2622   post_adapter_creation(_obj_arg_handler);
2623   post_adapter_creation(_int_arg_handler);
2624   post_adapter_creation(_obj_int_arg_handler);
2625   post_adapter_creation(_obj_obj_arg_handler);
2626 #endif // ZERO
2627 }
2628 
2629 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint) {
2630   uint id = (uint)AtomicAccess::add((int*)&_id_counter, 1);
2631   assert(id > 0, "we can never overflow because AOT cache cannot contain more than 2^32 methods");
2632   return AdapterHandlerEntry::allocate(id, fingerprint);
2633 }
2634 
2635 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2636   int total_args_passed = method->size_of_parameters(); // All args on stack
2637   if (total_args_passed == 0) {
2638     return _no_arg_handler;
2639   } else if (total_args_passed == 1) {
2640     if (!method->is_static()) {



2641       return _obj_arg_handler;
2642     }
2643     switch (method->signature()->char_at(1)) {
2644       case JVM_SIGNATURE_CLASS:









2645       case JVM_SIGNATURE_ARRAY:
2646         return _obj_arg_handler;
2647       case JVM_SIGNATURE_INT:
2648       case JVM_SIGNATURE_BOOLEAN:
2649       case JVM_SIGNATURE_CHAR:
2650       case JVM_SIGNATURE_BYTE:
2651       case JVM_SIGNATURE_SHORT:
2652         return _int_arg_handler;
2653     }
2654   } else if (total_args_passed == 2 &&
2655              !method->is_static()) {
2656     switch (method->signature()->char_at(1)) {
2657       case JVM_SIGNATURE_CLASS:









2658       case JVM_SIGNATURE_ARRAY:
2659         return _obj_obj_arg_handler;
2660       case JVM_SIGNATURE_INT:
2661       case JVM_SIGNATURE_BOOLEAN:
2662       case JVM_SIGNATURE_CHAR:
2663       case JVM_SIGNATURE_BYTE:
2664       case JVM_SIGNATURE_SHORT:
2665         return _obj_int_arg_handler;
2666     }
2667   }
2668   return nullptr;
2669 }
2670 
2671 class AdapterSignatureIterator : public SignatureIterator {
2672  private:
2673   BasicType stack_sig_bt[16];
2674   BasicType* sig_bt;
2675   int index;




2676 
2677  public:
2678   AdapterSignatureIterator(Symbol* signature,
2679                            fingerprint_t fingerprint,
2680                            bool is_static,
2681                            int total_args_passed) :
2682     SignatureIterator(signature, fingerprint),
2683     index(0)
2684   {
2685     sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2686     if (!is_static) { // Pass in receiver first
2687       sig_bt[index++] = T_OBJECT;













2688     }
2689     do_parameters_on(this);
2690   }
2691 
2692   BasicType* basic_types() {
2693     return sig_bt;







2694   }

2695 
2696 #ifdef ASSERT
2697   int slots() {
2698     return index;




































2699   }



















































2700 #endif























































2701 
2702  private:










2703 
2704   friend class SignatureIterator;  // so do_parameters_on can call do_type
2705   void do_type(BasicType type) {
2706     sig_bt[index++] = type;
2707     if (type == T_LONG || type == T_DOUBLE) {
2708       sig_bt[index++] = T_VOID; // Longs & doubles take 2 Java slots



2709     }
2710   }
2711 };
2712 



































































































































































2713 
2714 const char* AdapterHandlerEntry::_entry_names[] = {
2715   "i2c", "c2i", "c2i_unverified", "c2i_no_clinit_check"
2716 };
2717 
2718 #ifdef ASSERT
2719 void AdapterHandlerLibrary::verify_adapter_sharing(int total_args_passed, BasicType* sig_bt, AdapterHandlerEntry* cached_entry) {
2720   // we can only check for the same code if there is any
2721 #ifndef ZERO
2722   AdapterHandlerEntry* comparison_entry = create_adapter(total_args_passed, sig_bt, true);
2723   assert(comparison_entry->adapter_blob() == nullptr, "no blob should be created when creating an adapter for comparison");
2724   assert(comparison_entry->compare_code(cached_entry), "code must match");
2725   // Release the one just created
2726   AdapterHandlerEntry::deallocate(comparison_entry);
2727 # endif // ZERO
2728 }
2729 #endif /* ASSERT*/
2730 
2731 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
2732   assert(!method->is_abstract(), "abstract methods do not have adapters");
2733   // Use customized signature handler.  Need to lock around updates to
2734   // the _adapter_handler_table (it is not safe for concurrent readers
2735   // and a single writer: this could be fixed if it becomes a
2736   // problem).
2737 
2738   // Fast-path for trivial adapters
2739   AdapterHandlerEntry* entry = get_simple_adapter(method);
2740   if (entry != nullptr) {
2741     return entry;
2742   }
2743 
2744   ResourceMark rm;
2745   bool new_entry = false;
2746 
2747   // Fill in the signature array, for the calling-convention call.
2748   int total_args_passed = method->size_of_parameters(); // All args on stack











2749 
2750   AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2751                               method->is_static(), total_args_passed);
2752   assert(si.slots() == total_args_passed, "");
2753   BasicType* sig_bt = si.basic_types();
2754   {
2755     MutexLocker mu(AdapterHandlerLibrary_lock);
2756 
2757     // Lookup method signature's fingerprint
2758     entry = lookup(total_args_passed, sig_bt);
2759 
2760     if (entry != nullptr) {
2761 #ifndef ZERO
2762       assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
2763 #endif
2764 #ifdef ASSERT
2765       if (!entry->in_aot_cache() && VerifyAdapterSharing) {
2766         verify_adapter_sharing(total_args_passed, sig_bt, entry);
2767       }
2768 #endif
2769     } else {
2770       entry = create_adapter(total_args_passed, sig_bt);
2771       if (entry != nullptr) {
2772         new_entry = true;
2773       }
2774     }
2775   }
2776 
2777   // Outside of the lock
2778   if (new_entry) {
2779     post_adapter_creation(entry);
2780   }
2781   return entry;
2782 }
2783 
2784 void AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler) {
2785   ResourceMark rm;
2786   const char* name = AdapterHandlerLibrary::name(handler);
2787   const uint32_t id = AdapterHandlerLibrary::id(handler);
2788 
2789   CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::Adapter, id, name);
2790   if (blob != nullptr) {

2805   }
2806   insts_size = adapter_blob->code_size();
2807   st->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
2808                 handler->fingerprint()->as_basic_args_string(),
2809                 handler->fingerprint()->as_string(), insts_size);
2810   st->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
2811   if (Verbose || PrintStubCode) {
2812     address first_pc = adapter_blob->content_begin();
2813     if (first_pc != nullptr) {
2814       Disassembler::decode(first_pc, first_pc + insts_size, st, &adapter_blob->asm_remarks());
2815       st->cr();
2816     }
2817   }
2818 }
2819 #endif // PRODUCT
2820 
2821 void AdapterHandlerLibrary::address_to_offset(address entry_address[AdapterBlob::ENTRY_COUNT],
2822                                               int entry_offset[AdapterBlob::ENTRY_COUNT]) {
2823   entry_offset[AdapterBlob::I2C] = 0;
2824   entry_offset[AdapterBlob::C2I] = entry_address[AdapterBlob::C2I] - entry_address[AdapterBlob::I2C];


2825   entry_offset[AdapterBlob::C2I_Unverified] = entry_address[AdapterBlob::C2I_Unverified] - entry_address[AdapterBlob::I2C];

2826   if (entry_address[AdapterBlob::C2I_No_Clinit_Check] == nullptr) {
2827     entry_offset[AdapterBlob::C2I_No_Clinit_Check] = -1;
2828   } else {
2829     entry_offset[AdapterBlob::C2I_No_Clinit_Check] = entry_address[AdapterBlob::C2I_No_Clinit_Check] - entry_address[AdapterBlob::I2C];
2830   }
2831 }
2832 
2833 bool AdapterHandlerLibrary::generate_adapter_code(AdapterHandlerEntry* handler,
2834                                                   int total_args_passed,
2835                                                   BasicType* sig_bt,
2836                                                   bool is_transient) {
2837   if (log_is_enabled(Info, perf, class, link)) {
2838     ClassLoader::perf_method_adapters_count()->inc();
2839   }
2840 
2841 #ifndef ZERO

2842   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2843   CodeBuffer buffer(buf);
2844   short buffer_locs[20];
2845   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2846                                          sizeof(buffer_locs)/sizeof(relocInfo));
2847   MacroAssembler masm(&buffer);
2848   VMRegPair stack_regs[16];
2849   VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2850 
2851   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2852   int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2853   address entry_address[AdapterBlob::ENTRY_COUNT];
2854   SharedRuntime::generate_i2c2i_adapters(&masm,
2855                                          total_args_passed,
2856                                          comp_args_on_stack,
2857                                          sig_bt,
2858                                          regs,
2859                                          entry_address);















2860   // On zero there is no code to save and no need to create a blob and
2861   // or relocate the handler.
2862   int entry_offset[AdapterBlob::ENTRY_COUNT];
2863   address_to_offset(entry_address, entry_offset);
2864 #ifdef ASSERT
2865   if (VerifyAdapterSharing) {
2866     handler->save_code(buf->code_begin(), buffer.insts_size());
2867     if (is_transient) {
2868       return true;
2869     }
2870   }
2871 #endif
2872   AdapterBlob* adapter_blob = AdapterBlob::create(&buffer, entry_offset);
2873   if (adapter_blob == nullptr) {
2874     // CodeCache is full, disable compilation
2875     // Ought to log this but compile log is only per compile thread
2876     // and we're some non descript Java thread.
2877     return false;
2878   }
2879   handler->set_adapter_blob(adapter_blob);
2880   if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
2881     // try to save generated code
2882     const char* name = AdapterHandlerLibrary::name(handler);
2883     const uint32_t id = AdapterHandlerLibrary::id(handler);
2884     bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name);
2885     assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
2886   }
2887 #endif // ZERO
2888 
2889 #ifndef PRODUCT
2890   // debugging support
2891   if (PrintAdapterHandlers || PrintStubCode) {
2892     print_adapter_handler_info(tty, handler);
2893   }
2894 #endif
2895 
2896   return true;
2897 }
2898 
2899 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(int total_args_passed,
2900                                                            BasicType* sig_bt,
2901                                                            bool is_transient) {
2902   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);





2903   AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
2904   if (!generate_adapter_code(handler, total_args_passed, sig_bt, is_transient)) {
2905     AdapterHandlerEntry::deallocate(handler);
2906     return nullptr;
2907   }
2908   if (!is_transient) {
2909     assert_lock_strong(AdapterHandlerLibrary_lock);
2910     _adapter_handler_table->put(fp, handler);
2911   }
2912   return handler;
2913 }
2914 
2915 #if INCLUDE_CDS
2916 void AdapterHandlerEntry::remove_unshareable_info() {
2917 #ifdef ASSERT
2918    _saved_code = nullptr;
2919    _saved_code_length = 0;
2920 #endif // ASSERT
2921    _adapter_blob = nullptr;
2922    _linked = false;


2923 }
2924 
2925 class CopyAdapterTableToArchive : StackObj {
2926 private:
2927   CompactHashtableWriter* _writer;
2928   ArchiveBuilder* _builder;
2929 public:
2930   CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
2931                                                              _builder(ArchiveBuilder::current())
2932   {}
2933 
2934   bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
2935     LogStreamHandle(Trace, aot) lsh;
2936     if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
2937       assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
2938       AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
2939       assert(buffered_fp != nullptr,"sanity check");
2940       AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
2941       assert(buffered_entry != nullptr,"sanity check");
2942 

2982   }
2983 #endif
2984 }
2985 
2986 // This method is used during production run to link archived adapters (stored in AOT Cache)
2987 // to their code in AOT Code Cache
2988 void AdapterHandlerEntry::link() {
2989   ResourceMark rm;
2990   assert(_fingerprint != nullptr, "_fingerprint must not be null");
2991   bool generate_code = false;
2992   // Generate code only if AOTCodeCache is not available, or
2993   // caching adapters is disabled, or we fail to link
2994   // the AdapterHandlerEntry to its code in the AOTCodeCache
2995   if (AOTCodeCache::is_using_adapter()) {
2996     AdapterHandlerLibrary::link_aot_adapter_handler(this);
2997     // If link_aot_adapter_handler() succeeds, _adapter_blob will be non-null
2998     if (_adapter_blob == nullptr) {
2999       log_warning(aot)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
3000       generate_code = true;
3001     }

















3002   } else {
3003     generate_code = true;
3004   }
3005   if (generate_code) {
3006     int nargs;
3007     BasicType* bt = _fingerprint->as_basic_type(nargs);
3008     if (!AdapterHandlerLibrary::generate_adapter_code(this, nargs, bt, /* is_transient */ false)) {
3009       // Don't throw exceptions during VM initialization because java.lang.* classes
3010       // might not have been initialized, causing problems when constructing the
3011       // Java exception object.
3012       vm_exit_during_initialization("Out of space in CodeCache for adapters");
3013     }
3014   }
3015   if (_adapter_blob != nullptr) {
3016     post_adapter_creation(this);
3017   }
3018   assert(_linked, "AdapterHandlerEntry must now be linked");
3019 }
3020 
3021 void AdapterHandlerLibrary::link_aot_adapters() {
3022   uint max_id = 0;
3023   assert(AOTCodeCache::is_using_adapter(), "AOT adapters code should be available");
3024   /* It is possible that some adapters generated in assembly phase are not stored in the cache.
3025    * That implies adapter ids of the adapters in the cache may not be contiguous.
3026    * If the size of the _aot_adapter_handler_table is used to initialize _id_counter, then it may
3027    * result in collision of adapter ids between AOT stored handlers and runtime generated handlers.
3028    * To avoid such situation, initialize the _id_counter with the largest adapter id among the AOT stored handlers.
3029    */
3030   _aot_adapter_handler_table.iterate_all([&](AdapterHandlerEntry* entry) {
3031     assert(!entry->is_linked(), "AdapterHandlerEntry is already linked!");
3032     entry->link();
3033     max_id = MAX2(max_id, entry->id());
3034   });
3035   // Set adapter id to the maximum id found in the AOTCache
3036   assert(_id_counter == 0, "Did not expect new AdapterHandlerEntry to be created at this stage");
3037   _id_counter = max_id;
3038 }
3039 
3040 // This method is called during production run to lookup simple adapters
3041 // in the archived adapter handler table
3042 void AdapterHandlerLibrary::lookup_simple_adapters() {
3043   assert(!_aot_adapter_handler_table.empty(), "archived adapter handler table is empty");
3044 
3045   MutexLocker mu(AdapterHandlerLibrary_lock);
3046   _no_arg_handler = lookup(0, nullptr);
3047 
3048   BasicType obj_args[] = { T_OBJECT };
3049   _obj_arg_handler = lookup(1, obj_args);
3050 
3051   BasicType int_args[] = { T_INT };
3052   _int_arg_handler = lookup(1, int_args);
3053 
3054   BasicType obj_int_args[] = { T_OBJECT, T_INT };
3055   _obj_int_arg_handler = lookup(2, obj_int_args);
3056 
3057   BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
3058   _obj_obj_arg_handler = lookup(2, obj_obj_args);













3059 
3060   assert(_no_arg_handler != nullptr &&
3061          _obj_arg_handler != nullptr &&
3062          _int_arg_handler != nullptr &&
3063          _obj_int_arg_handler != nullptr &&
3064          _obj_obj_arg_handler != nullptr, "Initial adapters not found in archived adapter handler table");
3065   assert(_no_arg_handler->is_linked() &&
3066          _obj_arg_handler->is_linked() &&
3067          _int_arg_handler->is_linked() &&
3068          _obj_int_arg_handler->is_linked() &&
3069          _obj_obj_arg_handler->is_linked(), "Initial adapters not in linked state");
3070 }
3071 #endif // INCLUDE_CDS
3072 
3073 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3074   LogStreamHandle(Trace, aot) lsh;
3075   if (lsh.is_enabled()) {
3076     lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3077     lsh.cr();
3078   }
3079   it->push(&_fingerprint);
3080 }
3081 
3082 AdapterHandlerEntry::~AdapterHandlerEntry() {
3083   if (_fingerprint != nullptr) {
3084     AdapterFingerPrint::deallocate(_fingerprint);
3085     _fingerprint = nullptr;
3086   }






3087 #ifdef ASSERT
3088   FREE_C_HEAP_ARRAY(_saved_code);
3089 #endif
3090   FreeHeap(this);
3091 }
3092 
3093 
3094 #ifdef ASSERT
3095 // Capture the code before relocation so that it can be compared
3096 // against other versions.  If the code is captured after relocation
3097 // then relative instructions won't be equivalent.
3098 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3099   _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3100   _saved_code_length = length;
3101   memcpy(_saved_code, buffer, length);
3102 }
3103 
3104 
3105 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3106   assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");

3154 
3155       struct { double data[20]; } locs_buf;
3156       struct { double data[20]; } stubs_locs_buf;
3157       buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3158 #if defined(AARCH64) || defined(PPC64)
3159       // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3160       // in the constant pool to ensure ordering between the barrier and oops
3161       // accesses. For native_wrappers we need a constant.
3162       // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3163       // static java call that is resolved in the runtime.
3164       if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3165         buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3166       }
3167 #endif
3168       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3169       MacroAssembler _masm(&buffer);
3170 
3171       // Fill in the signature array, for the calling-convention call.
3172       const int total_args_passed = method->size_of_parameters();
3173 

3174       VMRegPair stack_regs[16];

3175       VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3176 
3177       AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
3178                               method->is_static(), total_args_passed);
3179       BasicType* sig_bt = si.basic_types();
3180       assert(si.slots() == total_args_passed, "");
3181       BasicType ret_type = si.return_type();








3182 
3183       // Now get the compiled-Java arguments layout.
3184       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3185 
3186       // Generate the compiled-to-native wrapper code
3187       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3188 
3189       if (nm != nullptr) {
3190         {
3191           MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3192           if (nm->make_in_use()) {
3193             method->set_code(method, nm);
3194           }
3195         }
3196 
3197         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3198         if (directive->PrintAssemblyOption) {
3199           nm->print_code();
3200         }
3201         DirectivesStack::release(directive);

3409       if (b == handler->adapter_blob()) {
3410         found = true;
3411         st->print("Adapter for signature: ");
3412         handler->print_adapter_on(st);
3413         return false; // abort iteration
3414       } else {
3415         return true; // keep looking
3416       }
3417     };
3418     assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3419     _adapter_handler_table->iterate(findblob_runtime_table);
3420   }
3421   assert(found, "Should have found handler");
3422 }
3423 
3424 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3425   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3426   if (adapter_blob() != nullptr) {
3427     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3428     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3429     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));



3430     if (get_c2i_no_clinit_check_entry() != nullptr) {
3431       st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3432     }
3433   }
3434   st->cr();
3435 }
3436 
3437 #ifndef PRODUCT
3438 
3439 void AdapterHandlerLibrary::print_statistics() {
3440   print_table_statistics();
3441 }
3442 
3443 #endif /* PRODUCT */
3444 
3445 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3446   assert(current == JavaThread::current(), "pre-condition");
3447   StackOverflow* overflow_state = current->stack_overflow_state();
3448   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3449   overflow_state->set_reserved_stack_activation(current->stack_base());

3496         event.set_method(method);
3497         event.commit();
3498       }
3499     }
3500   }
3501   return activation;
3502 }
3503 
3504 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
3505   // After any safepoint, just before going back to compiled code,
3506   // we inform the GC that we will be doing initializing writes to
3507   // this object in the future without emitting card-marks, so
3508   // GC may take any compensating steps.
3509 
3510   oop new_obj = current->vm_result_oop();
3511   if (new_obj == nullptr) return;
3512 
3513   BarrierSet *bs = BarrierSet::barrier_set();
3514   bs->on_slowpath_allocation_exit(current, new_obj);
3515 }










































































































































































































































































  30 #include "classfile/javaClasses.inline.hpp"
  31 #include "classfile/stringTable.hpp"
  32 #include "classfile/vmClasses.hpp"
  33 #include "classfile/vmSymbols.hpp"
  34 #include "code/aotCodeCache.hpp"
  35 #include "code/codeCache.hpp"
  36 #include "code/compiledIC.hpp"
  37 #include "code/nmethod.inline.hpp"
  38 #include "code/scopeDesc.hpp"
  39 #include "code/vtableStubs.hpp"
  40 #include "compiler/abstractCompiler.hpp"
  41 #include "compiler/compileBroker.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/collectedHeap.hpp"
  45 #include "interpreter/interpreter.hpp"
  46 #include "interpreter/interpreterRuntime.hpp"
  47 #include "jfr/jfrEvents.hpp"
  48 #include "jvm.h"
  49 #include "logging/log.hpp"
  50 #include "memory/oopFactory.hpp"
  51 #include "memory/resourceArea.hpp"
  52 #include "memory/universe.hpp"
  53 #include "metaprogramming/primitiveConversions.hpp"
  54 #include "oops/access.hpp"
  55 #include "oops/fieldStreams.inline.hpp"
  56 #include "oops/inlineKlass.inline.hpp"
  57 #include "oops/klass.hpp"
  58 #include "oops/method.inline.hpp"
  59 #include "oops/objArrayKlass.hpp"
  60 #include "oops/objArrayOop.inline.hpp"
  61 #include "oops/oop.inline.hpp"
  62 #include "prims/forte.hpp"
  63 #include "prims/jvmtiExport.hpp"
  64 #include "prims/jvmtiThreadState.hpp"
  65 #include "prims/methodHandles.hpp"
  66 #include "prims/nativeLookup.hpp"
  67 #include "runtime/arguments.hpp"
  68 #include "runtime/atomicAccess.hpp"
  69 #include "runtime/basicLock.inline.hpp"
  70 #include "runtime/frame.inline.hpp"
  71 #include "runtime/handles.inline.hpp"
  72 #include "runtime/init.hpp"
  73 #include "runtime/interfaceSupport.inline.hpp"
  74 #include "runtime/java.hpp"
  75 #include "runtime/javaCalls.hpp"
  76 #include "runtime/jniHandles.inline.hpp"
  77 #include "runtime/osThread.hpp"
  78 #include "runtime/perfData.hpp"
  79 #include "runtime/sharedRuntime.hpp"
  80 #include "runtime/signature.hpp"
  81 #include "runtime/stackWatermarkSet.hpp"
  82 #include "runtime/stubRoutines.hpp"
  83 #include "runtime/synchronizer.hpp"
  84 #include "runtime/timerTrace.hpp"
  85 #include "runtime/vframe.inline.hpp"
  86 #include "runtime/vframeArray.hpp"
  87 #include "runtime/vm_version.hpp"
  88 #include "utilities/copy.hpp"
  89 #include "utilities/dtrace.hpp"
  90 #include "utilities/events.hpp"
  91 #include "utilities/exceptions.hpp"
  92 #include "utilities/globalDefinitions.hpp"
  93 #include "utilities/hashTable.hpp"
  94 #include "utilities/macros.hpp"
  95 #include "utilities/xmlstream.hpp"
  96 #ifdef COMPILER1
  97 #include "c1/c1_Runtime1.hpp"
  98 #endif
  99 #ifdef COMPILER2
 100 #include "opto/runtime.hpp"

1224 // for a call current in progress, i.e., arguments has been pushed on stack
1225 // but callee has not been invoked yet.  Caller frame must be compiled.
1226 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1227                                               CallInfo& callinfo, TRAPS) {
1228   Handle receiver;
1229   Handle nullHandle;  // create a handy null handle for exception returns
1230   JavaThread* current = THREAD;
1231 
1232   assert(!vfst.at_end(), "Java frame must exist");
1233 
1234   // Find caller and bci from vframe
1235   methodHandle caller(current, vfst.method());
1236   int          bci   = vfst.bci();
1237 
1238   if (caller->is_continuation_enter_intrinsic()) {
1239     bc = Bytecodes::_invokestatic;
1240     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1241     return receiver;
1242   }
1243 
1244   // Substitutability test implementation piggy backs on static call resolution
1245   Bytecodes::Code code = caller->java_code_at(bci);
1246   if (code == Bytecodes::_if_acmpeq || code == Bytecodes::_if_acmpne) {
1247     bc = Bytecodes::_invokestatic;
1248     methodHandle attached_method(THREAD, extract_attached_method(vfst));
1249     assert(attached_method.not_null(), "must have attached method");
1250     vmClasses::ValueObjectMethods_klass()->initialize(CHECK_NH);
1251     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, false, CHECK_NH);
1252 #ifdef ASSERT
1253     Symbol* subst_method_name = vmSymbols::isSubstitutable_name();
1254     Method* is_subst = vmClasses::ValueObjectMethods_klass()->find_method(subst_method_name, vmSymbols::object_object_boolean_signature());
1255     assert(callinfo.selected_method() == is_subst, "must be isSubstitutable method");
1256 #endif
1257     return receiver;
1258   }
1259 
1260   Bytecode_invoke bytecode(caller, bci);
1261   int bytecode_index = bytecode.index();
1262   bc = bytecode.invoke_code();
1263 
1264   methodHandle attached_method(current, extract_attached_method(vfst));
1265   if (attached_method.not_null()) {
1266     Method* callee = bytecode.static_target(CHECK_NH);
1267     vmIntrinsics::ID id = callee->intrinsic_id();
1268     // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1269     // it attaches statically resolved method to the call site.
1270     if (MethodHandles::is_signature_polymorphic(id) &&
1271         MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1272       bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1273 
1274       // Adjust invocation mode according to the attached method.
1275       switch (bc) {
1276         case Bytecodes::_invokevirtual:
1277           if (attached_method->method_holder()->is_interface()) {
1278             bc = Bytecodes::_invokeinterface;
1279           }
1280           break;
1281         case Bytecodes::_invokeinterface:
1282           if (!attached_method->method_holder()->is_interface()) {
1283             bc = Bytecodes::_invokevirtual;
1284           }
1285           break;
1286         case Bytecodes::_invokehandle:
1287           if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1288             bc = attached_method->is_static() ? Bytecodes::_invokestatic
1289                                               : Bytecodes::_invokevirtual;
1290           }
1291           break;
1292         default:
1293           break;
1294       }
1295     } else {
1296       assert(attached_method->has_scalarized_args(), "invalid use of attached method");
1297       if (!attached_method->method_holder()->is_inline_klass()) {
1298         // Ignore the attached method in this case to not confuse below code
1299         attached_method = methodHandle(current, nullptr);
1300       }
1301     }
1302   }
1303 
1304   assert(bc != Bytecodes::_illegal, "not initialized");
1305 
1306   bool has_receiver = bc != Bytecodes::_invokestatic &&
1307                       bc != Bytecodes::_invokedynamic &&
1308                       bc != Bytecodes::_invokehandle;
1309   bool check_null_and_abstract = true;
1310 
1311   // Find receiver for non-static call
1312   if (has_receiver) {
1313     // This register map must be update since we need to find the receiver for
1314     // compiled frames. The receiver might be in a register.
1315     RegisterMap reg_map2(current,
1316                          RegisterMap::UpdateMap::include,
1317                          RegisterMap::ProcessFrames::include,
1318                          RegisterMap::WalkContinuation::skip);
1319     frame stubFrame   = current->last_frame();
1320     // Caller-frame is a compiled frame
1321     frame callerFrame = stubFrame.sender(&reg_map2);
1322 
1323     Method* callee = attached_method();
1324     if (callee == nullptr) {
1325       callee = bytecode.static_target(CHECK_NH);
1326       if (callee == nullptr) {
1327         THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1328       }
1329     }
1330     bool caller_is_c1 = callerFrame.is_compiled_frame() && callerFrame.cb()->as_nmethod()->is_compiled_by_c1();
1331     if (!caller_is_c1 && callee->is_scalarized_arg(0)) {
1332       // If the receiver is an inline type that is passed as fields, no oop is available
1333       // Resolve the call without receiver null checking.
1334       assert(!callee->mismatch(), "calls with inline type receivers should never mismatch");
1335       assert(attached_method.not_null() && !attached_method->is_abstract(), "must have non-abstract attached method");
1336       if (bc == Bytecodes::_invokeinterface) {
1337         bc = Bytecodes::_invokevirtual; // C2 optimistically replaces interface calls by virtual calls
1338       }
1339       check_null_and_abstract = false;
1340     } else {
1341       // Retrieve from a compiled argument list
1342       receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1343       assert(oopDesc::is_oop_or_null(receiver()), "");
1344       if (receiver.is_null()) {
1345         THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);
1346       }
1347     }
1348   }
1349 
1350   // Resolve method
1351   if (attached_method.not_null()) {
1352     // Parameterized by attached method.
1353     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, check_null_and_abstract, CHECK_NH);
1354   } else {
1355     // Parameterized by bytecode.
1356     constantPoolHandle constants(current, caller->constants());
1357     LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1358   }
1359 
1360 #ifdef ASSERT
1361   // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1362   if (has_receiver && check_null_and_abstract) {
1363     assert(receiver.not_null(), "should have thrown exception");
1364     Klass* receiver_klass = receiver->klass();
1365     Klass* rk = nullptr;
1366     if (attached_method.not_null()) {
1367       // In case there's resolved method attached, use its holder during the check.
1368       rk = attached_method->method_holder();
1369     } else {
1370       // Klass is already loaded.
1371       constantPoolHandle constants(current, caller->constants());
1372       rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1373     }
1374     Klass* static_receiver_klass = rk;
1375     assert(receiver_klass->is_subtype_of(static_receiver_klass),
1376            "actual receiver must be subclass of static receiver klass");
1377     if (receiver_klass->is_instance_klass()) {
1378       if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1379         tty->print_cr("ERROR: Klass not yet initialized!!");
1380         receiver_klass->print();
1381       }
1382       assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1383     }
1384   }
1385 #endif
1386 
1387   return receiver;
1388 }
1389 
1390 methodHandle SharedRuntime::find_callee_method(bool& caller_does_not_scalarize, TRAPS) {
1391   JavaThread* current = THREAD;
1392   ResourceMark rm(current);
1393   // We need first to check if any Java activations (compiled, interpreted)
1394   // exist on the stack since last JavaCall.  If not, we need
1395   // to get the target method from the JavaCall wrapper.
1396   vframeStream vfst(current, true);  // Do not skip any javaCalls
1397   methodHandle callee_method;
1398   if (vfst.at_end()) {
1399     // No Java frames were found on stack since we did the JavaCall.
1400     // Hence the stack can only contain an entry_frame.  We need to
1401     // find the target method from the stub frame.
1402     RegisterMap reg_map(current,
1403                         RegisterMap::UpdateMap::skip,
1404                         RegisterMap::ProcessFrames::include,
1405                         RegisterMap::WalkContinuation::skip);
1406     frame fr = current->last_frame();
1407     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1408     fr = fr.sender(&reg_map);
1409     assert(fr.is_entry_frame(), "must be");
1410     // fr is now pointing to the entry frame.
1411     callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1412   } else {
1413     Bytecodes::Code bc;
1414     CallInfo callinfo;
1415     find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));
1416     // Calls via mismatching methods are always non-scalarized
1417     if (callinfo.resolved_method()->mismatch()) {
1418       caller_does_not_scalarize = true;
1419     }
1420     callee_method = methodHandle(current, callinfo.selected_method());
1421   }
1422   assert(callee_method()->is_method(), "must be");
1423   return callee_method;
1424 }
1425 
1426 // Resolves a call.
1427 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, bool& caller_does_not_scalarize, TRAPS) {
1428   JavaThread* current = THREAD;
1429   ResourceMark rm(current);
1430   RegisterMap cbl_map(current,
1431                       RegisterMap::UpdateMap::skip,
1432                       RegisterMap::ProcessFrames::include,
1433                       RegisterMap::WalkContinuation::skip);
1434   frame caller_frame = current->last_frame().sender(&cbl_map);
1435 
1436   CodeBlob* caller_cb = caller_frame.cb();
1437   guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1438   nmethod* caller_nm = caller_cb->as_nmethod();
1439 
1440   // determine call info & receiver
1441   // note: a) receiver is null for static calls
1442   //       b) an exception is thrown if receiver is null for non-static calls
1443   CallInfo call_info;
1444   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1445   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1446 
1447   NoSafepointVerifier nsv;
1448 
1449   methodHandle callee_method(current, call_info.selected_method());
1450   // Calls via mismatching methods are always non-scalarized
1451   bool mismatch = is_optimized ? call_info.selected_method()->mismatch() : call_info.resolved_method()->mismatch();
1452   if (caller_nm->is_compiled_by_c1() || mismatch) {
1453     caller_does_not_scalarize = true;
1454   }
1455 
1456   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1457          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1458          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1459          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1460          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1461 
1462   assert(!caller_nm->is_unloading(), "It should not be unloading");
1463 
1464 #ifndef PRODUCT
1465   // tracing/debugging/statistics
1466   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1467                  (is_virtual) ? (&_resolve_virtual_ctr) :
1468                                 (&_resolve_static_ctr);
1469   AtomicAccess::inc(addr);
1470 
1471   if (TraceCallFixup) {
1472     ResourceMark rm(current);
1473     tty->print("resolving %s%s (%s) %s call to",
1474                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1475                Bytecodes::name(invoke_code), (caller_does_not_scalarize) ? "non-scalar" : "");
1476     callee_method->print_short_name(tty);
1477     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1478                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1479   }
1480 #endif
1481 
1482   if (invoke_code == Bytecodes::_invokestatic) {
1483     assert(callee_method->method_holder()->is_initialized() ||
1484            callee_method->method_holder()->is_reentrant_initialization(current),
1485            "invalid class initialization state for invoke_static");
1486     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1487       // In order to keep class initialization check, do not patch call
1488       // site for static call when the class is not fully initialized.
1489       // Proper check is enforced by call site re-resolution on every invocation.
1490       //
1491       // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1492       // explicit class initialization check is put in nmethod entry (VEP).
1493       assert(callee_method->method_holder()->is_linked(), "must be");
1494       return callee_method;
1495     }
1496   }
1497 
1498 
1499   // JSR 292 key invariant:
1500   // If the resolved method is a MethodHandle invoke target, the call
1501   // site must be a MethodHandle call site, because the lambda form might tail-call
1502   // leaving the stack in a state unknown to either caller or callee
1503 
1504   // Compute entry points. The computation of the entry points is independent of
1505   // patching the call.
1506 
1507   // Make sure the callee nmethod does not get deoptimized and removed before
1508   // we are done patching the code.
1509 
1510 
1511   CompiledICLocker ml(caller_nm);
1512   if (is_virtual && !is_optimized) {
1513     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1514     inline_cache->update(&call_info, receiver->klass(), caller_does_not_scalarize);
1515   } else {
1516     // Callsite is a direct call - set it to the destination method
1517     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1518     callsite->set(callee_method, caller_does_not_scalarize);
1519   }
1520 
1521   return callee_method;
1522 }
1523 
1524 // Inline caches exist only in compiled code
1525 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1526 #ifdef ASSERT
1527   RegisterMap reg_map(current,
1528                       RegisterMap::UpdateMap::skip,
1529                       RegisterMap::ProcessFrames::include,
1530                       RegisterMap::WalkContinuation::skip);
1531   frame stub_frame = current->last_frame();
1532   assert(stub_frame.is_runtime_frame(), "sanity check");
1533   frame caller_frame = stub_frame.sender(&reg_map);
1534   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1535 #endif /* ASSERT */
1536 
1537   methodHandle callee_method;
1538   bool caller_does_not_scalarize = false;
1539   JRT_BLOCK
1540     callee_method = SharedRuntime::handle_ic_miss_helper(caller_does_not_scalarize, CHECK_NULL);
1541     // Return Method* through TLS
1542     current->set_vm_result_metadata(callee_method());
1543   JRT_BLOCK_END
1544   // return compiled code entry point after potential safepoints
1545   return get_resolved_entry(current, callee_method, false, false, caller_does_not_scalarize);
1546 JRT_END
1547 
1548 
1549 // Handle call site that has been made non-entrant
1550 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1551   // 6243940 We might end up in here if the callee is deoptimized
1552   // as we race to call it.  We don't want to take a safepoint if
1553   // the caller was interpreted because the caller frame will look
1554   // interpreted to the stack walkers and arguments are now
1555   // "compiled" so it is much better to make this transition
1556   // invisible to the stack walking code. The i2c path will
1557   // place the callee method in the callee_target. It is stashed
1558   // there because if we try and find the callee by normal means a
1559   // safepoint is possible and have trouble gc'ing the compiled args.
1560   RegisterMap reg_map(current,
1561                       RegisterMap::UpdateMap::skip,
1562                       RegisterMap::ProcessFrames::include,
1563                       RegisterMap::WalkContinuation::skip);
1564   frame stub_frame = current->last_frame();
1565   assert(stub_frame.is_runtime_frame(), "sanity check");
1566   frame caller_frame = stub_frame.sender(&reg_map);
1567 
1568   if (caller_frame.is_interpreted_frame() ||
1569       caller_frame.is_entry_frame() ||
1570       caller_frame.is_upcall_stub_frame()) {
1571     Method* callee = current->callee_target();
1572     guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1573     current->set_vm_result_metadata(callee);
1574     current->set_callee_target(nullptr);
1575     if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1576       // Bypass class initialization checks in c2i when caller is in native.
1577       // JNI calls to static methods don't have class initialization checks.
1578       // Fast class initialization checks are present in c2i adapters and call into
1579       // SharedRuntime::handle_wrong_method() on the slow path.
1580       //
1581       // JVM upcalls may land here as well, but there's a proper check present in
1582       // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1583       // so bypassing it in c2i adapter is benign.
1584       return callee->get_c2i_no_clinit_check_entry();
1585     } else {
1586       if (caller_frame.is_interpreted_frame()) {
1587         return callee->get_c2i_inline_entry();
1588       } else {
1589         return callee->get_c2i_entry();
1590       }
1591     }
1592   }
1593 
1594   // Must be compiled to compiled path which is safe to stackwalk
1595   methodHandle callee_method;
1596   bool is_static_call = false;
1597   bool is_optimized = false;
1598   bool caller_does_not_scalarize = false;
1599   JRT_BLOCK
1600     // Force resolving of caller (if we called from compiled frame)
1601     callee_method = SharedRuntime::reresolve_call_site(is_optimized, caller_does_not_scalarize, CHECK_NULL);
1602     current->set_vm_result_metadata(callee_method());
1603   JRT_BLOCK_END
1604   // return compiled code entry point after potential safepoints
1605   return get_resolved_entry(current, callee_method, callee_method->is_static(), is_optimized, caller_does_not_scalarize);
1606 JRT_END
1607 
1608 // Handle abstract method call
1609 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1610   // Verbose error message for AbstractMethodError.
1611   // Get the called method from the invoke bytecode.
1612   vframeStream vfst(current, true);
1613   assert(!vfst.at_end(), "Java frame must exist");
1614   methodHandle caller(current, vfst.method());
1615   Bytecode_invoke invoke(caller, vfst.bci());
1616   DEBUG_ONLY( invoke.verify(); )
1617 
1618   // Find the compiled caller frame.
1619   RegisterMap reg_map(current,
1620                       RegisterMap::UpdateMap::include,
1621                       RegisterMap::ProcessFrames::include,
1622                       RegisterMap::WalkContinuation::skip);
1623   frame stubFrame = current->last_frame();
1624   assert(stubFrame.is_runtime_frame(), "must be");
1625   frame callerFrame = stubFrame.sender(&reg_map);
1626   assert(callerFrame.is_compiled_frame(), "must be");
1627 
1628   // Install exception and return forward entry.
1629   address res = SharedRuntime::throw_AbstractMethodError_entry();
1630   JRT_BLOCK
1631     methodHandle callee(current, invoke.static_target(current));
1632     if (!callee.is_null()) {
1633       oop recv = callerFrame.retrieve_receiver(&reg_map);
1634       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1635       res = StubRoutines::forward_exception_entry();
1636       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1637     }
1638   JRT_BLOCK_END
1639   return res;
1640 JRT_END
1641 
1642 // return verified_code_entry if interp_only_mode is not set for the current thread;
1643 // otherwise return c2i entry.
1644 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method,
1645                                           bool is_static_call, bool is_optimized, bool caller_does_not_scalarize) {
1646   bool is_interp_only_mode = (StressCallingConvention && (os::random() % (1 << 10)) == 0) || current->is_interp_only_mode();
1647   // In interp_only_mode we need to go to the interpreted entry
1648   // The c2i won't patch in this mode -- see fixup_callers_callsite
1649   bool go_to_interpreter = is_interp_only_mode && !callee_method->is_special_native_intrinsic();
1650 
1651   if (caller_does_not_scalarize) {
1652     if (go_to_interpreter) {
1653       return callee_method->get_c2i_inline_entry();
1654     }
1655     assert(callee_method->verified_inline_code_entry() != nullptr, "Jump to zero!");
1656     return callee_method->verified_inline_code_entry();
1657   } else if (is_static_call || is_optimized) {
1658     if (go_to_interpreter) {
1659       return callee_method->get_c2i_entry();
1660     }
1661     assert(callee_method->verified_code_entry() != nullptr, "Jump to zero!");
1662     return callee_method->verified_code_entry();
1663   } else {
1664     if (go_to_interpreter) {
1665       return callee_method->get_c2i_inline_ro_entry();
1666     }
1667     assert(callee_method->verified_inline_ro_code_entry() != nullptr, "Jump to zero!");
1668     return callee_method->verified_inline_ro_code_entry();
1669   }


1670 }
1671 
1672 // resolve a static call and patch code
1673 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1674   methodHandle callee_method;
1675   bool caller_does_not_scalarize = false;
1676   bool enter_special = false;
1677   JRT_BLOCK
1678     callee_method = SharedRuntime::resolve_helper(false, false, caller_does_not_scalarize, CHECK_NULL);
1679     current->set_vm_result_metadata(callee_method());
1680   JRT_BLOCK_END
1681   // return compiled code entry point after potential safepoints
1682   return get_resolved_entry(current, callee_method, true, false, caller_does_not_scalarize);
1683 JRT_END
1684 
1685 // resolve virtual call and update inline cache to monomorphic
1686 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1687   methodHandle callee_method;
1688   bool caller_does_not_scalarize = false;
1689   JRT_BLOCK
1690     callee_method = SharedRuntime::resolve_helper(true, false, caller_does_not_scalarize, CHECK_NULL);
1691     current->set_vm_result_metadata(callee_method());
1692   JRT_BLOCK_END
1693   // return compiled code entry point after potential safepoints
1694   return get_resolved_entry(current, callee_method, false, false, caller_does_not_scalarize);
1695 JRT_END
1696 
1697 
1698 // Resolve a virtual call that can be statically bound (e.g., always
1699 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1700 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1701   methodHandle callee_method;
1702   bool caller_does_not_scalarize = false;
1703   JRT_BLOCK
1704     callee_method = SharedRuntime::resolve_helper(true, true, caller_does_not_scalarize, CHECK_NULL);
1705     current->set_vm_result_metadata(callee_method());
1706   JRT_BLOCK_END
1707   // return compiled code entry point after potential safepoints
1708   return get_resolved_entry(current, callee_method, false, true, caller_does_not_scalarize);
1709 JRT_END
1710 
1711 methodHandle SharedRuntime::handle_ic_miss_helper(bool& caller_does_not_scalarize, TRAPS) {
1712   JavaThread* current = THREAD;
1713   ResourceMark rm(current);
1714   CallInfo call_info;
1715   Bytecodes::Code bc;
1716 
1717   // receiver is null for static calls. An exception is thrown for null
1718   // receivers for non-static calls
1719   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1720 
1721   methodHandle callee_method(current, call_info.selected_method());
1722 
1723 #ifndef PRODUCT
1724   AtomicAccess::inc(&_ic_miss_ctr);
1725 
1726   // Statistics & Tracing
1727   if (TraceCallFixup) {
1728     ResourceMark rm(current);
1729     tty->print("IC miss (%s) %s call to", Bytecodes::name(bc), (caller_does_not_scalarize) ? "non-scalar" : "");
1730     callee_method->print_short_name(tty);
1731     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1732   }
1733 
1734   if (ICMissHistogram) {
1735     MutexLocker m(VMStatistic_lock);
1736     RegisterMap reg_map(current,
1737                         RegisterMap::UpdateMap::skip,
1738                         RegisterMap::ProcessFrames::include,
1739                         RegisterMap::WalkContinuation::skip);
1740     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1741     // produce statistics under the lock
1742     trace_ic_miss(f.pc());
1743   }
1744 #endif
1745 
1746   // install an event collector so that when a vtable stub is created the
1747   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1748   // event can't be posted when the stub is created as locks are held
1749   // - instead the event will be deferred until the event collector goes
1750   // out of scope.
1751   JvmtiDynamicCodeEventCollector event_collector;
1752 
1753   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1754   RegisterMap reg_map(current,
1755                       RegisterMap::UpdateMap::skip,
1756                       RegisterMap::ProcessFrames::include,
1757                       RegisterMap::WalkContinuation::skip);
1758   frame caller_frame = current->last_frame().sender(&reg_map);
1759   CodeBlob* cb = caller_frame.cb();
1760   nmethod* caller_nm = cb->as_nmethod();
1761   // Calls via mismatching methods are always non-scalarized
1762   if (caller_nm->is_compiled_by_c1() || call_info.resolved_method()->mismatch()) {
1763     caller_does_not_scalarize = true;
1764   }
1765 
1766   CompiledICLocker ml(caller_nm);
1767   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1768   inline_cache->update(&call_info, receiver()->klass(), caller_does_not_scalarize);
1769 
1770   return callee_method;
1771 }
1772 
1773 //
1774 // Resets a call-site in compiled code so it will get resolved again.
1775 // This routines handles both virtual call sites, optimized virtual call
1776 // sites, and static call sites. Typically used to change a call sites
1777 // destination from compiled to interpreted.
1778 //
1779 methodHandle SharedRuntime::reresolve_call_site(bool& is_optimized, bool& caller_does_not_scalarize, TRAPS) {
1780   JavaThread* current = THREAD;
1781   ResourceMark rm(current);
1782   RegisterMap reg_map(current,
1783                       RegisterMap::UpdateMap::skip,
1784                       RegisterMap::ProcessFrames::include,
1785                       RegisterMap::WalkContinuation::skip);
1786   frame stub_frame = current->last_frame();
1787   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1788   frame caller = stub_frame.sender(&reg_map);
1789   if (caller.is_compiled_frame()) {
1790     caller_does_not_scalarize = caller.cb()->as_nmethod()->is_compiled_by_c1();
1791   }
1792   assert(!caller.is_interpreted_frame(), "must be compiled");
1793 
1794   // If the frame isn't a live compiled frame (i.e. deoptimized by the time we get here), no IC clearing must be done
1795   // for the caller. However, when the caller is C2 compiled and the callee a C1 or C2 compiled method, then we still
1796   // need to figure out whether it was an optimized virtual call with an inline type receiver. Otherwise, we end up
1797   // using the wrong method entry point and accidentally skip the buffering of the receiver.
1798   methodHandle callee_method = find_callee_method(caller_does_not_scalarize, CHECK_(methodHandle()));
1799   const bool caller_is_compiled_and_not_deoptimized = caller.is_compiled_frame() && !caller.is_deoptimized_frame();
1800   const bool caller_is_continuation_enter_intrinsic =
1801     caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic();
1802   const bool do_IC_clearing = caller_is_compiled_and_not_deoptimized || caller_is_continuation_enter_intrinsic;
1803 
1804   const bool callee_compiled_with_scalarized_receiver = callee_method->has_compiled_code() &&
1805                                                         !callee_method()->is_static() &&
1806                                                         callee_method()->is_scalarized_arg(0);
1807   const bool compute_is_optimized = !caller_does_not_scalarize && callee_compiled_with_scalarized_receiver;
1808 
1809   if (do_IC_clearing || compute_is_optimized) {
1810     address pc = caller.pc();
1811 
1812     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1813     assert(caller_nm != nullptr, "did not find caller nmethod");
1814 
1815     // Default call_addr is the location of the "basic" call.
1816     // Determine the address of the call we a reresolving. With
1817     // Inline Caches we will always find a recognizable call.
1818     // With Inline Caches disabled we may or may not find a
1819     // recognizable call. We will always find a call for static
1820     // calls and for optimized virtual calls. For vanilla virtual
1821     // calls it depends on the state of the UseInlineCaches switch.
1822     //
1823     // With Inline Caches disabled we can get here for a virtual call
1824     // for two reasons:
1825     //   1 - calling an abstract method. The vtable for abstract methods
1826     //       will run us thru handle_wrong_method and we will eventually
1827     //       end up in the interpreter to throw the ame.
1828     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1829     //       call and between the time we fetch the entry address and
1830     //       we jump to it the target gets deoptimized. Similar to 1
1831     //       we will wind up in the interprter (thru a c2i with c2).
1832     //
1833     CompiledICLocker ml(caller_nm);
1834     address call_addr = caller_nm->call_instruction_address(pc);
1835 
1836     if (call_addr != nullptr) {
1837       // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1838       // bytes back in the instruction stream so we must also check for reloc info.
1839       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1840       bool ret = iter.next(); // Get item
1841       if (ret) {
1842         is_optimized = false;
1843         switch (iter.type()) {
1844           case relocInfo::static_call_type:
1845             assert(callee_method->is_static(), "must be");
1846           case relocInfo::opt_virtual_call_type: {
1847             is_optimized = (iter.type() == relocInfo::opt_virtual_call_type);
1848             if (do_IC_clearing) {
1849               CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1850               cdc->set_to_clean();
1851             }
1852             break;
1853           }
1854 
1855           case relocInfo::virtual_call_type: {
1856             if (do_IC_clearing) {
1857               // compiled, dispatched call (which used to call an interpreted method)
1858               CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1859               inline_cache->set_to_clean();
1860             }
1861             break;
1862           }
1863           default:
1864             break;
1865         }
1866       }
1867     }
1868   }
1869 



1870 #ifndef PRODUCT
1871   AtomicAccess::inc(&_wrong_method_ctr);
1872 
1873   if (TraceCallFixup) {
1874     ResourceMark rm(current);
1875     tty->print("handle_wrong_method reresolving %s call to", (caller_does_not_scalarize) ? "non-scalar" : "");
1876     callee_method->print_short_name(tty);
1877     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1878   }
1879 #endif
1880 
1881   return callee_method;
1882 }
1883 
1884 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1885   // The faulting unsafe accesses should be changed to throw the error
1886   // synchronously instead. Meanwhile the faulting instruction will be
1887   // skipped over (effectively turning it into a no-op) and an
1888   // asynchronous exception will be raised which the thread will
1889   // handle at a later point. If the instruction is a load it will
1890   // return garbage.
1891 
1892   // Request an async exception.
1893   thread->set_pending_unsafe_access_error();
1894 
1895   // Return address of next instruction to execute.

2061   msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
2062 
2063   char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
2064   if (message == nullptr) {
2065     // Shouldn't happen, but don't cause even more problems if it does
2066     message = const_cast<char*>(caster_klass->external_name());
2067   } else {
2068     jio_snprintf(message,
2069                  msglen,
2070                  "class %s cannot be cast to class %s (%s%s%s)",
2071                  caster_name,
2072                  target_name,
2073                  caster_klass_description,
2074                  klass_separator,
2075                  target_klass_description
2076                  );
2077   }
2078   return message;
2079 }
2080 
2081 char* SharedRuntime::generate_identity_exception_message(JavaThread* current, Klass* klass) {
2082   assert(klass->is_inline_klass(), "Must be a concrete value class");
2083   const char* desc = "Cannot synchronize on an instance of value class ";
2084   const char* className = klass->external_name();
2085   size_t msglen = strlen(desc) + strlen(className) + 1;
2086   char* message = NEW_RESOURCE_ARRAY(char, msglen);
2087   if (nullptr == message) {
2088     // Out of memory: can't create detailed error message
2089     message = const_cast<char*>(klass->external_name());
2090   } else {
2091     jio_snprintf(message, msglen, "%s%s", desc, className);
2092   }
2093   return message;
2094 }
2095 
2096 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
2097   (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
2098 JRT_END
2099 
2100 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
2101   if (!SafepointSynchronize::is_synchronizing()) {
2102     // Only try quick_enter() if we're not trying to reach a safepoint
2103     // so that the calling thread reaches the safepoint more quickly.
2104     if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
2105       return;
2106     }
2107   }
2108   // NO_ASYNC required because an async exception on the state transition destructor
2109   // would leave you with the lock held and it would never be released.
2110   // The normal monitorenter NullPointerException is thrown without acquiring a lock
2111   // and the model is that an exception implies the method failed.
2112   JRT_BLOCK_NO_ASYNC
2113   Handle h_obj(THREAD, obj);
2114   ObjectSynchronizer::enter(h_obj, lock, current);
2115   assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");

2309   tty->print_cr("Note 1: counter updates are not MT-safe.");
2310   tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2311   tty->print_cr("        %% in nested categories are relative to their category");
2312   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2313   tty->cr();
2314 
2315   MethodArityHistogram h;
2316 }
2317 #endif
2318 
2319 #ifndef PRODUCT
2320 static int _lookups; // number of calls to lookup
2321 static int _equals;  // number of buckets checked with matching hash
2322 static int _archived_hits; // number of successful lookups in archived table
2323 static int _runtime_hits;  // number of successful lookups in runtime table
2324 #endif
2325 
2326 // A simple wrapper class around the calling convention information
2327 // that allows sharing of adapters for the same calling convention.
2328 class AdapterFingerPrint : public MetaspaceObj {
2329 public:
2330   class Element {
2331   private:
2332     // The highest byte is the type of the argument. The remaining bytes contain the offset of the
2333     // field if it is flattened in the calling convention, -1 otherwise.
2334     juint _payload;
2335 
2336     static constexpr int offset_bit_width = 24;
2337     static constexpr juint offset_bit_mask = (1 << offset_bit_width) - 1;
2338   public:
2339     Element(BasicType bt, int offset) : _payload((static_cast<juint>(bt) << offset_bit_width) | (juint(offset) & offset_bit_mask)) {
2340       assert(offset >= -1 && offset < jint(offset_bit_mask), "invalid offset %d", offset);
2341     }
2342 
2343     BasicType bt() const {
2344       return static_cast<BasicType>(_payload >> offset_bit_width);
2345     }
2346 
2347     int offset() const {
2348       juint res = _payload & offset_bit_mask;
2349       return res == offset_bit_mask ? -1 : res;
2350     }
2351 
2352     juint hash() const {
2353       return _payload;
2354     }
2355 
2356     bool operator!=(const Element& other) const {
2357       return _payload != other._payload;
2358     }
2359   };


2360 
2361 private:
2362   const bool _has_ro_adapter;
2363   const int _length;
2364 
2365   static int data_offset() { return sizeof(AdapterFingerPrint); }
2366   Element* data_pointer() {
2367     return reinterpret_cast<Element*>(reinterpret_cast<address>(this) + data_offset());
2368   }
2369 
2370   const Element& element_at(int index) {
2371     assert(index < length(), "index %d out of bounds for length %d", index, length());
2372     Element* data = data_pointer();
2373     return data[index];
2374   }
2375 
2376   // Private construtor. Use allocate() to get an instance.
2377   AdapterFingerPrint(const GrowableArray<SigEntry>* sig, bool has_ro_adapter)
2378     : _has_ro_adapter(has_ro_adapter), _length(total_args_passed_in_sig(sig)) {
2379     Element* data = data_pointer();
2380     BasicType prev_bt = T_ILLEGAL;
2381     int vt_count = 0;

2382     for (int index = 0; index < _length; index++) {
2383       const SigEntry& sig_entry = sig->at(index);
2384       BasicType bt = sig_entry._bt;
2385       if (bt == T_METADATA) {
2386         // Found start of inline type in signature
2387         assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
2388         vt_count++;
2389       } else if (bt == T_VOID && prev_bt != T_LONG && prev_bt != T_DOUBLE) {
2390         // Found end of inline type in signature
2391         assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
2392         vt_count--;
2393         assert(vt_count >= 0, "invalid vt_count");
2394       } else if (vt_count == 0) {
2395         // Widen fields that are not part of a scalarized inline type argument
2396         assert(sig_entry._offset == -1, "invalid offset for argument that is not a flattened field %d", sig_entry._offset);
2397         bt = adapter_encoding(bt);
2398       }
2399 
2400       ::new(&data[index]) Element(bt, sig_entry._offset);
2401       prev_bt = bt;
2402     }
2403     assert(vt_count == 0, "invalid vt_count");
2404   }
2405 
2406   // Call deallocate instead
2407   ~AdapterFingerPrint() {
2408     ShouldNotCallThis();
2409   }
2410 
2411   static int total_args_passed_in_sig(const GrowableArray<SigEntry>* sig) {
2412     return (sig != nullptr) ? sig->length() : 0;
2413   }
2414 
2415   static int compute_size_in_words(int len) {
2416     return (int)heap_word_size(sizeof(AdapterFingerPrint) + (len * sizeof(Element)));
2417   }
2418 
2419   // Remap BasicTypes that are handled equivalently by the adapters.
2420   // These are correct for the current system but someday it might be
2421   // necessary to make this mapping platform dependent.
2422   static BasicType adapter_encoding(BasicType in) {
2423     switch (in) {
2424       case T_BOOLEAN:
2425       case T_BYTE:
2426       case T_SHORT:
2427       case T_CHAR:
2428         // They are all promoted to T_INT in the calling convention
2429         return T_INT;
2430 
2431       case T_OBJECT:
2432       case T_ARRAY:
2433         // In other words, we assume that any register good enough for
2434         // an int or long is good enough for a managed pointer.
2435 #ifdef _LP64
2436         return T_LONG;
2437 #else
2438         return T_INT;
2439 #endif
2440 
2441       case T_INT:
2442       case T_LONG:
2443       case T_FLOAT:
2444       case T_DOUBLE:
2445       case T_VOID:
2446         return in;
2447 
2448       default:
2449         ShouldNotReachHere();
2450         return T_CONFLICT;
2451     }
2452   }
2453 
2454   void* operator new(size_t size, size_t fp_size) throw() {
2455     assert(fp_size >= size, "sanity check");
2456     void* p = AllocateHeap(fp_size, mtCode);
2457     memset(p, 0, fp_size);
2458     return p;
2459   }
2460 
2461 public:
2462   template<typename Function>
2463   void iterate_args(Function function) {
2464     for (int i = 0; i < length(); i++) {
2465       function(element_at(i));









2466     }
2467   }
2468 
2469   static AdapterFingerPrint* allocate(const GrowableArray<SigEntry>* sig, bool has_ro_adapter = false) {
2470     int len = total_args_passed_in_sig(sig);

2471     int size_in_bytes = BytesPerWord * compute_size_in_words(len);
2472     AdapterFingerPrint* afp = new (size_in_bytes) AdapterFingerPrint(sig, has_ro_adapter);
2473     assert((afp->size() * BytesPerWord) == size_in_bytes, "should match");
2474     return afp;
2475   }
2476 
2477   static void deallocate(AdapterFingerPrint* fp) {
2478     FreeHeap(fp);
2479   }
2480 
2481   bool has_ro_adapter() const {
2482     return _has_ro_adapter;

2483   }
2484 
2485   int length() const {
2486     return _length;
2487   }
2488 
2489   unsigned int compute_hash() {
2490     int hash = 0;
2491     for (int i = 0; i < length(); i++) {
2492       const Element& v = element_at(i);
2493       //Add arithmetic operation to the hash, like +3 to improve hashing
2494       hash = ((hash << 8) ^ v.hash() ^ (hash >> 5)) + 3;
2495     }
2496     return (unsigned int)hash;
2497   }
2498 
2499   const char* as_string() {
2500     stringStream st;
2501     st.print("{");
2502     if (_has_ro_adapter) {
2503       st.print("has_ro_adapter");
2504     } else {
2505       st.print("no_ro_adapter");
2506     }
2507     for (int i = 0; i < length(); i++) {
2508       st.print(", ");
2509       const Element& elem = element_at(i);
2510       st.print("{%s, %d}", type2name(elem.bt()), elem.offset());
2511     }
2512     st.print("}");
2513     return st.as_string();
2514   }
2515 
2516   const char* as_basic_args_string() {
2517     stringStream st;
2518     bool long_prev = false;
2519     iterate_args([&] (const Element& arg) {
2520       if (long_prev) {
2521         long_prev = false;
2522         if (arg.bt() == T_VOID) {
2523           st.print("J");
2524         } else {
2525           st.print("L");
2526         }
2527       }
2528       if (arg.bt() == T_LONG) {
2529         long_prev = true;
2530       } else if (arg.bt() != T_VOID) {
2531         st.print("%c", type2char(arg.bt()));



2532       }
2533     });
2534     if (long_prev) {
2535       st.print("L");
2536     }
2537     return st.as_string();
2538   }
2539 



















































2540   bool equals(AdapterFingerPrint* other) {
2541     if (other->_has_ro_adapter != _has_ro_adapter) {
2542       return false;
2543     } else if (other->_length != _length) {
2544       return false;
2545     } else {
2546       for (int i = 0; i < _length; i++) {
2547         if (element_at(i) != other->element_at(i)) {
2548           return false;
2549         }
2550       }
2551     }
2552     return true;
2553   }
2554 
2555   // methods required by virtue of being a MetaspaceObj
2556   void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2557   int size() const { return compute_size_in_words(_length); }
2558   MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2559 
2560   static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2561     NOT_PRODUCT(_equals++);
2562     return fp1->equals(fp2);
2563   }
2564 
2565   static unsigned int compute_hash(AdapterFingerPrint* const& fp) {
2566     return fp->compute_hash();
2567   }

2570 #if INCLUDE_CDS
2571 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2572   return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2573 }
2574 
2575 class ArchivedAdapterTable : public OffsetCompactHashtable<
2576   AdapterFingerPrint*,
2577   AdapterHandlerEntry*,
2578   adapter_fp_equals_compact_hashtable_entry> {};
2579 #endif // INCLUDE_CDS
2580 
2581 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2582 using AdapterHandlerTable = HashTable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2583                   AnyObj::C_HEAP, mtCode,
2584                   AdapterFingerPrint::compute_hash,
2585                   AdapterFingerPrint::equals>;
2586 static AdapterHandlerTable* _adapter_handler_table;
2587 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2588 
2589 // Find a entry with the same fingerprint if it exists
2590 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(const GrowableArray<SigEntry>* sig, bool has_ro_adapter) {
2591   NOT_PRODUCT(_lookups++);
2592   assert_lock_strong(AdapterHandlerLibrary_lock);
2593   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(sig, has_ro_adapter);
2594   AdapterHandlerEntry* entry = nullptr;
2595 #if INCLUDE_CDS
2596   // if we are building the archive then the archived adapter table is
2597   // not valid and we need to use the ones added to the runtime table
2598   if (AOTCodeCache::is_using_adapter()) {
2599     // Search archived table first. It is read-only table so can be searched without lock
2600     entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2601 #ifndef PRODUCT
2602     if (entry != nullptr) {
2603       _archived_hits++;
2604     }
2605 #endif
2606   }
2607 #endif // INCLUDE_CDS
2608   if (entry == nullptr) {
2609     assert_lock_strong(AdapterHandlerLibrary_lock);
2610     AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2611     if (entry_p != nullptr) {
2612       entry = *entry_p;
2613       assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",

2630   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2631   ts.print(tty, "AdapterHandlerTable");
2632   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2633                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2634   int total_hits = _archived_hits + _runtime_hits;
2635   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2636                 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2637 }
2638 #endif
2639 
2640 // ---------------------------------------------------------------------------
2641 // Implementation of AdapterHandlerLibrary
2642 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2643 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2644 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2645 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2646 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2647 #if INCLUDE_CDS
2648 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2649 #endif // INCLUDE_CDS
2650 static const int AdapterHandlerLibrary_size = 48*K;
2651 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2652 volatile uint AdapterHandlerLibrary::_id_counter = 0;
2653 
2654 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2655   assert(_buffer != nullptr, "should be initialized");
2656   return _buffer;
2657 }
2658 
2659 static void post_adapter_creation(const AdapterHandlerEntry* entry) {
2660   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2661     AdapterBlob* adapter_blob = entry->adapter_blob();
2662     char blob_id[256];
2663     jio_snprintf(blob_id,
2664                  sizeof(blob_id),
2665                  "%s(%s)",
2666                  adapter_blob->name(),
2667                  entry->fingerprint()->as_string());
2668     if (Forte::is_enabled()) {
2669       Forte::register_stub(blob_id, adapter_blob->content_begin(), adapter_blob->content_end());
2670     }

2678 void AdapterHandlerLibrary::initialize() {
2679   {
2680     ResourceMark rm;
2681     _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2682     _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2683   }
2684 
2685 #if INCLUDE_CDS
2686   // Link adapters in AOT Cache to their code in AOT Code Cache
2687   if (AOTCodeCache::is_using_adapter() && !_aot_adapter_handler_table.empty()) {
2688     link_aot_adapters();
2689     lookup_simple_adapters();
2690     return;
2691   }
2692 #endif // INCLUDE_CDS
2693 
2694   ResourceMark rm;
2695   {
2696     MutexLocker mu(AdapterHandlerLibrary_lock);
2697 
2698     CompiledEntrySignature no_args;
2699     no_args.compute_calling_conventions();
2700     _no_arg_handler = create_adapter(no_args, true);
2701 
2702     CompiledEntrySignature obj_args;
2703     SigEntry::add_entry(obj_args.sig(), T_OBJECT);
2704     obj_args.compute_calling_conventions();
2705     _obj_arg_handler = create_adapter(obj_args, true);
2706 
2707     CompiledEntrySignature int_args;
2708     SigEntry::add_entry(int_args.sig(), T_INT);
2709     int_args.compute_calling_conventions();
2710     _int_arg_handler = create_adapter(int_args, true);
2711 
2712     CompiledEntrySignature obj_int_args;
2713     SigEntry::add_entry(obj_int_args.sig(), T_OBJECT);
2714     SigEntry::add_entry(obj_int_args.sig(), T_INT);
2715     obj_int_args.compute_calling_conventions();
2716     _obj_int_arg_handler = create_adapter(obj_int_args, true);
2717 
2718     CompiledEntrySignature obj_obj_args;
2719     SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
2720     SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
2721     obj_obj_args.compute_calling_conventions();
2722     _obj_obj_arg_handler = create_adapter(obj_obj_args, true);
2723 
2724     // we should always get an entry back but we don't have any
2725     // associated blob on Zero
2726     assert(_no_arg_handler != nullptr &&
2727            _obj_arg_handler != nullptr &&
2728            _int_arg_handler != nullptr &&
2729            _obj_int_arg_handler != nullptr &&
2730            _obj_obj_arg_handler != nullptr, "Initial adapter handlers must be properly created");
2731   }
2732 
2733   // Outside of the lock
2734 #ifndef ZERO
2735   // no blobs to register when we are on Zero
2736   post_adapter_creation(_no_arg_handler);
2737   post_adapter_creation(_obj_arg_handler);
2738   post_adapter_creation(_int_arg_handler);
2739   post_adapter_creation(_obj_int_arg_handler);
2740   post_adapter_creation(_obj_obj_arg_handler);
2741 #endif // ZERO
2742 }
2743 
2744 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint) {
2745   uint id = (uint)AtomicAccess::add((int*)&_id_counter, 1);
2746   assert(id > 0, "we can never overflow because AOT cache cannot contain more than 2^32 methods");
2747   return AdapterHandlerEntry::allocate(id, fingerprint);
2748 }
2749 
2750 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2751   int total_args_passed = method->size_of_parameters(); // All args on stack
2752   if (total_args_passed == 0) {
2753     return _no_arg_handler;
2754   } else if (total_args_passed == 1) {
2755     if (!method->is_static()) {
2756       if (InlineTypePassFieldsAsArgs && method->method_holder()->is_inline_klass()) {
2757         return nullptr;
2758       }
2759       return _obj_arg_handler;
2760     }
2761     switch (method->signature()->char_at(1)) {
2762       case JVM_SIGNATURE_CLASS: {
2763         if (InlineTypePassFieldsAsArgs) {
2764           SignatureStream ss(method->signature());
2765           InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2766           if (vk != nullptr) {
2767             return nullptr;
2768           }
2769         }
2770         return _obj_arg_handler;
2771       }
2772       case JVM_SIGNATURE_ARRAY:
2773         return _obj_arg_handler;
2774       case JVM_SIGNATURE_INT:
2775       case JVM_SIGNATURE_BOOLEAN:
2776       case JVM_SIGNATURE_CHAR:
2777       case JVM_SIGNATURE_BYTE:
2778       case JVM_SIGNATURE_SHORT:
2779         return _int_arg_handler;
2780     }
2781   } else if (total_args_passed == 2 &&
2782              !method->is_static() && (!InlineTypePassFieldsAsArgs || !method->method_holder()->is_inline_klass())) {
2783     switch (method->signature()->char_at(1)) {
2784       case JVM_SIGNATURE_CLASS: {
2785         if (InlineTypePassFieldsAsArgs) {
2786           SignatureStream ss(method->signature());
2787           InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2788           if (vk != nullptr) {
2789             return nullptr;
2790           }
2791         }
2792         return _obj_obj_arg_handler;
2793       }
2794       case JVM_SIGNATURE_ARRAY:
2795         return _obj_obj_arg_handler;
2796       case JVM_SIGNATURE_INT:
2797       case JVM_SIGNATURE_BOOLEAN:
2798       case JVM_SIGNATURE_CHAR:
2799       case JVM_SIGNATURE_BYTE:
2800       case JVM_SIGNATURE_SHORT:
2801         return _obj_int_arg_handler;
2802     }
2803   }
2804   return nullptr;
2805 }
2806 
2807 CompiledEntrySignature::CompiledEntrySignature(Method* method) :
2808   _method(method), _num_inline_args(0), _has_inline_recv(false),
2809   _regs(nullptr), _regs_cc(nullptr), _regs_cc_ro(nullptr),
2810   _args_on_stack(0), _args_on_stack_cc(0), _args_on_stack_cc_ro(0),
2811   _c1_needs_stack_repair(false), _c2_needs_stack_repair(false), _supers(nullptr) {
2812   _sig = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2813   _sig_cc = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2814   _sig_cc_ro = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2815 }
2816 
2817 // See if we can save space by sharing the same entry for VIEP and VIEP(RO),
2818 // or the same entry for VEP and VIEP(RO).
2819 CodeOffsets::Entries CompiledEntrySignature::c1_inline_ro_entry_type() const {
2820   if (!has_scalarized_args()) {
2821     // VEP/VIEP/VIEP(RO) all share the same entry. There's no packing.
2822     return CodeOffsets::Verified_Entry;
2823   }
2824   if (_method->is_static()) {
2825     // Static methods don't need VIEP(RO)
2826     return CodeOffsets::Verified_Entry;
2827   }
2828 
2829   if (has_inline_recv()) {
2830     if (num_inline_args() == 1) {
2831       // Share same entry for VIEP and VIEP(RO).
2832       // This is quite common: we have an instance method in an InlineKlass that has
2833       // no inline type args other than <this>.
2834       return CodeOffsets::Verified_Inline_Entry;
2835     } else {
2836       assert(num_inline_args() > 1, "must be");
2837       // No sharing:
2838       //   VIEP(RO) -- <this> is passed as object
2839       //   VEP      -- <this> is passed as fields
2840       return CodeOffsets::Verified_Inline_Entry_RO;
2841     }

2842   }
2843 
2844   // Either a static method, or <this> is not an inline type
2845   if (args_on_stack_cc() != args_on_stack_cc_ro()) {
2846     // No sharing:
2847     // Some arguments are passed on the stack, and we have inserted reserved entries
2848     // into the VEP, but we never insert reserved entries into the VIEP(RO).
2849     return CodeOffsets::Verified_Inline_Entry_RO;
2850   } else {
2851     // Share same entry for VEP and VIEP(RO).
2852     return CodeOffsets::Verified_Entry;
2853   }
2854 }
2855 
2856 // Returns all super methods (transitive) in classes and interfaces that are overridden by the current method.
2857 GrowableArray<Method*>* CompiledEntrySignature::get_supers() {
2858   if (_supers != nullptr) {
2859     return _supers;
2860   }
2861   _supers = new GrowableArray<Method*>();
2862   // Skip private, static, and <init> methods
2863   if (_method->is_private() || _method->is_static() || _method->is_object_constructor()) {
2864     return _supers;
2865   }
2866   Symbol* name = _method->name();
2867   Symbol* signature = _method->signature();
2868   const Klass* holder = _method->method_holder()->super();
2869   Symbol* holder_name = holder->name();
2870   ThreadInVMfromUnknown tiv;
2871   JavaThread* current = JavaThread::current();
2872   HandleMark hm(current);
2873   Handle loader(current, _method->method_holder()->class_loader());
2874 
2875   // Walk up the class hierarchy and search for super methods
2876   while (holder != nullptr) {
2877     Method* super_method = holder->lookup_method(name, signature);
2878     if (super_method == nullptr) {
2879       break;
2880     }
2881     if (!super_method->is_static() && !super_method->is_private() &&
2882         (!super_method->is_package_private() ||
2883          super_method->method_holder()->is_same_class_package(loader(), holder_name))) {
2884       _supers->push(super_method);
2885     }
2886     holder = super_method->method_holder()->super();
2887   }
2888   // Search interfaces for super methods
2889   Array<InstanceKlass*>* interfaces = _method->method_holder()->transitive_interfaces();
2890   for (int i = 0; i < interfaces->length(); ++i) {
2891     Method* m = interfaces->at(i)->lookup_method(name, signature);
2892     if (m != nullptr && !m->is_static() && m->is_public()) {
2893       _supers->push(m);
2894     }
2895   }
2896   return _supers;
2897 }
2898 
2899 // Iterate over arguments and compute scalarized and non-scalarized signatures
2900 void CompiledEntrySignature::compute_calling_conventions(bool init) {
2901   bool has_scalarized = false;
2902   if (_method != nullptr) {
2903     InstanceKlass* holder = _method->method_holder();
2904     int arg_num = 0;
2905     if (!_method->is_static()) {
2906       // We shouldn't scalarize 'this' in a value class constructor
2907       if (holder->is_inline_klass() && InlineKlass::cast(holder)->can_be_passed_as_fields() &&
2908           !_method->is_object_constructor() && (init || _method->is_scalarized_arg(arg_num))) {
2909         _sig_cc->appendAll(InlineKlass::cast(holder)->extended_sig());
2910         _sig_cc->insert_before(1, SigEntry(T_OBJECT, 0, nullptr, false, true)); // buffer argument
2911         has_scalarized = true;
2912         _has_inline_recv = true;
2913         _num_inline_args++;
2914       } else {
2915         SigEntry::add_entry(_sig_cc, T_OBJECT, holder->name());
2916       }
2917       SigEntry::add_entry(_sig, T_OBJECT, holder->name());
2918       SigEntry::add_entry(_sig_cc_ro, T_OBJECT, holder->name());
2919       arg_num++;
2920     }
2921     for (SignatureStream ss(_method->signature()); !ss.at_return_type(); ss.next()) {
2922       BasicType bt = ss.type();
2923       if (InlineTypePassFieldsAsArgs && bt == T_OBJECT) {
2924         InlineKlass* vk = ss.as_inline_klass(holder);
2925         if (vk != nullptr && vk->can_be_passed_as_fields() && (init || _method->is_scalarized_arg(arg_num))) {
2926           // Check for a calling convention mismatch with super method(s)
2927           bool scalar_super = false;
2928           bool non_scalar_super = false;
2929           GrowableArray<Method*>* supers = get_supers();
2930           for (int i = 0; i < supers->length(); ++i) {
2931             Method* super_method = supers->at(i);
2932             if (super_method->is_scalarized_arg(arg_num)) {
2933               scalar_super = true;
2934             } else {
2935               non_scalar_super = true;
2936             }
2937           }
2938 #ifdef ASSERT
2939           // Randomly enable below code paths for stress testing
2940           bool stress = init && StressCallingConvention;
2941           if (stress && (os::random() & 1) == 1) {
2942             non_scalar_super = true;
2943             if ((os::random() & 1) == 1) {
2944               scalar_super = true;
2945             }
2946           }
2947 #endif
2948           if (non_scalar_super) {
2949             // Found a super method with a non-scalarized argument. Fall back to the non-scalarized calling convention.
2950             if (scalar_super) {
2951               // Found non-scalar *and* scalar super methods. We can't handle both.
2952               // Mark the scalar method as mismatch and re-compile call sites to use non-scalarized calling convention.
2953               for (int i = 0; i < supers->length(); ++i) {
2954                 Method* super_method = supers->at(i);
2955                 if (super_method->is_scalarized_arg(arg_num) DEBUG_ONLY(|| (stress && (os::random() & 1) == 1))) {
2956                   JavaThread* thread = JavaThread::current();
2957                   HandleMark hm(thread);
2958                   methodHandle mh(thread, super_method);
2959                   DeoptimizationScope deopt_scope;
2960                   {
2961                     // Keep the lock scope minimal. Prevent interference with other
2962                     // dependency checks by setting mismatch and marking within the lock.
2963                     MutexLocker ml(Compile_lock, Mutex::_safepoint_check_flag);
2964                     super_method->set_mismatch();
2965                     CodeCache::mark_for_deoptimization(&deopt_scope, mh());
2966                   }
2967                   deopt_scope.deoptimize_marked();
2968                 }
2969               }
2970             }
2971             // Fall back to non-scalarized calling convention
2972             SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2973             SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2974           } else {
2975             _num_inline_args++;
2976             has_scalarized = true;
2977             int last = _sig_cc->length();
2978             int last_ro = _sig_cc_ro->length();
2979             _sig_cc->appendAll(vk->extended_sig());
2980             _sig_cc_ro->appendAll(vk->extended_sig());
2981             // buffer argument
2982             _sig_cc->insert_before(last + 1, SigEntry(T_OBJECT, 0, nullptr, false, true));
2983             _sig_cc_ro->insert_before(last_ro + 1, SigEntry(T_OBJECT, 0, nullptr, false, true));
2984             // Insert InlineTypeNode::NullMarker field right after T_METADATA delimiter
2985             _sig_cc->insert_before(last + 2, SigEntry(T_BOOLEAN, -1, nullptr, true, false));
2986             _sig_cc_ro->insert_before(last_ro + 2, SigEntry(T_BOOLEAN, -1, nullptr, true, false));
2987           }
2988         } else {
2989           SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2990           SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2991         }
2992         bt = T_OBJECT;
2993       } else {
2994         SigEntry::add_entry(_sig_cc, ss.type(), ss.as_symbol());
2995         SigEntry::add_entry(_sig_cc_ro, ss.type(), ss.as_symbol());
2996       }
2997       SigEntry::add_entry(_sig, bt, ss.as_symbol());
2998       if (bt != T_VOID) {
2999         arg_num++;
3000       }
3001     }
3002   }
3003 
3004   // Compute the non-scalarized calling convention
3005   _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
3006   _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
3007 
3008   // Compute the scalarized calling conventions if there are scalarized inline types in the signature
3009   if (has_scalarized && !_method->is_native()) {
3010     _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
3011     _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
3012 
3013     _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
3014     _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
3015 
3016     _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
3017     _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
3018 
3019     // Upper bound on stack arguments to avoid hitting the argument limit and
3020     // bailing out of compilation ("unsupported incoming calling sequence").
3021     // TODO 8281260 We need a reasonable limit (flag?) here
3022     if (MAX2(_args_on_stack_cc, _args_on_stack_cc_ro) <= 75) {
3023       return; // Success
3024     }
3025   }

3026 
3027   // No scalarized args
3028   _sig_cc = _sig;
3029   _regs_cc = _regs;
3030   _args_on_stack_cc = _args_on_stack;
3031 
3032   _sig_cc_ro = _sig;
3033   _regs_cc_ro = _regs;
3034   _args_on_stack_cc_ro = _args_on_stack;
3035 }
3036 
3037 void CompiledEntrySignature::initialize_from_fingerprint(AdapterFingerPrint* fingerprint) {
3038   _has_inline_recv = fingerprint->has_ro_adapter();
3039 
3040   int value_object_count = 0;
3041   BasicType prev_bt = T_ILLEGAL;
3042   bool has_scalarized_arguments = false;
3043   bool long_prev = false;
3044   int long_prev_offset = -1;
3045   bool skipping_inline_recv = false;
3046   bool receiver_handled = false;
3047 
3048   fingerprint->iterate_args([&] (const AdapterFingerPrint::Element& arg) {
3049     BasicType bt = arg.bt();
3050     int offset = arg.offset();
3051 
3052     if (long_prev) {
3053       long_prev = false;
3054       BasicType bt_to_add;
3055       if (bt == T_VOID) {
3056         bt_to_add = T_LONG;
3057       } else {
3058         bt_to_add = T_OBJECT;
3059       }
3060       if (value_object_count == 0) {
3061         SigEntry::add_entry(_sig, bt_to_add);
3062       }
3063       assert(long_prev_offset != 0, "no buffer argument here");
3064       SigEntry::add_entry(_sig_cc, bt_to_add, nullptr, long_prev_offset);
3065       if (!skipping_inline_recv) {
3066         SigEntry::add_entry(_sig_cc_ro, bt_to_add, nullptr, long_prev_offset);
3067       }
3068     }
3069 
3070     switch (bt) {
3071       case T_VOID:
3072         if (prev_bt != T_LONG && prev_bt != T_DOUBLE) {
3073           assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
3074           value_object_count--;
3075           SigEntry::add_entry(_sig_cc, T_VOID, nullptr, offset);
3076           if (!skipping_inline_recv) {
3077             SigEntry::add_entry(_sig_cc_ro, T_VOID, nullptr, offset);
3078           } else if (value_object_count == 0) {
3079             skipping_inline_recv = false;
3080           }
3081           assert(value_object_count >= 0, "invalid value object count");
3082         } else {
3083           // Nothing to add for _sig: We already added an addition T_VOID in add_entry() when adding T_LONG or T_DOUBLE.
3084         }
3085         break;
3086       case T_INT:
3087       case T_FLOAT:
3088       case T_DOUBLE:
3089         if (value_object_count == 0) {
3090           SigEntry::add_entry(_sig, bt);
3091         }
3092         SigEntry::add_entry(_sig_cc, bt, nullptr, offset);
3093         if (!skipping_inline_recv) {
3094           SigEntry::add_entry(_sig_cc_ro, bt, nullptr, offset);
3095         }
3096         break;
3097       case T_LONG:
3098         long_prev = true;
3099         long_prev_offset = offset;
3100         break;
3101       case T_BOOLEAN:
3102       case T_CHAR:
3103       case T_BYTE:
3104       case T_SHORT:
3105       case T_OBJECT:
3106       case T_ARRAY:
3107         assert(value_object_count > 0, "must be value object field");
3108         assert(offset != 0 || (bt == T_OBJECT && prev_bt == T_METADATA), "buffer input expected here");
3109         SigEntry::add_entry(_sig_cc, bt, nullptr, offset, offset == -1, offset == 0);
3110         if (!skipping_inline_recv) {
3111           SigEntry::add_entry(_sig_cc_ro, bt, nullptr, offset, offset == -1, offset == 0);
3112         }
3113         break;
3114       case T_METADATA:
3115         assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
3116         if (value_object_count == 0) {
3117           SigEntry::add_entry(_sig, T_OBJECT);
3118         }
3119         SigEntry::add_entry(_sig_cc, T_METADATA, nullptr, offset);
3120         if (!skipping_inline_recv) {
3121           if (!receiver_handled && _has_inline_recv && value_object_count == 0) {
3122             SigEntry::add_entry(_sig_cc_ro, T_OBJECT);
3123             skipping_inline_recv = true;
3124             receiver_handled = true;
3125           } else {
3126             SigEntry::add_entry(_sig_cc_ro, T_METADATA, nullptr, offset);
3127           }
3128         }
3129         value_object_count++;
3130         has_scalarized_arguments = true;
3131         break;
3132       default: {
3133         fatal("Unexpected BasicType: %s", basictype_to_str(bt));
3134       }
3135     }
3136     prev_bt = bt;
3137   });
3138 
3139   if (long_prev) {
3140     // If previous bt was T_LONG and we reached the end of the signature, we know that it must be a T_OBJECT.
3141     SigEntry::add_entry(_sig, T_OBJECT);
3142     SigEntry::add_entry(_sig_cc, T_OBJECT);
3143     SigEntry::add_entry(_sig_cc_ro, T_OBJECT);
3144   }
3145   assert(value_object_count == 0, "invalid value object count");
3146 
3147 #ifdef ASSERT
3148   if (_has_inline_recv) {
3149     // In RO signatures, inline receivers must be represented as a single T_OBJECT
3150     assert(_sig_cc_ro->length() >= 1, "sig_cc_ro must include receiver");
3151     assert(_sig_cc_ro->at(0)._bt == T_OBJECT,
3152            "sig_cc_ro must represent inline receiver as T_OBJECT");
3153     assert(_sig_cc_ro->length() <= _sig_cc->length(),
3154            "sig_cc_ro must not be longer than sig_cc");
3155   }
3156 #endif
3157 
3158   _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
3159   _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
3160 
3161   // Compute the scalarized calling conventions if there are scalarized inline types in the signature
3162   if (has_scalarized_arguments) {
3163     _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
3164     _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
3165 
3166     _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
3167     _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
3168 
3169     _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
3170     _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
3171   } else {
3172     // No scalarized args
3173     _sig_cc = _sig;
3174     _regs_cc = _regs;
3175     _args_on_stack_cc = _args_on_stack;
3176 
3177     _sig_cc_ro = _sig;
3178     _regs_cc_ro = _regs;
3179     _args_on_stack_cc_ro = _args_on_stack;
3180   }
3181 
3182 #ifdef ASSERT
3183   {
3184     AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(_sig_cc, _has_inline_recv);
3185     assert(fingerprint->equals(compare_fp), "%s - %s", fingerprint->as_string(), compare_fp->as_string());
3186     AdapterFingerPrint::deallocate(compare_fp);
3187   }
3188 #endif
3189 }
3190 
3191 const char* AdapterHandlerEntry::_entry_names[] = {
3192   "i2c", "c2i", "c2i_unverified", "c2i_no_clinit_check"
3193 };
3194 
3195 #ifdef ASSERT
3196 void AdapterHandlerLibrary::verify_adapter_sharing(CompiledEntrySignature& ces, AdapterHandlerEntry* cached_entry) {
3197   // we can only check for the same code if there is any
3198 #ifndef ZERO
3199   AdapterHandlerEntry* comparison_entry = create_adapter(ces, false, true);
3200   assert(comparison_entry->adapter_blob() == nullptr, "no blob should be created when creating an adapter for comparison");
3201   assert(comparison_entry->compare_code(cached_entry), "code must match");
3202   // Release the one just created
3203   AdapterHandlerEntry::deallocate(comparison_entry);
3204 # endif // ZERO
3205 }
3206 #endif /* ASSERT*/
3207 
3208 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
3209   assert(!method->is_abstract() || InlineTypePassFieldsAsArgs, "abstract methods do not have adapters");
3210   // Use customized signature handler.  Need to lock around updates to
3211   // the _adapter_handler_table (it is not safe for concurrent readers
3212   // and a single writer: this could be fixed if it becomes a
3213   // problem).
3214 
3215   // Fast-path for trivial adapters
3216   AdapterHandlerEntry* entry = get_simple_adapter(method);
3217   if (entry != nullptr) {
3218     return entry;
3219   }
3220 
3221   ResourceMark rm;
3222   bool new_entry = false;
3223 
3224   CompiledEntrySignature ces(method());
3225   ces.compute_calling_conventions();
3226   if (ces.has_scalarized_args()) {
3227     if (!method->has_scalarized_args()) {
3228       method->set_has_scalarized_args();
3229     }
3230     if (ces.c1_needs_stack_repair()) {
3231       method->set_c1_needs_stack_repair();
3232     }
3233     if (ces.c2_needs_stack_repair() && !method->c2_needs_stack_repair()) {
3234       method->set_c2_needs_stack_repair();
3235     }
3236   }
3237 




3238   {
3239     MutexLocker mu(AdapterHandlerLibrary_lock);
3240 
3241     // Lookup method signature's fingerprint
3242     entry = lookup(ces.sig_cc(), ces.has_inline_recv());
3243 
3244     if (entry != nullptr) {
3245 #ifndef ZERO
3246       assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
3247 #endif
3248 #ifdef ASSERT
3249       if (!entry->in_aot_cache() && VerifyAdapterSharing) {
3250         verify_adapter_sharing(ces, entry);
3251       }
3252 #endif
3253     } else {
3254       entry = create_adapter(ces, /* allocate_code_blob */ true);
3255       if (entry != nullptr) {
3256         new_entry = true;
3257       }
3258     }
3259   }
3260 
3261   // Outside of the lock
3262   if (new_entry) {
3263     post_adapter_creation(entry);
3264   }
3265   return entry;
3266 }
3267 
3268 void AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler) {
3269   ResourceMark rm;
3270   const char* name = AdapterHandlerLibrary::name(handler);
3271   const uint32_t id = AdapterHandlerLibrary::id(handler);
3272 
3273   CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::Adapter, id, name);
3274   if (blob != nullptr) {

3289   }
3290   insts_size = adapter_blob->code_size();
3291   st->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
3292                 handler->fingerprint()->as_basic_args_string(),
3293                 handler->fingerprint()->as_string(), insts_size);
3294   st->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
3295   if (Verbose || PrintStubCode) {
3296     address first_pc = adapter_blob->content_begin();
3297     if (first_pc != nullptr) {
3298       Disassembler::decode(first_pc, first_pc + insts_size, st, &adapter_blob->asm_remarks());
3299       st->cr();
3300     }
3301   }
3302 }
3303 #endif // PRODUCT
3304 
3305 void AdapterHandlerLibrary::address_to_offset(address entry_address[AdapterBlob::ENTRY_COUNT],
3306                                               int entry_offset[AdapterBlob::ENTRY_COUNT]) {
3307   entry_offset[AdapterBlob::I2C] = 0;
3308   entry_offset[AdapterBlob::C2I] = entry_address[AdapterBlob::C2I] - entry_address[AdapterBlob::I2C];
3309   entry_offset[AdapterBlob::C2I_Inline] = entry_address[AdapterBlob::C2I_Inline] - entry_address[AdapterBlob::I2C];
3310   entry_offset[AdapterBlob::C2I_Inline_RO] = entry_address[AdapterBlob::C2I_Inline_RO] - entry_address[AdapterBlob::I2C];
3311   entry_offset[AdapterBlob::C2I_Unverified] = entry_address[AdapterBlob::C2I_Unverified] - entry_address[AdapterBlob::I2C];
3312   entry_offset[AdapterBlob::C2I_Unverified_Inline] = entry_address[AdapterBlob::C2I_Unverified_Inline] - entry_address[AdapterBlob::I2C];
3313   if (entry_address[AdapterBlob::C2I_No_Clinit_Check] == nullptr) {
3314     entry_offset[AdapterBlob::C2I_No_Clinit_Check] = -1;
3315   } else {
3316     entry_offset[AdapterBlob::C2I_No_Clinit_Check] = entry_address[AdapterBlob::C2I_No_Clinit_Check] - entry_address[AdapterBlob::I2C];
3317   }
3318 }
3319 
3320 bool AdapterHandlerLibrary::generate_adapter_code(AdapterHandlerEntry* handler,
3321                                                   CompiledEntrySignature& ces,
3322                                                   bool allocate_code_blob,
3323                                                   bool is_transient) {
3324   if (log_is_enabled(Info, perf, class, link)) {
3325     ClassLoader::perf_method_adapters_count()->inc();
3326   }
3327 
3328 #ifndef ZERO
3329   AdapterBlob* adapter_blob = nullptr;
3330   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
3331   CodeBuffer buffer(buf);
3332   short buffer_locs[20];
3333   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
3334                                          sizeof(buffer_locs)/sizeof(relocInfo));
3335   MacroAssembler masm(&buffer);
3336   address entry_address[AdapterBlob::ENTRY_COUNT];

3337 
3338   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage


3339   SharedRuntime::generate_i2c2i_adapters(&masm,
3340                                          ces.args_on_stack(),
3341                                          ces.sig(),
3342                                          ces.regs(),
3343                                          ces.sig_cc(),
3344                                          ces.regs_cc(),
3345                                          ces.sig_cc_ro(),
3346                                          ces.regs_cc_ro(),
3347                                          entry_address,
3348                                          adapter_blob,
3349                                          allocate_code_blob);
3350 
3351   if (ces.has_scalarized_args()) {
3352     // Save a C heap allocated version of the scalarized signature and store it in the adapter
3353     GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc()->length(), mtInternal);
3354     heap_sig->appendAll(ces.sig_cc());
3355     handler->set_sig_cc(heap_sig);
3356     heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc_ro()->length(), mtInternal);
3357     heap_sig->appendAll(ces.sig_cc_ro());
3358     handler->set_sig_cc_ro(heap_sig);
3359   }
3360   // On zero there is no code to save and no need to create a blob and
3361   // or relocate the handler.
3362   int entry_offset[AdapterBlob::ENTRY_COUNT];
3363   address_to_offset(entry_address, entry_offset);
3364 #ifdef ASSERT
3365   if (VerifyAdapterSharing) {
3366     handler->save_code(buf->code_begin(), buffer.insts_size());
3367     if (is_transient) {
3368       return true;
3369     }
3370   }
3371 #endif

3372   if (adapter_blob == nullptr) {
3373     // CodeCache is full, disable compilation
3374     // Ought to log this but compile log is only per compile thread
3375     // and we're some non descript Java thread.
3376     return false;
3377   }
3378   handler->set_adapter_blob(adapter_blob);
3379   if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
3380     // try to save generated code
3381     const char* name = AdapterHandlerLibrary::name(handler);
3382     const uint32_t id = AdapterHandlerLibrary::id(handler);
3383     bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name);
3384     assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
3385   }
3386 #endif // ZERO
3387 
3388 #ifndef PRODUCT
3389   // debugging support
3390   if (PrintAdapterHandlers || PrintStubCode) {
3391     print_adapter_handler_info(tty, handler);
3392   }
3393 #endif
3394 
3395   return true;
3396 }
3397 
3398 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(CompiledEntrySignature& ces,
3399                                                            bool allocate_code_blob,
3400                                                            bool is_transient) {
3401   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(ces.sig_cc(), ces.has_inline_recv());
3402 #ifdef ASSERT
3403   // Verify that we can successfully restore the compiled entry signature object.
3404   CompiledEntrySignature ces_verify;
3405   ces_verify.initialize_from_fingerprint(fp);
3406 #endif
3407   AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
3408   if (!generate_adapter_code(handler, ces, allocate_code_blob, is_transient)) {
3409     AdapterHandlerEntry::deallocate(handler);
3410     return nullptr;
3411   }
3412   if (!is_transient) {
3413     assert_lock_strong(AdapterHandlerLibrary_lock);
3414     _adapter_handler_table->put(fp, handler);
3415   }
3416   return handler;
3417 }
3418 
3419 #if INCLUDE_CDS
3420 void AdapterHandlerEntry::remove_unshareable_info() {
3421 #ifdef ASSERT
3422    _saved_code = nullptr;
3423    _saved_code_length = 0;
3424 #endif // ASSERT
3425    _adapter_blob = nullptr;
3426    _linked = false;
3427    _sig_cc = nullptr;
3428    _sig_cc_ro = nullptr;
3429 }
3430 
3431 class CopyAdapterTableToArchive : StackObj {
3432 private:
3433   CompactHashtableWriter* _writer;
3434   ArchiveBuilder* _builder;
3435 public:
3436   CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
3437                                                              _builder(ArchiveBuilder::current())
3438   {}
3439 
3440   bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
3441     LogStreamHandle(Trace, aot) lsh;
3442     if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
3443       assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
3444       AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
3445       assert(buffered_fp != nullptr,"sanity check");
3446       AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
3447       assert(buffered_entry != nullptr,"sanity check");
3448 

3488   }
3489 #endif
3490 }
3491 
3492 // This method is used during production run to link archived adapters (stored in AOT Cache)
3493 // to their code in AOT Code Cache
3494 void AdapterHandlerEntry::link() {
3495   ResourceMark rm;
3496   assert(_fingerprint != nullptr, "_fingerprint must not be null");
3497   bool generate_code = false;
3498   // Generate code only if AOTCodeCache is not available, or
3499   // caching adapters is disabled, or we fail to link
3500   // the AdapterHandlerEntry to its code in the AOTCodeCache
3501   if (AOTCodeCache::is_using_adapter()) {
3502     AdapterHandlerLibrary::link_aot_adapter_handler(this);
3503     // If link_aot_adapter_handler() succeeds, _adapter_blob will be non-null
3504     if (_adapter_blob == nullptr) {
3505       log_warning(aot)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
3506       generate_code = true;
3507     }
3508 
3509     if (get_sig_cc() == nullptr) {
3510       // Calling conventions have to be regenerated at runtime and are accessed through method adapters,
3511       // which are archived in the AOT code cache. If the adapters are not regenerated, the
3512       // calling conventions should be regenerated here.
3513       CompiledEntrySignature ces;
3514       ces.initialize_from_fingerprint(_fingerprint);
3515       if (ces.has_scalarized_args()) {
3516         // Save a C heap allocated version of the scalarized signature and store it in the adapter
3517         GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc()->length(), mtInternal);
3518         heap_sig->appendAll(ces.sig_cc());
3519         set_sig_cc(heap_sig);
3520         heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc_ro()->length(), mtInternal);
3521         heap_sig->appendAll(ces.sig_cc_ro());
3522         set_sig_cc_ro(heap_sig);
3523       }
3524     }
3525   } else {
3526     generate_code = true;
3527   }
3528   if (generate_code) {
3529     CompiledEntrySignature ces;
3530     ces.initialize_from_fingerprint(_fingerprint);
3531     if (!AdapterHandlerLibrary::generate_adapter_code(this, ces, true, false)) {
3532       // Don't throw exceptions during VM initialization because java.lang.* classes
3533       // might not have been initialized, causing problems when constructing the
3534       // Java exception object.
3535       vm_exit_during_initialization("Out of space in CodeCache for adapters");
3536     }
3537   }
3538   if (_adapter_blob != nullptr) {
3539     post_adapter_creation(this);
3540   }
3541   assert(_linked, "AdapterHandlerEntry must now be linked");
3542 }
3543 
3544 void AdapterHandlerLibrary::link_aot_adapters() {
3545   uint max_id = 0;
3546   assert(AOTCodeCache::is_using_adapter(), "AOT adapters code should be available");
3547   /* It is possible that some adapters generated in assembly phase are not stored in the cache.
3548    * That implies adapter ids of the adapters in the cache may not be contiguous.
3549    * If the size of the _aot_adapter_handler_table is used to initialize _id_counter, then it may
3550    * result in collision of adapter ids between AOT stored handlers and runtime generated handlers.
3551    * To avoid such situation, initialize the _id_counter with the largest adapter id among the AOT stored handlers.
3552    */
3553   _aot_adapter_handler_table.iterate_all([&](AdapterHandlerEntry* entry) {
3554     assert(!entry->is_linked(), "AdapterHandlerEntry is already linked!");
3555     entry->link();
3556     max_id = MAX2(max_id, entry->id());
3557   });
3558   // Set adapter id to the maximum id found in the AOTCache
3559   assert(_id_counter == 0, "Did not expect new AdapterHandlerEntry to be created at this stage");
3560   _id_counter = max_id;
3561 }
3562 
3563 // This method is called during production run to lookup simple adapters
3564 // in the archived adapter handler table
3565 void AdapterHandlerLibrary::lookup_simple_adapters() {
3566   assert(!_aot_adapter_handler_table.empty(), "archived adapter handler table is empty");
3567 
3568   MutexLocker mu(AdapterHandlerLibrary_lock);
3569   ResourceMark rm;
3570   CompiledEntrySignature no_args;
3571   no_args.compute_calling_conventions();
3572   _no_arg_handler = lookup(no_args.sig_cc(), no_args.has_inline_recv());
3573 
3574   CompiledEntrySignature obj_args;
3575   SigEntry::add_entry(obj_args.sig(), T_OBJECT);
3576   obj_args.compute_calling_conventions();
3577   _obj_arg_handler = lookup(obj_args.sig_cc(), obj_args.has_inline_recv());
3578 
3579   CompiledEntrySignature int_args;
3580   SigEntry::add_entry(int_args.sig(), T_INT);
3581   int_args.compute_calling_conventions();
3582   _int_arg_handler = lookup(int_args.sig_cc(), int_args.has_inline_recv());
3583 
3584   CompiledEntrySignature obj_int_args;
3585   SigEntry::add_entry(obj_int_args.sig(), T_OBJECT);
3586   SigEntry::add_entry(obj_int_args.sig(), T_INT);
3587   obj_int_args.compute_calling_conventions();
3588   _obj_int_arg_handler = lookup(obj_int_args.sig_cc(), obj_int_args.has_inline_recv());
3589 
3590   CompiledEntrySignature obj_obj_args;
3591   SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
3592   SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
3593   obj_obj_args.compute_calling_conventions();
3594   _obj_obj_arg_handler = lookup(obj_obj_args.sig_cc(), obj_obj_args.has_inline_recv());
3595 
3596   assert(_no_arg_handler != nullptr &&
3597          _obj_arg_handler != nullptr &&
3598          _int_arg_handler != nullptr &&
3599          _obj_int_arg_handler != nullptr &&
3600          _obj_obj_arg_handler != nullptr, "Initial adapters not found in archived adapter handler table");
3601   assert(_no_arg_handler->is_linked() &&
3602          _obj_arg_handler->is_linked() &&
3603          _int_arg_handler->is_linked() &&
3604          _obj_int_arg_handler->is_linked() &&
3605          _obj_obj_arg_handler->is_linked(), "Initial adapters not in linked state");
3606 }
3607 #endif // INCLUDE_CDS
3608 
3609 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3610   LogStreamHandle(Trace, aot) lsh;
3611   if (lsh.is_enabled()) {
3612     lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3613     lsh.cr();
3614   }
3615   it->push(&_fingerprint);
3616 }
3617 
3618 AdapterHandlerEntry::~AdapterHandlerEntry() {
3619   if (_fingerprint != nullptr) {
3620     AdapterFingerPrint::deallocate(_fingerprint);
3621     _fingerprint = nullptr;
3622   }
3623   if (_sig_cc != nullptr) {
3624     delete _sig_cc;
3625   }
3626   if (_sig_cc_ro != nullptr) {
3627     delete _sig_cc_ro;
3628   }
3629 #ifdef ASSERT
3630   FREE_C_HEAP_ARRAY(_saved_code);
3631 #endif
3632   FreeHeap(this);
3633 }
3634 
3635 
3636 #ifdef ASSERT
3637 // Capture the code before relocation so that it can be compared
3638 // against other versions.  If the code is captured after relocation
3639 // then relative instructions won't be equivalent.
3640 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3641   _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3642   _saved_code_length = length;
3643   memcpy(_saved_code, buffer, length);
3644 }
3645 
3646 
3647 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3648   assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");

3696 
3697       struct { double data[20]; } locs_buf;
3698       struct { double data[20]; } stubs_locs_buf;
3699       buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3700 #if defined(AARCH64) || defined(PPC64)
3701       // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3702       // in the constant pool to ensure ordering between the barrier and oops
3703       // accesses. For native_wrappers we need a constant.
3704       // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3705       // static java call that is resolved in the runtime.
3706       if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3707         buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3708       }
3709 #endif
3710       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3711       MacroAssembler _masm(&buffer);
3712 
3713       // Fill in the signature array, for the calling-convention call.
3714       const int total_args_passed = method->size_of_parameters();
3715 
3716       BasicType stack_sig_bt[16];
3717       VMRegPair stack_regs[16];
3718       BasicType* sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
3719       VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3720 
3721       int i = 0;
3722       if (!method->is_static()) {  // Pass in receiver first
3723         sig_bt[i++] = T_OBJECT;
3724       }
3725       SignatureStream ss(method->signature());
3726       for (; !ss.at_return_type(); ss.next()) {
3727         sig_bt[i++] = ss.type();  // Collect remaining bits of signature
3728         if (ss.type() == T_LONG || ss.type() == T_DOUBLE) {
3729           sig_bt[i++] = T_VOID;   // Longs & doubles take 2 Java slots
3730         }
3731       }
3732       assert(i == total_args_passed, "");
3733       BasicType ret_type = ss.type();
3734 
3735       // Now get the compiled-Java arguments layout.
3736       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3737 
3738       // Generate the compiled-to-native wrapper code
3739       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3740 
3741       if (nm != nullptr) {
3742         {
3743           MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3744           if (nm->make_in_use()) {
3745             method->set_code(method, nm);
3746           }
3747         }
3748 
3749         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3750         if (directive->PrintAssemblyOption) {
3751           nm->print_code();
3752         }
3753         DirectivesStack::release(directive);

3961       if (b == handler->adapter_blob()) {
3962         found = true;
3963         st->print("Adapter for signature: ");
3964         handler->print_adapter_on(st);
3965         return false; // abort iteration
3966       } else {
3967         return true; // keep looking
3968       }
3969     };
3970     assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3971     _adapter_handler_table->iterate(findblob_runtime_table);
3972   }
3973   assert(found, "Should have found handler");
3974 }
3975 
3976 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3977   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3978   if (adapter_blob() != nullptr) {
3979     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3980     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3981     st->print(" c2iVE: " INTPTR_FORMAT, p2i(get_c2i_inline_entry()));
3982     st->print(" c2iVROE: " INTPTR_FORMAT, p2i(get_c2i_inline_ro_entry()));
3983     st->print(" c2iUE: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3984     st->print(" c2iUVE: " INTPTR_FORMAT, p2i(get_c2i_unverified_inline_entry()));
3985     if (get_c2i_no_clinit_check_entry() != nullptr) {
3986       st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3987     }
3988   }
3989   st->cr();
3990 }
3991 
3992 #ifndef PRODUCT
3993 
3994 void AdapterHandlerLibrary::print_statistics() {
3995   print_table_statistics();
3996 }
3997 
3998 #endif /* PRODUCT */
3999 
4000 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
4001   assert(current == JavaThread::current(), "pre-condition");
4002   StackOverflow* overflow_state = current->stack_overflow_state();
4003   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
4004   overflow_state->set_reserved_stack_activation(current->stack_base());

4051         event.set_method(method);
4052         event.commit();
4053       }
4054     }
4055   }
4056   return activation;
4057 }
4058 
4059 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
4060   // After any safepoint, just before going back to compiled code,
4061   // we inform the GC that we will be doing initializing writes to
4062   // this object in the future without emitting card-marks, so
4063   // GC may take any compensating steps.
4064 
4065   oop new_obj = current->vm_result_oop();
4066   if (new_obj == nullptr) return;
4067 
4068   BarrierSet *bs = BarrierSet::barrier_set();
4069   bs->on_slowpath_allocation_exit(current, new_obj);
4070 }
4071 
4072 // We are at a compiled code to interpreter call. We need backing
4073 // buffers for all inline type arguments. Allocate an object array to
4074 // hold them (convenient because once we're done with it we don't have
4075 // to worry about freeing it).
4076 oop SharedRuntime::allocate_inline_types_impl(JavaThread* current, methodHandle callee, bool allocate_receiver, bool from_c1, TRAPS) {
4077   assert(InlineTypePassFieldsAsArgs, "no reason to call this");
4078   ResourceMark rm;
4079 
4080   // Retrieve arguments passed at the call
4081   RegisterMap reg_map2(THREAD,
4082                        RegisterMap::UpdateMap::include,
4083                        RegisterMap::ProcessFrames::include,
4084                        RegisterMap::WalkContinuation::skip);
4085   frame stubFrame = THREAD->last_frame();
4086   frame callerFrame = stubFrame.sender(&reg_map2);
4087   if (from_c1) {
4088     callerFrame = callerFrame.sender(&reg_map2);
4089   }
4090   int arg_size;
4091   const GrowableArray<SigEntry>* sig = allocate_receiver ? callee->adapter()->get_sig_cc() : callee->adapter()->get_sig_cc_ro();
4092   assert(sig != nullptr, "sig should never be null");
4093   TempNewSymbol tmp_sig = SigEntry::create_symbol(sig);
4094   VMRegPair* reg_pairs = find_callee_arguments(tmp_sig, false, false, &arg_size);
4095 
4096   int nb_slots = 0;
4097   InstanceKlass* holder = callee->method_holder();
4098   allocate_receiver &= !callee->is_static() && holder->is_inline_klass() && callee->is_scalarized_arg(0);
4099   if (allocate_receiver) {
4100     nb_slots++;
4101   }
4102   int arg_num = callee->is_static() ? 0 : 1;
4103   for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
4104     BasicType bt = ss.type();
4105     if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
4106       nb_slots++;
4107     }
4108     if (bt != T_VOID) {
4109       arg_num++;
4110     }
4111   }
4112   objArrayOop array_oop = nullptr;
4113   objArrayHandle array;
4114   arg_num = callee->is_static() ? 0 : 1;
4115   int i = 0;
4116   uint pos = 0;
4117   uint depth = 0;
4118   uint ignored = 0;
4119   if (allocate_receiver) {
4120     assert(sig->at(pos)._bt == T_METADATA, "scalarized value expected");
4121     pos++;
4122     ignored++;
4123     depth++;
4124     assert(sig->at(pos)._bt == T_OBJECT, "buffer argument");
4125     uint reg_pos = 0;
4126     assert(reg_pos < (uint)arg_size, "");
4127     VMRegPair reg_pair = reg_pairs[reg_pos];
4128     oop* buffer = callerFrame.oopmapreg_to_oop_location(reg_pair.first(), &reg_map2);
4129     instanceHandle h_buffer(THREAD, (instanceOop)*buffer);
4130     InlineKlass* vk = InlineKlass::cast(holder);
4131     if (h_buffer.not_null()) {
4132       assert(h_buffer->klass() == vk, "buffer not of expected class");
4133     } else {
4134       // Only allocate if buffer passed at the call is null
4135       if (array_oop == nullptr) {
4136         array_oop = oopFactory::new_objectArray(nb_slots, CHECK_NULL);
4137         array = objArrayHandle(THREAD, array_oop);
4138       }
4139       oop res = vk->allocate_instance(CHECK_NULL);
4140       array->obj_at_put(i, res);
4141     }
4142     i++;
4143   }
4144   for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
4145     BasicType bt = ss.type();
4146     if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
4147       while (true) {
4148         BasicType bt = sig->at(pos)._bt;
4149         if (bt == T_METADATA) {
4150           depth++;
4151           ignored++;
4152           if (depth == 1) {
4153             break;
4154           }
4155         } else if (bt == T_VOID && sig->at(pos - 1)._bt != T_LONG && sig->at(pos - 1)._bt != T_DOUBLE) {
4156           ignored++;
4157           depth--;
4158         }
4159         pos++;
4160       }
4161       pos++;
4162       assert(sig->at(pos)._bt == T_OBJECT, "buffer argument expected");
4163       uint reg_pos = pos - ignored;
4164       assert(reg_pos < (uint)arg_size, "out of bound register?");
4165       VMRegPair reg_pair = reg_pairs[reg_pos];
4166       oop* buffer = callerFrame.oopmapreg_to_oop_location(reg_pair.first(), &reg_map2);
4167       instanceHandle h_buffer(THREAD, (instanceOop)*buffer);
4168       InlineKlass* vk = ss.as_inline_klass(holder);
4169       assert(vk != nullptr, "Unexpected klass");
4170       if (h_buffer.not_null()) {
4171         assert(h_buffer->klass() == vk, "buffer not of expected class");
4172       } else {
4173         // Only allocate if buffer passed at the call is null
4174         if (array_oop == nullptr) {
4175           array_oop = oopFactory::new_objectArray(nb_slots, CHECK_NULL);
4176           array = objArrayHandle(THREAD, array_oop);
4177         }
4178         oop res = vk->allocate_instance(CHECK_NULL);
4179         array->obj_at_put(i, res);
4180       }
4181       i++;
4182     }
4183     if (bt != T_VOID) {
4184       arg_num++;
4185     }
4186   }
4187   return array();
4188 }
4189 
4190 JRT_ENTRY(void, SharedRuntime::allocate_inline_types(JavaThread* current, Method* callee_method, bool allocate_receiver))
4191   methodHandle callee(current, callee_method);
4192   oop array = SharedRuntime::allocate_inline_types_impl(current, callee, allocate_receiver, false, CHECK);
4193   current->set_vm_result_oop(array);
4194 JRT_END
4195 
4196 // We're returning from an interpreted method: load each field into a
4197 // register following the calling convention
4198 JRT_LEAF(void, SharedRuntime::load_inline_type_fields_in_regs(JavaThread* current, oopDesc* res))
4199 {
4200   assert(res->klass()->is_inline_klass(), "only inline types here");
4201   ResourceMark rm;
4202   RegisterMap reg_map(current,
4203                       RegisterMap::UpdateMap::include,
4204                       RegisterMap::ProcessFrames::include,
4205                       RegisterMap::WalkContinuation::skip);
4206   frame stubFrame = current->last_frame();
4207   frame callerFrame = stubFrame.sender(&reg_map);
4208   assert(callerFrame.is_interpreted_frame(), "should be coming from interpreter");
4209 
4210   InlineKlass* vk = InlineKlass::cast(res->klass());
4211 
4212   const Array<SigEntry>* sig_vk = vk->extended_sig();
4213   const Array<VMRegPair>* regs = vk->return_regs();
4214 
4215   if (regs == nullptr) {
4216     // The fields of the inline klass don't fit in registers, bail out
4217     return;
4218   }
4219 
4220   int j = 1;
4221   for (int i = 0; i < sig_vk->length(); i++) {
4222     BasicType bt = sig_vk->at(i)._bt;
4223     if (bt == T_METADATA) {
4224       continue;
4225     }
4226     if (bt == T_VOID) {
4227       if (sig_vk->at(i-1)._bt == T_LONG ||
4228           sig_vk->at(i-1)._bt == T_DOUBLE) {
4229         j++;
4230       }
4231       continue;
4232     }
4233     int off = sig_vk->at(i)._offset;
4234     assert(off > 0, "offset in object should be positive");
4235     VMRegPair pair = regs->at(j);
4236     address loc = reg_map.location(pair.first(), nullptr);
4237     guarantee(loc != nullptr, "bad register save location");
4238     switch(bt) {
4239     case T_BOOLEAN:
4240       *(jboolean*)loc = res->bool_field(off);
4241       break;
4242     case T_CHAR:
4243       *(jchar*)loc = res->char_field(off);
4244       break;
4245     case T_BYTE:
4246       *(jbyte*)loc = res->byte_field(off);
4247       break;
4248     case T_SHORT:
4249       *(jshort*)loc = res->short_field(off);
4250       break;
4251     case T_INT: {
4252       *(jint*)loc = res->int_field(off);
4253       break;
4254     }
4255     case T_LONG:
4256 #ifdef _LP64
4257       *(intptr_t*)loc = res->long_field(off);
4258 #else
4259       Unimplemented();
4260 #endif
4261       break;
4262     case T_OBJECT:
4263     case T_ARRAY: {
4264       *(oop*)loc = res->obj_field(off);
4265       break;
4266     }
4267     case T_FLOAT:
4268       *(jfloat*)loc = res->float_field(off);
4269       break;
4270     case T_DOUBLE:
4271       *(jdouble*)loc = res->double_field(off);
4272       break;
4273     default:
4274       ShouldNotReachHere();
4275     }
4276     j++;
4277   }
4278   assert(j == regs->length(), "missed a field?");
4279 
4280 #ifdef ASSERT
4281   VMRegPair pair = regs->at(0);
4282   address loc = reg_map.location(pair.first(), nullptr);
4283   assert(*(oopDesc**)loc == res, "overwritten object");
4284 #endif
4285 
4286   current->set_vm_result_oop(res);
4287 }
4288 JRT_END
4289 
4290 // We've returned to an interpreted method, the interpreter needs a
4291 // reference to an inline type instance. Allocate it and initialize it
4292 // from field's values in registers.
4293 JRT_BLOCK_ENTRY(void, SharedRuntime::store_inline_type_fields_to_buf(JavaThread* current, intptr_t res))
4294 {
4295   ResourceMark rm;
4296   RegisterMap reg_map(current,
4297                       RegisterMap::UpdateMap::include,
4298                       RegisterMap::ProcessFrames::include,
4299                       RegisterMap::WalkContinuation::skip);
4300   frame stubFrame = current->last_frame();
4301   frame callerFrame = stubFrame.sender(&reg_map);
4302 
4303 #ifdef ASSERT
4304   InlineKlass* verif_vk = InlineKlass::returned_inline_klass(reg_map);
4305 #endif
4306 
4307   if (!is_set_nth_bit(res, 0)) {
4308     // We're not returning with inline type fields in registers (the
4309     // calling convention didn't allow it for this inline klass)
4310     assert(!Metaspace::contains((void*)res), "should be oop or pointer in buffer area");
4311     current->set_vm_result_oop((oopDesc*)res);
4312     assert(verif_vk == nullptr, "broken calling convention");
4313     return;
4314   }
4315 
4316   clear_nth_bit(res, 0);
4317   InlineKlass* vk = (InlineKlass*)res;
4318   assert(verif_vk == vk, "broken calling convention");
4319   assert(Metaspace::contains((void*)res), "should be klass");
4320 
4321   // Allocate handles for every oop field so they are safe in case of
4322   // a safepoint when allocating
4323   GrowableArray<Handle> handles;
4324   vk->save_oop_fields(reg_map, handles);
4325 
4326   // It's unsafe to safepoint until we are here
4327   JRT_BLOCK;
4328   {
4329     JavaThread* THREAD = current;
4330     oop vt = vk->realloc_result(reg_map, handles, CHECK);
4331     current->set_vm_result_oop(vt);
4332   }
4333   JRT_BLOCK_END;
4334 }
4335 JRT_END
< prev index next >