< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

  28 #include "classfile/javaClasses.inline.hpp"
  29 #include "classfile/stringTable.hpp"
  30 #include "classfile/vmClasses.hpp"
  31 #include "classfile/vmSymbols.hpp"
  32 #include "code/aotCodeCache.hpp"
  33 #include "code/codeCache.hpp"
  34 #include "code/compiledIC.hpp"
  35 #include "code/nmethod.inline.hpp"
  36 #include "code/scopeDesc.hpp"
  37 #include "code/vtableStubs.hpp"
  38 #include "compiler/abstractCompiler.hpp"
  39 #include "compiler/compileBroker.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "gc/shared/barrierSet.hpp"
  42 #include "gc/shared/collectedHeap.hpp"
  43 #include "interpreter/interpreter.hpp"
  44 #include "interpreter/interpreterRuntime.hpp"
  45 #include "jvm.h"
  46 #include "jfr/jfrEvents.hpp"
  47 #include "logging/log.hpp"

  48 #include "memory/resourceArea.hpp"
  49 #include "memory/universe.hpp"


  50 #include "metaprogramming/primitiveConversions.hpp"
  51 #include "oops/klass.hpp"
  52 #include "oops/method.inline.hpp"
  53 #include "oops/objArrayKlass.hpp"

  54 #include "oops/oop.inline.hpp"

  55 #include "prims/forte.hpp"
  56 #include "prims/jvmtiExport.hpp"
  57 #include "prims/jvmtiThreadState.hpp"
  58 #include "prims/methodHandles.hpp"
  59 #include "prims/nativeLookup.hpp"
  60 #include "runtime/arguments.hpp"
  61 #include "runtime/atomic.hpp"
  62 #include "runtime/basicLock.inline.hpp"
  63 #include "runtime/frame.inline.hpp"
  64 #include "runtime/handles.inline.hpp"
  65 #include "runtime/init.hpp"
  66 #include "runtime/interfaceSupport.inline.hpp"
  67 #include "runtime/java.hpp"
  68 #include "runtime/javaCalls.hpp"
  69 #include "runtime/jniHandles.inline.hpp"
  70 #include "runtime/perfData.hpp"
  71 #include "runtime/sharedRuntime.hpp"
  72 #include "runtime/stackWatermarkSet.hpp"
  73 #include "runtime/stubRoutines.hpp"
  74 #include "runtime/synchronizer.inline.hpp"

1177 // for a call current in progress, i.e., arguments has been pushed on stack
1178 // but callee has not been invoked yet.  Caller frame must be compiled.
1179 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1180                                               CallInfo& callinfo, TRAPS) {
1181   Handle receiver;
1182   Handle nullHandle;  // create a handy null handle for exception returns
1183   JavaThread* current = THREAD;
1184 
1185   assert(!vfst.at_end(), "Java frame must exist");
1186 
1187   // Find caller and bci from vframe
1188   methodHandle caller(current, vfst.method());
1189   int          bci   = vfst.bci();
1190 
1191   if (caller->is_continuation_enter_intrinsic()) {
1192     bc = Bytecodes::_invokestatic;
1193     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1194     return receiver;
1195   }
1196 















1197   Bytecode_invoke bytecode(caller, bci);
1198   int bytecode_index = bytecode.index();
1199   bc = bytecode.invoke_code();
1200 
1201   methodHandle attached_method(current, extract_attached_method(vfst));
1202   if (attached_method.not_null()) {
1203     Method* callee = bytecode.static_target(CHECK_NH);
1204     vmIntrinsics::ID id = callee->intrinsic_id();
1205     // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1206     // it attaches statically resolved method to the call site.
1207     if (MethodHandles::is_signature_polymorphic(id) &&
1208         MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1209       bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1210 
1211       // Adjust invocation mode according to the attached method.
1212       switch (bc) {
1213         case Bytecodes::_invokevirtual:
1214           if (attached_method->method_holder()->is_interface()) {
1215             bc = Bytecodes::_invokeinterface;
1216           }
1217           break;
1218         case Bytecodes::_invokeinterface:
1219           if (!attached_method->method_holder()->is_interface()) {
1220             bc = Bytecodes::_invokevirtual;
1221           }
1222           break;
1223         case Bytecodes::_invokehandle:
1224           if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1225             bc = attached_method->is_static() ? Bytecodes::_invokestatic
1226                                               : Bytecodes::_invokevirtual;
1227           }
1228           break;
1229         default:
1230           break;
1231       }






1232     }
1233   }
1234 
1235   assert(bc != Bytecodes::_illegal, "not initialized");
1236 
1237   bool has_receiver = bc != Bytecodes::_invokestatic &&
1238                       bc != Bytecodes::_invokedynamic &&
1239                       bc != Bytecodes::_invokehandle;

1240 
1241   // Find receiver for non-static call
1242   if (has_receiver) {
1243     // This register map must be update since we need to find the receiver for
1244     // compiled frames. The receiver might be in a register.
1245     RegisterMap reg_map2(current,
1246                          RegisterMap::UpdateMap::include,
1247                          RegisterMap::ProcessFrames::include,
1248                          RegisterMap::WalkContinuation::skip);
1249     frame stubFrame   = current->last_frame();
1250     // Caller-frame is a compiled frame
1251     frame callerFrame = stubFrame.sender(&reg_map2);
1252 
1253     if (attached_method.is_null()) {
1254       Method* callee = bytecode.static_target(CHECK_NH);

1255       if (callee == nullptr) {
1256         THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1257       }
1258     }
1259 
1260     // Retrieve from a compiled argument list
1261     receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1262     assert(oopDesc::is_oop_or_null(receiver()), "");
1263 
1264     if (receiver.is_null()) {
1265       THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);










1266     }
1267   }
1268 
1269   // Resolve method
1270   if (attached_method.not_null()) {
1271     // Parameterized by attached method.
1272     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, CHECK_NH);
1273   } else {
1274     // Parameterized by bytecode.
1275     constantPoolHandle constants(current, caller->constants());
1276     LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1277   }
1278 
1279 #ifdef ASSERT
1280   // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1281   if (has_receiver) {
1282     assert(receiver.not_null(), "should have thrown exception");
1283     Klass* receiver_klass = receiver->klass();
1284     Klass* rk = nullptr;
1285     if (attached_method.not_null()) {
1286       // In case there's resolved method attached, use its holder during the check.
1287       rk = attached_method->method_holder();
1288     } else {
1289       // Klass is already loaded.
1290       constantPoolHandle constants(current, caller->constants());
1291       rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1292     }
1293     Klass* static_receiver_klass = rk;
1294     assert(receiver_klass->is_subtype_of(static_receiver_klass),
1295            "actual receiver must be subclass of static receiver klass");
1296     if (receiver_klass->is_instance_klass()) {
1297       if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1298         tty->print_cr("ERROR: Klass not yet initialized!!");
1299         receiver_klass->print();
1300       }
1301       assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1302     }
1303   }
1304 #endif
1305 
1306   return receiver;
1307 }
1308 
1309 methodHandle SharedRuntime::find_callee_method(TRAPS) {
1310   JavaThread* current = THREAD;
1311   ResourceMark rm(current);
1312   // We need first to check if any Java activations (compiled, interpreted)
1313   // exist on the stack since last JavaCall.  If not, we need
1314   // to get the target method from the JavaCall wrapper.
1315   vframeStream vfst(current, true);  // Do not skip any javaCalls
1316   methodHandle callee_method;
1317   if (vfst.at_end()) {
1318     // No Java frames were found on stack since we did the JavaCall.
1319     // Hence the stack can only contain an entry_frame.  We need to
1320     // find the target method from the stub frame.
1321     RegisterMap reg_map(current,
1322                         RegisterMap::UpdateMap::skip,
1323                         RegisterMap::ProcessFrames::include,
1324                         RegisterMap::WalkContinuation::skip);
1325     frame fr = current->last_frame();
1326     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1327     fr = fr.sender(&reg_map);
1328     assert(fr.is_entry_frame(), "must be");
1329     // fr is now pointing to the entry frame.
1330     callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1331   } else {
1332     Bytecodes::Code bc;
1333     CallInfo callinfo;
1334     find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));




1335     callee_method = methodHandle(current, callinfo.selected_method());
1336   }
1337   assert(callee_method()->is_method(), "must be");
1338   return callee_method;
1339 }
1340 
1341 // Resolves a call.
1342 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, TRAPS) {
1343   JavaThread* current = THREAD;
1344   ResourceMark rm(current);
1345   RegisterMap cbl_map(current,
1346                       RegisterMap::UpdateMap::skip,
1347                       RegisterMap::ProcessFrames::include,
1348                       RegisterMap::WalkContinuation::skip);
1349   frame caller_frame = current->last_frame().sender(&cbl_map);
1350 
1351   CodeBlob* caller_cb = caller_frame.cb();
1352   guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1353   nmethod* caller_nm = caller_cb->as_nmethod();
1354 
1355   // determine call info & receiver
1356   // note: a) receiver is null for static calls
1357   //       b) an exception is thrown if receiver is null for non-static calls
1358   CallInfo call_info;
1359   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1360   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1361 
1362   NoSafepointVerifier nsv;
1363 
1364   methodHandle callee_method(current, call_info.selected_method());




1365 
1366   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1367          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1368          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1369          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1370          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1371 
1372   assert(!caller_nm->is_unloading(), "It should not be unloading");
1373 
1374 #ifndef PRODUCT
1375   // tracing/debugging/statistics
1376   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1377                  (is_virtual) ? (&_resolve_virtual_ctr) :
1378                                 (&_resolve_static_ctr);
1379   Atomic::inc(addr);
1380 
1381   if (TraceCallFixup) {
1382     ResourceMark rm(current);
1383     tty->print("resolving %s%s (%s) call to",
1384                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1385                Bytecodes::name(invoke_code));
1386     callee_method->print_short_name(tty);
1387     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1388                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1389   }
1390 #endif
1391 
1392   if (invoke_code == Bytecodes::_invokestatic) {
1393     assert(callee_method->method_holder()->is_initialized() ||
1394            callee_method->method_holder()->is_reentrant_initialization(current),
1395            "invalid class initialization state for invoke_static");
1396     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1397       // In order to keep class initialization check, do not patch call
1398       // site for static call when the class is not fully initialized.
1399       // Proper check is enforced by call site re-resolution on every invocation.
1400       //
1401       // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1402       // explicit class initialization check is put in nmethod entry (VEP).
1403       assert(callee_method->method_holder()->is_linked(), "must be");
1404       return callee_method;
1405     }
1406   }
1407 
1408 
1409   // JSR 292 key invariant:
1410   // If the resolved method is a MethodHandle invoke target, the call
1411   // site must be a MethodHandle call site, because the lambda form might tail-call
1412   // leaving the stack in a state unknown to either caller or callee
1413 
1414   // Compute entry points. The computation of the entry points is independent of
1415   // patching the call.
1416 
1417   // Make sure the callee nmethod does not get deoptimized and removed before
1418   // we are done patching the code.
1419 
1420 
1421   CompiledICLocker ml(caller_nm);
1422   if (is_virtual && !is_optimized) {
1423     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1424     inline_cache->update(&call_info, receiver->klass());
1425   } else {
1426     // Callsite is a direct call - set it to the destination method
1427     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1428     callsite->set(callee_method);
1429   }
1430 
1431   return callee_method;
1432 }
1433 
1434 // Inline caches exist only in compiled code
1435 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1436 #ifdef ASSERT
1437   RegisterMap reg_map(current,
1438                       RegisterMap::UpdateMap::skip,
1439                       RegisterMap::ProcessFrames::include,
1440                       RegisterMap::WalkContinuation::skip);
1441   frame stub_frame = current->last_frame();
1442   assert(stub_frame.is_runtime_frame(), "sanity check");
1443   frame caller_frame = stub_frame.sender(&reg_map);
1444   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1445 #endif /* ASSERT */
1446 
1447   methodHandle callee_method;


1448   JRT_BLOCK
1449     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1450     // Return Method* through TLS
1451     current->set_vm_result_metadata(callee_method());
1452   JRT_BLOCK_END
1453   // return compiled code entry point after potential safepoints
1454   return get_resolved_entry(current, callee_method);
1455 JRT_END
1456 
1457 
1458 // Handle call site that has been made non-entrant
1459 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1460   // 6243940 We might end up in here if the callee is deoptimized
1461   // as we race to call it.  We don't want to take a safepoint if
1462   // the caller was interpreted because the caller frame will look
1463   // interpreted to the stack walkers and arguments are now
1464   // "compiled" so it is much better to make this transition
1465   // invisible to the stack walking code. The i2c path will
1466   // place the callee method in the callee_target. It is stashed
1467   // there because if we try and find the callee by normal means a
1468   // safepoint is possible and have trouble gc'ing the compiled args.
1469   RegisterMap reg_map(current,
1470                       RegisterMap::UpdateMap::skip,
1471                       RegisterMap::ProcessFrames::include,
1472                       RegisterMap::WalkContinuation::skip);
1473   frame stub_frame = current->last_frame();
1474   assert(stub_frame.is_runtime_frame(), "sanity check");
1475   frame caller_frame = stub_frame.sender(&reg_map);
1476 
1477   if (caller_frame.is_interpreted_frame() ||
1478       caller_frame.is_entry_frame() ||
1479       caller_frame.is_upcall_stub_frame()) {
1480     Method* callee = current->callee_target();
1481     guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1482     current->set_vm_result_metadata(callee);
1483     current->set_callee_target(nullptr);
1484     if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1485       // Bypass class initialization checks in c2i when caller is in native.
1486       // JNI calls to static methods don't have class initialization checks.
1487       // Fast class initialization checks are present in c2i adapters and call into
1488       // SharedRuntime::handle_wrong_method() on the slow path.
1489       //
1490       // JVM upcalls may land here as well, but there's a proper check present in
1491       // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1492       // so bypassing it in c2i adapter is benign.
1493       return callee->get_c2i_no_clinit_check_entry();
1494     } else {
1495       return callee->get_c2i_entry();




1496     }
1497   }
1498 
1499   // Must be compiled to compiled path which is safe to stackwalk
1500   methodHandle callee_method;



1501   JRT_BLOCK
1502     // Force resolving of caller (if we called from compiled frame)
1503     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1504     current->set_vm_result_metadata(callee_method());
1505   JRT_BLOCK_END
1506   // return compiled code entry point after potential safepoints
1507   return get_resolved_entry(current, callee_method);
1508 JRT_END
1509 
1510 // Handle abstract method call
1511 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1512   // Verbose error message for AbstractMethodError.
1513   // Get the called method from the invoke bytecode.
1514   vframeStream vfst(current, true);
1515   assert(!vfst.at_end(), "Java frame must exist");
1516   methodHandle caller(current, vfst.method());
1517   Bytecode_invoke invoke(caller, vfst.bci());
1518   DEBUG_ONLY( invoke.verify(); )
1519 
1520   // Find the compiled caller frame.
1521   RegisterMap reg_map(current,
1522                       RegisterMap::UpdateMap::include,
1523                       RegisterMap::ProcessFrames::include,
1524                       RegisterMap::WalkContinuation::skip);
1525   frame stubFrame = current->last_frame();
1526   assert(stubFrame.is_runtime_frame(), "must be");
1527   frame callerFrame = stubFrame.sender(&reg_map);
1528   assert(callerFrame.is_compiled_frame(), "must be");
1529 
1530   // Install exception and return forward entry.
1531   address res = SharedRuntime::throw_AbstractMethodError_entry();
1532   JRT_BLOCK
1533     methodHandle callee(current, invoke.static_target(current));
1534     if (!callee.is_null()) {
1535       oop recv = callerFrame.retrieve_receiver(&reg_map);
1536       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1537       res = StubRoutines::forward_exception_entry();
1538       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1539     }
1540   JRT_BLOCK_END
1541   return res;
1542 JRT_END
1543 
1544 // return verified_code_entry if interp_only_mode is not set for the current thread;
1545 // otherwise return c2i entry.
1546 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {

1547   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1548     // In interp_only_mode we need to go to the interpreted entry
1549     // The c2i won't patch in this mode -- see fixup_callers_callsite
1550     return callee_method->get_c2i_entry();
1551   }
1552   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1553   return callee_method->verified_code_entry();









1554 }
1555 
1556 // resolve a static call and patch code
1557 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1558   methodHandle callee_method;

1559   bool enter_special = false;
1560   JRT_BLOCK
1561     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1562     current->set_vm_result_metadata(callee_method());
1563   JRT_BLOCK_END
1564   // return compiled code entry point after potential safepoints
1565   return get_resolved_entry(current, callee_method);
1566 JRT_END
1567 
1568 // resolve virtual call and update inline cache to monomorphic
1569 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1570   methodHandle callee_method;

1571   JRT_BLOCK
1572     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1573     current->set_vm_result_metadata(callee_method());
1574   JRT_BLOCK_END
1575   // return compiled code entry point after potential safepoints
1576   return get_resolved_entry(current, callee_method);
1577 JRT_END
1578 
1579 
1580 // Resolve a virtual call that can be statically bound (e.g., always
1581 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1582 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1583   methodHandle callee_method;

1584   JRT_BLOCK
1585     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1586     current->set_vm_result_metadata(callee_method());
1587   JRT_BLOCK_END
1588   // return compiled code entry point after potential safepoints
1589   return get_resolved_entry(current, callee_method);
1590 JRT_END
1591 
1592 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {


1593   JavaThread* current = THREAD;
1594   ResourceMark rm(current);
1595   CallInfo call_info;
1596   Bytecodes::Code bc;
1597 
1598   // receiver is null for static calls. An exception is thrown for null
1599   // receivers for non-static calls
1600   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1601 
1602   methodHandle callee_method(current, call_info.selected_method());
1603 
1604 #ifndef PRODUCT
1605   Atomic::inc(&_ic_miss_ctr);
1606 
1607   // Statistics & Tracing
1608   if (TraceCallFixup) {
1609     ResourceMark rm(current);
1610     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1611     callee_method->print_short_name(tty);
1612     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1613   }
1614 
1615   if (ICMissHistogram) {
1616     MutexLocker m(VMStatistic_lock);
1617     RegisterMap reg_map(current,
1618                         RegisterMap::UpdateMap::skip,
1619                         RegisterMap::ProcessFrames::include,
1620                         RegisterMap::WalkContinuation::skip);
1621     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1622     // produce statistics under the lock
1623     trace_ic_miss(f.pc());
1624   }
1625 #endif
1626 
1627   // install an event collector so that when a vtable stub is created the
1628   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1629   // event can't be posted when the stub is created as locks are held
1630   // - instead the event will be deferred until the event collector goes
1631   // out of scope.
1632   JvmtiDynamicCodeEventCollector event_collector;
1633 
1634   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1635   RegisterMap reg_map(current,
1636                       RegisterMap::UpdateMap::skip,
1637                       RegisterMap::ProcessFrames::include,
1638                       RegisterMap::WalkContinuation::skip);
1639   frame caller_frame = current->last_frame().sender(&reg_map);
1640   CodeBlob* cb = caller_frame.cb();
1641   nmethod* caller_nm = cb->as_nmethod();




1642 
1643   CompiledICLocker ml(caller_nm);
1644   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1645   inline_cache->update(&call_info, receiver()->klass());
1646 
1647   return callee_method;
1648 }
1649 
1650 //
1651 // Resets a call-site in compiled code so it will get resolved again.
1652 // This routines handles both virtual call sites, optimized virtual call
1653 // sites, and static call sites. Typically used to change a call sites
1654 // destination from compiled to interpreted.
1655 //
1656 methodHandle SharedRuntime::reresolve_call_site(TRAPS) {
1657   JavaThread* current = THREAD;
1658   ResourceMark rm(current);
1659   RegisterMap reg_map(current,
1660                       RegisterMap::UpdateMap::skip,
1661                       RegisterMap::ProcessFrames::include,
1662                       RegisterMap::WalkContinuation::skip);
1663   frame stub_frame = current->last_frame();
1664   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1665   frame caller = stub_frame.sender(&reg_map);



1666 
1667   // Do nothing if the frame isn't a live compiled frame.
1668   // nmethod could be deoptimized by the time we get here
1669   // so no update to the caller is needed.
1670 
1671   if ((caller.is_compiled_frame() && !caller.is_deoptimized_frame()) ||
1672       (caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic())) {
1673 
1674     address pc = caller.pc();
1675 
1676     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1677     assert(caller_nm != nullptr, "did not find caller nmethod");
1678 
1679     // Default call_addr is the location of the "basic" call.
1680     // Determine the address of the call we a reresolving. With
1681     // Inline Caches we will always find a recognizable call.
1682     // With Inline Caches disabled we may or may not find a
1683     // recognizable call. We will always find a call for static
1684     // calls and for optimized virtual calls. For vanilla virtual
1685     // calls it depends on the state of the UseInlineCaches switch.
1686     //
1687     // With Inline Caches disabled we can get here for a virtual call
1688     // for two reasons:
1689     //   1 - calling an abstract method. The vtable for abstract methods
1690     //       will run us thru handle_wrong_method and we will eventually
1691     //       end up in the interpreter to throw the ame.
1692     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1693     //       call and between the time we fetch the entry address and
1694     //       we jump to it the target gets deoptimized. Similar to 1
1695     //       we will wind up in the interprter (thru a c2i with c2).
1696     //
1697     CompiledICLocker ml(caller_nm);
1698     address call_addr = caller_nm->call_instruction_address(pc);
1699 
1700     if (call_addr != nullptr) {
1701       // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1702       // bytes back in the instruction stream so we must also check for reloc info.
1703       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1704       bool ret = iter.next(); // Get item
1705       if (ret) {


1706         switch (iter.type()) {
1707           case relocInfo::static_call_type:

1708           case relocInfo::opt_virtual_call_type: {

1709             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1710             cdc->set_to_clean();
1711             break;
1712           }
1713 
1714           case relocInfo::virtual_call_type: {
1715             // compiled, dispatched call (which used to call an interpreted method)
1716             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1717             inline_cache->set_to_clean();
1718             break;
1719           }
1720           default:
1721             break;
1722         }
1723       }
1724     }
1725   }
1726 
1727   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1728 
1729 
1730 #ifndef PRODUCT
1731   Atomic::inc(&_wrong_method_ctr);
1732 
1733   if (TraceCallFixup) {
1734     ResourceMark rm(current);
1735     tty->print("handle_wrong_method reresolving call to");
1736     callee_method->print_short_name(tty);
1737     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1738   }
1739 #endif
1740 
1741   return callee_method;
1742 }
1743 
1744 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1745   // The faulting unsafe accesses should be changed to throw the error
1746   // synchronously instead. Meanwhile the faulting instruction will be
1747   // skipped over (effectively turning it into a no-op) and an
1748   // asynchronous exception will be raised which the thread will
1749   // handle at a later point. If the instruction is a load it will
1750   // return garbage.
1751 
1752   // Request an async exception.
1753   thread->set_pending_unsafe_access_error();
1754 
1755   // Return address of next instruction to execute.

1921   msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
1922 
1923   char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
1924   if (message == nullptr) {
1925     // Shouldn't happen, but don't cause even more problems if it does
1926     message = const_cast<char*>(caster_klass->external_name());
1927   } else {
1928     jio_snprintf(message,
1929                  msglen,
1930                  "class %s cannot be cast to class %s (%s%s%s)",
1931                  caster_name,
1932                  target_name,
1933                  caster_klass_description,
1934                  klass_separator,
1935                  target_klass_description
1936                  );
1937   }
1938   return message;
1939 }
1940 















1941 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
1942   (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
1943 JRT_END
1944 
1945 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
1946   if (!SafepointSynchronize::is_synchronizing()) {
1947     // Only try quick_enter() if we're not trying to reach a safepoint
1948     // so that the calling thread reaches the safepoint more quickly.
1949     if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
1950       return;
1951     }
1952   }
1953   // NO_ASYNC required because an async exception on the state transition destructor
1954   // would leave you with the lock held and it would never be released.
1955   // The normal monitorenter NullPointerException is thrown without acquiring a lock
1956   // and the model is that an exception implies the method failed.
1957   JRT_BLOCK_NO_ASYNC
1958   Handle h_obj(THREAD, obj);
1959   ObjectSynchronizer::enter(h_obj, lock, current);
1960   assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");

2171   tty->print_cr("        %% in nested categories are relative to their category");
2172   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2173   tty->cr();
2174 
2175   MethodArityHistogram h;
2176 }
2177 #endif
2178 
2179 #ifndef PRODUCT
2180 static int _lookups; // number of calls to lookup
2181 static int _equals;  // number of buckets checked with matching hash
2182 static int _archived_hits; // number of successful lookups in archived table
2183 static int _runtime_hits;  // number of successful lookups in runtime table
2184 #endif
2185 
2186 // A simple wrapper class around the calling convention information
2187 // that allows sharing of adapters for the same calling convention.
2188 class AdapterFingerPrint : public MetaspaceObj {
2189  private:
2190   enum {
2191     _basic_type_bits = 4,
2192     _basic_type_mask = right_n_bits(_basic_type_bits),
2193     _basic_types_per_int = BitsPerInt / _basic_type_bits,
2194   };
2195   // TO DO:  Consider integrating this with a more global scheme for compressing signatures.
2196   // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2197 
2198   int _length;
2199 
2200   static int data_offset() { return sizeof(AdapterFingerPrint); }
2201   int* data_pointer() {
2202     return (int*)((address)this + data_offset());
2203   }
2204 
2205   // Private construtor. Use allocate() to get an instance.
2206   AdapterFingerPrint(int total_args_passed, BasicType* sig_bt, int len) {
2207     int* data = data_pointer();
2208     // Pack the BasicTypes with 8 per int
2209     assert(len == length(total_args_passed), "sanity");
2210     _length = len;
2211     int sig_index = 0;


2212     for (int index = 0; index < _length; index++) {
2213       int value = 0;
2214       for (int byte = 0; sig_index < total_args_passed && byte < _basic_types_per_int; byte++) {
2215         int bt = adapter_encoding(sig_bt[sig_index++]);
2216         assert((bt & _basic_type_mask) == bt, "must fit in 4 bits");
2217         value = (value << _basic_type_bits) | bt;























2218       }
2219       data[index] = value;
2220     }

2221   }
2222 
2223   // Call deallocate instead
2224   ~AdapterFingerPrint() {
2225     ShouldNotCallThis();
2226   }
2227 




2228   static int length(int total_args) {
2229     return (total_args + (_basic_types_per_int-1)) / _basic_types_per_int;
2230   }
2231 
2232   static int compute_size_in_words(int len) {
2233     return (int)heap_word_size(sizeof(AdapterFingerPrint) + (len * sizeof(int)));
2234   }
2235 
2236   // Remap BasicTypes that are handled equivalently by the adapters.
2237   // These are correct for the current system but someday it might be
2238   // necessary to make this mapping platform dependent.
2239   static int adapter_encoding(BasicType in) {
2240     switch (in) {
2241       case T_BOOLEAN:
2242       case T_BYTE:
2243       case T_SHORT:
2244       case T_CHAR:
2245         // There are all promoted to T_INT in the calling convention
2246         return T_INT;
2247 
2248       case T_OBJECT:
2249       case T_ARRAY:
2250         // In other words, we assume that any register good enough for
2251         // an int or long is good enough for a managed pointer.
2252 #ifdef _LP64
2253         return T_LONG;
2254 #else
2255         return T_INT;
2256 #endif
2257 
2258       case T_INT:
2259       case T_LONG:
2260       case T_FLOAT:
2261       case T_DOUBLE:
2262       case T_VOID:
2263         return in;
2264 
2265       default:
2266         ShouldNotReachHere();
2267         return T_CONFLICT;
2268     }
2269   }
2270 
2271   void* operator new(size_t size, size_t fp_size) throw() {
2272     assert(fp_size >= size, "sanity check");
2273     void* p = AllocateHeap(fp_size, mtCode);
2274     memset(p, 0, fp_size);
2275     return p;
2276   }
2277 

2278   template<typename Function>
2279   void iterate_args(Function function) {
2280     for (int i = 0; i < length(); i++) {
2281       unsigned val = (unsigned)value(i);
2282       // args are packed so that first/lower arguments are in the highest
2283       // bits of each int value, so iterate from highest to the lowest
2284       for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {

2285         unsigned v = (val >> j) & _basic_type_mask;
2286         if (v == 0) {
2287           continue;
2288         }
2289         function(v);
2290       }
2291     }
2292   }
2293 
2294  public:
2295   static AdapterFingerPrint* allocate(int total_args_passed, BasicType* sig_bt) {
2296     int len = length(total_args_passed);
2297     int size_in_bytes = BytesPerWord * compute_size_in_words(len);
2298     AdapterFingerPrint* afp = new (size_in_bytes) AdapterFingerPrint(total_args_passed, sig_bt, len);
2299     assert((afp->size() * BytesPerWord) == size_in_bytes, "should match");
2300     return afp;
2301   }
2302 
2303   static void deallocate(AdapterFingerPrint* fp) {
2304     FreeHeap(fp);
2305   }
2306 
2307   int value(int index) {
2308     int* data = data_pointer();
2309     return data[index];
2310   }
2311 
2312   int length() {
2313     return _length;
2314   }
2315 
2316   unsigned int compute_hash() {
2317     int hash = 0;
2318     for (int i = 0; i < length(); i++) {

2327     stringStream st;
2328     st.print("0x");
2329     for (int i = 0; i < length(); i++) {
2330       st.print("%x", value(i));
2331     }
2332     return st.as_string();
2333   }
2334 
2335   const char* as_basic_args_string() {
2336     stringStream st;
2337     bool long_prev = false;
2338     iterate_args([&] (int arg) {
2339       if (long_prev) {
2340         long_prev = false;
2341         if (arg == T_VOID) {
2342           st.print("J");
2343         } else {
2344           st.print("L");
2345         }
2346       }
2347       switch (arg) {
2348         case T_INT:    st.print("I");    break;
2349         case T_LONG:   long_prev = true; break;
2350         case T_FLOAT:  st.print("F");    break;
2351         case T_DOUBLE: st.print("D");    break;
2352         case T_VOID:   break;
2353         default: ShouldNotReachHere();
2354       }
2355     });
2356     if (long_prev) {
2357       st.print("L");
2358     }
2359     return st.as_string();
2360   }
2361 
2362   BasicType* as_basic_type(int& nargs) {
2363     nargs = 0;
2364     GrowableArray<BasicType> btarray;
2365     bool long_prev = false;
2366 
2367     iterate_args([&] (int arg) {
2368       if (long_prev) {
2369         long_prev = false;
2370         if (arg == T_VOID) {
2371           btarray.append(T_LONG);
2372         } else {
2373           btarray.append(T_OBJECT); // it could be T_ARRAY; it shouldn't matter
2374         }
2375       }
2376       switch (arg) {
2377         case T_INT: // fallthrough
2378         case T_FLOAT: // fallthrough
2379         case T_DOUBLE:
2380         case T_VOID:
2381           btarray.append((BasicType)arg);
2382           break;
2383         case T_LONG:
2384           long_prev = true;
2385           break;
2386         default: ShouldNotReachHere();
2387       }
2388     });
2389 
2390     if (long_prev) {
2391       btarray.append(T_OBJECT);
2392     }
2393 
2394     nargs = btarray.length();
2395     BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, nargs);
2396     int index = 0;
2397     GrowableArrayIterator<BasicType> iter = btarray.begin();
2398     while (iter != btarray.end()) {
2399       sig_bt[index++] = *iter;
2400       ++iter;
2401     }
2402     assert(index == btarray.length(), "sanity check");
2403 #ifdef ASSERT
2404     {
2405       AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(nargs, sig_bt);
2406       assert(this->equals(compare_fp), "sanity check");
2407       AdapterFingerPrint::deallocate(compare_fp);
2408     }
2409 #endif
2410     return sig_bt;
2411   }
2412 
2413   bool equals(AdapterFingerPrint* other) {
2414     if (other->_length != _length) {
2415       return false;
2416     } else {
2417       for (int i = 0; i < _length; i++) {
2418         if (value(i) != other->value(i)) {
2419           return false;
2420         }
2421       }
2422     }
2423     return true;
2424   }
2425 
2426   // methods required by virtue of being a MetaspaceObj
2427   void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2428   int size() const { return compute_size_in_words(_length); }
2429   MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2430 
2431   static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2432     NOT_PRODUCT(_equals++);

2441 #if INCLUDE_CDS
2442 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2443   return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2444 }
2445 
2446 class ArchivedAdapterTable : public OffsetCompactHashtable<
2447   AdapterFingerPrint*,
2448   AdapterHandlerEntry*,
2449   adapter_fp_equals_compact_hashtable_entry> {};
2450 #endif // INCLUDE_CDS
2451 
2452 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2453 using AdapterHandlerTable = HashTable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2454                   AnyObj::C_HEAP, mtCode,
2455                   AdapterFingerPrint::compute_hash,
2456                   AdapterFingerPrint::equals>;
2457 static AdapterHandlerTable* _adapter_handler_table;
2458 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2459 
2460 // Find a entry with the same fingerprint if it exists
2461 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(int total_args_passed, BasicType* sig_bt) {
2462   NOT_PRODUCT(_lookups++);
2463   assert_lock_strong(AdapterHandlerLibrary_lock);
2464   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2465   AdapterHandlerEntry* entry = nullptr;
2466 #if INCLUDE_CDS
2467   // if we are building the archive then the archived adapter table is
2468   // not valid and we need to use the ones added to the runtime table
2469   if (AOTCodeCache::is_using_adapter()) {
2470     // Search archived table first. It is read-only table so can be searched without lock
2471     entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2472 #ifndef PRODUCT
2473     if (entry != nullptr) {
2474       _archived_hits++;
2475     }
2476 #endif
2477   }
2478 #endif // INCLUDE_CDS
2479   if (entry == nullptr) {
2480     assert_lock_strong(AdapterHandlerLibrary_lock);
2481     AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2482     if (entry_p != nullptr) {
2483       entry = *entry_p;
2484       assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",

2502   ts.print(tty, "AdapterHandlerTable");
2503   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2504                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2505   int total_hits = _archived_hits + _runtime_hits;
2506   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2507                 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2508 }
2509 #endif
2510 
2511 // ---------------------------------------------------------------------------
2512 // Implementation of AdapterHandlerLibrary
2513 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2514 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2515 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2516 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2517 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2518 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2519 #if INCLUDE_CDS
2520 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2521 #endif // INCLUDE_CDS
2522 static const int AdapterHandlerLibrary_size = 16*K;
2523 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2524 
2525 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2526   assert(_buffer != nullptr, "should be initialized");
2527   return _buffer;
2528 }
2529 
2530 static void post_adapter_creation(const AdapterBlob* new_adapter,
2531                                   const AdapterHandlerEntry* entry) {
2532   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2533     char blob_id[256];
2534     jio_snprintf(blob_id,
2535                  sizeof(blob_id),
2536                  "%s(%s)",
2537                  new_adapter->name(),
2538                  entry->fingerprint()->as_string());
2539     if (Forte::is_enabled()) {
2540       Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2541     }
2542 
2543     if (JvmtiExport::should_post_dynamic_code_generated()) {
2544       JvmtiExport::post_dynamic_code_generated(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2545     }
2546   }
2547 }
2548 
2549 void AdapterHandlerLibrary::create_abstract_method_handler() {
2550   assert_lock_strong(AdapterHandlerLibrary_lock);
2551   // Create a special handler for abstract methods.  Abstract methods
2552   // are never compiled so an i2c entry is somewhat meaningless, but
2553   // throw AbstractMethodError just in case.
2554   // Pass wrong_method_abstract for the c2i transitions to return
2555   // AbstractMethodError for invalid invocations.
2556   address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2557   _abstract_method_handler = AdapterHandlerLibrary::new_entry(AdapterFingerPrint::allocate(0, nullptr));
2558   _abstract_method_handler->set_entry_points(SharedRuntime::throw_AbstractMethodError_entry(),
2559                                              wrong_method_abstract,
2560                                              wrong_method_abstract,
2561                                              nullptr);
2562 }
2563 
2564 void AdapterHandlerLibrary::initialize() {
2565   {
2566     ResourceMark rm;
2567     MutexLocker mu(AdapterHandlerLibrary_lock);
2568     _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2569     _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2570     create_abstract_method_handler();
2571   }
2572 
2573 #if INCLUDE_CDS
2574   // Link adapters in AOT Cache to their code in AOT Code Cache
2575   if (AOTCodeCache::is_using_adapter() && !_aot_adapter_handler_table.empty()) {
2576     link_aot_adapters();
2577     lookup_simple_adapters();
2578     return;
2579   }
2580 #endif // INCLUDE_CDS
2581 
2582   ResourceMark rm;
2583   AdapterBlob* no_arg_blob = nullptr;
2584   AdapterBlob* int_arg_blob = nullptr;
2585   AdapterBlob* obj_arg_blob = nullptr;
2586   AdapterBlob* obj_int_arg_blob = nullptr;
2587   AdapterBlob* obj_obj_arg_blob = nullptr;
2588   {
2589     MutexLocker mu(AdapterHandlerLibrary_lock);
2590 
2591     _no_arg_handler = create_adapter(no_arg_blob, 0, nullptr);


2592 
2593     BasicType obj_args[] = { T_OBJECT };
2594     _obj_arg_handler = create_adapter(obj_arg_blob, 1, obj_args);


2595 
2596     BasicType int_args[] = { T_INT };
2597     _int_arg_handler = create_adapter(int_arg_blob, 1, int_args);


2598 
2599     BasicType obj_int_args[] = { T_OBJECT, T_INT };
2600     _obj_int_arg_handler = create_adapter(obj_int_arg_blob, 2, obj_int_args);



2601 
2602     BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
2603     _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, 2, obj_obj_args);



2604 
2605     // we should always get an entry back but we don't have any
2606     // associated blob on Zero
2607     assert(_no_arg_handler != nullptr &&
2608            _obj_arg_handler != nullptr &&
2609            _int_arg_handler != nullptr &&
2610            _obj_int_arg_handler != nullptr &&
2611            _obj_obj_arg_handler != nullptr, "Initial adapter handlers must be properly created");
2612   }
2613 
2614   // Outside of the lock
2615 #ifndef ZERO
2616   // no blobs to register when we are on Zero
2617   post_adapter_creation(no_arg_blob, _no_arg_handler);
2618   post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2619   post_adapter_creation(int_arg_blob, _int_arg_handler);
2620   post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2621   post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2622 #endif // ZERO
2623 }
2624 
2625 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint) {
2626   return AdapterHandlerEntry::allocate(fingerprint);
2627 }
2628 
2629 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2630   if (method->is_abstract()) {
2631     return _abstract_method_handler;
2632   }
2633   int total_args_passed = method->size_of_parameters(); // All args on stack
2634   if (total_args_passed == 0) {
2635     return _no_arg_handler;
2636   } else if (total_args_passed == 1) {
2637     if (!method->is_static()) {



2638       return _obj_arg_handler;
2639     }
2640     switch (method->signature()->char_at(1)) {
2641       case JVM_SIGNATURE_CLASS:









2642       case JVM_SIGNATURE_ARRAY:
2643         return _obj_arg_handler;
2644       case JVM_SIGNATURE_INT:
2645       case JVM_SIGNATURE_BOOLEAN:
2646       case JVM_SIGNATURE_CHAR:
2647       case JVM_SIGNATURE_BYTE:
2648       case JVM_SIGNATURE_SHORT:
2649         return _int_arg_handler;
2650     }
2651   } else if (total_args_passed == 2 &&
2652              !method->is_static()) {
2653     switch (method->signature()->char_at(1)) {
2654       case JVM_SIGNATURE_CLASS:









2655       case JVM_SIGNATURE_ARRAY:
2656         return _obj_obj_arg_handler;
2657       case JVM_SIGNATURE_INT:
2658       case JVM_SIGNATURE_BOOLEAN:
2659       case JVM_SIGNATURE_CHAR:
2660       case JVM_SIGNATURE_BYTE:
2661       case JVM_SIGNATURE_SHORT:
2662         return _obj_int_arg_handler;
2663     }
2664   }
2665   return nullptr;
2666 }
2667 
2668 class AdapterSignatureIterator : public SignatureIterator {
2669  private:
2670   BasicType stack_sig_bt[16];
2671   BasicType* sig_bt;
2672   int index;




2673 
2674  public:
2675   AdapterSignatureIterator(Symbol* signature,
2676                            fingerprint_t fingerprint,
2677                            bool is_static,
2678                            int total_args_passed) :
2679     SignatureIterator(signature, fingerprint),
2680     index(0)
2681   {
2682     sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2683     if (!is_static) { // Pass in receiver first
2684       sig_bt[index++] = T_OBJECT;













2685     }
2686     do_parameters_on(this);
2687   }
2688 
2689   BasicType* basic_types() {
2690     return sig_bt;







2691   }

2692 
2693 #ifdef ASSERT
2694   int slots() {
2695     return index;




































2696   }


















































2697 #endif


















































2698 
2699  private:


2700 
2701   friend class SignatureIterator;  // so do_parameters_on can call do_type
2702   void do_type(BasicType type) {
2703     sig_bt[index++] = type;
2704     if (type == T_LONG || type == T_DOUBLE) {
2705       sig_bt[index++] = T_VOID; // Longs & doubles take 2 Java slots











2706     }
2707   }
2708 };
2709 




































































































































2710 
2711 const char* AdapterHandlerEntry::_entry_names[] = {
2712   "i2c", "c2i", "c2i_unverified", "c2i_no_clinit_check"
2713 };
2714 
2715 #ifdef ASSERT
2716 void AdapterHandlerLibrary::verify_adapter_sharing(int total_args_passed, BasicType* sig_bt, AdapterHandlerEntry* cached_entry) {
2717   // we can only check for the same code if there is any
2718 #ifndef ZERO
2719   AdapterBlob* comparison_blob = nullptr;
2720   AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, total_args_passed, sig_bt, true);
2721   assert(comparison_blob == nullptr, "no blob should be created when creating an adapter for comparison");
2722   assert(comparison_entry->compare_code(cached_entry), "code must match");
2723   // Release the one just created
2724   AdapterHandlerEntry::deallocate(comparison_entry);
2725 # endif // ZERO
2726 }
2727 #endif /* ASSERT*/
2728 
2729 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
2730   // Use customized signature handler.  Need to lock around updates to
2731   // the _adapter_handler_table (it is not safe for concurrent readers
2732   // and a single writer: this could be fixed if it becomes a
2733   // problem).
2734 
2735   // Fast-path for trivial adapters
2736   AdapterHandlerEntry* entry = get_simple_adapter(method);
2737   if (entry != nullptr) {
2738     return entry;
2739   }
2740 
2741   ResourceMark rm;
2742   AdapterBlob* adapter_blob = nullptr;
2743 
2744   // Fill in the signature array, for the calling-convention call.
2745   int total_args_passed = method->size_of_parameters(); // All args on stack













2746 
2747   AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2748                               method->is_static(), total_args_passed);
2749   assert(si.slots() == total_args_passed, "");
2750   BasicType* sig_bt = si.basic_types();
2751   {
2752     MutexLocker mu(AdapterHandlerLibrary_lock);
2753 













2754     // Lookup method signature's fingerprint
2755     entry = lookup(total_args_passed, sig_bt);
2756 
2757     if (entry != nullptr) {
2758       assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
2759 #ifdef ASSERT
2760       if (!entry->is_shared() && VerifyAdapterSharing) {
2761         verify_adapter_sharing(total_args_passed, sig_bt, entry);
2762       }
2763 #endif
2764     } else {
2765       entry = create_adapter(adapter_blob, total_args_passed, sig_bt);
2766     }
2767   }
2768 
2769   // Outside of the lock
2770   if (adapter_blob != nullptr) {
2771     post_adapter_creation(adapter_blob, entry);
2772   }
2773   return entry;
2774 }
2775 
2776 AdapterBlob* AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler) {
2777   ResourceMark rm;
2778   const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
2779   const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
2780   int offsets[AdapterBlob::ENTRY_COUNT];
2781 
2782   AdapterBlob* adapter_blob = nullptr;
2783   CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::Adapter, id, name);
2784   if (blob != nullptr) {
2785     adapter_blob = blob->as_adapter_blob();
2786     adapter_blob->get_offsets(offsets);
2787     address i2c_entry = adapter_blob->content_begin();
2788     assert(offsets[0] == 0, "sanity check");
2789     handler->set_entry_points(
2790       i2c_entry,
2791       (offsets[1] != -1) ? (i2c_entry + offsets[1]) : nullptr,
2792       (offsets[2] != -1) ? (i2c_entry + offsets[2]) : nullptr,
2793       (offsets[3] != -1) ? (i2c_entry + offsets[3]) : nullptr



2794     );
2795   }
2796   return adapter_blob;
2797 }
2798 
2799 #ifndef PRODUCT
2800 void AdapterHandlerLibrary::print_adapter_handler_info(outputStream* st, AdapterHandlerEntry* handler, AdapterBlob* adapter_blob) {
2801   ttyLocker ttyl;
2802   ResourceMark rm;
2803   int insts_size;
2804   // on Zero the blob may be null
2805   handler->print_adapter_on(tty);
2806   if (adapter_blob == nullptr) {
2807     return;
2808   }
2809   insts_size = adapter_blob->code_size();
2810   st->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
2811                 handler->fingerprint()->as_basic_args_string(),
2812                 handler->fingerprint()->as_string(), insts_size);
2813   st->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
2814   if (Verbose || PrintStubCode) {
2815     address first_pc = handler->base_address();
2816     if (first_pc != nullptr) {
2817       Disassembler::decode(first_pc, first_pc + insts_size, st, &adapter_blob->asm_remarks());
2818       st->cr();
2819     }
2820   }
2821 }
2822 #endif // PRODUCT
2823 
2824 bool AdapterHandlerLibrary::generate_adapter_code(AdapterBlob*& adapter_blob,
2825                                                   AdapterHandlerEntry* handler,
2826                                                   int total_args_passed,
2827                                                   BasicType* sig_bt,
2828                                                   bool is_transient) {
2829   if (log_is_enabled(Info, perf, class, link)) {
2830     ClassLoader::perf_method_adapters_count()->inc();
2831   }
2832 
2833   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2834   CodeBuffer buffer(buf);
2835   short buffer_locs[20];
2836   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2837                                          sizeof(buffer_locs)/sizeof(relocInfo));
2838   MacroAssembler masm(&buffer);
2839   VMRegPair stack_regs[16];
2840   VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2841 
2842   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2843   int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2844   SharedRuntime::generate_i2c2i_adapters(&masm,
2845                                          total_args_passed,
2846                                          comp_args_on_stack,
2847                                          sig_bt,
2848                                          regs,
2849                                          handler);












2850 #ifdef ZERO
2851   // On zero there is no code to save and no need to create a blob and
2852   // or relocate the handler.
2853   adapter_blob = nullptr;
2854 #else
2855 #ifdef ASSERT
2856   if (VerifyAdapterSharing) {
2857     handler->save_code(buf->code_begin(), buffer.insts_size());
2858     if (is_transient) {
2859       return true;
2860     }
2861   }
2862 #endif
2863 
2864   int entry_offset[AdapterBlob::ENTRY_COUNT];
2865   assert(AdapterBlob::ENTRY_COUNT == 4, "sanity");
2866   address i2c_entry = handler->get_i2c_entry();
2867   entry_offset[0] = 0; // i2c_entry offset
2868   entry_offset[1] = (handler->get_c2i_entry() != nullptr) ?
2869                     (handler->get_c2i_entry() - i2c_entry) : -1;
2870   entry_offset[2] = (handler->get_c2i_unverified_entry() != nullptr) ?
2871                     (handler->get_c2i_unverified_entry() - i2c_entry) : -1;
2872   entry_offset[3] = (handler->get_c2i_no_clinit_check_entry() != nullptr) ?
2873                     (handler->get_c2i_no_clinit_check_entry() - i2c_entry) : -1;
2874 
2875   adapter_blob = AdapterBlob::create(&buffer, entry_offset);
2876   if (adapter_blob == nullptr) {
2877     // CodeCache is full, disable compilation
2878     // Ought to log this but compile log is only per compile thread
2879     // and we're some non descript Java thread.
2880     return false;
2881   }
2882   if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
2883     // try to save generated code
2884     const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
2885     const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
2886     bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name);
2887     assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
2888   }
2889   handler->relocate(adapter_blob->content_begin());
2890 #endif // ZERO
2891 
2892 #ifndef PRODUCT
2893   // debugging support
2894   if (PrintAdapterHandlers || PrintStubCode) {
2895     print_adapter_handler_info(tty, handler, adapter_blob);
2896   }
2897 #endif
2898 
2899   return true;
2900 }
2901 
2902 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& adapter_blob,
2903                                                            int total_args_passed,
2904                                                            BasicType* sig_bt,
2905                                                            bool is_transient) {
2906   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);





2907   AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
2908   if (!generate_adapter_code(adapter_blob, handler, total_args_passed, sig_bt, is_transient)) {
2909     AdapterHandlerEntry::deallocate(handler);
2910     return nullptr;
2911   }
2912   if (!is_transient) {
2913     assert_lock_strong(AdapterHandlerLibrary_lock);
2914     _adapter_handler_table->put(fp, handler);
2915   }
2916   return handler;
2917 }
2918 
2919 #if INCLUDE_CDS
2920 void AdapterHandlerEntry::remove_unshareable_info() {
2921 #ifdef ASSERT
2922    _saved_code = nullptr;
2923    _saved_code_length = 0;
2924 #endif // ASSERT
2925   set_entry_points(nullptr, nullptr, nullptr, nullptr, false);
2926 }
2927 
2928 class CopyAdapterTableToArchive : StackObj {
2929 private:
2930   CompactHashtableWriter* _writer;
2931   ArchiveBuilder* _builder;
2932 public:
2933   CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
2934                                                              _builder(ArchiveBuilder::current())
2935   {}
2936 
2937   bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
2938     LogStreamHandle(Trace, aot) lsh;
2939     if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
2940       assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
2941       AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
2942       assert(buffered_fp != nullptr,"sanity check");
2943       AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
2944       assert(buffered_entry != nullptr,"sanity check");
2945 

2991 // This method is used during production run to link archived adapters (stored in AOT Cache)
2992 // to their code in AOT Code Cache
2993 void AdapterHandlerEntry::link() {
2994   AdapterBlob* adapter_blob = nullptr;
2995   ResourceMark rm;
2996   assert(_fingerprint != nullptr, "_fingerprint must not be null");
2997   bool generate_code = false;
2998   // Generate code only if AOTCodeCache is not available, or
2999   // caching adapters is disabled, or we fail to link
3000   // the AdapterHandlerEntry to its code in the AOTCodeCache
3001   if (AOTCodeCache::is_using_adapter()) {
3002     adapter_blob = AdapterHandlerLibrary::link_aot_adapter_handler(this);
3003     if (adapter_blob == nullptr) {
3004       log_warning(aot)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
3005       generate_code = true;
3006     }
3007   } else {
3008     generate_code = true;
3009   }
3010   if (generate_code) {
3011     int nargs;
3012     BasicType* bt = _fingerprint->as_basic_type(nargs);
3013     if (!AdapterHandlerLibrary::generate_adapter_code(adapter_blob, this, nargs, bt, /* is_transient */ false)) {
3014       // Don't throw exceptions during VM initialization because java.lang.* classes
3015       // might not have been initialized, causing problems when constructing the
3016       // Java exception object.
3017       vm_exit_during_initialization("Out of space in CodeCache for adapters");
3018     }
3019   }
3020   // Outside of the lock
3021   if (adapter_blob != nullptr) {
3022     post_adapter_creation(adapter_blob, this);
3023   }
3024   assert(_linked, "AdapterHandlerEntry must now be linked");
3025 }
3026 
3027 void AdapterHandlerLibrary::link_aot_adapters() {
3028   assert(AOTCodeCache::is_using_adapter(), "AOT adapters code should be available");
3029   _aot_adapter_handler_table.iterate([](AdapterHandlerEntry* entry) {
3030     assert(!entry->is_linked(), "AdapterHandlerEntry is already linked!");
3031     entry->link();
3032   });
3033 }
3034 
3035 // This method is called during production run to lookup simple adapters
3036 // in the archived adapter handler table
3037 void AdapterHandlerLibrary::lookup_simple_adapters() {
3038   assert(!_aot_adapter_handler_table.empty(), "archived adapter handler table is empty");
3039 
3040   MutexLocker mu(AdapterHandlerLibrary_lock);
3041   _no_arg_handler = lookup(0, nullptr);
3042 
3043   BasicType obj_args[] = { T_OBJECT };
3044   _obj_arg_handler = lookup(1, obj_args);
3045 
3046   BasicType int_args[] = { T_INT };
3047   _int_arg_handler = lookup(1, int_args);
3048 
3049   BasicType obj_int_args[] = { T_OBJECT, T_INT };
3050   _obj_int_arg_handler = lookup(2, obj_int_args);
3051 
3052   BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
3053   _obj_obj_arg_handler = lookup(2, obj_obj_args);













3054 
3055   assert(_no_arg_handler != nullptr &&
3056          _obj_arg_handler != nullptr &&
3057          _int_arg_handler != nullptr &&
3058          _obj_int_arg_handler != nullptr &&
3059          _obj_obj_arg_handler != nullptr, "Initial adapters not found in archived adapter handler table");
3060   assert(_no_arg_handler->is_linked() &&
3061          _obj_arg_handler->is_linked() &&
3062          _int_arg_handler->is_linked() &&
3063          _obj_int_arg_handler->is_linked() &&
3064          _obj_obj_arg_handler->is_linked(), "Initial adapters not in linked state");
3065 }
3066 #endif // INCLUDE_CDS
3067 
3068 address AdapterHandlerEntry::base_address() {
3069   address base = _i2c_entry;
3070   if (base == nullptr)  base = _c2i_entry;
3071   assert(base <= _c2i_entry || _c2i_entry == nullptr, "");


3072   assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == nullptr, "");

3073   assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == nullptr, "");
3074   return base;
3075 }
3076 
3077 void AdapterHandlerEntry::relocate(address new_base) {
3078   address old_base = base_address();
3079   assert(old_base != nullptr, "");
3080   ptrdiff_t delta = new_base - old_base;
3081   if (_i2c_entry != nullptr)
3082     _i2c_entry += delta;
3083   if (_c2i_entry != nullptr)
3084     _c2i_entry += delta;




3085   if (_c2i_unverified_entry != nullptr)
3086     _c2i_unverified_entry += delta;


3087   if (_c2i_no_clinit_check_entry != nullptr)
3088     _c2i_no_clinit_check_entry += delta;
3089   assert(base_address() == new_base, "");
3090 }
3091 
3092 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3093   LogStreamHandle(Trace, aot) lsh;
3094   if (lsh.is_enabled()) {
3095     lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3096     lsh.cr();
3097   }
3098   it->push(&_fingerprint);
3099 }
3100 
3101 AdapterHandlerEntry::~AdapterHandlerEntry() {
3102   if (_fingerprint != nullptr) {
3103     AdapterFingerPrint::deallocate(_fingerprint);
3104     _fingerprint = nullptr;
3105   }



3106 #ifdef ASSERT
3107   FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
3108 #endif
3109   FreeHeap(this);
3110 }
3111 
3112 
3113 #ifdef ASSERT
3114 // Capture the code before relocation so that it can be compared
3115 // against other versions.  If the code is captured after relocation
3116 // then relative instructions won't be equivalent.
3117 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3118   _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3119   _saved_code_length = length;
3120   memcpy(_saved_code, buffer, length);
3121 }
3122 
3123 
3124 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3125   assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");

3173 
3174       struct { double data[20]; } locs_buf;
3175       struct { double data[20]; } stubs_locs_buf;
3176       buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3177 #if defined(AARCH64) || defined(PPC64)
3178       // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3179       // in the constant pool to ensure ordering between the barrier and oops
3180       // accesses. For native_wrappers we need a constant.
3181       // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3182       // static java call that is resolved in the runtime.
3183       if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3184         buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3185       }
3186 #endif
3187       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3188       MacroAssembler _masm(&buffer);
3189 
3190       // Fill in the signature array, for the calling-convention call.
3191       const int total_args_passed = method->size_of_parameters();
3192 

3193       VMRegPair stack_regs[16];

3194       VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3195 
3196       AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
3197                               method->is_static(), total_args_passed);
3198       BasicType* sig_bt = si.basic_types();
3199       assert(si.slots() == total_args_passed, "");
3200       BasicType ret_type = si.return_type();








3201 
3202       // Now get the compiled-Java arguments layout.
3203       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3204 
3205       // Generate the compiled-to-native wrapper code
3206       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3207 
3208       if (nm != nullptr) {
3209         {
3210           MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3211           if (nm->make_in_use()) {
3212             method->set_code(method, nm);
3213           }
3214         }
3215 
3216         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3217         if (directive->PrintAssemblyOption) {
3218           nm->print_code();
3219         }
3220         DirectivesStack::release(directive);

3463         a->print_adapter_on(st);
3464         return true;
3465       } else {
3466         return false; // keep looking
3467       }
3468     };
3469     assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3470     _adapter_handler_table->iterate(findblob_runtime_table);
3471   }
3472   assert(found, "Should have found handler");
3473 }
3474 
3475 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3476   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3477   if (get_i2c_entry() != nullptr) {
3478     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3479   }
3480   if (get_c2i_entry() != nullptr) {
3481     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3482   }






3483   if (get_c2i_unverified_entry() != nullptr) {
3484     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));



3485   }
3486   if (get_c2i_no_clinit_check_entry() != nullptr) {
3487     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3488   }
3489   st->cr();
3490 }
3491 
3492 #ifndef PRODUCT
3493 
3494 void AdapterHandlerLibrary::print_statistics() {
3495   print_table_statistics();
3496 }
3497 
3498 #endif /* PRODUCT */
3499 
3500 bool AdapterHandlerLibrary::is_abstract_method_adapter(AdapterHandlerEntry* entry) {
3501   if (entry == _abstract_method_handler) {
3502     return true;
3503   }
3504   return false;

3560         event.set_method(method);
3561         event.commit();
3562       }
3563     }
3564   }
3565   return activation;
3566 }
3567 
3568 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
3569   // After any safepoint, just before going back to compiled code,
3570   // we inform the GC that we will be doing initializing writes to
3571   // this object in the future without emitting card-marks, so
3572   // GC may take any compensating steps.
3573 
3574   oop new_obj = current->vm_result_oop();
3575   if (new_obj == nullptr) return;
3576 
3577   BarrierSet *bs = BarrierSet::barrier_set();
3578   bs->on_slowpath_allocation_exit(current, new_obj);
3579 }




































































































































































































  28 #include "classfile/javaClasses.inline.hpp"
  29 #include "classfile/stringTable.hpp"
  30 #include "classfile/vmClasses.hpp"
  31 #include "classfile/vmSymbols.hpp"
  32 #include "code/aotCodeCache.hpp"
  33 #include "code/codeCache.hpp"
  34 #include "code/compiledIC.hpp"
  35 #include "code/nmethod.inline.hpp"
  36 #include "code/scopeDesc.hpp"
  37 #include "code/vtableStubs.hpp"
  38 #include "compiler/abstractCompiler.hpp"
  39 #include "compiler/compileBroker.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "gc/shared/barrierSet.hpp"
  42 #include "gc/shared/collectedHeap.hpp"
  43 #include "interpreter/interpreter.hpp"
  44 #include "interpreter/interpreterRuntime.hpp"
  45 #include "jvm.h"
  46 #include "jfr/jfrEvents.hpp"
  47 #include "logging/log.hpp"
  48 #include "memory/oopFactory.hpp"
  49 #include "memory/resourceArea.hpp"
  50 #include "memory/universe.hpp"
  51 #include "oops/access.hpp"
  52 #include "oops/fieldStreams.inline.hpp"
  53 #include "metaprogramming/primitiveConversions.hpp"
  54 #include "oops/klass.hpp"
  55 #include "oops/method.inline.hpp"
  56 #include "oops/objArrayKlass.hpp"
  57 #include "oops/objArrayOop.inline.hpp"
  58 #include "oops/oop.inline.hpp"
  59 #include "oops/inlineKlass.inline.hpp"
  60 #include "prims/forte.hpp"
  61 #include "prims/jvmtiExport.hpp"
  62 #include "prims/jvmtiThreadState.hpp"
  63 #include "prims/methodHandles.hpp"
  64 #include "prims/nativeLookup.hpp"
  65 #include "runtime/arguments.hpp"
  66 #include "runtime/atomic.hpp"
  67 #include "runtime/basicLock.inline.hpp"
  68 #include "runtime/frame.inline.hpp"
  69 #include "runtime/handles.inline.hpp"
  70 #include "runtime/init.hpp"
  71 #include "runtime/interfaceSupport.inline.hpp"
  72 #include "runtime/java.hpp"
  73 #include "runtime/javaCalls.hpp"
  74 #include "runtime/jniHandles.inline.hpp"
  75 #include "runtime/perfData.hpp"
  76 #include "runtime/sharedRuntime.hpp"
  77 #include "runtime/stackWatermarkSet.hpp"
  78 #include "runtime/stubRoutines.hpp"
  79 #include "runtime/synchronizer.inline.hpp"

1182 // for a call current in progress, i.e., arguments has been pushed on stack
1183 // but callee has not been invoked yet.  Caller frame must be compiled.
1184 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1185                                               CallInfo& callinfo, TRAPS) {
1186   Handle receiver;
1187   Handle nullHandle;  // create a handy null handle for exception returns
1188   JavaThread* current = THREAD;
1189 
1190   assert(!vfst.at_end(), "Java frame must exist");
1191 
1192   // Find caller and bci from vframe
1193   methodHandle caller(current, vfst.method());
1194   int          bci   = vfst.bci();
1195 
1196   if (caller->is_continuation_enter_intrinsic()) {
1197     bc = Bytecodes::_invokestatic;
1198     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1199     return receiver;
1200   }
1201 
1202   // Substitutability test implementation piggy backs on static call resolution
1203   Bytecodes::Code code = caller->java_code_at(bci);
1204   if (code == Bytecodes::_if_acmpeq || code == Bytecodes::_if_acmpne) {
1205     bc = Bytecodes::_invokestatic;
1206     methodHandle attached_method(THREAD, extract_attached_method(vfst));
1207     assert(attached_method.not_null(), "must have attached method");
1208     vmClasses::ValueObjectMethods_klass()->initialize(CHECK_NH);
1209     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, false, CHECK_NH);
1210 #ifdef ASSERT
1211     Method* is_subst = vmClasses::ValueObjectMethods_klass()->find_method(vmSymbols::isSubstitutable_name(), vmSymbols::object_object_boolean_signature());
1212     assert(callinfo.selected_method() == is_subst, "must be isSubstitutable method");
1213 #endif
1214     return receiver;
1215   }
1216 
1217   Bytecode_invoke bytecode(caller, bci);
1218   int bytecode_index = bytecode.index();
1219   bc = bytecode.invoke_code();
1220 
1221   methodHandle attached_method(current, extract_attached_method(vfst));
1222   if (attached_method.not_null()) {
1223     Method* callee = bytecode.static_target(CHECK_NH);
1224     vmIntrinsics::ID id = callee->intrinsic_id();
1225     // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1226     // it attaches statically resolved method to the call site.
1227     if (MethodHandles::is_signature_polymorphic(id) &&
1228         MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1229       bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1230 
1231       // Adjust invocation mode according to the attached method.
1232       switch (bc) {
1233         case Bytecodes::_invokevirtual:
1234           if (attached_method->method_holder()->is_interface()) {
1235             bc = Bytecodes::_invokeinterface;
1236           }
1237           break;
1238         case Bytecodes::_invokeinterface:
1239           if (!attached_method->method_holder()->is_interface()) {
1240             bc = Bytecodes::_invokevirtual;
1241           }
1242           break;
1243         case Bytecodes::_invokehandle:
1244           if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1245             bc = attached_method->is_static() ? Bytecodes::_invokestatic
1246                                               : Bytecodes::_invokevirtual;
1247           }
1248           break;
1249         default:
1250           break;
1251       }
1252     } else {
1253       assert(attached_method->has_scalarized_args(), "invalid use of attached method");
1254       if (!attached_method->method_holder()->is_inline_klass()) {
1255         // Ignore the attached method in this case to not confuse below code
1256         attached_method = methodHandle(current, nullptr);
1257       }
1258     }
1259   }
1260 
1261   assert(bc != Bytecodes::_illegal, "not initialized");
1262 
1263   bool has_receiver = bc != Bytecodes::_invokestatic &&
1264                       bc != Bytecodes::_invokedynamic &&
1265                       bc != Bytecodes::_invokehandle;
1266   bool check_null_and_abstract = true;
1267 
1268   // Find receiver for non-static call
1269   if (has_receiver) {
1270     // This register map must be update since we need to find the receiver for
1271     // compiled frames. The receiver might be in a register.
1272     RegisterMap reg_map2(current,
1273                          RegisterMap::UpdateMap::include,
1274                          RegisterMap::ProcessFrames::include,
1275                          RegisterMap::WalkContinuation::skip);
1276     frame stubFrame   = current->last_frame();
1277     // Caller-frame is a compiled frame
1278     frame callerFrame = stubFrame.sender(&reg_map2);
1279 
1280     Method* callee = attached_method();
1281     if (callee == nullptr) {
1282       callee = bytecode.static_target(CHECK_NH);
1283       if (callee == nullptr) {
1284         THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1285       }
1286     }
1287     bool caller_is_c1 = callerFrame.is_compiled_frame() && callerFrame.cb()->as_nmethod()->is_compiled_by_c1();
1288     if (!caller_is_c1 && callee->is_scalarized_arg(0)) {
1289       // If the receiver is an inline type that is passed as fields, no oop is available
1290       // Resolve the call without receiver null checking.
1291       assert(!callee->mismatch(), "calls with inline type receivers should never mismatch");
1292       assert(attached_method.not_null() && !attached_method->is_abstract(), "must have non-abstract attached method");
1293       if (bc == Bytecodes::_invokeinterface) {
1294         bc = Bytecodes::_invokevirtual; // C2 optimistically replaces interface calls by virtual calls
1295       }
1296       check_null_and_abstract = false;
1297     } else {
1298       // Retrieve from a compiled argument list
1299       receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1300       assert(oopDesc::is_oop_or_null(receiver()), "");
1301       if (receiver.is_null()) {
1302         THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);
1303       }
1304     }
1305   }
1306 
1307   // Resolve method
1308   if (attached_method.not_null()) {
1309     // Parameterized by attached method.
1310     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, check_null_and_abstract, CHECK_NH);
1311   } else {
1312     // Parameterized by bytecode.
1313     constantPoolHandle constants(current, caller->constants());
1314     LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1315   }
1316 
1317 #ifdef ASSERT
1318   // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1319   if (has_receiver && check_null_and_abstract) {
1320     assert(receiver.not_null(), "should have thrown exception");
1321     Klass* receiver_klass = receiver->klass();
1322     Klass* rk = nullptr;
1323     if (attached_method.not_null()) {
1324       // In case there's resolved method attached, use its holder during the check.
1325       rk = attached_method->method_holder();
1326     } else {
1327       // Klass is already loaded.
1328       constantPoolHandle constants(current, caller->constants());
1329       rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1330     }
1331     Klass* static_receiver_klass = rk;
1332     assert(receiver_klass->is_subtype_of(static_receiver_klass),
1333            "actual receiver must be subclass of static receiver klass");
1334     if (receiver_klass->is_instance_klass()) {
1335       if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1336         tty->print_cr("ERROR: Klass not yet initialized!!");
1337         receiver_klass->print();
1338       }
1339       assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1340     }
1341   }
1342 #endif
1343 
1344   return receiver;
1345 }
1346 
1347 methodHandle SharedRuntime::find_callee_method(bool is_optimized, bool& caller_is_c1, TRAPS) {
1348   JavaThread* current = THREAD;
1349   ResourceMark rm(current);
1350   // We need first to check if any Java activations (compiled, interpreted)
1351   // exist on the stack since last JavaCall.  If not, we need
1352   // to get the target method from the JavaCall wrapper.
1353   vframeStream vfst(current, true);  // Do not skip any javaCalls
1354   methodHandle callee_method;
1355   if (vfst.at_end()) {
1356     // No Java frames were found on stack since we did the JavaCall.
1357     // Hence the stack can only contain an entry_frame.  We need to
1358     // find the target method from the stub frame.
1359     RegisterMap reg_map(current,
1360                         RegisterMap::UpdateMap::skip,
1361                         RegisterMap::ProcessFrames::include,
1362                         RegisterMap::WalkContinuation::skip);
1363     frame fr = current->last_frame();
1364     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1365     fr = fr.sender(&reg_map);
1366     assert(fr.is_entry_frame(), "must be");
1367     // fr is now pointing to the entry frame.
1368     callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1369   } else {
1370     Bytecodes::Code bc;
1371     CallInfo callinfo;
1372     find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));
1373     // Calls via mismatching methods are always non-scalarized
1374     if (callinfo.resolved_method()->mismatch() && !is_optimized) {
1375       caller_is_c1 = true;
1376     }
1377     callee_method = methodHandle(current, callinfo.selected_method());
1378   }
1379   assert(callee_method()->is_method(), "must be");
1380   return callee_method;
1381 }
1382 
1383 // Resolves a call.
1384 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, bool& caller_is_c1, TRAPS) {
1385   JavaThread* current = THREAD;
1386   ResourceMark rm(current);
1387   RegisterMap cbl_map(current,
1388                       RegisterMap::UpdateMap::skip,
1389                       RegisterMap::ProcessFrames::include,
1390                       RegisterMap::WalkContinuation::skip);
1391   frame caller_frame = current->last_frame().sender(&cbl_map);
1392 
1393   CodeBlob* caller_cb = caller_frame.cb();
1394   guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1395   nmethod* caller_nm = caller_cb->as_nmethod();
1396 
1397   // determine call info & receiver
1398   // note: a) receiver is null for static calls
1399   //       b) an exception is thrown if receiver is null for non-static calls
1400   CallInfo call_info;
1401   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1402   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1403 
1404   NoSafepointVerifier nsv;
1405 
1406   methodHandle callee_method(current, call_info.selected_method());
1407   // Calls via mismatching methods are always non-scalarized
1408   if (caller_nm->is_compiled_by_c1() || (call_info.resolved_method()->mismatch() && !is_optimized)) {
1409     caller_is_c1 = true;
1410   }
1411 
1412   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1413          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1414          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1415          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1416          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1417 
1418   assert(!caller_nm->is_unloading(), "It should not be unloading");
1419 
1420 #ifndef PRODUCT
1421   // tracing/debugging/statistics
1422   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1423                  (is_virtual) ? (&_resolve_virtual_ctr) :
1424                                 (&_resolve_static_ctr);
1425   Atomic::inc(addr);
1426 
1427   if (TraceCallFixup) {
1428     ResourceMark rm(current);
1429     tty->print("resolving %s%s (%s) call%s to",
1430                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1431                Bytecodes::name(invoke_code), (caller_is_c1) ? " from C1" : "");
1432     callee_method->print_short_name(tty);
1433     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1434                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1435   }
1436 #endif
1437 
1438   if (invoke_code == Bytecodes::_invokestatic) {
1439     assert(callee_method->method_holder()->is_initialized() ||
1440            callee_method->method_holder()->is_reentrant_initialization(current),
1441            "invalid class initialization state for invoke_static");
1442     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1443       // In order to keep class initialization check, do not patch call
1444       // site for static call when the class is not fully initialized.
1445       // Proper check is enforced by call site re-resolution on every invocation.
1446       //
1447       // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1448       // explicit class initialization check is put in nmethod entry (VEP).
1449       assert(callee_method->method_holder()->is_linked(), "must be");
1450       return callee_method;
1451     }
1452   }
1453 
1454 
1455   // JSR 292 key invariant:
1456   // If the resolved method is a MethodHandle invoke target, the call
1457   // site must be a MethodHandle call site, because the lambda form might tail-call
1458   // leaving the stack in a state unknown to either caller or callee
1459 
1460   // Compute entry points. The computation of the entry points is independent of
1461   // patching the call.
1462 
1463   // Make sure the callee nmethod does not get deoptimized and removed before
1464   // we are done patching the code.
1465 
1466 
1467   CompiledICLocker ml(caller_nm);
1468   if (is_virtual && !is_optimized) {
1469     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1470     inline_cache->update(&call_info, receiver->klass(), caller_is_c1);
1471   } else {
1472     // Callsite is a direct call - set it to the destination method
1473     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1474     callsite->set(callee_method, caller_is_c1);
1475   }
1476 
1477   return callee_method;
1478 }
1479 
1480 // Inline caches exist only in compiled code
1481 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1482 #ifdef ASSERT
1483   RegisterMap reg_map(current,
1484                       RegisterMap::UpdateMap::skip,
1485                       RegisterMap::ProcessFrames::include,
1486                       RegisterMap::WalkContinuation::skip);
1487   frame stub_frame = current->last_frame();
1488   assert(stub_frame.is_runtime_frame(), "sanity check");
1489   frame caller_frame = stub_frame.sender(&reg_map);
1490   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1491 #endif /* ASSERT */
1492 
1493   methodHandle callee_method;
1494   bool is_optimized = false;
1495   bool caller_is_c1 = false;
1496   JRT_BLOCK
1497     callee_method = SharedRuntime::handle_ic_miss_helper(is_optimized, caller_is_c1, CHECK_NULL);
1498     // Return Method* through TLS
1499     current->set_vm_result_metadata(callee_method());
1500   JRT_BLOCK_END
1501   // return compiled code entry point after potential safepoints
1502   return get_resolved_entry(current, callee_method, false, is_optimized, caller_is_c1);
1503 JRT_END
1504 
1505 
1506 // Handle call site that has been made non-entrant
1507 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1508   // 6243940 We might end up in here if the callee is deoptimized
1509   // as we race to call it.  We don't want to take a safepoint if
1510   // the caller was interpreted because the caller frame will look
1511   // interpreted to the stack walkers and arguments are now
1512   // "compiled" so it is much better to make this transition
1513   // invisible to the stack walking code. The i2c path will
1514   // place the callee method in the callee_target. It is stashed
1515   // there because if we try and find the callee by normal means a
1516   // safepoint is possible and have trouble gc'ing the compiled args.
1517   RegisterMap reg_map(current,
1518                       RegisterMap::UpdateMap::skip,
1519                       RegisterMap::ProcessFrames::include,
1520                       RegisterMap::WalkContinuation::skip);
1521   frame stub_frame = current->last_frame();
1522   assert(stub_frame.is_runtime_frame(), "sanity check");
1523   frame caller_frame = stub_frame.sender(&reg_map);
1524 
1525   if (caller_frame.is_interpreted_frame() ||
1526       caller_frame.is_entry_frame() ||
1527       caller_frame.is_upcall_stub_frame()) {
1528     Method* callee = current->callee_target();
1529     guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1530     current->set_vm_result_metadata(callee);
1531     current->set_callee_target(nullptr);
1532     if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1533       // Bypass class initialization checks in c2i when caller is in native.
1534       // JNI calls to static methods don't have class initialization checks.
1535       // Fast class initialization checks are present in c2i adapters and call into
1536       // SharedRuntime::handle_wrong_method() on the slow path.
1537       //
1538       // JVM upcalls may land here as well, but there's a proper check present in
1539       // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1540       // so bypassing it in c2i adapter is benign.
1541       return callee->get_c2i_no_clinit_check_entry();
1542     } else {
1543       if (caller_frame.is_interpreted_frame()) {
1544         return callee->get_c2i_inline_entry();
1545       } else {
1546         return callee->get_c2i_entry();
1547       }
1548     }
1549   }
1550 
1551   // Must be compiled to compiled path which is safe to stackwalk
1552   methodHandle callee_method;
1553   bool is_static_call = false;
1554   bool is_optimized = false;
1555   bool caller_is_c1 = false;
1556   JRT_BLOCK
1557     // Force resolving of caller (if we called from compiled frame)
1558     callee_method = SharedRuntime::reresolve_call_site(is_static_call, is_optimized, caller_is_c1, CHECK_NULL);
1559     current->set_vm_result_metadata(callee_method());
1560   JRT_BLOCK_END
1561   // return compiled code entry point after potential safepoints
1562   return get_resolved_entry(current, callee_method, is_static_call, is_optimized, caller_is_c1);
1563 JRT_END
1564 
1565 // Handle abstract method call
1566 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1567   // Verbose error message for AbstractMethodError.
1568   // Get the called method from the invoke bytecode.
1569   vframeStream vfst(current, true);
1570   assert(!vfst.at_end(), "Java frame must exist");
1571   methodHandle caller(current, vfst.method());
1572   Bytecode_invoke invoke(caller, vfst.bci());
1573   DEBUG_ONLY( invoke.verify(); )
1574 
1575   // Find the compiled caller frame.
1576   RegisterMap reg_map(current,
1577                       RegisterMap::UpdateMap::include,
1578                       RegisterMap::ProcessFrames::include,
1579                       RegisterMap::WalkContinuation::skip);
1580   frame stubFrame = current->last_frame();
1581   assert(stubFrame.is_runtime_frame(), "must be");
1582   frame callerFrame = stubFrame.sender(&reg_map);
1583   assert(callerFrame.is_compiled_frame(), "must be");
1584 
1585   // Install exception and return forward entry.
1586   address res = SharedRuntime::throw_AbstractMethodError_entry();
1587   JRT_BLOCK
1588     methodHandle callee(current, invoke.static_target(current));
1589     if (!callee.is_null()) {
1590       oop recv = callerFrame.retrieve_receiver(&reg_map);
1591       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1592       res = StubRoutines::forward_exception_entry();
1593       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1594     }
1595   JRT_BLOCK_END
1596   return res;
1597 JRT_END
1598 
1599 // return verified_code_entry if interp_only_mode is not set for the current thread;
1600 // otherwise return c2i entry.
1601 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method,
1602                                           bool is_static_call, bool is_optimized, bool caller_is_c1) {
1603   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1604     // In interp_only_mode we need to go to the interpreted entry
1605     // The c2i won't patch in this mode -- see fixup_callers_callsite
1606     return callee_method->get_c2i_entry();
1607   }
1608 
1609   if (caller_is_c1) {
1610     assert(callee_method->verified_inline_code_entry() != nullptr, "Jump to zero!");
1611     return callee_method->verified_inline_code_entry();
1612   } else if (is_static_call || is_optimized) {
1613     assert(callee_method->verified_code_entry() != nullptr, "Jump to zero!");
1614     return callee_method->verified_code_entry();
1615   } else {
1616     assert(callee_method->verified_inline_ro_code_entry() != nullptr, "Jump to zero!");
1617     return callee_method->verified_inline_ro_code_entry();
1618   }
1619 }
1620 
1621 // resolve a static call and patch code
1622 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1623   methodHandle callee_method;
1624   bool caller_is_c1 = false;
1625   bool enter_special = false;
1626   JRT_BLOCK
1627     callee_method = SharedRuntime::resolve_helper(false, false, caller_is_c1, CHECK_NULL);
1628     current->set_vm_result_metadata(callee_method());
1629   JRT_BLOCK_END
1630   // return compiled code entry point after potential safepoints
1631   return get_resolved_entry(current, callee_method, true, false, caller_is_c1);
1632 JRT_END
1633 
1634 // resolve virtual call and update inline cache to monomorphic
1635 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1636   methodHandle callee_method;
1637   bool caller_is_c1 = false;
1638   JRT_BLOCK
1639     callee_method = SharedRuntime::resolve_helper(true, false, caller_is_c1, CHECK_NULL);
1640     current->set_vm_result_metadata(callee_method());
1641   JRT_BLOCK_END
1642   // return compiled code entry point after potential safepoints
1643   return get_resolved_entry(current, callee_method, false, false, caller_is_c1);
1644 JRT_END
1645 
1646 
1647 // Resolve a virtual call that can be statically bound (e.g., always
1648 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1649 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1650   methodHandle callee_method;
1651   bool caller_is_c1 = false;
1652   JRT_BLOCK
1653     callee_method = SharedRuntime::resolve_helper(true, true, caller_is_c1, CHECK_NULL);
1654     current->set_vm_result_metadata(callee_method());
1655   JRT_BLOCK_END
1656   // return compiled code entry point after potential safepoints
1657   return get_resolved_entry(current, callee_method, false, true, caller_is_c1);
1658 JRT_END
1659 
1660 
1661 
1662 methodHandle SharedRuntime::handle_ic_miss_helper(bool& is_optimized, bool& caller_is_c1, TRAPS) {
1663   JavaThread* current = THREAD;
1664   ResourceMark rm(current);
1665   CallInfo call_info;
1666   Bytecodes::Code bc;
1667 
1668   // receiver is null for static calls. An exception is thrown for null
1669   // receivers for non-static calls
1670   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1671 
1672   methodHandle callee_method(current, call_info.selected_method());
1673 
1674 #ifndef PRODUCT
1675   Atomic::inc(&_ic_miss_ctr);
1676 
1677   // Statistics & Tracing
1678   if (TraceCallFixup) {
1679     ResourceMark rm(current);
1680     tty->print("IC miss (%s) call%s to", Bytecodes::name(bc), (caller_is_c1) ? " from C1" : "");
1681     callee_method->print_short_name(tty);
1682     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1683   }
1684 
1685   if (ICMissHistogram) {
1686     MutexLocker m(VMStatistic_lock);
1687     RegisterMap reg_map(current,
1688                         RegisterMap::UpdateMap::skip,
1689                         RegisterMap::ProcessFrames::include,
1690                         RegisterMap::WalkContinuation::skip);
1691     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1692     // produce statistics under the lock
1693     trace_ic_miss(f.pc());
1694   }
1695 #endif
1696 
1697   // install an event collector so that when a vtable stub is created the
1698   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1699   // event can't be posted when the stub is created as locks are held
1700   // - instead the event will be deferred until the event collector goes
1701   // out of scope.
1702   JvmtiDynamicCodeEventCollector event_collector;
1703 
1704   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1705   RegisterMap reg_map(current,
1706                       RegisterMap::UpdateMap::skip,
1707                       RegisterMap::ProcessFrames::include,
1708                       RegisterMap::WalkContinuation::skip);
1709   frame caller_frame = current->last_frame().sender(&reg_map);
1710   CodeBlob* cb = caller_frame.cb();
1711   nmethod* caller_nm = cb->as_nmethod();
1712   // Calls via mismatching methods are always non-scalarized
1713   if (caller_nm->is_compiled_by_c1() || call_info.resolved_method()->mismatch()) {
1714     caller_is_c1 = true;
1715   }
1716 
1717   CompiledICLocker ml(caller_nm);
1718   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1719   inline_cache->update(&call_info, receiver()->klass(), caller_is_c1);
1720 
1721   return callee_method;
1722 }
1723 
1724 //
1725 // Resets a call-site in compiled code so it will get resolved again.
1726 // This routines handles both virtual call sites, optimized virtual call
1727 // sites, and static call sites. Typically used to change a call sites
1728 // destination from compiled to interpreted.
1729 //
1730 methodHandle SharedRuntime::reresolve_call_site(bool& is_static_call, bool& is_optimized, bool& caller_is_c1, TRAPS) {
1731   JavaThread* current = THREAD;
1732   ResourceMark rm(current);
1733   RegisterMap reg_map(current,
1734                       RegisterMap::UpdateMap::skip,
1735                       RegisterMap::ProcessFrames::include,
1736                       RegisterMap::WalkContinuation::skip);
1737   frame stub_frame = current->last_frame();
1738   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1739   frame caller = stub_frame.sender(&reg_map);
1740   if (caller.is_compiled_frame()) {
1741     caller_is_c1 = caller.cb()->as_nmethod()->is_compiled_by_c1();
1742   }
1743 
1744   // Do nothing if the frame isn't a live compiled frame.
1745   // nmethod could be deoptimized by the time we get here
1746   // so no update to the caller is needed.
1747 
1748   if ((caller.is_compiled_frame() && !caller.is_deoptimized_frame()) ||
1749       (caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic())) {
1750 
1751     address pc = caller.pc();
1752 
1753     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1754     assert(caller_nm != nullptr, "did not find caller nmethod");
1755 
1756     // Default call_addr is the location of the "basic" call.
1757     // Determine the address of the call we a reresolving. With
1758     // Inline Caches we will always find a recognizable call.
1759     // With Inline Caches disabled we may or may not find a
1760     // recognizable call. We will always find a call for static
1761     // calls and for optimized virtual calls. For vanilla virtual
1762     // calls it depends on the state of the UseInlineCaches switch.
1763     //
1764     // With Inline Caches disabled we can get here for a virtual call
1765     // for two reasons:
1766     //   1 - calling an abstract method. The vtable for abstract methods
1767     //       will run us thru handle_wrong_method and we will eventually
1768     //       end up in the interpreter to throw the ame.
1769     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1770     //       call and between the time we fetch the entry address and
1771     //       we jump to it the target gets deoptimized. Similar to 1
1772     //       we will wind up in the interprter (thru a c2i with c2).
1773     //
1774     CompiledICLocker ml(caller_nm);
1775     address call_addr = caller_nm->call_instruction_address(pc);
1776 
1777     if (call_addr != nullptr) {
1778       // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1779       // bytes back in the instruction stream so we must also check for reloc info.
1780       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1781       bool ret = iter.next(); // Get item
1782       if (ret) {
1783         is_static_call = false;
1784         is_optimized = false;
1785         switch (iter.type()) {
1786           case relocInfo::static_call_type:
1787             is_static_call = true;
1788           case relocInfo::opt_virtual_call_type: {
1789             is_optimized = (iter.type() == relocInfo::opt_virtual_call_type);
1790             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1791             cdc->set_to_clean();
1792             break;
1793           }

1794           case relocInfo::virtual_call_type: {
1795             // compiled, dispatched call (which used to call an interpreted method)
1796             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1797             inline_cache->set_to_clean();
1798             break;
1799           }
1800           default:
1801             break;
1802         }
1803       }
1804     }
1805   }
1806 
1807   methodHandle callee_method = find_callee_method(is_optimized, caller_is_c1, CHECK_(methodHandle()));

1808 
1809 #ifndef PRODUCT
1810   Atomic::inc(&_wrong_method_ctr);
1811 
1812   if (TraceCallFixup) {
1813     ResourceMark rm(current);
1814     tty->print("handle_wrong_method reresolving call%s to", (caller_is_c1) ? " from C1" : "");
1815     callee_method->print_short_name(tty);
1816     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1817   }
1818 #endif
1819 
1820   return callee_method;
1821 }
1822 
1823 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1824   // The faulting unsafe accesses should be changed to throw the error
1825   // synchronously instead. Meanwhile the faulting instruction will be
1826   // skipped over (effectively turning it into a no-op) and an
1827   // asynchronous exception will be raised which the thread will
1828   // handle at a later point. If the instruction is a load it will
1829   // return garbage.
1830 
1831   // Request an async exception.
1832   thread->set_pending_unsafe_access_error();
1833 
1834   // Return address of next instruction to execute.

2000   msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
2001 
2002   char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
2003   if (message == nullptr) {
2004     // Shouldn't happen, but don't cause even more problems if it does
2005     message = const_cast<char*>(caster_klass->external_name());
2006   } else {
2007     jio_snprintf(message,
2008                  msglen,
2009                  "class %s cannot be cast to class %s (%s%s%s)",
2010                  caster_name,
2011                  target_name,
2012                  caster_klass_description,
2013                  klass_separator,
2014                  target_klass_description
2015                  );
2016   }
2017   return message;
2018 }
2019 
2020 char* SharedRuntime::generate_identity_exception_message(JavaThread* current, Klass* klass) {
2021   assert(klass->is_inline_klass(), "Must be a concrete value class");
2022   const char* desc = "Cannot synchronize on an instance of value class ";
2023   const char* className = klass->external_name();
2024   size_t msglen = strlen(desc) + strlen(className) + 1;
2025   char* message = NEW_RESOURCE_ARRAY(char, msglen);
2026   if (nullptr == message) {
2027     // Out of memory: can't create detailed error message
2028     message = const_cast<char*>(klass->external_name());
2029   } else {
2030     jio_snprintf(message, msglen, "%s%s", desc, className);
2031   }
2032   return message;
2033 }
2034 
2035 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
2036   (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
2037 JRT_END
2038 
2039 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
2040   if (!SafepointSynchronize::is_synchronizing()) {
2041     // Only try quick_enter() if we're not trying to reach a safepoint
2042     // so that the calling thread reaches the safepoint more quickly.
2043     if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
2044       return;
2045     }
2046   }
2047   // NO_ASYNC required because an async exception on the state transition destructor
2048   // would leave you with the lock held and it would never be released.
2049   // The normal monitorenter NullPointerException is thrown without acquiring a lock
2050   // and the model is that an exception implies the method failed.
2051   JRT_BLOCK_NO_ASYNC
2052   Handle h_obj(THREAD, obj);
2053   ObjectSynchronizer::enter(h_obj, lock, current);
2054   assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");

2265   tty->print_cr("        %% in nested categories are relative to their category");
2266   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2267   tty->cr();
2268 
2269   MethodArityHistogram h;
2270 }
2271 #endif
2272 
2273 #ifndef PRODUCT
2274 static int _lookups; // number of calls to lookup
2275 static int _equals;  // number of buckets checked with matching hash
2276 static int _archived_hits; // number of successful lookups in archived table
2277 static int _runtime_hits;  // number of successful lookups in runtime table
2278 #endif
2279 
2280 // A simple wrapper class around the calling convention information
2281 // that allows sharing of adapters for the same calling convention.
2282 class AdapterFingerPrint : public MetaspaceObj {
2283  private:
2284   enum {
2285     _basic_type_bits = 5,
2286     _basic_type_mask = right_n_bits(_basic_type_bits),
2287     _basic_types_per_int = BitsPerInt / _basic_type_bits,
2288   };
2289   // TO DO:  Consider integrating this with a more global scheme for compressing signatures.
2290   // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2291 
2292   int _length;
2293 
2294   static int data_offset() { return sizeof(AdapterFingerPrint); }
2295   int* data_pointer() {
2296     return (int*)((address)this + data_offset());
2297   }
2298 
2299   // Private construtor. Use allocate() to get an instance.
2300   AdapterFingerPrint(const GrowableArray<SigEntry>* sig, bool has_ro_adapter = false) {
2301     int* data = data_pointer();
2302     // Pack the BasicTypes with 8 per int
2303     int total_args_passed = total_args_passed_in_sig(sig);
2304     _length = length(total_args_passed);
2305     int sig_index = 0;
2306     BasicType prev_bt = T_ILLEGAL;
2307     int vt_count = 0;
2308     for (int index = 0; index < _length; index++) {
2309       int value = 0;
2310       for (int byte = 0; byte < _basic_types_per_int; byte++) {
2311         BasicType bt = T_ILLEGAL;
2312         if (sig_index < total_args_passed) {
2313           bt = sig->at(sig_index++)._bt;
2314           if (bt == T_METADATA) {
2315             // Found start of inline type in signature
2316             assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
2317             if (sig_index == 1 && has_ro_adapter) {
2318               // With a ro_adapter, replace receiver inline type delimiter by T_VOID to prevent matching
2319               // with other adapters that have the same inline type as first argument and no receiver.
2320               bt = T_VOID;
2321             }
2322             vt_count++;
2323           } else if (bt == T_VOID && prev_bt != T_LONG && prev_bt != T_DOUBLE) {
2324             // Found end of inline type in signature
2325             assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
2326             vt_count--;
2327             assert(vt_count >= 0, "invalid vt_count");
2328           } else if (vt_count == 0) {
2329             // Widen fields that are not part of a scalarized inline type argument
2330             bt = adapter_encoding(bt);
2331           }
2332           prev_bt = bt;
2333         }
2334         int bt_val = (bt == T_ILLEGAL) ? 0 : bt;
2335         assert((bt_val & _basic_type_mask) == bt_val, "must fit in 4 bits");
2336         value = (value << _basic_type_bits) | bt_val;
2337       }
2338       data[index] = value;
2339     }
2340     assert(vt_count == 0, "invalid vt_count");
2341   }
2342 
2343   // Call deallocate instead
2344   ~AdapterFingerPrint() {
2345     ShouldNotCallThis();
2346   }
2347 
2348   static int total_args_passed_in_sig(const GrowableArray<SigEntry>* sig) {
2349     return (sig != nullptr) ? sig->length() : 0;
2350   }
2351 
2352   static int length(int total_args) {
2353     return (total_args + (_basic_types_per_int-1)) / _basic_types_per_int;
2354   }
2355 
2356   static int compute_size_in_words(int len) {
2357     return (int)heap_word_size(sizeof(AdapterFingerPrint) + (len * sizeof(int)));
2358   }
2359 
2360   // Remap BasicTypes that are handled equivalently by the adapters.
2361   // These are correct for the current system but someday it might be
2362   // necessary to make this mapping platform dependent.
2363   static BasicType adapter_encoding(BasicType in) {
2364     switch (in) {
2365       case T_BOOLEAN:
2366       case T_BYTE:
2367       case T_SHORT:
2368       case T_CHAR:
2369         // They are all promoted to T_INT in the calling convention
2370         return T_INT;
2371 
2372       case T_OBJECT:
2373       case T_ARRAY:
2374         // In other words, we assume that any register good enough for
2375         // an int or long is good enough for a managed pointer.
2376 #ifdef _LP64
2377         return T_LONG;
2378 #else
2379         return T_INT;
2380 #endif
2381 
2382       case T_INT:
2383       case T_LONG:
2384       case T_FLOAT:
2385       case T_DOUBLE:
2386       case T_VOID:
2387         return in;
2388 
2389       default:
2390         ShouldNotReachHere();
2391         return T_CONFLICT;
2392     }
2393   }
2394 
2395   void* operator new(size_t size, size_t fp_size) throw() {
2396     assert(fp_size >= size, "sanity check");
2397     void* p = AllocateHeap(fp_size, mtCode);
2398     memset(p, 0, fp_size);
2399     return p;
2400   }
2401 
2402 public:
2403   template<typename Function>
2404   void iterate_args(Function function) {
2405     for (int i = 0; i < length(); i++) {
2406       unsigned val = (unsigned)value(i);
2407       // args are packed so that first/lower arguments are in the highest
2408       // bits of each int value, so iterate from highest to the lowest
2409       int first_entry = _basic_types_per_int * _basic_type_bits;
2410       for (int j = first_entry; j >= 0; j -= _basic_type_bits) {
2411         unsigned v = (val >> j) & _basic_type_mask;
2412         if (v == 0) {
2413           continue;
2414         }
2415         function(v);
2416       }
2417     }
2418   }
2419 
2420   static AdapterFingerPrint* allocate(const GrowableArray<SigEntry>* sig, bool has_ro_adapter = false) {
2421     int total_args_passed = total_args_passed_in_sig(sig);
2422     int len = length(total_args_passed);
2423     int size_in_bytes = BytesPerWord * compute_size_in_words(len);
2424     AdapterFingerPrint* afp = new (size_in_bytes) AdapterFingerPrint(sig, has_ro_adapter);
2425     assert((afp->size() * BytesPerWord) == size_in_bytes, "should match");
2426     return afp;
2427   }
2428 
2429   static void deallocate(AdapterFingerPrint* fp) {
2430     FreeHeap(fp);
2431   }
2432 
2433   int value(int index) {
2434     int* data = data_pointer();
2435     return data[index];
2436   }
2437 
2438   int length() {
2439     return _length;
2440   }
2441 
2442   unsigned int compute_hash() {
2443     int hash = 0;
2444     for (int i = 0; i < length(); i++) {

2453     stringStream st;
2454     st.print("0x");
2455     for (int i = 0; i < length(); i++) {
2456       st.print("%x", value(i));
2457     }
2458     return st.as_string();
2459   }
2460 
2461   const char* as_basic_args_string() {
2462     stringStream st;
2463     bool long_prev = false;
2464     iterate_args([&] (int arg) {
2465       if (long_prev) {
2466         long_prev = false;
2467         if (arg == T_VOID) {
2468           st.print("J");
2469         } else {
2470           st.print("L");
2471         }
2472       }
2473       if (arg == T_LONG) {
2474         long_prev = true;
2475       } else if (arg != T_VOID) {
2476         st.print("%c", type2char((BasicType)arg));



2477       }
2478     });
2479     if (long_prev) {
2480       st.print("L");
2481     }
2482     return st.as_string();
2483   }
2484 



















































2485   bool equals(AdapterFingerPrint* other) {
2486     if (other->_length != _length) {
2487       return false;
2488     } else {
2489       for (int i = 0; i < _length; i++) {
2490         if (value(i) != other->value(i)) {
2491           return false;
2492         }
2493       }
2494     }
2495     return true;
2496   }
2497 
2498   // methods required by virtue of being a MetaspaceObj
2499   void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2500   int size() const { return compute_size_in_words(_length); }
2501   MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2502 
2503   static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2504     NOT_PRODUCT(_equals++);

2513 #if INCLUDE_CDS
2514 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2515   return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2516 }
2517 
2518 class ArchivedAdapterTable : public OffsetCompactHashtable<
2519   AdapterFingerPrint*,
2520   AdapterHandlerEntry*,
2521   adapter_fp_equals_compact_hashtable_entry> {};
2522 #endif // INCLUDE_CDS
2523 
2524 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2525 using AdapterHandlerTable = HashTable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2526                   AnyObj::C_HEAP, mtCode,
2527                   AdapterFingerPrint::compute_hash,
2528                   AdapterFingerPrint::equals>;
2529 static AdapterHandlerTable* _adapter_handler_table;
2530 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2531 
2532 // Find a entry with the same fingerprint if it exists
2533 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(const GrowableArray<SigEntry>* sig, bool has_ro_adapter) {
2534   NOT_PRODUCT(_lookups++);
2535   assert_lock_strong(AdapterHandlerLibrary_lock);
2536   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(sig, has_ro_adapter);
2537   AdapterHandlerEntry* entry = nullptr;
2538 #if INCLUDE_CDS
2539   // if we are building the archive then the archived adapter table is
2540   // not valid and we need to use the ones added to the runtime table
2541   if (AOTCodeCache::is_using_adapter()) {
2542     // Search archived table first. It is read-only table so can be searched without lock
2543     entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2544 #ifndef PRODUCT
2545     if (entry != nullptr) {
2546       _archived_hits++;
2547     }
2548 #endif
2549   }
2550 #endif // INCLUDE_CDS
2551   if (entry == nullptr) {
2552     assert_lock_strong(AdapterHandlerLibrary_lock);
2553     AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2554     if (entry_p != nullptr) {
2555       entry = *entry_p;
2556       assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",

2574   ts.print(tty, "AdapterHandlerTable");
2575   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2576                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2577   int total_hits = _archived_hits + _runtime_hits;
2578   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2579                 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2580 }
2581 #endif
2582 
2583 // ---------------------------------------------------------------------------
2584 // Implementation of AdapterHandlerLibrary
2585 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2586 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2587 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2588 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2589 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2590 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2591 #if INCLUDE_CDS
2592 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2593 #endif // INCLUDE_CDS
2594 static const int AdapterHandlerLibrary_size = 48*K;
2595 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2596 
2597 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2598   assert(_buffer != nullptr, "should be initialized");
2599   return _buffer;
2600 }
2601 
2602 static void post_adapter_creation(const AdapterBlob* new_adapter,
2603                                   const AdapterHandlerEntry* entry) {
2604   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2605     char blob_id[256];
2606     jio_snprintf(blob_id,
2607                  sizeof(blob_id),
2608                  "%s(%s)",
2609                  new_adapter->name(),
2610                  entry->fingerprint()->as_string());
2611     if (Forte::is_enabled()) {
2612       Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2613     }
2614 
2615     if (JvmtiExport::should_post_dynamic_code_generated()) {
2616       JvmtiExport::post_dynamic_code_generated(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2617     }
2618   }
2619 }
2620 
2621 void AdapterHandlerLibrary::create_abstract_method_handler() {
2622   assert_lock_strong(AdapterHandlerLibrary_lock);
2623   // Create a special handler for abstract methods.  Abstract methods
2624   // are never compiled so an i2c entry is somewhat meaningless, but
2625   // throw AbstractMethodError just in case.
2626   // Pass wrong_method_abstract for the c2i transitions to return
2627   // AbstractMethodError for invalid invocations.
2628   address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2629   _abstract_method_handler = AdapterHandlerLibrary::new_entry(AdapterFingerPrint::allocate(nullptr));
2630   _abstract_method_handler->set_entry_points(SharedRuntime::throw_AbstractMethodError_entry(),
2631                                              wrong_method_abstract, wrong_method_abstract, wrong_method_abstract,
2632                                              wrong_method_abstract, wrong_method_abstract);

2633 }
2634 
2635 void AdapterHandlerLibrary::initialize() {
2636   {
2637     ResourceMark rm;
2638     MutexLocker mu(AdapterHandlerLibrary_lock);
2639     _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2640     _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2641     create_abstract_method_handler();
2642   }
2643 
2644 #if INCLUDE_CDS
2645   // Link adapters in AOT Cache to their code in AOT Code Cache
2646   if (AOTCodeCache::is_using_adapter() && !_aot_adapter_handler_table.empty()) {
2647     link_aot_adapters();
2648     lookup_simple_adapters();
2649     return;
2650   }
2651 #endif // INCLUDE_CDS
2652 
2653   ResourceMark rm;
2654   AdapterBlob* no_arg_blob = nullptr;
2655   AdapterBlob* int_arg_blob = nullptr;
2656   AdapterBlob* obj_arg_blob = nullptr;
2657   AdapterBlob* obj_int_arg_blob = nullptr;
2658   AdapterBlob* obj_obj_arg_blob = nullptr;
2659   {
2660     MutexLocker mu(AdapterHandlerLibrary_lock);
2661 
2662     CompiledEntrySignature no_args;
2663     no_args.compute_calling_conventions();
2664     _no_arg_handler = create_adapter(no_arg_blob, no_args, true);
2665 
2666     CompiledEntrySignature obj_args;
2667     SigEntry::add_entry(obj_args.sig(), T_OBJECT);
2668     obj_args.compute_calling_conventions();
2669     _obj_arg_handler = create_adapter(obj_arg_blob, obj_args, true);
2670 
2671     CompiledEntrySignature int_args;
2672     SigEntry::add_entry(int_args.sig(), T_INT);
2673     int_args.compute_calling_conventions();
2674     _int_arg_handler = create_adapter(int_arg_blob, int_args, true);
2675 
2676     CompiledEntrySignature obj_int_args;
2677     SigEntry::add_entry(obj_int_args.sig(), T_OBJECT);
2678     SigEntry::add_entry(obj_int_args.sig(), T_INT);
2679     obj_int_args.compute_calling_conventions();
2680     _obj_int_arg_handler = create_adapter(obj_int_arg_blob, obj_int_args, true);
2681 
2682     CompiledEntrySignature obj_obj_args;
2683     SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
2684     SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
2685     obj_obj_args.compute_calling_conventions();
2686     _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, obj_obj_args, true);
2687 
2688     // we should always get an entry back but we don't have any
2689     // associated blob on Zero
2690     assert(_no_arg_handler != nullptr &&
2691            _obj_arg_handler != nullptr &&
2692            _int_arg_handler != nullptr &&
2693            _obj_int_arg_handler != nullptr &&
2694            _obj_obj_arg_handler != nullptr, "Initial adapter handlers must be properly created");
2695   }
2696 
2697   // Outside of the lock
2698 #ifndef ZERO
2699   // no blobs to register when we are on Zero
2700   post_adapter_creation(no_arg_blob, _no_arg_handler);
2701   post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2702   post_adapter_creation(int_arg_blob, _int_arg_handler);
2703   post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2704   post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2705 #endif // ZERO
2706 }
2707 
2708 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint) {
2709   return AdapterHandlerEntry::allocate(fingerprint);
2710 }
2711 
2712 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2713   if (method->is_abstract()) {
2714     return nullptr;
2715   }
2716   int total_args_passed = method->size_of_parameters(); // All args on stack
2717   if (total_args_passed == 0) {
2718     return _no_arg_handler;
2719   } else if (total_args_passed == 1) {
2720     if (!method->is_static()) {
2721       if (InlineTypePassFieldsAsArgs && method->method_holder()->is_inline_klass()) {
2722         return nullptr;
2723       }
2724       return _obj_arg_handler;
2725     }
2726     switch (method->signature()->char_at(1)) {
2727       case JVM_SIGNATURE_CLASS: {
2728         if (InlineTypePassFieldsAsArgs) {
2729           SignatureStream ss(method->signature());
2730           InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2731           if (vk != nullptr) {
2732             return nullptr;
2733           }
2734         }
2735         return _obj_arg_handler;
2736       }
2737       case JVM_SIGNATURE_ARRAY:
2738         return _obj_arg_handler;
2739       case JVM_SIGNATURE_INT:
2740       case JVM_SIGNATURE_BOOLEAN:
2741       case JVM_SIGNATURE_CHAR:
2742       case JVM_SIGNATURE_BYTE:
2743       case JVM_SIGNATURE_SHORT:
2744         return _int_arg_handler;
2745     }
2746   } else if (total_args_passed == 2 &&
2747              !method->is_static() && (!InlineTypePassFieldsAsArgs || !method->method_holder()->is_inline_klass())) {
2748     switch (method->signature()->char_at(1)) {
2749       case JVM_SIGNATURE_CLASS: {
2750         if (InlineTypePassFieldsAsArgs) {
2751           SignatureStream ss(method->signature());
2752           InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2753           if (vk != nullptr) {
2754             return nullptr;
2755           }
2756         }
2757         return _obj_obj_arg_handler;
2758       }
2759       case JVM_SIGNATURE_ARRAY:
2760         return _obj_obj_arg_handler;
2761       case JVM_SIGNATURE_INT:
2762       case JVM_SIGNATURE_BOOLEAN:
2763       case JVM_SIGNATURE_CHAR:
2764       case JVM_SIGNATURE_BYTE:
2765       case JVM_SIGNATURE_SHORT:
2766         return _obj_int_arg_handler;
2767     }
2768   }
2769   return nullptr;
2770 }
2771 
2772 CompiledEntrySignature::CompiledEntrySignature(Method* method) :
2773   _method(method), _num_inline_args(0), _has_inline_recv(false),
2774   _regs(nullptr), _regs_cc(nullptr), _regs_cc_ro(nullptr),
2775   _args_on_stack(0), _args_on_stack_cc(0), _args_on_stack_cc_ro(0),
2776   _c1_needs_stack_repair(false), _c2_needs_stack_repair(false), _supers(nullptr) {
2777   _sig = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2778   _sig_cc = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2779   _sig_cc_ro = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2780 }
2781 
2782 // See if we can save space by sharing the same entry for VIEP and VIEP(RO),
2783 // or the same entry for VEP and VIEP(RO).
2784 CodeOffsets::Entries CompiledEntrySignature::c1_inline_ro_entry_type() const {
2785   if (!has_scalarized_args()) {
2786     // VEP/VIEP/VIEP(RO) all share the same entry. There's no packing.
2787     return CodeOffsets::Verified_Entry;
2788   }
2789   if (_method->is_static()) {
2790     // Static methods don't need VIEP(RO)
2791     return CodeOffsets::Verified_Entry;
2792   }
2793 
2794   if (has_inline_recv()) {
2795     if (num_inline_args() == 1) {
2796       // Share same entry for VIEP and VIEP(RO).
2797       // This is quite common: we have an instance method in an InlineKlass that has
2798       // no inline type args other than <this>.
2799       return CodeOffsets::Verified_Inline_Entry;
2800     } else {
2801       assert(num_inline_args() > 1, "must be");
2802       // No sharing:
2803       //   VIEP(RO) -- <this> is passed as object
2804       //   VEP      -- <this> is passed as fields
2805       return CodeOffsets::Verified_Inline_Entry_RO;
2806     }

2807   }
2808 
2809   // Either a static method, or <this> is not an inline type
2810   if (args_on_stack_cc() != args_on_stack_cc_ro()) {
2811     // No sharing:
2812     // Some arguments are passed on the stack, and we have inserted reserved entries
2813     // into the VEP, but we never insert reserved entries into the VIEP(RO).
2814     return CodeOffsets::Verified_Inline_Entry_RO;
2815   } else {
2816     // Share same entry for VEP and VIEP(RO).
2817     return CodeOffsets::Verified_Entry;
2818   }
2819 }
2820 
2821 // Returns all super methods (transitive) in classes and interfaces that are overridden by the current method.
2822 GrowableArray<Method*>* CompiledEntrySignature::get_supers() {
2823   if (_supers != nullptr) {
2824     return _supers;
2825   }
2826   _supers = new GrowableArray<Method*>();
2827   // Skip private, static, and <init> methods
2828   if (_method->is_private() || _method->is_static() || _method->is_object_constructor()) {
2829     return _supers;
2830   }
2831   Symbol* name = _method->name();
2832   Symbol* signature = _method->signature();
2833   const Klass* holder = _method->method_holder()->super();
2834   Symbol* holder_name = holder->name();
2835   ThreadInVMfromUnknown tiv;
2836   JavaThread* current = JavaThread::current();
2837   HandleMark hm(current);
2838   Handle loader(current, _method->method_holder()->class_loader());
2839 
2840   // Walk up the class hierarchy and search for super methods
2841   while (holder != nullptr) {
2842     Method* super_method = holder->lookup_method(name, signature);
2843     if (super_method == nullptr) {
2844       break;
2845     }
2846     if (!super_method->is_static() && !super_method->is_private() &&
2847         (!super_method->is_package_private() ||
2848          super_method->method_holder()->is_same_class_package(loader(), holder_name))) {
2849       _supers->push(super_method);
2850     }
2851     holder = super_method->method_holder()->super();
2852   }
2853   // Search interfaces for super methods
2854   Array<InstanceKlass*>* interfaces = _method->method_holder()->transitive_interfaces();
2855   for (int i = 0; i < interfaces->length(); ++i) {
2856     Method* m = interfaces->at(i)->lookup_method(name, signature);
2857     if (m != nullptr && !m->is_static() && m->is_public()) {
2858       _supers->push(m);
2859     }
2860   }
2861   return _supers;
2862 }
2863 
2864 // Iterate over arguments and compute scalarized and non-scalarized signatures
2865 void CompiledEntrySignature::compute_calling_conventions(bool init) {
2866   bool has_scalarized = false;
2867   if (_method != nullptr) {
2868     InstanceKlass* holder = _method->method_holder();
2869     int arg_num = 0;
2870     if (!_method->is_static()) {
2871       // We shouldn't scalarize 'this' in a value class constructor
2872       if (holder->is_inline_klass() && InlineKlass::cast(holder)->can_be_passed_as_fields() && !_method->is_object_constructor() &&
2873           (init || _method->is_scalarized_arg(arg_num))) {
2874         _sig_cc->appendAll(InlineKlass::cast(holder)->extended_sig());
2875         has_scalarized = true;
2876         _has_inline_recv = true;
2877         _num_inline_args++;
2878       } else {
2879         SigEntry::add_entry(_sig_cc, T_OBJECT, holder->name());
2880       }
2881       SigEntry::add_entry(_sig, T_OBJECT, holder->name());
2882       SigEntry::add_entry(_sig_cc_ro, T_OBJECT, holder->name());
2883       arg_num++;
2884     }
2885     for (SignatureStream ss(_method->signature()); !ss.at_return_type(); ss.next()) {
2886       BasicType bt = ss.type();
2887       if (bt == T_OBJECT) {
2888         InlineKlass* vk = ss.as_inline_klass(holder);
2889         if (vk != nullptr && vk->can_be_passed_as_fields() && (init || _method->is_scalarized_arg(arg_num))) {
2890           // Check for a calling convention mismatch with super method(s)
2891           bool scalar_super = false;
2892           bool non_scalar_super = false;
2893           GrowableArray<Method*>* supers = get_supers();
2894           for (int i = 0; i < supers->length(); ++i) {
2895             Method* super_method = supers->at(i);
2896             if (super_method->is_scalarized_arg(arg_num)) {
2897               scalar_super = true;
2898             } else {
2899               non_scalar_super = true;
2900             }
2901           }
2902 #ifdef ASSERT
2903           // Randomly enable below code paths for stress testing
2904           bool stress = init && StressCallingConvention;
2905           if (stress && (os::random() & 1) == 1) {
2906             non_scalar_super = true;
2907             if ((os::random() & 1) == 1) {
2908               scalar_super = true;
2909             }
2910           }
2911 #endif
2912           if (non_scalar_super) {
2913             // Found a super method with a non-scalarized argument. Fall back to the non-scalarized calling convention.
2914             if (scalar_super) {
2915               // Found non-scalar *and* scalar super methods. We can't handle both.
2916               // Mark the scalar method as mismatch and re-compile call sites to use non-scalarized calling convention.
2917               for (int i = 0; i < supers->length(); ++i) {
2918                 Method* super_method = supers->at(i);
2919                 if (super_method->is_scalarized_arg(arg_num) DEBUG_ONLY(|| (stress && (os::random() & 1) == 1))) {
2920                   super_method->set_mismatch();
2921                   MutexLocker ml(Compile_lock, Mutex::_safepoint_check_flag);
2922                   JavaThread* thread = JavaThread::current();
2923                   HandleMark hm(thread);
2924                   methodHandle mh(thread, super_method);
2925                   DeoptimizationScope deopt_scope;
2926                   CodeCache::mark_for_deoptimization(&deopt_scope, mh());
2927                   deopt_scope.deoptimize_marked();
2928                 }
2929               }
2930             }
2931             // Fall back to non-scalarized calling convention
2932             SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2933             SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2934           } else {
2935             _num_inline_args++;
2936             has_scalarized = true;
2937             int last = _sig_cc->length();
2938             int last_ro = _sig_cc_ro->length();
2939             _sig_cc->appendAll(vk->extended_sig());
2940             _sig_cc_ro->appendAll(vk->extended_sig());
2941             if (bt == T_OBJECT) {
2942               // Nullable inline type argument, insert InlineTypeNode::NullMarker field right after T_METADATA delimiter
2943               _sig_cc->insert_before(last+1, SigEntry(T_BOOLEAN, -1, nullptr, true));
2944               _sig_cc_ro->insert_before(last_ro+1, SigEntry(T_BOOLEAN, -1, nullptr, true));
2945             }
2946           }
2947         } else {
2948           SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2949           SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2950         }
2951         bt = T_OBJECT;
2952       } else {
2953         SigEntry::add_entry(_sig_cc, ss.type(), ss.as_symbol());
2954         SigEntry::add_entry(_sig_cc_ro, ss.type(), ss.as_symbol());
2955       }
2956       SigEntry::add_entry(_sig, bt, ss.as_symbol());
2957       if (bt != T_VOID) {
2958         arg_num++;
2959       }
2960     }
2961   }
2962 
2963   // Compute the non-scalarized calling convention
2964   _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
2965   _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
2966 
2967   // Compute the scalarized calling conventions if there are scalarized inline types in the signature
2968   if (has_scalarized && !_method->is_native()) {
2969     _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
2970     _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
2971 
2972     _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
2973     _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
2974 
2975     _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
2976     _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
2977 
2978     // Upper bound on stack arguments to avoid hitting the argument limit and
2979     // bailing out of compilation ("unsupported incoming calling sequence").
2980     // TODO we need a reasonable limit (flag?) here
2981     if (MAX2(_args_on_stack_cc, _args_on_stack_cc_ro) <= 60) {
2982       return; // Success
2983     }
2984   }

2985 
2986   // No scalarized args
2987   _sig_cc = _sig;
2988   _regs_cc = _regs;
2989   _args_on_stack_cc = _args_on_stack;
2990 
2991   _sig_cc_ro = _sig;
2992   _regs_cc_ro = _regs;
2993   _args_on_stack_cc_ro = _args_on_stack;
2994 }
2995 
2996 void CompiledEntrySignature::initialize_from_fingerprint(AdapterFingerPrint* fingerprint) {
2997   int value_object_count = 0;
2998   bool is_receiver = true;
2999   BasicType prev_bt = T_ILLEGAL;
3000   bool long_prev = false;
3001   bool has_scalarized_arguments = false;
3002 
3003   fingerprint->iterate_args([&] (int arg) {
3004     BasicType bt = (BasicType)arg;
3005     if (long_prev) {
3006       long_prev = false;
3007       BasicType bt_to_add;
3008       if (bt == T_VOID) {
3009         bt_to_add = T_LONG;
3010       } else {
3011         bt_to_add = T_OBJECT; // it could be T_ARRAY; it shouldn't matter
3012       }
3013       SigEntry::add_entry(_sig_cc, bt_to_add);
3014       SigEntry::add_entry(_sig_cc_ro, bt_to_add);
3015       if (value_object_count == 0) {
3016         SigEntry::add_entry(_sig, bt_to_add);
3017       }
3018     }
3019     switch (bt) {
3020       case T_VOID:
3021         if (is_receiver) {
3022           // 'this' when ro adapter is available
3023           assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
3024           value_object_count++;
3025           has_scalarized_arguments = true;
3026           _has_inline_recv = true;
3027           SigEntry::add_entry(_sig, T_OBJECT);
3028           SigEntry::add_entry(_sig_cc, T_METADATA);
3029           SigEntry::add_entry(_sig_cc_ro, T_METADATA);
3030         } else if (prev_bt != T_LONG && prev_bt != T_DOUBLE) {
3031           assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
3032           value_object_count--;
3033           SigEntry::add_entry(_sig_cc, T_VOID);
3034           SigEntry::add_entry(_sig_cc_ro, T_VOID);
3035           assert(value_object_count >= 0, "invalid value object count");
3036         } else {
3037           // Nothing to add for _sig: We already added an addition T_VOID in add_entry() when adding T_LONG or T_DOUBLE.
3038         }
3039         break;
3040       case T_INT:
3041       case T_FLOAT:
3042       case T_DOUBLE:
3043         if (value_object_count == 0) {
3044           SigEntry::add_entry(_sig, bt);
3045         }
3046         SigEntry::add_entry(_sig_cc, bt);
3047         SigEntry::add_entry(_sig_cc_ro, bt);
3048         break;
3049       case T_LONG:
3050         long_prev = true;
3051         break;
3052       case T_BOOLEAN:
3053       case T_CHAR:
3054       case T_BYTE:
3055       case T_SHORT:
3056       case T_OBJECT:
3057       case T_ARRAY:
3058         assert(value_object_count > 0 && !is_receiver, "must be value object field");
3059         SigEntry::add_entry(_sig_cc, bt);
3060         SigEntry::add_entry(_sig_cc_ro, bt);
3061         break;
3062       case T_METADATA:
3063         assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
3064         value_object_count++;
3065         has_scalarized_arguments = true;
3066         SigEntry::add_entry(_sig, T_OBJECT);
3067         SigEntry::add_entry(_sig_cc, T_METADATA);
3068         SigEntry::add_entry(_sig_cc_ro, T_METADATA);
3069         break;
3070       default: {
3071         fatal("Unexpected BasicType: %s", basictype_to_str(bt));
3072       }
3073     }
3074     prev_bt = bt;
3075     is_receiver = false;
3076   });
3077 
3078   if (long_prev) {
3079     // If previous bt was T_LONG and we reached the end of the signature, we know that it must be a T_OBJECT.
3080     SigEntry::add_entry(_sig, T_OBJECT);
3081     SigEntry::add_entry(_sig_cc, T_OBJECT);
3082     SigEntry::add_entry(_sig_cc_ro, T_OBJECT);
3083   }
3084   assert(value_object_count == 0, "invalid value object count");
3085 
3086   _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
3087   _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
3088 
3089   // Compute the scalarized calling conventions if there are scalarized inline types in the signature
3090   if (has_scalarized_arguments) {
3091     _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
3092     _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
3093 
3094     _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
3095     _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
3096 
3097     _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
3098     _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
3099   } else {
3100     // No scalarized args
3101     _sig_cc = _sig;
3102     _regs_cc = _regs;
3103     _args_on_stack_cc = _args_on_stack;
3104 
3105     _sig_cc_ro = _sig;
3106     _regs_cc_ro = _regs;
3107     _args_on_stack_cc_ro = _args_on_stack;
3108   }
3109 
3110 #ifdef ASSERT
3111   {
3112     AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(_sig_cc, _has_inline_recv);
3113     assert(fingerprint->equals(compare_fp), "sanity check");
3114     AdapterFingerPrint::deallocate(compare_fp);
3115   }
3116 #endif
3117 }
3118 
3119 const char* AdapterHandlerEntry::_entry_names[] = {
3120   "i2c", "c2i", "c2i_unverified", "c2i_no_clinit_check"
3121 };
3122 
3123 #ifdef ASSERT
3124 void AdapterHandlerLibrary::verify_adapter_sharing(CompiledEntrySignature& ces, AdapterHandlerEntry* cached_entry) {
3125   // we can only check for the same code if there is any
3126 #ifndef ZERO
3127   AdapterBlob* comparison_blob = nullptr;
3128   AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, ces, false, true);
3129   assert(comparison_blob == nullptr, "no blob should be created when creating an adapter for comparison");
3130   assert(comparison_entry->compare_code(cached_entry), "code must match");
3131   // Release the one just created
3132   AdapterHandlerEntry::deallocate(comparison_entry);
3133 # endif // ZERO
3134 }
3135 #endif /* ASSERT*/
3136 
3137 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
3138   // Use customized signature handler.  Need to lock around updates to
3139   // the _adapter_handler_table (it is not safe for concurrent readers
3140   // and a single writer: this could be fixed if it becomes a
3141   // problem).
3142 
3143   // Fast-path for trivial adapters
3144   AdapterHandlerEntry* entry = get_simple_adapter(method);
3145   if (entry != nullptr) {
3146     return entry;
3147   }
3148 
3149   ResourceMark rm;
3150   AdapterBlob* adapter_blob = nullptr;
3151 
3152   CompiledEntrySignature ces(method());
3153   ces.compute_calling_conventions();
3154   if (ces.has_scalarized_args()) {
3155     if (!method->has_scalarized_args()) {
3156       method->set_has_scalarized_args();
3157     }
3158     if (ces.c1_needs_stack_repair()) {
3159       method->set_c1_needs_stack_repair();
3160     }
3161     if (ces.c2_needs_stack_repair() && !method->c2_needs_stack_repair()) {
3162       method->set_c2_needs_stack_repair();
3163     }
3164   } else if (method->is_abstract()) {
3165     return _abstract_method_handler;
3166   }
3167 




3168   {
3169     MutexLocker mu(AdapterHandlerLibrary_lock);
3170 
3171     if (ces.has_scalarized_args() && method->is_abstract()) {
3172       // Save a C heap allocated version of the signature for abstract methods with scalarized inline type arguments
3173       address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
3174       entry = AdapterHandlerLibrary::new_entry(AdapterFingerPrint::allocate(nullptr));
3175       entry->set_entry_points(SharedRuntime::throw_AbstractMethodError_entry(),
3176                               wrong_method_abstract, wrong_method_abstract, wrong_method_abstract,
3177                               wrong_method_abstract, wrong_method_abstract);
3178       GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc_ro()->length(), mtInternal);
3179       heap_sig->appendAll(ces.sig_cc_ro());
3180       entry->set_sig_cc(heap_sig);
3181       return entry;
3182     }
3183 
3184     // Lookup method signature's fingerprint
3185     entry = lookup(ces.sig_cc(), ces.has_inline_recv());
3186 
3187     if (entry != nullptr) {
3188       assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
3189 #ifdef ASSERT
3190       if (!entry->is_shared() && VerifyAdapterSharing) {
3191         verify_adapter_sharing(ces, entry);
3192       }
3193 #endif
3194     } else {
3195       entry = create_adapter(adapter_blob, ces, /* allocate_code_blob */ true);
3196     }
3197   }
3198 
3199   // Outside of the lock
3200   if (adapter_blob != nullptr) {
3201     post_adapter_creation(adapter_blob, entry);
3202   }
3203   return entry;
3204 }
3205 
3206 AdapterBlob* AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler) {
3207   ResourceMark rm;
3208   const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
3209   const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
3210   int offsets[AdapterBlob::ENTRY_COUNT];
3211 
3212   AdapterBlob* adapter_blob = nullptr;
3213   CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::Adapter, id, name);
3214   if (blob != nullptr) {
3215     adapter_blob = blob->as_adapter_blob();
3216     adapter_blob->get_offsets(offsets);
3217     address i2c_entry = adapter_blob->content_begin();
3218     assert(offsets[0] == 0, "sanity check");
3219     handler->set_entry_points(
3220       i2c_entry,
3221       (offsets[1] != -1) ? (i2c_entry + offsets[1]) : nullptr,
3222       (offsets[2] != -1) ? (i2c_entry + offsets[2]) : nullptr,
3223       (offsets[3] != -1) ? (i2c_entry + offsets[3]) : nullptr,
3224       (offsets[4] != -1) ? (i2c_entry + offsets[4]) : nullptr,
3225       (offsets[5] != -1) ? (i2c_entry + offsets[5]) : nullptr,
3226       (offsets[6] != -1) ? (i2c_entry + offsets[6]) : nullptr
3227     );
3228   }
3229   return adapter_blob;
3230 }
3231 
3232 #ifndef PRODUCT
3233 void AdapterHandlerLibrary::print_adapter_handler_info(outputStream* st, AdapterHandlerEntry* handler, AdapterBlob* adapter_blob) {
3234   ttyLocker ttyl;
3235   ResourceMark rm;
3236   int insts_size;
3237   // on Zero the blob may be null
3238   handler->print_adapter_on(tty);
3239   if (adapter_blob == nullptr) {
3240     return;
3241   }
3242   insts_size = adapter_blob->code_size();
3243   st->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
3244                 handler->fingerprint()->as_basic_args_string(),
3245                 handler->fingerprint()->as_string(), insts_size);
3246   st->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
3247   if (Verbose || PrintStubCode) {
3248     address first_pc = handler->base_address();
3249     if (first_pc != nullptr) {
3250       Disassembler::decode(first_pc, first_pc + insts_size, st, &adapter_blob->asm_remarks());
3251       st->cr();
3252     }
3253   }
3254 }
3255 #endif // PRODUCT
3256 
3257 bool AdapterHandlerLibrary::generate_adapter_code(AdapterBlob*& adapter_blob,
3258                                                   AdapterHandlerEntry* handler,
3259                                                   CompiledEntrySignature& ces,
3260                                                   bool allocate_code_blob,
3261                                                   bool is_transient) {
3262   if (log_is_enabled(Info, perf, class, link)) {
3263     ClassLoader::perf_method_adapters_count()->inc();
3264   }
3265 
3266   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
3267   CodeBuffer buffer(buf);
3268   short buffer_locs[20];
3269   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
3270                                          sizeof(buffer_locs)/sizeof(relocInfo));
3271   MacroAssembler masm(&buffer);


3272 
3273   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage

3274   SharedRuntime::generate_i2c2i_adapters(&masm,
3275                                          ces.args_on_stack(),
3276                                          ces.sig(),
3277                                          ces.regs(),
3278                                          ces.sig_cc(),
3279                                          ces.regs_cc(),
3280                                          ces.sig_cc_ro(),
3281                                          ces.regs_cc_ro(),
3282                                          handler,
3283                                          adapter_blob,
3284                                          allocate_code_blob);
3285 
3286   if (ces.has_scalarized_args()) {
3287     // Save a C heap allocated version of the scalarized signature and store it in the adapter
3288     GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc()->length(), mtInternal);
3289     heap_sig->appendAll(ces.sig_cc());
3290     handler->set_sig_cc(heap_sig);
3291   }
3292 #ifdef ZERO
3293   // On zero there is no code to save and no need to create a blob and
3294   // or relocate the handler.
3295   adapter_blob = nullptr;
3296 #else
3297 #ifdef ASSERT
3298   if (VerifyAdapterSharing) {
3299     handler->save_code(buf->code_begin(), buffer.insts_size());
3300     if (is_transient) {
3301       return true;
3302     }
3303   }
3304 #endif
3305 












3306   if (adapter_blob == nullptr) {
3307     // CodeCache is full, disable compilation
3308     // Ought to log this but compile log is only per compile thread
3309     // and we're some non descript Java thread.
3310     return false;
3311   }
3312   if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
3313     // try to save generated code
3314     const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
3315     const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
3316     bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name);
3317     assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
3318   }
3319   handler->relocate(adapter_blob->content_begin());
3320 #endif // ZERO
3321 
3322 #ifndef PRODUCT
3323   // debugging support
3324   if (PrintAdapterHandlers || PrintStubCode) {
3325     print_adapter_handler_info(tty, handler, adapter_blob);
3326   }
3327 #endif
3328 
3329   return true;
3330 }
3331 
3332 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& adapter_blob,
3333                                                            CompiledEntrySignature& ces,
3334                                                            bool allocate_code_blob,
3335                                                            bool is_transient) {
3336   AdapterFingerPrint* fp = AdapterFingerPrint::allocate(ces.sig_cc(), ces.has_inline_recv());
3337 #ifdef ASSERT
3338   // Verify that we can successfully restore the compiled entry signature object.
3339   CompiledEntrySignature ces_verify;
3340   ces_verify.initialize_from_fingerprint(fp);
3341 #endif
3342   AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
3343   if (!generate_adapter_code(adapter_blob, handler, ces, allocate_code_blob, is_transient)) {
3344     AdapterHandlerEntry::deallocate(handler);
3345     return nullptr;
3346   }
3347   if (!is_transient) {
3348     assert_lock_strong(AdapterHandlerLibrary_lock);
3349     _adapter_handler_table->put(fp, handler);
3350   }
3351   return handler;
3352 }
3353 
3354 #if INCLUDE_CDS
3355 void AdapterHandlerEntry::remove_unshareable_info() {
3356 #ifdef ASSERT
3357    _saved_code = nullptr;
3358    _saved_code_length = 0;
3359 #endif // ASSERT
3360   set_entry_points(nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, false);
3361 }
3362 
3363 class CopyAdapterTableToArchive : StackObj {
3364 private:
3365   CompactHashtableWriter* _writer;
3366   ArchiveBuilder* _builder;
3367 public:
3368   CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
3369                                                              _builder(ArchiveBuilder::current())
3370   {}
3371 
3372   bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
3373     LogStreamHandle(Trace, aot) lsh;
3374     if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
3375       assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
3376       AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
3377       assert(buffered_fp != nullptr,"sanity check");
3378       AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
3379       assert(buffered_entry != nullptr,"sanity check");
3380 

3426 // This method is used during production run to link archived adapters (stored in AOT Cache)
3427 // to their code in AOT Code Cache
3428 void AdapterHandlerEntry::link() {
3429   AdapterBlob* adapter_blob = nullptr;
3430   ResourceMark rm;
3431   assert(_fingerprint != nullptr, "_fingerprint must not be null");
3432   bool generate_code = false;
3433   // Generate code only if AOTCodeCache is not available, or
3434   // caching adapters is disabled, or we fail to link
3435   // the AdapterHandlerEntry to its code in the AOTCodeCache
3436   if (AOTCodeCache::is_using_adapter()) {
3437     adapter_blob = AdapterHandlerLibrary::link_aot_adapter_handler(this);
3438     if (adapter_blob == nullptr) {
3439       log_warning(aot)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
3440       generate_code = true;
3441     }
3442   } else {
3443     generate_code = true;
3444   }
3445   if (generate_code) {
3446     CompiledEntrySignature ces;
3447     ces.initialize_from_fingerprint(_fingerprint);
3448     if (!AdapterHandlerLibrary::generate_adapter_code(adapter_blob, this, ces, true, false)) {
3449       // Don't throw exceptions during VM initialization because java.lang.* classes
3450       // might not have been initialized, causing problems when constructing the
3451       // Java exception object.
3452       vm_exit_during_initialization("Out of space in CodeCache for adapters");
3453     }
3454   }
3455   // Outside of the lock
3456   if (adapter_blob != nullptr) {
3457     post_adapter_creation(adapter_blob, this);
3458   }
3459   assert(_linked, "AdapterHandlerEntry must now be linked");
3460 }
3461 
3462 void AdapterHandlerLibrary::link_aot_adapters() {
3463   assert(AOTCodeCache::is_using_adapter(), "AOT adapters code should be available");
3464   _aot_adapter_handler_table.iterate([](AdapterHandlerEntry* entry) {
3465     assert(!entry->is_linked(), "AdapterHandlerEntry is already linked!");
3466     entry->link();
3467   });
3468 }
3469 
3470 // This method is called during production run to lookup simple adapters
3471 // in the archived adapter handler table
3472 void AdapterHandlerLibrary::lookup_simple_adapters() {
3473   assert(!_aot_adapter_handler_table.empty(), "archived adapter handler table is empty");
3474 
3475   MutexLocker mu(AdapterHandlerLibrary_lock);
3476   ResourceMark rm;
3477   CompiledEntrySignature no_args;
3478   no_args.compute_calling_conventions();
3479   _no_arg_handler = lookup(no_args.sig_cc(), no_args.has_inline_recv());
3480 
3481   CompiledEntrySignature obj_args;
3482   SigEntry::add_entry(obj_args.sig(), T_OBJECT);
3483   obj_args.compute_calling_conventions();
3484   _obj_arg_handler = lookup(obj_args.sig_cc(), obj_args.has_inline_recv());
3485 
3486   CompiledEntrySignature int_args;
3487   SigEntry::add_entry(int_args.sig(), T_INT);
3488   int_args.compute_calling_conventions();
3489   _int_arg_handler = lookup(int_args.sig_cc(), int_args.has_inline_recv());
3490 
3491   CompiledEntrySignature obj_int_args;
3492   SigEntry::add_entry(obj_int_args.sig(), T_OBJECT);
3493   SigEntry::add_entry(obj_int_args.sig(), T_INT);
3494   obj_int_args.compute_calling_conventions();
3495   _obj_int_arg_handler = lookup(obj_int_args.sig_cc(), obj_int_args.has_inline_recv());
3496 
3497   CompiledEntrySignature obj_obj_args;
3498   SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
3499   SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
3500   obj_obj_args.compute_calling_conventions();
3501   _obj_obj_arg_handler = lookup(obj_obj_args.sig_cc(), obj_obj_args.has_inline_recv());
3502 
3503   assert(_no_arg_handler != nullptr &&
3504          _obj_arg_handler != nullptr &&
3505          _int_arg_handler != nullptr &&
3506          _obj_int_arg_handler != nullptr &&
3507          _obj_obj_arg_handler != nullptr, "Initial adapters not found in archived adapter handler table");
3508   assert(_no_arg_handler->is_linked() &&
3509          _obj_arg_handler->is_linked() &&
3510          _int_arg_handler->is_linked() &&
3511          _obj_int_arg_handler->is_linked() &&
3512          _obj_obj_arg_handler->is_linked(), "Initial adapters not in linked state");
3513 }
3514 #endif // INCLUDE_CDS
3515 
3516 address AdapterHandlerEntry::base_address() {
3517   address base = _i2c_entry;
3518   if (base == nullptr)  base = _c2i_entry;
3519   assert(base <= _c2i_entry || _c2i_entry == nullptr, "");
3520   assert(base <= _c2i_inline_entry || _c2i_inline_entry == nullptr, "");
3521   assert(base <= _c2i_inline_ro_entry || _c2i_inline_ro_entry == nullptr, "");
3522   assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == nullptr, "");
3523   assert(base <= _c2i_unverified_inline_entry || _c2i_unverified_inline_entry == nullptr, "");
3524   assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == nullptr, "");
3525   return base;
3526 }
3527 
3528 void AdapterHandlerEntry::relocate(address new_base) {
3529   address old_base = base_address();
3530   assert(old_base != nullptr, "");
3531   ptrdiff_t delta = new_base - old_base;
3532   if (_i2c_entry != nullptr)
3533     _i2c_entry += delta;
3534   if (_c2i_entry != nullptr)
3535     _c2i_entry += delta;
3536   if (_c2i_inline_entry != nullptr)
3537     _c2i_inline_entry += delta;
3538   if (_c2i_inline_ro_entry != nullptr)
3539     _c2i_inline_ro_entry += delta;
3540   if (_c2i_unverified_entry != nullptr)
3541     _c2i_unverified_entry += delta;
3542   if (_c2i_unverified_inline_entry != nullptr)
3543     _c2i_unverified_inline_entry += delta;
3544   if (_c2i_no_clinit_check_entry != nullptr)
3545     _c2i_no_clinit_check_entry += delta;
3546   assert(base_address() == new_base, "");
3547 }
3548 
3549 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3550   LogStreamHandle(Trace, aot) lsh;
3551   if (lsh.is_enabled()) {
3552     lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3553     lsh.cr();
3554   }
3555   it->push(&_fingerprint);
3556 }
3557 
3558 AdapterHandlerEntry::~AdapterHandlerEntry() {
3559   if (_fingerprint != nullptr) {
3560     AdapterFingerPrint::deallocate(_fingerprint);
3561     _fingerprint = nullptr;
3562   }
3563   if (_sig_cc != nullptr) {
3564     delete _sig_cc;
3565   }
3566 #ifdef ASSERT
3567   FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
3568 #endif
3569   FreeHeap(this);
3570 }
3571 
3572 
3573 #ifdef ASSERT
3574 // Capture the code before relocation so that it can be compared
3575 // against other versions.  If the code is captured after relocation
3576 // then relative instructions won't be equivalent.
3577 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3578   _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3579   _saved_code_length = length;
3580   memcpy(_saved_code, buffer, length);
3581 }
3582 
3583 
3584 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3585   assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");

3633 
3634       struct { double data[20]; } locs_buf;
3635       struct { double data[20]; } stubs_locs_buf;
3636       buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3637 #if defined(AARCH64) || defined(PPC64)
3638       // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3639       // in the constant pool to ensure ordering between the barrier and oops
3640       // accesses. For native_wrappers we need a constant.
3641       // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3642       // static java call that is resolved in the runtime.
3643       if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3644         buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3645       }
3646 #endif
3647       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3648       MacroAssembler _masm(&buffer);
3649 
3650       // Fill in the signature array, for the calling-convention call.
3651       const int total_args_passed = method->size_of_parameters();
3652 
3653       BasicType stack_sig_bt[16];
3654       VMRegPair stack_regs[16];
3655       BasicType* sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
3656       VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3657 
3658       int i = 0;
3659       if (!method->is_static()) {  // Pass in receiver first
3660         sig_bt[i++] = T_OBJECT;
3661       }
3662       SignatureStream ss(method->signature());
3663       for (; !ss.at_return_type(); ss.next()) {
3664         sig_bt[i++] = ss.type();  // Collect remaining bits of signature
3665         if (ss.type() == T_LONG || ss.type() == T_DOUBLE) {
3666           sig_bt[i++] = T_VOID;   // Longs & doubles take 2 Java slots
3667         }
3668       }
3669       assert(i == total_args_passed, "");
3670       BasicType ret_type = ss.type();
3671 
3672       // Now get the compiled-Java arguments layout.
3673       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3674 
3675       // Generate the compiled-to-native wrapper code
3676       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3677 
3678       if (nm != nullptr) {
3679         {
3680           MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3681           if (nm->make_in_use()) {
3682             method->set_code(method, nm);
3683           }
3684         }
3685 
3686         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3687         if (directive->PrintAssemblyOption) {
3688           nm->print_code();
3689         }
3690         DirectivesStack::release(directive);

3933         a->print_adapter_on(st);
3934         return true;
3935       } else {
3936         return false; // keep looking
3937       }
3938     };
3939     assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3940     _adapter_handler_table->iterate(findblob_runtime_table);
3941   }
3942   assert(found, "Should have found handler");
3943 }
3944 
3945 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3946   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3947   if (get_i2c_entry() != nullptr) {
3948     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3949   }
3950   if (get_c2i_entry() != nullptr) {
3951     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3952   }
3953   if (get_c2i_entry() != nullptr) {
3954     st->print(" c2iVE: " INTPTR_FORMAT, p2i(get_c2i_inline_entry()));
3955   }
3956   if (get_c2i_entry() != nullptr) {
3957     st->print(" c2iVROE: " INTPTR_FORMAT, p2i(get_c2i_inline_ro_entry()));
3958   }
3959   if (get_c2i_unverified_entry() != nullptr) {
3960     st->print(" c2iUE: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3961   }
3962   if (get_c2i_unverified_entry() != nullptr) {
3963     st->print(" c2iUVE: " INTPTR_FORMAT, p2i(get_c2i_unverified_inline_entry()));
3964   }
3965   if (get_c2i_no_clinit_check_entry() != nullptr) {
3966     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3967   }
3968   st->cr();
3969 }
3970 
3971 #ifndef PRODUCT
3972 
3973 void AdapterHandlerLibrary::print_statistics() {
3974   print_table_statistics();
3975 }
3976 
3977 #endif /* PRODUCT */
3978 
3979 bool AdapterHandlerLibrary::is_abstract_method_adapter(AdapterHandlerEntry* entry) {
3980   if (entry == _abstract_method_handler) {
3981     return true;
3982   }
3983   return false;

4039         event.set_method(method);
4040         event.commit();
4041       }
4042     }
4043   }
4044   return activation;
4045 }
4046 
4047 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
4048   // After any safepoint, just before going back to compiled code,
4049   // we inform the GC that we will be doing initializing writes to
4050   // this object in the future without emitting card-marks, so
4051   // GC may take any compensating steps.
4052 
4053   oop new_obj = current->vm_result_oop();
4054   if (new_obj == nullptr) return;
4055 
4056   BarrierSet *bs = BarrierSet::barrier_set();
4057   bs->on_slowpath_allocation_exit(current, new_obj);
4058 }
4059 
4060 // We are at a compiled code to interpreter call. We need backing
4061 // buffers for all inline type arguments. Allocate an object array to
4062 // hold them (convenient because once we're done with it we don't have
4063 // to worry about freeing it).
4064 oop SharedRuntime::allocate_inline_types_impl(JavaThread* current, methodHandle callee, bool allocate_receiver, TRAPS) {
4065   assert(InlineTypePassFieldsAsArgs, "no reason to call this");
4066   ResourceMark rm;
4067 
4068   int nb_slots = 0;
4069   InstanceKlass* holder = callee->method_holder();
4070   allocate_receiver &= !callee->is_static() && holder->is_inline_klass() && callee->is_scalarized_arg(0);
4071   if (allocate_receiver) {
4072     nb_slots++;
4073   }
4074   int arg_num = callee->is_static() ? 0 : 1;
4075   for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
4076     BasicType bt = ss.type();
4077     if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
4078       nb_slots++;
4079     }
4080     if (bt != T_VOID) {
4081       arg_num++;
4082     }
4083   }
4084   objArrayOop array_oop = oopFactory::new_objectArray(nb_slots, CHECK_NULL);
4085   objArrayHandle array(THREAD, array_oop);
4086   arg_num = callee->is_static() ? 0 : 1;
4087   int i = 0;
4088   if (allocate_receiver) {
4089     InlineKlass* vk = InlineKlass::cast(holder);
4090     oop res = vk->allocate_instance(CHECK_NULL);
4091     array->obj_at_put(i++, res);
4092   }
4093   for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
4094     BasicType bt = ss.type();
4095     if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
4096       InlineKlass* vk = ss.as_inline_klass(holder);
4097       assert(vk != nullptr, "Unexpected klass");
4098       oop res = vk->allocate_instance(CHECK_NULL);
4099       array->obj_at_put(i++, res);
4100     }
4101     if (bt != T_VOID) {
4102       arg_num++;
4103     }
4104   }
4105   return array();
4106 }
4107 
4108 JRT_ENTRY(void, SharedRuntime::allocate_inline_types(JavaThread* current, Method* callee_method, bool allocate_receiver))
4109   methodHandle callee(current, callee_method);
4110   oop array = SharedRuntime::allocate_inline_types_impl(current, callee, allocate_receiver, CHECK);
4111   current->set_vm_result_oop(array);
4112   current->set_vm_result_metadata(callee()); // TODO: required to keep callee live?
4113 JRT_END
4114 
4115 // We're returning from an interpreted method: load each field into a
4116 // register following the calling convention
4117 JRT_LEAF(void, SharedRuntime::load_inline_type_fields_in_regs(JavaThread* current, oopDesc* res))
4118 {
4119   assert(res->klass()->is_inline_klass(), "only inline types here");
4120   ResourceMark rm;
4121   RegisterMap reg_map(current,
4122                       RegisterMap::UpdateMap::include,
4123                       RegisterMap::ProcessFrames::include,
4124                       RegisterMap::WalkContinuation::skip);
4125   frame stubFrame = current->last_frame();
4126   frame callerFrame = stubFrame.sender(&reg_map);
4127   assert(callerFrame.is_interpreted_frame(), "should be coming from interpreter");
4128 
4129   InlineKlass* vk = InlineKlass::cast(res->klass());
4130 
4131   const Array<SigEntry>* sig_vk = vk->extended_sig();
4132   const Array<VMRegPair>* regs = vk->return_regs();
4133 
4134   if (regs == nullptr) {
4135     // The fields of the inline klass don't fit in registers, bail out
4136     return;
4137   }
4138 
4139   int j = 1;
4140   for (int i = 0; i < sig_vk->length(); i++) {
4141     BasicType bt = sig_vk->at(i)._bt;
4142     if (bt == T_METADATA) {
4143       continue;
4144     }
4145     if (bt == T_VOID) {
4146       if (sig_vk->at(i-1)._bt == T_LONG ||
4147           sig_vk->at(i-1)._bt == T_DOUBLE) {
4148         j++;
4149       }
4150       continue;
4151     }
4152     int off = sig_vk->at(i)._offset;
4153     assert(off > 0, "offset in object should be positive");
4154     VMRegPair pair = regs->at(j);
4155     address loc = reg_map.location(pair.first(), nullptr);
4156     switch(bt) {
4157     case T_BOOLEAN:
4158       *(jboolean*)loc = res->bool_field(off);
4159       break;
4160     case T_CHAR:
4161       *(jchar*)loc = res->char_field(off);
4162       break;
4163     case T_BYTE:
4164       *(jbyte*)loc = res->byte_field(off);
4165       break;
4166     case T_SHORT:
4167       *(jshort*)loc = res->short_field(off);
4168       break;
4169     case T_INT: {
4170       *(jint*)loc = res->int_field(off);
4171       break;
4172     }
4173     case T_LONG:
4174 #ifdef _LP64
4175       *(intptr_t*)loc = res->long_field(off);
4176 #else
4177       Unimplemented();
4178 #endif
4179       break;
4180     case T_OBJECT:
4181     case T_ARRAY: {
4182       *(oop*)loc = res->obj_field(off);
4183       break;
4184     }
4185     case T_FLOAT:
4186       *(jfloat*)loc = res->float_field(off);
4187       break;
4188     case T_DOUBLE:
4189       *(jdouble*)loc = res->double_field(off);
4190       break;
4191     default:
4192       ShouldNotReachHere();
4193     }
4194     j++;
4195   }
4196   assert(j == regs->length(), "missed a field?");
4197 
4198 #ifdef ASSERT
4199   VMRegPair pair = regs->at(0);
4200   address loc = reg_map.location(pair.first(), nullptr);
4201   assert(*(oopDesc**)loc == res, "overwritten object");
4202 #endif
4203 
4204   current->set_vm_result_oop(res);
4205 }
4206 JRT_END
4207 
4208 // We've returned to an interpreted method, the interpreter needs a
4209 // reference to an inline type instance. Allocate it and initialize it
4210 // from field's values in registers.
4211 JRT_BLOCK_ENTRY(void, SharedRuntime::store_inline_type_fields_to_buf(JavaThread* current, intptr_t res))
4212 {
4213   ResourceMark rm;
4214   RegisterMap reg_map(current,
4215                       RegisterMap::UpdateMap::include,
4216                       RegisterMap::ProcessFrames::include,
4217                       RegisterMap::WalkContinuation::skip);
4218   frame stubFrame = current->last_frame();
4219   frame callerFrame = stubFrame.sender(&reg_map);
4220 
4221 #ifdef ASSERT
4222   InlineKlass* verif_vk = InlineKlass::returned_inline_klass(reg_map);
4223 #endif
4224 
4225   if (!is_set_nth_bit(res, 0)) {
4226     // We're not returning with inline type fields in registers (the
4227     // calling convention didn't allow it for this inline klass)
4228     assert(!Metaspace::contains((void*)res), "should be oop or pointer in buffer area");
4229     current->set_vm_result_oop((oopDesc*)res);
4230     assert(verif_vk == nullptr, "broken calling convention");
4231     return;
4232   }
4233 
4234   clear_nth_bit(res, 0);
4235   InlineKlass* vk = (InlineKlass*)res;
4236   assert(verif_vk == vk, "broken calling convention");
4237   assert(Metaspace::contains((void*)res), "should be klass");
4238 
4239   // Allocate handles for every oop field so they are safe in case of
4240   // a safepoint when allocating
4241   GrowableArray<Handle> handles;
4242   vk->save_oop_fields(reg_map, handles);
4243 
4244   // It's unsafe to safepoint until we are here
4245   JRT_BLOCK;
4246   {
4247     JavaThread* THREAD = current;
4248     oop vt = vk->realloc_result(reg_map, handles, CHECK);
4249     current->set_vm_result_oop(vt);
4250   }
4251   JRT_BLOCK_END;
4252 }
4253 JRT_END
< prev index next >