< prev index next >

src/hotspot/share/runtime/sharedRuntime.cpp

Print this page

  27 #include "classfile/javaClasses.inline.hpp"
  28 #include "classfile/stringTable.hpp"
  29 #include "classfile/vmClasses.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "code/codeCache.hpp"
  32 #include "code/compiledIC.hpp"
  33 #include "code/nmethod.inline.hpp"
  34 #include "code/scopeDesc.hpp"
  35 #include "code/vtableStubs.hpp"
  36 #include "compiler/abstractCompiler.hpp"
  37 #include "compiler/compileBroker.hpp"
  38 #include "compiler/disassembler.hpp"
  39 #include "gc/shared/barrierSet.hpp"
  40 #include "gc/shared/collectedHeap.hpp"
  41 #include "gc/shared/gcLocker.inline.hpp"
  42 #include "interpreter/interpreter.hpp"
  43 #include "interpreter/interpreterRuntime.hpp"
  44 #include "jvm.h"
  45 #include "jfr/jfrEvents.hpp"
  46 #include "logging/log.hpp"

  47 #include "memory/resourceArea.hpp"
  48 #include "memory/universe.hpp"


  49 #include "metaprogramming/primitiveConversions.hpp"
  50 #include "oops/klass.hpp"
  51 #include "oops/method.inline.hpp"
  52 #include "oops/objArrayKlass.hpp"

  53 #include "oops/oop.inline.hpp"

  54 #include "prims/forte.hpp"
  55 #include "prims/jvmtiExport.hpp"
  56 #include "prims/jvmtiThreadState.hpp"
  57 #include "prims/methodHandles.hpp"
  58 #include "prims/nativeLookup.hpp"
  59 #include "runtime/arguments.hpp"
  60 #include "runtime/atomic.hpp"
  61 #include "runtime/basicLock.inline.hpp"
  62 #include "runtime/frame.inline.hpp"
  63 #include "runtime/handles.inline.hpp"
  64 #include "runtime/init.hpp"
  65 #include "runtime/interfaceSupport.inline.hpp"
  66 #include "runtime/java.hpp"
  67 #include "runtime/javaCalls.hpp"
  68 #include "runtime/jniHandles.inline.hpp"
  69 #include "runtime/perfData.hpp"
  70 #include "runtime/sharedRuntime.hpp"
  71 #include "runtime/stackWatermarkSet.hpp"
  72 #include "runtime/stubRoutines.hpp"
  73 #include "runtime/synchronizer.inline.hpp"

1164 // for a call current in progress, i.e., arguments has been pushed on stack
1165 // but callee has not been invoked yet.  Caller frame must be compiled.
1166 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1167                                               CallInfo& callinfo, TRAPS) {
1168   Handle receiver;
1169   Handle nullHandle;  // create a handy null handle for exception returns
1170   JavaThread* current = THREAD;
1171 
1172   assert(!vfst.at_end(), "Java frame must exist");
1173 
1174   // Find caller and bci from vframe
1175   methodHandle caller(current, vfst.method());
1176   int          bci   = vfst.bci();
1177 
1178   if (caller->is_continuation_enter_intrinsic()) {
1179     bc = Bytecodes::_invokestatic;
1180     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1181     return receiver;
1182   }
1183 















1184   Bytecode_invoke bytecode(caller, bci);
1185   int bytecode_index = bytecode.index();
1186   bc = bytecode.invoke_code();
1187 
1188   methodHandle attached_method(current, extract_attached_method(vfst));
1189   if (attached_method.not_null()) {
1190     Method* callee = bytecode.static_target(CHECK_NH);
1191     vmIntrinsics::ID id = callee->intrinsic_id();
1192     // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1193     // it attaches statically resolved method to the call site.
1194     if (MethodHandles::is_signature_polymorphic(id) &&
1195         MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1196       bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1197 
1198       // Adjust invocation mode according to the attached method.
1199       switch (bc) {
1200         case Bytecodes::_invokevirtual:
1201           if (attached_method->method_holder()->is_interface()) {
1202             bc = Bytecodes::_invokeinterface;
1203           }
1204           break;
1205         case Bytecodes::_invokeinterface:
1206           if (!attached_method->method_holder()->is_interface()) {
1207             bc = Bytecodes::_invokevirtual;
1208           }
1209           break;
1210         case Bytecodes::_invokehandle:
1211           if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1212             bc = attached_method->is_static() ? Bytecodes::_invokestatic
1213                                               : Bytecodes::_invokevirtual;
1214           }
1215           break;
1216         default:
1217           break;
1218       }






1219     }
1220   }
1221 
1222   assert(bc != Bytecodes::_illegal, "not initialized");
1223 
1224   bool has_receiver = bc != Bytecodes::_invokestatic &&
1225                       bc != Bytecodes::_invokedynamic &&
1226                       bc != Bytecodes::_invokehandle;

1227 
1228   // Find receiver for non-static call
1229   if (has_receiver) {
1230     // This register map must be update since we need to find the receiver for
1231     // compiled frames. The receiver might be in a register.
1232     RegisterMap reg_map2(current,
1233                          RegisterMap::UpdateMap::include,
1234                          RegisterMap::ProcessFrames::include,
1235                          RegisterMap::WalkContinuation::skip);
1236     frame stubFrame   = current->last_frame();
1237     // Caller-frame is a compiled frame
1238     frame callerFrame = stubFrame.sender(&reg_map2);
1239 
1240     if (attached_method.is_null()) {
1241       Method* callee = bytecode.static_target(CHECK_NH);

1242       if (callee == nullptr) {
1243         THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1244       }
1245     }
1246 
1247     // Retrieve from a compiled argument list
1248     receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1249     assert(oopDesc::is_oop_or_null(receiver()), "");
1250 
1251     if (receiver.is_null()) {
1252       THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);










1253     }
1254   }
1255 
1256   // Resolve method
1257   if (attached_method.not_null()) {
1258     // Parameterized by attached method.
1259     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, CHECK_NH);
1260   } else {
1261     // Parameterized by bytecode.
1262     constantPoolHandle constants(current, caller->constants());
1263     LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1264   }
1265 
1266 #ifdef ASSERT
1267   // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1268   if (has_receiver) {
1269     assert(receiver.not_null(), "should have thrown exception");
1270     Klass* receiver_klass = receiver->klass();
1271     Klass* rk = nullptr;
1272     if (attached_method.not_null()) {
1273       // In case there's resolved method attached, use its holder during the check.
1274       rk = attached_method->method_holder();
1275     } else {
1276       // Klass is already loaded.
1277       constantPoolHandle constants(current, caller->constants());
1278       rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1279     }
1280     Klass* static_receiver_klass = rk;
1281     assert(receiver_klass->is_subtype_of(static_receiver_klass),
1282            "actual receiver must be subclass of static receiver klass");
1283     if (receiver_klass->is_instance_klass()) {
1284       if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1285         tty->print_cr("ERROR: Klass not yet initialized!!");
1286         receiver_klass->print();
1287       }
1288       assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1289     }
1290   }
1291 #endif
1292 
1293   return receiver;
1294 }
1295 
1296 methodHandle SharedRuntime::find_callee_method(TRAPS) {
1297   JavaThread* current = THREAD;
1298   ResourceMark rm(current);
1299   // We need first to check if any Java activations (compiled, interpreted)
1300   // exist on the stack since last JavaCall.  If not, we need
1301   // to get the target method from the JavaCall wrapper.
1302   vframeStream vfst(current, true);  // Do not skip any javaCalls
1303   methodHandle callee_method;
1304   if (vfst.at_end()) {
1305     // No Java frames were found on stack since we did the JavaCall.
1306     // Hence the stack can only contain an entry_frame.  We need to
1307     // find the target method from the stub frame.
1308     RegisterMap reg_map(current,
1309                         RegisterMap::UpdateMap::skip,
1310                         RegisterMap::ProcessFrames::include,
1311                         RegisterMap::WalkContinuation::skip);
1312     frame fr = current->last_frame();
1313     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1314     fr = fr.sender(&reg_map);
1315     assert(fr.is_entry_frame(), "must be");
1316     // fr is now pointing to the entry frame.
1317     callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1318   } else {
1319     Bytecodes::Code bc;
1320     CallInfo callinfo;
1321     find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));




1322     callee_method = methodHandle(current, callinfo.selected_method());
1323   }
1324   assert(callee_method()->is_method(), "must be");
1325   return callee_method;
1326 }
1327 
1328 // Resolves a call.
1329 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, TRAPS) {
1330   JavaThread* current = THREAD;
1331   ResourceMark rm(current);
1332   RegisterMap cbl_map(current,
1333                       RegisterMap::UpdateMap::skip,
1334                       RegisterMap::ProcessFrames::include,
1335                       RegisterMap::WalkContinuation::skip);
1336   frame caller_frame = current->last_frame().sender(&cbl_map);
1337 
1338   CodeBlob* caller_cb = caller_frame.cb();
1339   guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1340   nmethod* caller_nm = caller_cb->as_nmethod();
1341 
1342   // determine call info & receiver
1343   // note: a) receiver is null for static calls
1344   //       b) an exception is thrown if receiver is null for non-static calls
1345   CallInfo call_info;
1346   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1347   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1348 
1349   NoSafepointVerifier nsv;
1350 
1351   methodHandle callee_method(current, call_info.selected_method());




1352 
1353   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1354          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1355          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1356          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1357          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1358 
1359   assert(!caller_nm->is_unloading(), "It should not be unloading");
1360 
1361 #ifndef PRODUCT
1362   // tracing/debugging/statistics
1363   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1364                  (is_virtual) ? (&_resolve_virtual_ctr) :
1365                                 (&_resolve_static_ctr);
1366   Atomic::inc(addr);
1367 
1368   if (TraceCallFixup) {
1369     ResourceMark rm(current);
1370     tty->print("resolving %s%s (%s) call to",
1371                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1372                Bytecodes::name(invoke_code));
1373     callee_method->print_short_name(tty);
1374     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1375                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1376   }
1377 #endif
1378 
1379   if (invoke_code == Bytecodes::_invokestatic) {
1380     assert(callee_method->method_holder()->is_initialized() ||
1381            callee_method->method_holder()->is_reentrant_initialization(current),
1382            "invalid class initialization state for invoke_static");
1383     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1384       // In order to keep class initialization check, do not patch call
1385       // site for static call when the class is not fully initialized.
1386       // Proper check is enforced by call site re-resolution on every invocation.
1387       //
1388       // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1389       // explicit class initialization check is put in nmethod entry (VEP).
1390       assert(callee_method->method_holder()->is_linked(), "must be");
1391       return callee_method;
1392     }
1393   }
1394 
1395 
1396   // JSR 292 key invariant:
1397   // If the resolved method is a MethodHandle invoke target, the call
1398   // site must be a MethodHandle call site, because the lambda form might tail-call
1399   // leaving the stack in a state unknown to either caller or callee
1400 
1401   // Compute entry points. The computation of the entry points is independent of
1402   // patching the call.
1403 
1404   // Make sure the callee nmethod does not get deoptimized and removed before
1405   // we are done patching the code.
1406 
1407 
1408   CompiledICLocker ml(caller_nm);
1409   if (is_virtual && !is_optimized) {
1410     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1411     inline_cache->update(&call_info, receiver->klass());
1412   } else {
1413     // Callsite is a direct call - set it to the destination method
1414     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1415     callsite->set(callee_method);
1416   }
1417 
1418   return callee_method;
1419 }
1420 
1421 // Inline caches exist only in compiled code
1422 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1423 #ifdef ASSERT
1424   RegisterMap reg_map(current,
1425                       RegisterMap::UpdateMap::skip,
1426                       RegisterMap::ProcessFrames::include,
1427                       RegisterMap::WalkContinuation::skip);
1428   frame stub_frame = current->last_frame();
1429   assert(stub_frame.is_runtime_frame(), "sanity check");
1430   frame caller_frame = stub_frame.sender(&reg_map);
1431   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1432 #endif /* ASSERT */
1433 
1434   methodHandle callee_method;


1435   JRT_BLOCK
1436     callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1437     // Return Method* through TLS
1438     current->set_vm_result_2(callee_method());
1439   JRT_BLOCK_END
1440   // return compiled code entry point after potential safepoints
1441   return get_resolved_entry(current, callee_method);
1442 JRT_END
1443 
1444 
1445 // Handle call site that has been made non-entrant
1446 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1447   // 6243940 We might end up in here if the callee is deoptimized
1448   // as we race to call it.  We don't want to take a safepoint if
1449   // the caller was interpreted because the caller frame will look
1450   // interpreted to the stack walkers and arguments are now
1451   // "compiled" so it is much better to make this transition
1452   // invisible to the stack walking code. The i2c path will
1453   // place the callee method in the callee_target. It is stashed
1454   // there because if we try and find the callee by normal means a
1455   // safepoint is possible and have trouble gc'ing the compiled args.
1456   RegisterMap reg_map(current,
1457                       RegisterMap::UpdateMap::skip,
1458                       RegisterMap::ProcessFrames::include,
1459                       RegisterMap::WalkContinuation::skip);
1460   frame stub_frame = current->last_frame();
1461   assert(stub_frame.is_runtime_frame(), "sanity check");
1462   frame caller_frame = stub_frame.sender(&reg_map);
1463 
1464   if (caller_frame.is_interpreted_frame() ||
1465       caller_frame.is_entry_frame() ||
1466       caller_frame.is_upcall_stub_frame()) {
1467     Method* callee = current->callee_target();
1468     guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1469     current->set_vm_result_2(callee);
1470     current->set_callee_target(nullptr);
1471     if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1472       // Bypass class initialization checks in c2i when caller is in native.
1473       // JNI calls to static methods don't have class initialization checks.
1474       // Fast class initialization checks are present in c2i adapters and call into
1475       // SharedRuntime::handle_wrong_method() on the slow path.
1476       //
1477       // JVM upcalls may land here as well, but there's a proper check present in
1478       // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1479       // so bypassing it in c2i adapter is benign.
1480       return callee->get_c2i_no_clinit_check_entry();
1481     } else {
1482       return callee->get_c2i_entry();




1483     }
1484   }
1485 
1486   // Must be compiled to compiled path which is safe to stackwalk
1487   methodHandle callee_method;



1488   JRT_BLOCK
1489     // Force resolving of caller (if we called from compiled frame)
1490     callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1491     current->set_vm_result_2(callee_method());
1492   JRT_BLOCK_END
1493   // return compiled code entry point after potential safepoints
1494   return get_resolved_entry(current, callee_method);
1495 JRT_END
1496 
1497 // Handle abstract method call
1498 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1499   // Verbose error message for AbstractMethodError.
1500   // Get the called method from the invoke bytecode.
1501   vframeStream vfst(current, true);
1502   assert(!vfst.at_end(), "Java frame must exist");
1503   methodHandle caller(current, vfst.method());
1504   Bytecode_invoke invoke(caller, vfst.bci());
1505   DEBUG_ONLY( invoke.verify(); )
1506 
1507   // Find the compiled caller frame.
1508   RegisterMap reg_map(current,
1509                       RegisterMap::UpdateMap::include,
1510                       RegisterMap::ProcessFrames::include,
1511                       RegisterMap::WalkContinuation::skip);
1512   frame stubFrame = current->last_frame();
1513   assert(stubFrame.is_runtime_frame(), "must be");
1514   frame callerFrame = stubFrame.sender(&reg_map);
1515   assert(callerFrame.is_compiled_frame(), "must be");
1516 
1517   // Install exception and return forward entry.
1518   address res = SharedRuntime::throw_AbstractMethodError_entry();
1519   JRT_BLOCK
1520     methodHandle callee(current, invoke.static_target(current));
1521     if (!callee.is_null()) {
1522       oop recv = callerFrame.retrieve_receiver(&reg_map);
1523       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1524       res = StubRoutines::forward_exception_entry();
1525       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1526     }
1527   JRT_BLOCK_END
1528   return res;
1529 JRT_END
1530 
1531 // return verified_code_entry if interp_only_mode is not set for the current thread;
1532 // otherwise return c2i entry.
1533 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {

1534   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1535     // In interp_only_mode we need to go to the interpreted entry
1536     // The c2i won't patch in this mode -- see fixup_callers_callsite
1537     return callee_method->get_c2i_entry();
1538   }
1539   assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1540   return callee_method->verified_code_entry();









1541 }
1542 
1543 // resolve a static call and patch code
1544 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1545   methodHandle callee_method;

1546   bool enter_special = false;
1547   JRT_BLOCK
1548     callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1549     current->set_vm_result_2(callee_method());
1550   JRT_BLOCK_END
1551   // return compiled code entry point after potential safepoints
1552   return get_resolved_entry(current, callee_method);
1553 JRT_END
1554 
1555 // resolve virtual call and update inline cache to monomorphic
1556 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1557   methodHandle callee_method;

1558   JRT_BLOCK
1559     callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1560     current->set_vm_result_2(callee_method());
1561   JRT_BLOCK_END
1562   // return compiled code entry point after potential safepoints
1563   return get_resolved_entry(current, callee_method);
1564 JRT_END
1565 
1566 
1567 // Resolve a virtual call that can be statically bound (e.g., always
1568 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1569 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1570   methodHandle callee_method;

1571   JRT_BLOCK
1572     callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1573     current->set_vm_result_2(callee_method());
1574   JRT_BLOCK_END
1575   // return compiled code entry point after potential safepoints
1576   return get_resolved_entry(current, callee_method);
1577 JRT_END
1578 
1579 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {


1580   JavaThread* current = THREAD;
1581   ResourceMark rm(current);
1582   CallInfo call_info;
1583   Bytecodes::Code bc;
1584 
1585   // receiver is null for static calls. An exception is thrown for null
1586   // receivers for non-static calls
1587   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1588 
1589   methodHandle callee_method(current, call_info.selected_method());
1590 
1591 #ifndef PRODUCT
1592   Atomic::inc(&_ic_miss_ctr);
1593 
1594   // Statistics & Tracing
1595   if (TraceCallFixup) {
1596     ResourceMark rm(current);
1597     tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1598     callee_method->print_short_name(tty);
1599     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1600   }
1601 
1602   if (ICMissHistogram) {
1603     MutexLocker m(VMStatistic_lock);
1604     RegisterMap reg_map(current,
1605                         RegisterMap::UpdateMap::skip,
1606                         RegisterMap::ProcessFrames::include,
1607                         RegisterMap::WalkContinuation::skip);
1608     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1609     // produce statistics under the lock
1610     trace_ic_miss(f.pc());
1611   }
1612 #endif
1613 
1614   // install an event collector so that when a vtable stub is created the
1615   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1616   // event can't be posted when the stub is created as locks are held
1617   // - instead the event will be deferred until the event collector goes
1618   // out of scope.
1619   JvmtiDynamicCodeEventCollector event_collector;
1620 
1621   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1622   RegisterMap reg_map(current,
1623                       RegisterMap::UpdateMap::skip,
1624                       RegisterMap::ProcessFrames::include,
1625                       RegisterMap::WalkContinuation::skip);
1626   frame caller_frame = current->last_frame().sender(&reg_map);
1627   CodeBlob* cb = caller_frame.cb();
1628   nmethod* caller_nm = cb->as_nmethod();




1629 
1630   CompiledICLocker ml(caller_nm);
1631   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1632   inline_cache->update(&call_info, receiver()->klass());
1633 
1634   return callee_method;
1635 }
1636 
1637 //
1638 // Resets a call-site in compiled code so it will get resolved again.
1639 // This routines handles both virtual call sites, optimized virtual call
1640 // sites, and static call sites. Typically used to change a call sites
1641 // destination from compiled to interpreted.
1642 //
1643 methodHandle SharedRuntime::reresolve_call_site(TRAPS) {
1644   JavaThread* current = THREAD;
1645   ResourceMark rm(current);
1646   RegisterMap reg_map(current,
1647                       RegisterMap::UpdateMap::skip,
1648                       RegisterMap::ProcessFrames::include,
1649                       RegisterMap::WalkContinuation::skip);
1650   frame stub_frame = current->last_frame();
1651   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1652   frame caller = stub_frame.sender(&reg_map);



1653 
1654   // Do nothing if the frame isn't a live compiled frame.
1655   // nmethod could be deoptimized by the time we get here
1656   // so no update to the caller is needed.
1657 
1658   if ((caller.is_compiled_frame() && !caller.is_deoptimized_frame()) ||
1659       (caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic())) {
1660 
1661     address pc = caller.pc();
1662 
1663     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1664     assert(caller_nm != nullptr, "did not find caller nmethod");
1665 
1666     // Default call_addr is the location of the "basic" call.
1667     // Determine the address of the call we a reresolving. With
1668     // Inline Caches we will always find a recognizable call.
1669     // With Inline Caches disabled we may or may not find a
1670     // recognizable call. We will always find a call for static
1671     // calls and for optimized virtual calls. For vanilla virtual
1672     // calls it depends on the state of the UseInlineCaches switch.
1673     //
1674     // With Inline Caches disabled we can get here for a virtual call
1675     // for two reasons:
1676     //   1 - calling an abstract method. The vtable for abstract methods
1677     //       will run us thru handle_wrong_method and we will eventually
1678     //       end up in the interpreter to throw the ame.
1679     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1680     //       call and between the time we fetch the entry address and
1681     //       we jump to it the target gets deoptimized. Similar to 1
1682     //       we will wind up in the interprter (thru a c2i with c2).
1683     //
1684     CompiledICLocker ml(caller_nm);
1685     address call_addr = caller_nm->call_instruction_address(pc);
1686 
1687     if (call_addr != nullptr) {
1688       // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1689       // bytes back in the instruction stream so we must also check for reloc info.
1690       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1691       bool ret = iter.next(); // Get item
1692       if (ret) {


1693         switch (iter.type()) {
1694           case relocInfo::static_call_type:

1695           case relocInfo::opt_virtual_call_type: {

1696             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1697             cdc->set_to_clean();
1698             break;
1699           }
1700 
1701           case relocInfo::virtual_call_type: {
1702             // compiled, dispatched call (which used to call an interpreted method)
1703             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1704             inline_cache->set_to_clean();
1705             break;
1706           }
1707           default:
1708             break;
1709         }
1710       }
1711     }
1712   }
1713 
1714   methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1715 
1716 
1717 #ifndef PRODUCT
1718   Atomic::inc(&_wrong_method_ctr);
1719 
1720   if (TraceCallFixup) {
1721     ResourceMark rm(current);
1722     tty->print("handle_wrong_method reresolving call to");
1723     callee_method->print_short_name(tty);
1724     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1725   }
1726 #endif
1727 
1728   return callee_method;
1729 }
1730 
1731 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1732   // The faulting unsafe accesses should be changed to throw the error
1733   // synchronously instead. Meanwhile the faulting instruction will be
1734   // skipped over (effectively turning it into a no-op) and an
1735   // asynchronous exception will be raised which the thread will
1736   // handle at a later point. If the instruction is a load it will
1737   // return garbage.
1738 
1739   // Request an async exception.
1740   thread->set_pending_unsafe_access_error();
1741 
1742   // Return address of next instruction to execute.

1908   msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
1909 
1910   char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
1911   if (message == nullptr) {
1912     // Shouldn't happen, but don't cause even more problems if it does
1913     message = const_cast<char*>(caster_klass->external_name());
1914   } else {
1915     jio_snprintf(message,
1916                  msglen,
1917                  "class %s cannot be cast to class %s (%s%s%s)",
1918                  caster_name,
1919                  target_name,
1920                  caster_klass_description,
1921                  klass_separator,
1922                  target_klass_description
1923                  );
1924   }
1925   return message;
1926 }
1927 















1928 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
1929   (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
1930 JRT_END
1931 
1932 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
1933   if (!SafepointSynchronize::is_synchronizing()) {
1934     // Only try quick_enter() if we're not trying to reach a safepoint
1935     // so that the calling thread reaches the safepoint more quickly.
1936     if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
1937       return;
1938     }
1939   }
1940   // NO_ASYNC required because an async exception on the state transition destructor
1941   // would leave you with the lock held and it would never be released.
1942   // The normal monitorenter NullPointerException is thrown without acquiring a lock
1943   // and the model is that an exception implies the method failed.
1944   JRT_BLOCK_NO_ASYNC
1945   Handle h_obj(THREAD, obj);
1946   ObjectSynchronizer::enter(h_obj, lock, current);
1947   assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");

2138   tty->print_cr("        %% in nested categories are relative to their category");
2139   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2140   tty->cr();
2141 
2142   MethodArityHistogram h;
2143 }
2144 #endif
2145 
2146 #ifndef PRODUCT
2147 static int _lookups; // number of calls to lookup
2148 static int _equals;  // number of buckets checked with matching hash
2149 static int _hits;    // number of successful lookups
2150 static int _compact; // number of equals calls with compact signature
2151 #endif
2152 
2153 // A simple wrapper class around the calling convention information
2154 // that allows sharing of adapters for the same calling convention.
2155 class AdapterFingerPrint : public CHeapObj<mtCode> {
2156  private:
2157   enum {
2158     _basic_type_bits = 4,
2159     _basic_type_mask = right_n_bits(_basic_type_bits),
2160     _basic_types_per_int = BitsPerInt / _basic_type_bits,
2161     _compact_int_count = 3
2162   };
2163   // TO DO:  Consider integrating this with a more global scheme for compressing signatures.
2164   // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2165 
2166   union {
2167     int  _compact[_compact_int_count];
2168     int* _fingerprint;
2169   } _value;
2170   int _length; // A negative length indicates the fingerprint is in the compact form,
2171                // Otherwise _value._fingerprint is the array.
2172 
2173   // Remap BasicTypes that are handled equivalently by the adapters.
2174   // These are correct for the current system but someday it might be
2175   // necessary to make this mapping platform dependent.
2176   static int adapter_encoding(BasicType in) {
2177     switch (in) {
2178       case T_BOOLEAN:
2179       case T_BYTE:
2180       case T_SHORT:
2181       case T_CHAR:
2182         // There are all promoted to T_INT in the calling convention
2183         return T_INT;
2184 
2185       case T_OBJECT:
2186       case T_ARRAY:
2187         // In other words, we assume that any register good enough for
2188         // an int or long is good enough for a managed pointer.
2189 #ifdef _LP64
2190         return T_LONG;
2191 #else
2192         return T_INT;
2193 #endif
2194 
2195       case T_INT:
2196       case T_LONG:
2197       case T_FLOAT:
2198       case T_DOUBLE:
2199       case T_VOID:
2200         return in;
2201 
2202       default:
2203         ShouldNotReachHere();
2204         return T_CONFLICT;
2205     }
2206   }
2207 
2208  public:
2209   AdapterFingerPrint(int total_args_passed, BasicType* sig_bt) {
2210     // The fingerprint is based on the BasicType signature encoded
2211     // into an array of ints with eight entries per int.

2212     int* ptr;
2213     int len = (total_args_passed + (_basic_types_per_int-1)) / _basic_types_per_int;
2214     if (len <= _compact_int_count) {
2215       assert(_compact_int_count == 3, "else change next line");
2216       _value._compact[0] = _value._compact[1] = _value._compact[2] = 0;
2217       // Storing the signature encoded as signed chars hits about 98%
2218       // of the time.
2219       _length = -len;
2220       ptr = _value._compact;
2221     } else {
2222       _length = len;
2223       _value._fingerprint = NEW_C_HEAP_ARRAY(int, _length, mtCode);
2224       ptr = _value._fingerprint;
2225     }
2226 
2227     // Now pack the BasicTypes with 8 per int
2228     int sig_index = 0;


2229     for (int index = 0; index < len; index++) {
2230       int value = 0;
2231       for (int byte = 0; sig_index < total_args_passed && byte < _basic_types_per_int; byte++) {
2232         int bt = adapter_encoding(sig_bt[sig_index++]);
2233         assert((bt & _basic_type_mask) == bt, "must fit in 4 bits");
2234         value = (value << _basic_type_bits) | bt;























2235       }
2236       ptr[index] = value;
2237     }

2238   }
2239 
2240   ~AdapterFingerPrint() {
2241     if (_length > 0) {
2242       FREE_C_HEAP_ARRAY(int, _value._fingerprint);
2243     }
2244   }
2245 
2246   int value(int index) {
2247     if (_length < 0) {
2248       return _value._compact[index];
2249     }
2250     return _value._fingerprint[index];
2251   }
2252   int length() {
2253     if (_length < 0) return -_length;
2254     return _length;
2255   }
2256 
2257   bool is_compact() {

2282   const char* as_basic_args_string() {
2283     stringStream st;
2284     bool long_prev = false;
2285     for (int i = 0; i < length(); i++) {
2286       unsigned val = (unsigned)value(i);
2287       // args are packed so that first/lower arguments are in the highest
2288       // bits of each int value, so iterate from highest to the lowest
2289       for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {
2290         unsigned v = (val >> j) & _basic_type_mask;
2291         if (v == 0) {
2292           assert(i == length() - 1, "Only expect zeroes in the last word");
2293           continue;
2294         }
2295         if (long_prev) {
2296           long_prev = false;
2297           if (v == T_VOID) {
2298             st.print("J");
2299           } else {
2300             st.print("L");
2301           }
2302         }
2303         switch (v) {
2304           case T_INT:    st.print("I");    break;
2305           case T_LONG:   long_prev = true; break;
2306           case T_FLOAT:  st.print("F");    break;
2307           case T_DOUBLE: st.print("D");    break;
2308           case T_VOID:   break;
2309           default: ShouldNotReachHere();
2310         }
2311       }
2312     }
2313     if (long_prev) {
2314       st.print("L");
2315     }
2316     return st.as_string();
2317   }
2318 #endif // !product
2319 
2320   bool equals(AdapterFingerPrint* other) {
2321     if (other->_length != _length) {
2322       return false;
2323     }
2324     if (_length < 0) {
2325       assert(_compact_int_count == 3, "else change next line");
2326       return _value._compact[0] == other->_value._compact[0] &&
2327              _value._compact[1] == other->_value._compact[1] &&
2328              _value._compact[2] == other->_value._compact[2];
2329     } else {

2337   }
2338 
2339   static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2340     NOT_PRODUCT(_equals++);
2341     return fp1->equals(fp2);
2342   }
2343 
2344   static unsigned int compute_hash(AdapterFingerPrint* const& fp) {
2345     return fp->compute_hash();
2346   }
2347 };
2348 
2349 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2350 using AdapterHandlerTable = ResourceHashtable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2351                   AnyObj::C_HEAP, mtCode,
2352                   AdapterFingerPrint::compute_hash,
2353                   AdapterFingerPrint::equals>;
2354 static AdapterHandlerTable* _adapter_handler_table;
2355 
2356 // Find a entry with the same fingerprint if it exists
2357 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2358   NOT_PRODUCT(_lookups++);
2359   assert_lock_strong(AdapterHandlerLibrary_lock);
2360   AdapterFingerPrint fp(total_args_passed, sig_bt);
2361   AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2362   if (entry != nullptr) {
2363 #ifndef PRODUCT
2364     if (fp.is_compact()) _compact++;
2365     _hits++;
2366 #endif
2367     return *entry;
2368   }
2369   return nullptr;
2370 }
2371 
2372 #ifndef PRODUCT
2373 static void print_table_statistics() {
2374   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2375     return sizeof(*key) + sizeof(*a);
2376   };
2377   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2378   ts.print(tty, "AdapterHandlerTable");
2379   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2380                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2381   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2382                 _lookups, _equals, _hits, _compact);
2383 }
2384 #endif
2385 
2386 // ---------------------------------------------------------------------------
2387 // Implementation of AdapterHandlerLibrary
2388 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2389 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2390 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2391 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2392 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2393 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2394 const int AdapterHandlerLibrary_size = 16*K;
2395 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2396 
2397 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2398   return _buffer;
2399 }
2400 
2401 static void post_adapter_creation(const AdapterBlob* new_adapter,
2402                                   const AdapterHandlerEntry* entry) {
2403   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2404     char blob_id[256];
2405     jio_snprintf(blob_id,
2406                  sizeof(blob_id),
2407                  "%s(%s)",
2408                  new_adapter->name(),
2409                  entry->fingerprint()->as_string());
2410     if (Forte::is_enabled()) {
2411       Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2412     }
2413 
2414     if (JvmtiExport::should_post_dynamic_code_generated()) {

2417   }
2418 }
2419 
2420 void AdapterHandlerLibrary::initialize() {
2421   ResourceMark rm;
2422   AdapterBlob* no_arg_blob = nullptr;
2423   AdapterBlob* int_arg_blob = nullptr;
2424   AdapterBlob* obj_arg_blob = nullptr;
2425   AdapterBlob* obj_int_arg_blob = nullptr;
2426   AdapterBlob* obj_obj_arg_blob = nullptr;
2427   {
2428     _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2429     MutexLocker mu(AdapterHandlerLibrary_lock);
2430 
2431     // Create a special handler for abstract methods.  Abstract methods
2432     // are never compiled so an i2c entry is somewhat meaningless, but
2433     // throw AbstractMethodError just in case.
2434     // Pass wrong_method_abstract for the c2i transitions to return
2435     // AbstractMethodError for invalid invocations.
2436     address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2437     _abstract_method_handler = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(0, nullptr),
2438                                                                 SharedRuntime::throw_AbstractMethodError_entry(),

2439                                                                 wrong_method_abstract, wrong_method_abstract);
2440 
2441     _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2442     _no_arg_handler = create_adapter(no_arg_blob, 0, nullptr, true);
2443 
2444     BasicType obj_args[] = { T_OBJECT };
2445     _obj_arg_handler = create_adapter(obj_arg_blob, 1, obj_args, true);






2446 
2447     BasicType int_args[] = { T_INT };
2448     _int_arg_handler = create_adapter(int_arg_blob, 1, int_args, true);


2449 
2450     BasicType obj_int_args[] = { T_OBJECT, T_INT };
2451     _obj_int_arg_handler = create_adapter(obj_int_arg_blob, 2, obj_int_args, true);



2452 
2453     BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
2454     _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, 2, obj_obj_args, true);



2455 
2456     assert(no_arg_blob != nullptr &&
2457           obj_arg_blob != nullptr &&
2458           int_arg_blob != nullptr &&
2459           obj_int_arg_blob != nullptr &&
2460           obj_obj_arg_blob != nullptr, "Initial adapters must be properly created");
2461   }

2462 
2463   // Outside of the lock
2464   post_adapter_creation(no_arg_blob, _no_arg_handler);
2465   post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2466   post_adapter_creation(int_arg_blob, _int_arg_handler);
2467   post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2468   post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2469 }
2470 
2471 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint,
2472                                                       address i2c_entry,
2473                                                       address c2i_entry,


2474                                                       address c2i_unverified_entry,

2475                                                       address c2i_no_clinit_check_entry) {
2476   // Insert an entry into the table
2477   return new AdapterHandlerEntry(fingerprint, i2c_entry, c2i_entry, c2i_unverified_entry,
2478                                  c2i_no_clinit_check_entry);
2479 }
2480 
2481 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2482   if (method->is_abstract()) {
2483     return _abstract_method_handler;
2484   }
2485   int total_args_passed = method->size_of_parameters(); // All args on stack
2486   if (total_args_passed == 0) {
2487     return _no_arg_handler;
2488   } else if (total_args_passed == 1) {
2489     if (!method->is_static()) {



2490       return _obj_arg_handler;
2491     }
2492     switch (method->signature()->char_at(1)) {
2493       case JVM_SIGNATURE_CLASS:









2494       case JVM_SIGNATURE_ARRAY:
2495         return _obj_arg_handler;
2496       case JVM_SIGNATURE_INT:
2497       case JVM_SIGNATURE_BOOLEAN:
2498       case JVM_SIGNATURE_CHAR:
2499       case JVM_SIGNATURE_BYTE:
2500       case JVM_SIGNATURE_SHORT:
2501         return _int_arg_handler;
2502     }
2503   } else if (total_args_passed == 2 &&
2504              !method->is_static()) {
2505     switch (method->signature()->char_at(1)) {
2506       case JVM_SIGNATURE_CLASS:









2507       case JVM_SIGNATURE_ARRAY:
2508         return _obj_obj_arg_handler;
2509       case JVM_SIGNATURE_INT:
2510       case JVM_SIGNATURE_BOOLEAN:
2511       case JVM_SIGNATURE_CHAR:
2512       case JVM_SIGNATURE_BYTE:
2513       case JVM_SIGNATURE_SHORT:
2514         return _obj_int_arg_handler;
2515     }
2516   }
2517   return nullptr;
2518 }
2519 
2520 class AdapterSignatureIterator : public SignatureIterator {
2521  private:
2522   BasicType stack_sig_bt[16];
2523   BasicType* sig_bt;
2524   int index;




2525 
2526  public:
2527   AdapterSignatureIterator(Symbol* signature,
2528                            fingerprint_t fingerprint,
2529                            bool is_static,
2530                            int total_args_passed) :
2531     SignatureIterator(signature, fingerprint),
2532     index(0)
2533   {
2534     sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2535     if (!is_static) { // Pass in receiver first
2536       sig_bt[index++] = T_OBJECT;













2537     }
2538     do_parameters_on(this);
2539   }
2540 
2541   BasicType* basic_types() {
2542     return sig_bt;







2543   }

2544 
2545 #ifdef ASSERT
2546   int slots() {
2547     return index;




































2548   }


















































2549 #endif


















































2550 
2551  private:










2552 
2553   friend class SignatureIterator;  // so do_parameters_on can call do_type
2554   void do_type(BasicType type) {
2555     sig_bt[index++] = type;
2556     if (type == T_LONG || type == T_DOUBLE) {
2557       sig_bt[index++] = T_VOID; // Longs & doubles take 2 Java slots



2558     }
2559   }
2560 };









2561 
2562 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
2563   // Use customized signature handler.  Need to lock around updates to
2564   // the _adapter_handler_table (it is not safe for concurrent readers
2565   // and a single writer: this could be fixed if it becomes a
2566   // problem).
2567 
2568   // Fast-path for trivial adapters
2569   AdapterHandlerEntry* entry = get_simple_adapter(method);
2570   if (entry != nullptr) {
2571     return entry;
2572   }
2573 
2574   ResourceMark rm;
2575   AdapterBlob* new_adapter = nullptr;
2576 
2577   // Fill in the signature array, for the calling-convention call.
2578   int total_args_passed = method->size_of_parameters(); // All args on stack













2579 
2580   AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2581                               method->is_static(), total_args_passed);
2582   assert(si.slots() == total_args_passed, "");
2583   BasicType* sig_bt = si.basic_types();
2584   {
2585     MutexLocker mu(AdapterHandlerLibrary_lock);
2586 













2587     // Lookup method signature's fingerprint
2588     entry = lookup(total_args_passed, sig_bt);
2589 
2590     if (entry != nullptr) {
2591 #ifdef ASSERT
2592       if (VerifyAdapterSharing) {
2593         AdapterBlob* comparison_blob = nullptr;
2594         AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, total_args_passed, sig_bt, false);
2595         assert(comparison_blob == nullptr, "no blob should be created when creating an adapter for comparison");
2596         assert(comparison_entry->compare_code(entry), "code must match");
2597         // Release the one just created and return the original
2598         delete comparison_entry;
2599       }
2600 #endif
2601       return entry;
2602     }
2603 
2604     entry = create_adapter(new_adapter, total_args_passed, sig_bt, /* allocate_code_blob */ true);
2605   }
2606 
2607   // Outside of the lock
2608   if (new_adapter != nullptr) {
2609     post_adapter_creation(new_adapter, entry);
2610   }
2611   return entry;
2612 }
2613 
2614 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_adapter,
2615                                                            int total_args_passed,
2616                                                            BasicType* sig_bt,
2617                                                            bool allocate_code_blob) {
2618   if (log_is_enabled(Info, perf, class, link)) {
2619     ClassLoader::perf_method_adapters_count()->inc();
2620   }
2621 
2622   // StubRoutines::_final_stubs_code is initialized after this function can be called. As a result,
2623   // VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated prior
2624   // to all StubRoutines::_final_stubs_code being set. Checks refer to runtime range checks generated
2625   // in an I2C stub that ensure that an I2C stub is called from an interpreter frame or stubs.
2626   bool contains_all_checks = StubRoutines::final_stubs_code() != nullptr;
2627 
2628   VMRegPair stack_regs[16];
2629   VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2630 
2631   // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2632   int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2633   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2634   CodeBuffer buffer(buf);
2635   short buffer_locs[20];
2636   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2637                                           sizeof(buffer_locs)/sizeof(relocInfo));
2638 
2639   // Make a C heap allocated version of the fingerprint to store in the adapter
2640   AdapterFingerPrint* fingerprint = new AdapterFingerPrint(total_args_passed, sig_bt);
2641   MacroAssembler _masm(&buffer);
2642   AdapterHandlerEntry* entry = SharedRuntime::generate_i2c2i_adapters(&_masm,
2643                                                 total_args_passed,
2644                                                 comp_args_on_stack,
2645                                                 sig_bt,
2646                                                 regs,
2647                                                 fingerprint);












2648 
2649 #ifdef ASSERT
2650   if (VerifyAdapterSharing) {
2651     entry->save_code(buf->code_begin(), buffer.insts_size());
2652     if (!allocate_code_blob) {
2653       return entry;
2654     }
2655   }
2656 #endif
2657 
2658   new_adapter = AdapterBlob::create(&buffer);
2659   NOT_PRODUCT(int insts_size = buffer.insts_size());
2660   if (new_adapter == nullptr) {
2661     // CodeCache is full, disable compilation
2662     // Ought to log this but compile log is only per compile thread
2663     // and we're some non descript Java thread.
2664     return nullptr;
2665   }
2666   entry->relocate(new_adapter->content_begin());
2667 #ifndef PRODUCT
2668   // debugging support
2669   if (PrintAdapterHandlers || PrintStubCode) {
2670     ttyLocker ttyl;
2671     entry->print_adapter_on(tty);
2672     tty->print_cr("i2c argument handler #%d for: %s %s (%d bytes generated)",
2673                   _adapter_handler_table->number_of_entries(), fingerprint->as_basic_args_string(),
2674                   fingerprint->as_string(), insts_size);
2675     tty->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(entry->get_c2i_entry()));
2676     if (Verbose || PrintStubCode) {
2677       address first_pc = entry->base_address();
2678       if (first_pc != nullptr) {

2680                              NOT_PRODUCT(COMMA &new_adapter->asm_remarks()));
2681         tty->cr();
2682       }
2683     }
2684   }
2685 #endif
2686 
2687   // Add the entry only if the entry contains all required checks (see sharedRuntime_xxx.cpp)
2688   // The checks are inserted only if -XX:+VerifyAdapterCalls is specified.
2689   if (contains_all_checks || !VerifyAdapterCalls) {
2690     assert_lock_strong(AdapterHandlerLibrary_lock);
2691     _adapter_handler_table->put(fingerprint, entry);
2692   }
2693   return entry;
2694 }
2695 
2696 address AdapterHandlerEntry::base_address() {
2697   address base = _i2c_entry;
2698   if (base == nullptr)  base = _c2i_entry;
2699   assert(base <= _c2i_entry || _c2i_entry == nullptr, "");


2700   assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == nullptr, "");

2701   assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == nullptr, "");
2702   return base;
2703 }
2704 
2705 void AdapterHandlerEntry::relocate(address new_base) {
2706   address old_base = base_address();
2707   assert(old_base != nullptr, "");
2708   ptrdiff_t delta = new_base - old_base;
2709   if (_i2c_entry != nullptr)
2710     _i2c_entry += delta;
2711   if (_c2i_entry != nullptr)
2712     _c2i_entry += delta;




2713   if (_c2i_unverified_entry != nullptr)
2714     _c2i_unverified_entry += delta;


2715   if (_c2i_no_clinit_check_entry != nullptr)
2716     _c2i_no_clinit_check_entry += delta;
2717   assert(base_address() == new_base, "");
2718 }
2719 
2720 
2721 AdapterHandlerEntry::~AdapterHandlerEntry() {
2722   delete _fingerprint;



2723 #ifdef ASSERT
2724   FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
2725 #endif
2726 }
2727 
2728 
2729 #ifdef ASSERT
2730 // Capture the code before relocation so that it can be compared
2731 // against other versions.  If the code is captured after relocation
2732 // then relative instructions won't be equivalent.
2733 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
2734   _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
2735   _saved_code_length = length;
2736   memcpy(_saved_code, buffer, length);
2737 }
2738 
2739 
2740 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
2741   assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");
2742 

2789 
2790       struct { double data[20]; } locs_buf;
2791       struct { double data[20]; } stubs_locs_buf;
2792       buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
2793 #if defined(AARCH64) || defined(PPC64)
2794       // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
2795       // in the constant pool to ensure ordering between the barrier and oops
2796       // accesses. For native_wrappers we need a constant.
2797       // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
2798       // static java call that is resolved in the runtime.
2799       if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
2800         buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
2801       }
2802 #endif
2803       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
2804       MacroAssembler _masm(&buffer);
2805 
2806       // Fill in the signature array, for the calling-convention call.
2807       const int total_args_passed = method->size_of_parameters();
2808 

2809       VMRegPair stack_regs[16];

2810       VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2811 
2812       AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2813                               method->is_static(), total_args_passed);
2814       BasicType* sig_bt = si.basic_types();
2815       assert(si.slots() == total_args_passed, "");
2816       BasicType ret_type = si.return_type();








2817 
2818       // Now get the compiled-Java arguments layout.
2819       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2820 
2821       // Generate the compiled-to-native wrapper code
2822       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
2823 
2824       if (nm != nullptr) {
2825         {
2826           MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
2827           if (nm->make_in_use()) {
2828             method->set_code(method, nm);
2829           }
2830         }
2831 
2832         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
2833         if (directive->PrintAssemblyOption) {
2834           nm->print_code();
2835         }
2836         DirectivesStack::release(directive);

3043       st->print("Adapter for signature: ");
3044       a->print_adapter_on(st);
3045       return true;
3046     } else {
3047       return false; // keep looking
3048     }
3049   };
3050   assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3051   _adapter_handler_table->iterate(findblob);
3052   assert(found, "Should have found handler");
3053 }
3054 
3055 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3056   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3057   if (get_i2c_entry() != nullptr) {
3058     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3059   }
3060   if (get_c2i_entry() != nullptr) {
3061     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3062   }






3063   if (get_c2i_unverified_entry() != nullptr) {
3064     st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));



3065   }
3066   if (get_c2i_no_clinit_check_entry() != nullptr) {
3067     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3068   }
3069   st->cr();
3070 }
3071 
3072 #ifndef PRODUCT
3073 
3074 void AdapterHandlerLibrary::print_statistics() {
3075   print_table_statistics();
3076 }
3077 
3078 #endif /* PRODUCT */
3079 
3080 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3081   assert(current == JavaThread::current(), "pre-condition");
3082   StackOverflow* overflow_state = current->stack_overflow_state();
3083   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3084   overflow_state->set_reserved_stack_activation(current->stack_base());

3133         event.set_method(method);
3134         event.commit();
3135       }
3136     }
3137   }
3138   return activation;
3139 }
3140 
3141 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
3142   // After any safepoint, just before going back to compiled code,
3143   // we inform the GC that we will be doing initializing writes to
3144   // this object in the future without emitting card-marks, so
3145   // GC may take any compensating steps.
3146 
3147   oop new_obj = current->vm_result();
3148   if (new_obj == nullptr) return;
3149 
3150   BarrierSet *bs = BarrierSet::barrier_set();
3151   bs->on_slowpath_allocation_exit(current, new_obj);
3152 }




































































































































































































  27 #include "classfile/javaClasses.inline.hpp"
  28 #include "classfile/stringTable.hpp"
  29 #include "classfile/vmClasses.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "code/codeCache.hpp"
  32 #include "code/compiledIC.hpp"
  33 #include "code/nmethod.inline.hpp"
  34 #include "code/scopeDesc.hpp"
  35 #include "code/vtableStubs.hpp"
  36 #include "compiler/abstractCompiler.hpp"
  37 #include "compiler/compileBroker.hpp"
  38 #include "compiler/disassembler.hpp"
  39 #include "gc/shared/barrierSet.hpp"
  40 #include "gc/shared/collectedHeap.hpp"
  41 #include "gc/shared/gcLocker.inline.hpp"
  42 #include "interpreter/interpreter.hpp"
  43 #include "interpreter/interpreterRuntime.hpp"
  44 #include "jvm.h"
  45 #include "jfr/jfrEvents.hpp"
  46 #include "logging/log.hpp"
  47 #include "memory/oopFactory.hpp"
  48 #include "memory/resourceArea.hpp"
  49 #include "memory/universe.hpp"
  50 #include "oops/access.hpp"
  51 #include "oops/fieldStreams.inline.hpp"
  52 #include "metaprogramming/primitiveConversions.hpp"
  53 #include "oops/klass.hpp"
  54 #include "oops/method.inline.hpp"
  55 #include "oops/objArrayKlass.hpp"
  56 #include "oops/objArrayOop.inline.hpp"
  57 #include "oops/oop.inline.hpp"
  58 #include "oops/inlineKlass.inline.hpp"
  59 #include "prims/forte.hpp"
  60 #include "prims/jvmtiExport.hpp"
  61 #include "prims/jvmtiThreadState.hpp"
  62 #include "prims/methodHandles.hpp"
  63 #include "prims/nativeLookup.hpp"
  64 #include "runtime/arguments.hpp"
  65 #include "runtime/atomic.hpp"
  66 #include "runtime/basicLock.inline.hpp"
  67 #include "runtime/frame.inline.hpp"
  68 #include "runtime/handles.inline.hpp"
  69 #include "runtime/init.hpp"
  70 #include "runtime/interfaceSupport.inline.hpp"
  71 #include "runtime/java.hpp"
  72 #include "runtime/javaCalls.hpp"
  73 #include "runtime/jniHandles.inline.hpp"
  74 #include "runtime/perfData.hpp"
  75 #include "runtime/sharedRuntime.hpp"
  76 #include "runtime/stackWatermarkSet.hpp"
  77 #include "runtime/stubRoutines.hpp"
  78 #include "runtime/synchronizer.inline.hpp"

1169 // for a call current in progress, i.e., arguments has been pushed on stack
1170 // but callee has not been invoked yet.  Caller frame must be compiled.
1171 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1172                                               CallInfo& callinfo, TRAPS) {
1173   Handle receiver;
1174   Handle nullHandle;  // create a handy null handle for exception returns
1175   JavaThread* current = THREAD;
1176 
1177   assert(!vfst.at_end(), "Java frame must exist");
1178 
1179   // Find caller and bci from vframe
1180   methodHandle caller(current, vfst.method());
1181   int          bci   = vfst.bci();
1182 
1183   if (caller->is_continuation_enter_intrinsic()) {
1184     bc = Bytecodes::_invokestatic;
1185     LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1186     return receiver;
1187   }
1188 
1189   // Substitutability test implementation piggy backs on static call resolution
1190   Bytecodes::Code code = caller->java_code_at(bci);
1191   if (code == Bytecodes::_if_acmpeq || code == Bytecodes::_if_acmpne) {
1192     bc = Bytecodes::_invokestatic;
1193     methodHandle attached_method(THREAD, extract_attached_method(vfst));
1194     assert(attached_method.not_null(), "must have attached method");
1195     vmClasses::ValueObjectMethods_klass()->initialize(CHECK_NH);
1196     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, false, CHECK_NH);
1197 #ifdef ASSERT
1198     Method* is_subst = vmClasses::ValueObjectMethods_klass()->find_method(vmSymbols::isSubstitutable_name(), vmSymbols::object_object_boolean_signature());
1199     assert(callinfo.selected_method() == is_subst, "must be isSubstitutable method");
1200 #endif
1201     return receiver;
1202   }
1203 
1204   Bytecode_invoke bytecode(caller, bci);
1205   int bytecode_index = bytecode.index();
1206   bc = bytecode.invoke_code();
1207 
1208   methodHandle attached_method(current, extract_attached_method(vfst));
1209   if (attached_method.not_null()) {
1210     Method* callee = bytecode.static_target(CHECK_NH);
1211     vmIntrinsics::ID id = callee->intrinsic_id();
1212     // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1213     // it attaches statically resolved method to the call site.
1214     if (MethodHandles::is_signature_polymorphic(id) &&
1215         MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1216       bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1217 
1218       // Adjust invocation mode according to the attached method.
1219       switch (bc) {
1220         case Bytecodes::_invokevirtual:
1221           if (attached_method->method_holder()->is_interface()) {
1222             bc = Bytecodes::_invokeinterface;
1223           }
1224           break;
1225         case Bytecodes::_invokeinterface:
1226           if (!attached_method->method_holder()->is_interface()) {
1227             bc = Bytecodes::_invokevirtual;
1228           }
1229           break;
1230         case Bytecodes::_invokehandle:
1231           if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1232             bc = attached_method->is_static() ? Bytecodes::_invokestatic
1233                                               : Bytecodes::_invokevirtual;
1234           }
1235           break;
1236         default:
1237           break;
1238       }
1239     } else {
1240       assert(attached_method->has_scalarized_args(), "invalid use of attached method");
1241       if (!attached_method->method_holder()->is_inline_klass()) {
1242         // Ignore the attached method in this case to not confuse below code
1243         attached_method = methodHandle(current, nullptr);
1244       }
1245     }
1246   }
1247 
1248   assert(bc != Bytecodes::_illegal, "not initialized");
1249 
1250   bool has_receiver = bc != Bytecodes::_invokestatic &&
1251                       bc != Bytecodes::_invokedynamic &&
1252                       bc != Bytecodes::_invokehandle;
1253   bool check_null_and_abstract = true;
1254 
1255   // Find receiver for non-static call
1256   if (has_receiver) {
1257     // This register map must be update since we need to find the receiver for
1258     // compiled frames. The receiver might be in a register.
1259     RegisterMap reg_map2(current,
1260                          RegisterMap::UpdateMap::include,
1261                          RegisterMap::ProcessFrames::include,
1262                          RegisterMap::WalkContinuation::skip);
1263     frame stubFrame   = current->last_frame();
1264     // Caller-frame is a compiled frame
1265     frame callerFrame = stubFrame.sender(&reg_map2);
1266 
1267     Method* callee = attached_method();
1268     if (callee == nullptr) {
1269       callee = bytecode.static_target(CHECK_NH);
1270       if (callee == nullptr) {
1271         THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1272       }
1273     }
1274     bool caller_is_c1 = callerFrame.is_compiled_frame() && callerFrame.cb()->as_nmethod()->is_compiled_by_c1();
1275     if (!caller_is_c1 && callee->is_scalarized_arg(0)) {
1276       // If the receiver is an inline type that is passed as fields, no oop is available
1277       // Resolve the call without receiver null checking.
1278       assert(!callee->mismatch(), "calls with inline type receivers should never mismatch");
1279       assert(attached_method.not_null() && !attached_method->is_abstract(), "must have non-abstract attached method");
1280       if (bc == Bytecodes::_invokeinterface) {
1281         bc = Bytecodes::_invokevirtual; // C2 optimistically replaces interface calls by virtual calls
1282       }
1283       check_null_and_abstract = false;
1284     } else {
1285       // Retrieve from a compiled argument list
1286       receiver = Handle(current, callerFrame.retrieve_receiver(&reg_map2));
1287       assert(oopDesc::is_oop_or_null(receiver()), "");
1288       if (receiver.is_null()) {
1289         THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);
1290       }
1291     }
1292   }
1293 
1294   // Resolve method
1295   if (attached_method.not_null()) {
1296     // Parameterized by attached method.
1297     LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, check_null_and_abstract, CHECK_NH);
1298   } else {
1299     // Parameterized by bytecode.
1300     constantPoolHandle constants(current, caller->constants());
1301     LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1302   }
1303 
1304 #ifdef ASSERT
1305   // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1306   if (has_receiver && check_null_and_abstract) {
1307     assert(receiver.not_null(), "should have thrown exception");
1308     Klass* receiver_klass = receiver->klass();
1309     Klass* rk = nullptr;
1310     if (attached_method.not_null()) {
1311       // In case there's resolved method attached, use its holder during the check.
1312       rk = attached_method->method_holder();
1313     } else {
1314       // Klass is already loaded.
1315       constantPoolHandle constants(current, caller->constants());
1316       rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1317     }
1318     Klass* static_receiver_klass = rk;
1319     assert(receiver_klass->is_subtype_of(static_receiver_klass),
1320            "actual receiver must be subclass of static receiver klass");
1321     if (receiver_klass->is_instance_klass()) {
1322       if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1323         tty->print_cr("ERROR: Klass not yet initialized!!");
1324         receiver_klass->print();
1325       }
1326       assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1327     }
1328   }
1329 #endif
1330 
1331   return receiver;
1332 }
1333 
1334 methodHandle SharedRuntime::find_callee_method(bool is_optimized, bool& caller_is_c1, TRAPS) {
1335   JavaThread* current = THREAD;
1336   ResourceMark rm(current);
1337   // We need first to check if any Java activations (compiled, interpreted)
1338   // exist on the stack since last JavaCall.  If not, we need
1339   // to get the target method from the JavaCall wrapper.
1340   vframeStream vfst(current, true);  // Do not skip any javaCalls
1341   methodHandle callee_method;
1342   if (vfst.at_end()) {
1343     // No Java frames were found on stack since we did the JavaCall.
1344     // Hence the stack can only contain an entry_frame.  We need to
1345     // find the target method from the stub frame.
1346     RegisterMap reg_map(current,
1347                         RegisterMap::UpdateMap::skip,
1348                         RegisterMap::ProcessFrames::include,
1349                         RegisterMap::WalkContinuation::skip);
1350     frame fr = current->last_frame();
1351     assert(fr.is_runtime_frame(), "must be a runtimeStub");
1352     fr = fr.sender(&reg_map);
1353     assert(fr.is_entry_frame(), "must be");
1354     // fr is now pointing to the entry frame.
1355     callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1356   } else {
1357     Bytecodes::Code bc;
1358     CallInfo callinfo;
1359     find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));
1360     // Calls via mismatching methods are always non-scalarized
1361     if (callinfo.resolved_method()->mismatch() && !is_optimized) {
1362       caller_is_c1 = true;
1363     }
1364     callee_method = methodHandle(current, callinfo.selected_method());
1365   }
1366   assert(callee_method()->is_method(), "must be");
1367   return callee_method;
1368 }
1369 
1370 // Resolves a call.
1371 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, bool& caller_is_c1, TRAPS) {
1372   JavaThread* current = THREAD;
1373   ResourceMark rm(current);
1374   RegisterMap cbl_map(current,
1375                       RegisterMap::UpdateMap::skip,
1376                       RegisterMap::ProcessFrames::include,
1377                       RegisterMap::WalkContinuation::skip);
1378   frame caller_frame = current->last_frame().sender(&cbl_map);
1379 
1380   CodeBlob* caller_cb = caller_frame.cb();
1381   guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1382   nmethod* caller_nm = caller_cb->as_nmethod();
1383 
1384   // determine call info & receiver
1385   // note: a) receiver is null for static calls
1386   //       b) an exception is thrown if receiver is null for non-static calls
1387   CallInfo call_info;
1388   Bytecodes::Code invoke_code = Bytecodes::_illegal;
1389   Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1390 
1391   NoSafepointVerifier nsv;
1392 
1393   methodHandle callee_method(current, call_info.selected_method());
1394   // Calls via mismatching methods are always non-scalarized
1395   if (caller_nm->is_compiled_by_c1() || (call_info.resolved_method()->mismatch() && !is_optimized)) {
1396     caller_is_c1 = true;
1397   }
1398 
1399   assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1400          (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1401          (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1402          (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1403          ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1404 
1405   assert(!caller_nm->is_unloading(), "It should not be unloading");
1406 
1407 #ifndef PRODUCT
1408   // tracing/debugging/statistics
1409   uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1410                  (is_virtual) ? (&_resolve_virtual_ctr) :
1411                                 (&_resolve_static_ctr);
1412   Atomic::inc(addr);
1413 
1414   if (TraceCallFixup) {
1415     ResourceMark rm(current);
1416     tty->print("resolving %s%s (%s) call%s to",
1417                (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1418                Bytecodes::name(invoke_code), (caller_is_c1) ? " from C1" : "");
1419     callee_method->print_short_name(tty);
1420     tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1421                   p2i(caller_frame.pc()), p2i(callee_method->code()));
1422   }
1423 #endif
1424 
1425   if (invoke_code == Bytecodes::_invokestatic) {
1426     assert(callee_method->method_holder()->is_initialized() ||
1427            callee_method->method_holder()->is_reentrant_initialization(current),
1428            "invalid class initialization state for invoke_static");
1429     if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1430       // In order to keep class initialization check, do not patch call
1431       // site for static call when the class is not fully initialized.
1432       // Proper check is enforced by call site re-resolution on every invocation.
1433       //
1434       // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1435       // explicit class initialization check is put in nmethod entry (VEP).
1436       assert(callee_method->method_holder()->is_linked(), "must be");
1437       return callee_method;
1438     }
1439   }
1440 
1441 
1442   // JSR 292 key invariant:
1443   // If the resolved method is a MethodHandle invoke target, the call
1444   // site must be a MethodHandle call site, because the lambda form might tail-call
1445   // leaving the stack in a state unknown to either caller or callee
1446 
1447   // Compute entry points. The computation of the entry points is independent of
1448   // patching the call.
1449 
1450   // Make sure the callee nmethod does not get deoptimized and removed before
1451   // we are done patching the code.
1452 
1453 
1454   CompiledICLocker ml(caller_nm);
1455   if (is_virtual && !is_optimized) {
1456     CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1457     inline_cache->update(&call_info, receiver->klass(), caller_is_c1);
1458   } else {
1459     // Callsite is a direct call - set it to the destination method
1460     CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1461     callsite->set(callee_method, caller_is_c1);
1462   }
1463 
1464   return callee_method;
1465 }
1466 
1467 // Inline caches exist only in compiled code
1468 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1469 #ifdef ASSERT
1470   RegisterMap reg_map(current,
1471                       RegisterMap::UpdateMap::skip,
1472                       RegisterMap::ProcessFrames::include,
1473                       RegisterMap::WalkContinuation::skip);
1474   frame stub_frame = current->last_frame();
1475   assert(stub_frame.is_runtime_frame(), "sanity check");
1476   frame caller_frame = stub_frame.sender(&reg_map);
1477   assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1478 #endif /* ASSERT */
1479 
1480   methodHandle callee_method;
1481   bool is_optimized = false;
1482   bool caller_is_c1 = false;
1483   JRT_BLOCK
1484     callee_method = SharedRuntime::handle_ic_miss_helper(is_optimized, caller_is_c1, CHECK_NULL);
1485     // Return Method* through TLS
1486     current->set_vm_result_2(callee_method());
1487   JRT_BLOCK_END
1488   // return compiled code entry point after potential safepoints
1489   return get_resolved_entry(current, callee_method, false, is_optimized, caller_is_c1);
1490 JRT_END
1491 
1492 
1493 // Handle call site that has been made non-entrant
1494 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1495   // 6243940 We might end up in here if the callee is deoptimized
1496   // as we race to call it.  We don't want to take a safepoint if
1497   // the caller was interpreted because the caller frame will look
1498   // interpreted to the stack walkers and arguments are now
1499   // "compiled" so it is much better to make this transition
1500   // invisible to the stack walking code. The i2c path will
1501   // place the callee method in the callee_target. It is stashed
1502   // there because if we try and find the callee by normal means a
1503   // safepoint is possible and have trouble gc'ing the compiled args.
1504   RegisterMap reg_map(current,
1505                       RegisterMap::UpdateMap::skip,
1506                       RegisterMap::ProcessFrames::include,
1507                       RegisterMap::WalkContinuation::skip);
1508   frame stub_frame = current->last_frame();
1509   assert(stub_frame.is_runtime_frame(), "sanity check");
1510   frame caller_frame = stub_frame.sender(&reg_map);
1511 
1512   if (caller_frame.is_interpreted_frame() ||
1513       caller_frame.is_entry_frame() ||
1514       caller_frame.is_upcall_stub_frame()) {
1515     Method* callee = current->callee_target();
1516     guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1517     current->set_vm_result_2(callee);
1518     current->set_callee_target(nullptr);
1519     if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1520       // Bypass class initialization checks in c2i when caller is in native.
1521       // JNI calls to static methods don't have class initialization checks.
1522       // Fast class initialization checks are present in c2i adapters and call into
1523       // SharedRuntime::handle_wrong_method() on the slow path.
1524       //
1525       // JVM upcalls may land here as well, but there's a proper check present in
1526       // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1527       // so bypassing it in c2i adapter is benign.
1528       return callee->get_c2i_no_clinit_check_entry();
1529     } else {
1530       if (caller_frame.is_interpreted_frame()) {
1531         return callee->get_c2i_inline_entry();
1532       } else {
1533         return callee->get_c2i_entry();
1534       }
1535     }
1536   }
1537 
1538   // Must be compiled to compiled path which is safe to stackwalk
1539   methodHandle callee_method;
1540   bool is_static_call = false;
1541   bool is_optimized = false;
1542   bool caller_is_c1 = false;
1543   JRT_BLOCK
1544     // Force resolving of caller (if we called from compiled frame)
1545     callee_method = SharedRuntime::reresolve_call_site(is_static_call, is_optimized, caller_is_c1, CHECK_NULL);
1546     current->set_vm_result_2(callee_method());
1547   JRT_BLOCK_END
1548   // return compiled code entry point after potential safepoints
1549   return get_resolved_entry(current, callee_method, is_static_call, is_optimized, caller_is_c1);
1550 JRT_END
1551 
1552 // Handle abstract method call
1553 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1554   // Verbose error message for AbstractMethodError.
1555   // Get the called method from the invoke bytecode.
1556   vframeStream vfst(current, true);
1557   assert(!vfst.at_end(), "Java frame must exist");
1558   methodHandle caller(current, vfst.method());
1559   Bytecode_invoke invoke(caller, vfst.bci());
1560   DEBUG_ONLY( invoke.verify(); )
1561 
1562   // Find the compiled caller frame.
1563   RegisterMap reg_map(current,
1564                       RegisterMap::UpdateMap::include,
1565                       RegisterMap::ProcessFrames::include,
1566                       RegisterMap::WalkContinuation::skip);
1567   frame stubFrame = current->last_frame();
1568   assert(stubFrame.is_runtime_frame(), "must be");
1569   frame callerFrame = stubFrame.sender(&reg_map);
1570   assert(callerFrame.is_compiled_frame(), "must be");
1571 
1572   // Install exception and return forward entry.
1573   address res = SharedRuntime::throw_AbstractMethodError_entry();
1574   JRT_BLOCK
1575     methodHandle callee(current, invoke.static_target(current));
1576     if (!callee.is_null()) {
1577       oop recv = callerFrame.retrieve_receiver(&reg_map);
1578       Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1579       res = StubRoutines::forward_exception_entry();
1580       LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1581     }
1582   JRT_BLOCK_END
1583   return res;
1584 JRT_END
1585 
1586 // return verified_code_entry if interp_only_mode is not set for the current thread;
1587 // otherwise return c2i entry.
1588 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method,
1589                                           bool is_static_call, bool is_optimized, bool caller_is_c1) {
1590   if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1591     // In interp_only_mode we need to go to the interpreted entry
1592     // The c2i won't patch in this mode -- see fixup_callers_callsite
1593     return callee_method->get_c2i_entry();
1594   }
1595 
1596   if (caller_is_c1) {
1597     assert(callee_method->verified_inline_code_entry() != nullptr, "Jump to zero!");
1598     return callee_method->verified_inline_code_entry();
1599   } else if (is_static_call || is_optimized) {
1600     assert(callee_method->verified_code_entry() != nullptr, "Jump to zero!");
1601     return callee_method->verified_code_entry();
1602   } else {
1603     assert(callee_method->verified_inline_ro_code_entry() != nullptr, "Jump to zero!");
1604     return callee_method->verified_inline_ro_code_entry();
1605   }
1606 }
1607 
1608 // resolve a static call and patch code
1609 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1610   methodHandle callee_method;
1611   bool caller_is_c1 = false;
1612   bool enter_special = false;
1613   JRT_BLOCK
1614     callee_method = SharedRuntime::resolve_helper(false, false, caller_is_c1, CHECK_NULL);
1615     current->set_vm_result_2(callee_method());
1616   JRT_BLOCK_END
1617   // return compiled code entry point after potential safepoints
1618   return get_resolved_entry(current, callee_method, true, false, caller_is_c1);
1619 JRT_END
1620 
1621 // resolve virtual call and update inline cache to monomorphic
1622 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1623   methodHandle callee_method;
1624   bool caller_is_c1 = false;
1625   JRT_BLOCK
1626     callee_method = SharedRuntime::resolve_helper(true, false, caller_is_c1, CHECK_NULL);
1627     current->set_vm_result_2(callee_method());
1628   JRT_BLOCK_END
1629   // return compiled code entry point after potential safepoints
1630   return get_resolved_entry(current, callee_method, false, false, caller_is_c1);
1631 JRT_END
1632 
1633 
1634 // Resolve a virtual call that can be statically bound (e.g., always
1635 // monomorphic, so it has no inline cache).  Patch code to resolved target.
1636 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1637   methodHandle callee_method;
1638   bool caller_is_c1 = false;
1639   JRT_BLOCK
1640     callee_method = SharedRuntime::resolve_helper(true, true, caller_is_c1, CHECK_NULL);
1641     current->set_vm_result_2(callee_method());
1642   JRT_BLOCK_END
1643   // return compiled code entry point after potential safepoints
1644   return get_resolved_entry(current, callee_method, false, true, caller_is_c1);
1645 JRT_END
1646 
1647 
1648 
1649 methodHandle SharedRuntime::handle_ic_miss_helper(bool& is_optimized, bool& caller_is_c1, TRAPS) {
1650   JavaThread* current = THREAD;
1651   ResourceMark rm(current);
1652   CallInfo call_info;
1653   Bytecodes::Code bc;
1654 
1655   // receiver is null for static calls. An exception is thrown for null
1656   // receivers for non-static calls
1657   Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1658 
1659   methodHandle callee_method(current, call_info.selected_method());
1660 
1661 #ifndef PRODUCT
1662   Atomic::inc(&_ic_miss_ctr);
1663 
1664   // Statistics & Tracing
1665   if (TraceCallFixup) {
1666     ResourceMark rm(current);
1667     tty->print("IC miss (%s) call%s to", Bytecodes::name(bc), (caller_is_c1) ? " from C1" : "");
1668     callee_method->print_short_name(tty);
1669     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1670   }
1671 
1672   if (ICMissHistogram) {
1673     MutexLocker m(VMStatistic_lock);
1674     RegisterMap reg_map(current,
1675                         RegisterMap::UpdateMap::skip,
1676                         RegisterMap::ProcessFrames::include,
1677                         RegisterMap::WalkContinuation::skip);
1678     frame f = current->last_frame().real_sender(&reg_map);// skip runtime stub
1679     // produce statistics under the lock
1680     trace_ic_miss(f.pc());
1681   }
1682 #endif
1683 
1684   // install an event collector so that when a vtable stub is created the
1685   // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1686   // event can't be posted when the stub is created as locks are held
1687   // - instead the event will be deferred until the event collector goes
1688   // out of scope.
1689   JvmtiDynamicCodeEventCollector event_collector;
1690 
1691   // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1692   RegisterMap reg_map(current,
1693                       RegisterMap::UpdateMap::skip,
1694                       RegisterMap::ProcessFrames::include,
1695                       RegisterMap::WalkContinuation::skip);
1696   frame caller_frame = current->last_frame().sender(&reg_map);
1697   CodeBlob* cb = caller_frame.cb();
1698   nmethod* caller_nm = cb->as_nmethod();
1699   // Calls via mismatching methods are always non-scalarized
1700   if (caller_nm->is_compiled_by_c1() || call_info.resolved_method()->mismatch()) {
1701     caller_is_c1 = true;
1702   }
1703 
1704   CompiledICLocker ml(caller_nm);
1705   CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1706   inline_cache->update(&call_info, receiver()->klass(), caller_is_c1);
1707 
1708   return callee_method;
1709 }
1710 
1711 //
1712 // Resets a call-site in compiled code so it will get resolved again.
1713 // This routines handles both virtual call sites, optimized virtual call
1714 // sites, and static call sites. Typically used to change a call sites
1715 // destination from compiled to interpreted.
1716 //
1717 methodHandle SharedRuntime::reresolve_call_site(bool& is_static_call, bool& is_optimized, bool& caller_is_c1, TRAPS) {
1718   JavaThread* current = THREAD;
1719   ResourceMark rm(current);
1720   RegisterMap reg_map(current,
1721                       RegisterMap::UpdateMap::skip,
1722                       RegisterMap::ProcessFrames::include,
1723                       RegisterMap::WalkContinuation::skip);
1724   frame stub_frame = current->last_frame();
1725   assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1726   frame caller = stub_frame.sender(&reg_map);
1727   if (caller.is_compiled_frame()) {
1728     caller_is_c1 = caller.cb()->as_nmethod()->is_compiled_by_c1();
1729   }
1730 
1731   // Do nothing if the frame isn't a live compiled frame.
1732   // nmethod could be deoptimized by the time we get here
1733   // so no update to the caller is needed.
1734 
1735   if ((caller.is_compiled_frame() && !caller.is_deoptimized_frame()) ||
1736       (caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic())) {
1737 
1738     address pc = caller.pc();
1739 
1740     nmethod* caller_nm = CodeCache::find_nmethod(pc);
1741     assert(caller_nm != nullptr, "did not find caller nmethod");
1742 
1743     // Default call_addr is the location of the "basic" call.
1744     // Determine the address of the call we a reresolving. With
1745     // Inline Caches we will always find a recognizable call.
1746     // With Inline Caches disabled we may or may not find a
1747     // recognizable call. We will always find a call for static
1748     // calls and for optimized virtual calls. For vanilla virtual
1749     // calls it depends on the state of the UseInlineCaches switch.
1750     //
1751     // With Inline Caches disabled we can get here for a virtual call
1752     // for two reasons:
1753     //   1 - calling an abstract method. The vtable for abstract methods
1754     //       will run us thru handle_wrong_method and we will eventually
1755     //       end up in the interpreter to throw the ame.
1756     //   2 - a racing deoptimization. We could be doing a vanilla vtable
1757     //       call and between the time we fetch the entry address and
1758     //       we jump to it the target gets deoptimized. Similar to 1
1759     //       we will wind up in the interprter (thru a c2i with c2).
1760     //
1761     CompiledICLocker ml(caller_nm);
1762     address call_addr = caller_nm->call_instruction_address(pc);
1763 
1764     if (call_addr != nullptr) {
1765       // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1766       // bytes back in the instruction stream so we must also check for reloc info.
1767       RelocIterator iter(caller_nm, call_addr, call_addr+1);
1768       bool ret = iter.next(); // Get item
1769       if (ret) {
1770         is_static_call = false;
1771         is_optimized = false;
1772         switch (iter.type()) {
1773           case relocInfo::static_call_type:
1774             is_static_call = true;
1775           case relocInfo::opt_virtual_call_type: {
1776             is_optimized = (iter.type() == relocInfo::opt_virtual_call_type);
1777             CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1778             cdc->set_to_clean();
1779             break;
1780           }

1781           case relocInfo::virtual_call_type: {
1782             // compiled, dispatched call (which used to call an interpreted method)
1783             CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1784             inline_cache->set_to_clean();
1785             break;
1786           }
1787           default:
1788             break;
1789         }
1790       }
1791     }
1792   }
1793 
1794   methodHandle callee_method = find_callee_method(is_optimized, caller_is_c1, CHECK_(methodHandle()));

1795 
1796 #ifndef PRODUCT
1797   Atomic::inc(&_wrong_method_ctr);
1798 
1799   if (TraceCallFixup) {
1800     ResourceMark rm(current);
1801     tty->print("handle_wrong_method reresolving call%s to", (caller_is_c1) ? " from C1" : "");
1802     callee_method->print_short_name(tty);
1803     tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1804   }
1805 #endif
1806 
1807   return callee_method;
1808 }
1809 
1810 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1811   // The faulting unsafe accesses should be changed to throw the error
1812   // synchronously instead. Meanwhile the faulting instruction will be
1813   // skipped over (effectively turning it into a no-op) and an
1814   // asynchronous exception will be raised which the thread will
1815   // handle at a later point. If the instruction is a load it will
1816   // return garbage.
1817 
1818   // Request an async exception.
1819   thread->set_pending_unsafe_access_error();
1820 
1821   // Return address of next instruction to execute.

1987   msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
1988 
1989   char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
1990   if (message == nullptr) {
1991     // Shouldn't happen, but don't cause even more problems if it does
1992     message = const_cast<char*>(caster_klass->external_name());
1993   } else {
1994     jio_snprintf(message,
1995                  msglen,
1996                  "class %s cannot be cast to class %s (%s%s%s)",
1997                  caster_name,
1998                  target_name,
1999                  caster_klass_description,
2000                  klass_separator,
2001                  target_klass_description
2002                  );
2003   }
2004   return message;
2005 }
2006 
2007 char* SharedRuntime::generate_identity_exception_message(JavaThread* current, Klass* klass) {
2008   assert(klass->is_inline_klass(), "Must be a concrete value class");
2009   const char* desc = "Cannot synchronize on an instance of value class ";
2010   const char* className = klass->external_name();
2011   size_t msglen = strlen(desc) + strlen(className) + 1;
2012   char* message = NEW_RESOURCE_ARRAY(char, msglen);
2013   if (nullptr == message) {
2014     // Out of memory: can't create detailed error message
2015     message = const_cast<char*>(klass->external_name());
2016   } else {
2017     jio_snprintf(message, msglen, "%s%s", desc, className);
2018   }
2019   return message;
2020 }
2021 
2022 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
2023   (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
2024 JRT_END
2025 
2026 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
2027   if (!SafepointSynchronize::is_synchronizing()) {
2028     // Only try quick_enter() if we're not trying to reach a safepoint
2029     // so that the calling thread reaches the safepoint more quickly.
2030     if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
2031       return;
2032     }
2033   }
2034   // NO_ASYNC required because an async exception on the state transition destructor
2035   // would leave you with the lock held and it would never be released.
2036   // The normal monitorenter NullPointerException is thrown without acquiring a lock
2037   // and the model is that an exception implies the method failed.
2038   JRT_BLOCK_NO_ASYNC
2039   Handle h_obj(THREAD, obj);
2040   ObjectSynchronizer::enter(h_obj, lock, current);
2041   assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");

2232   tty->print_cr("        %% in nested categories are relative to their category");
2233   tty->print_cr("        (and thus add up to more than 100%% with inlining)");
2234   tty->cr();
2235 
2236   MethodArityHistogram h;
2237 }
2238 #endif
2239 
2240 #ifndef PRODUCT
2241 static int _lookups; // number of calls to lookup
2242 static int _equals;  // number of buckets checked with matching hash
2243 static int _hits;    // number of successful lookups
2244 static int _compact; // number of equals calls with compact signature
2245 #endif
2246 
2247 // A simple wrapper class around the calling convention information
2248 // that allows sharing of adapters for the same calling convention.
2249 class AdapterFingerPrint : public CHeapObj<mtCode> {
2250  private:
2251   enum {
2252     _basic_type_bits = 5,
2253     _basic_type_mask = right_n_bits(_basic_type_bits),
2254     _basic_types_per_int = BitsPerInt / _basic_type_bits,
2255     _compact_int_count = 3
2256   };
2257   // TO DO:  Consider integrating this with a more global scheme for compressing signatures.
2258   // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2259 
2260   union {
2261     int  _compact[_compact_int_count];
2262     int* _fingerprint;
2263   } _value;
2264   int _length; // A negative length indicates the fingerprint is in the compact form,
2265                // Otherwise _value._fingerprint is the array.
2266 
2267   // Remap BasicTypes that are handled equivalently by the adapters.
2268   // These are correct for the current system but someday it might be
2269   // necessary to make this mapping platform dependent.
2270   static BasicType adapter_encoding(BasicType in) {
2271     switch (in) {
2272       case T_BOOLEAN:
2273       case T_BYTE:
2274       case T_SHORT:
2275       case T_CHAR:
2276         // They are all promoted to T_INT in the calling convention
2277         return T_INT;
2278 
2279       case T_OBJECT:
2280       case T_ARRAY:
2281         // In other words, we assume that any register good enough for
2282         // an int or long is good enough for a managed pointer.
2283 #ifdef _LP64
2284         return T_LONG;
2285 #else
2286         return T_INT;
2287 #endif
2288 
2289       case T_INT:
2290       case T_LONG:
2291       case T_FLOAT:
2292       case T_DOUBLE:
2293       case T_VOID:
2294         return in;
2295 
2296       default:
2297         ShouldNotReachHere();
2298         return T_CONFLICT;
2299     }
2300   }
2301 
2302  public:
2303   AdapterFingerPrint(const GrowableArray<SigEntry>* sig, bool has_ro_adapter = false) {
2304     // The fingerprint is based on the BasicType signature encoded
2305     // into an array of ints with eight entries per int.
2306     int total_args_passed = (sig != nullptr) ? sig->length() : 0;
2307     int* ptr;
2308     int len = (total_args_passed + (_basic_types_per_int-1)) / _basic_types_per_int;
2309     if (len <= _compact_int_count) {
2310       assert(_compact_int_count == 3, "else change next line");
2311       _value._compact[0] = _value._compact[1] = _value._compact[2] = 0;
2312       // Storing the signature encoded as signed chars hits about 98%
2313       // of the time.
2314       _length = -len;
2315       ptr = _value._compact;
2316     } else {
2317       _length = len;
2318       _value._fingerprint = NEW_C_HEAP_ARRAY(int, _length, mtCode);
2319       ptr = _value._fingerprint;
2320     }
2321 
2322     // Now pack the BasicTypes with 8 per int
2323     int sig_index = 0;
2324     BasicType prev_bt = T_ILLEGAL;
2325     int vt_count = 0;
2326     for (int index = 0; index < len; index++) {
2327       int value = 0;
2328       for (int byte = 0; byte < _basic_types_per_int; byte++) {
2329         BasicType bt = T_ILLEGAL;
2330         if (sig_index < total_args_passed) {
2331           bt = sig->at(sig_index++)._bt;
2332           if (bt == T_METADATA) {
2333             // Found start of inline type in signature
2334             assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
2335             if (sig_index == 1 && has_ro_adapter) {
2336               // With a ro_adapter, replace receiver inline type delimiter by T_VOID to prevent matching
2337               // with other adapters that have the same inline type as first argument and no receiver.
2338               bt = T_VOID;
2339             }
2340             vt_count++;
2341           } else if (bt == T_VOID && prev_bt != T_LONG && prev_bt != T_DOUBLE) {
2342             // Found end of inline type in signature
2343             assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
2344             vt_count--;
2345             assert(vt_count >= 0, "invalid vt_count");
2346           } else if (vt_count == 0) {
2347             // Widen fields that are not part of a scalarized inline type argument
2348             bt = adapter_encoding(bt);
2349           }
2350           prev_bt = bt;
2351         }
2352         int bt_val = (bt == T_ILLEGAL) ? 0 : bt;
2353         assert((bt_val & _basic_type_mask) == bt_val, "must fit in 4 bits");
2354         value = (value << _basic_type_bits) | bt_val;
2355       }
2356       ptr[index] = value;
2357     }
2358     assert(vt_count == 0, "invalid vt_count");
2359   }
2360 
2361   ~AdapterFingerPrint() {
2362     if (_length > 0) {
2363       FREE_C_HEAP_ARRAY(int, _value._fingerprint);
2364     }
2365   }
2366 
2367   int value(int index) {
2368     if (_length < 0) {
2369       return _value._compact[index];
2370     }
2371     return _value._fingerprint[index];
2372   }
2373   int length() {
2374     if (_length < 0) return -_length;
2375     return _length;
2376   }
2377 
2378   bool is_compact() {

2403   const char* as_basic_args_string() {
2404     stringStream st;
2405     bool long_prev = false;
2406     for (int i = 0; i < length(); i++) {
2407       unsigned val = (unsigned)value(i);
2408       // args are packed so that first/lower arguments are in the highest
2409       // bits of each int value, so iterate from highest to the lowest
2410       for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {
2411         unsigned v = (val >> j) & _basic_type_mask;
2412         if (v == 0) {
2413           assert(i == length() - 1, "Only expect zeroes in the last word");
2414           continue;
2415         }
2416         if (long_prev) {
2417           long_prev = false;
2418           if (v == T_VOID) {
2419             st.print("J");
2420           } else {
2421             st.print("L");
2422           }
2423         } else if (v == T_LONG) {
2424           long_prev = true;
2425         } else if (v != T_VOID){
2426           st.print("%c", type2char((BasicType)v));




2427         }
2428       }
2429     }
2430     if (long_prev) {
2431       st.print("L");
2432     }
2433     return st.as_string();
2434   }
2435 #endif // !product
2436 
2437   bool equals(AdapterFingerPrint* other) {
2438     if (other->_length != _length) {
2439       return false;
2440     }
2441     if (_length < 0) {
2442       assert(_compact_int_count == 3, "else change next line");
2443       return _value._compact[0] == other->_value._compact[0] &&
2444              _value._compact[1] == other->_value._compact[1] &&
2445              _value._compact[2] == other->_value._compact[2];
2446     } else {

2454   }
2455 
2456   static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2457     NOT_PRODUCT(_equals++);
2458     return fp1->equals(fp2);
2459   }
2460 
2461   static unsigned int compute_hash(AdapterFingerPrint* const& fp) {
2462     return fp->compute_hash();
2463   }
2464 };
2465 
2466 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2467 using AdapterHandlerTable = ResourceHashtable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2468                   AnyObj::C_HEAP, mtCode,
2469                   AdapterFingerPrint::compute_hash,
2470                   AdapterFingerPrint::equals>;
2471 static AdapterHandlerTable* _adapter_handler_table;
2472 
2473 // Find a entry with the same fingerprint if it exists
2474 static AdapterHandlerEntry* lookup(const GrowableArray<SigEntry>* sig, bool has_ro_adapter = false) {
2475   NOT_PRODUCT(_lookups++);
2476   assert_lock_strong(AdapterHandlerLibrary_lock);
2477   AdapterFingerPrint fp(sig, has_ro_adapter);
2478   AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2479   if (entry != nullptr) {
2480 #ifndef PRODUCT
2481     if (fp.is_compact()) _compact++;
2482     _hits++;
2483 #endif
2484     return *entry;
2485   }
2486   return nullptr;
2487 }
2488 
2489 #ifndef PRODUCT
2490 static void print_table_statistics() {
2491   auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2492     return sizeof(*key) + sizeof(*a);
2493   };
2494   TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2495   ts.print(tty, "AdapterHandlerTable");
2496   tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2497                 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2498   tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2499                 _lookups, _equals, _hits, _compact);
2500 }
2501 #endif
2502 
2503 // ---------------------------------------------------------------------------
2504 // Implementation of AdapterHandlerLibrary
2505 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2506 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2507 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2508 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2509 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2510 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2511 const int AdapterHandlerLibrary_size = 48*K;
2512 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2513 
2514 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2515   return _buffer;
2516 }
2517 
2518 static void post_adapter_creation(const AdapterBlob* new_adapter,
2519                                   const AdapterHandlerEntry* entry) {
2520   if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2521     char blob_id[256];
2522     jio_snprintf(blob_id,
2523                  sizeof(blob_id),
2524                  "%s(%s)",
2525                  new_adapter->name(),
2526                  entry->fingerprint()->as_string());
2527     if (Forte::is_enabled()) {
2528       Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2529     }
2530 
2531     if (JvmtiExport::should_post_dynamic_code_generated()) {

2534   }
2535 }
2536 
2537 void AdapterHandlerLibrary::initialize() {
2538   ResourceMark rm;
2539   AdapterBlob* no_arg_blob = nullptr;
2540   AdapterBlob* int_arg_blob = nullptr;
2541   AdapterBlob* obj_arg_blob = nullptr;
2542   AdapterBlob* obj_int_arg_blob = nullptr;
2543   AdapterBlob* obj_obj_arg_blob = nullptr;
2544   {
2545     _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2546     MutexLocker mu(AdapterHandlerLibrary_lock);
2547 
2548     // Create a special handler for abstract methods.  Abstract methods
2549     // are never compiled so an i2c entry is somewhat meaningless, but
2550     // throw AbstractMethodError just in case.
2551     // Pass wrong_method_abstract for the c2i transitions to return
2552     // AbstractMethodError for invalid invocations.
2553     address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2554     _abstract_method_handler = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(nullptr),
2555                                                                 SharedRuntime::throw_AbstractMethodError_entry(),
2556                                                                 wrong_method_abstract, wrong_method_abstract, wrong_method_abstract,
2557                                                                 wrong_method_abstract, wrong_method_abstract);

2558     _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);

2559 
2560     CompiledEntrySignature no_args;
2561     no_args.compute_calling_conventions();
2562     _no_arg_handler = create_adapter(no_arg_blob, no_args, true);
2563 
2564     CompiledEntrySignature obj_args;
2565     SigEntry::add_entry(obj_args.sig(), T_OBJECT, nullptr);
2566     obj_args.compute_calling_conventions();
2567     _obj_arg_handler = create_adapter(obj_arg_blob, obj_args, true);
2568 
2569     CompiledEntrySignature int_args;
2570     SigEntry::add_entry(int_args.sig(), T_INT, nullptr);
2571     int_args.compute_calling_conventions();
2572     _int_arg_handler = create_adapter(int_arg_blob, int_args, true);
2573 
2574     CompiledEntrySignature obj_int_args;
2575     SigEntry::add_entry(obj_int_args.sig(), T_OBJECT, nullptr);
2576     SigEntry::add_entry(obj_int_args.sig(), T_INT, nullptr);
2577     obj_int_args.compute_calling_conventions();
2578     _obj_int_arg_handler = create_adapter(obj_int_arg_blob, obj_int_args, true);
2579 
2580     CompiledEntrySignature obj_obj_args;
2581     SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT, nullptr);
2582     SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT, nullptr);
2583     obj_obj_args.compute_calling_conventions();
2584     _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, obj_obj_args, true);
2585 
2586     assert(no_arg_blob != nullptr &&
2587           obj_arg_blob != nullptr &&
2588           int_arg_blob != nullptr &&
2589           obj_int_arg_blob != nullptr &&
2590           obj_obj_arg_blob != nullptr, "Initial adapters must be properly created");
2591   }
2592   return;
2593 
2594   // Outside of the lock
2595   post_adapter_creation(no_arg_blob, _no_arg_handler);
2596   post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2597   post_adapter_creation(int_arg_blob, _int_arg_handler);
2598   post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2599   post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2600 }
2601 
2602 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint,
2603                                                       address i2c_entry,
2604                                                       address c2i_entry,
2605                                                       address c2i_inline_entry,
2606                                                       address c2i_inline_ro_entry,
2607                                                       address c2i_unverified_entry,
2608                                                       address c2i_unverified_inline_entry,
2609                                                       address c2i_no_clinit_check_entry) {
2610   return new AdapterHandlerEntry(fingerprint, i2c_entry, c2i_entry, c2i_inline_entry, c2i_inline_ro_entry, c2i_unverified_entry,
2611                               c2i_unverified_inline_entry, c2i_no_clinit_check_entry);

2612 }
2613 
2614 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2615   if (method->is_abstract()) {
2616     return nullptr;
2617   }
2618   int total_args_passed = method->size_of_parameters(); // All args on stack
2619   if (total_args_passed == 0) {
2620     return _no_arg_handler;
2621   } else if (total_args_passed == 1) {
2622     if (!method->is_static()) {
2623       if (InlineTypePassFieldsAsArgs && method->method_holder()->is_inline_klass()) {
2624         return nullptr;
2625       }
2626       return _obj_arg_handler;
2627     }
2628     switch (method->signature()->char_at(1)) {
2629       case JVM_SIGNATURE_CLASS: {
2630         if (InlineTypePassFieldsAsArgs) {
2631           SignatureStream ss(method->signature());
2632           InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2633           if (vk != nullptr) {
2634             return nullptr;
2635           }
2636         }
2637         return _obj_arg_handler;
2638       }
2639       case JVM_SIGNATURE_ARRAY:
2640         return _obj_arg_handler;
2641       case JVM_SIGNATURE_INT:
2642       case JVM_SIGNATURE_BOOLEAN:
2643       case JVM_SIGNATURE_CHAR:
2644       case JVM_SIGNATURE_BYTE:
2645       case JVM_SIGNATURE_SHORT:
2646         return _int_arg_handler;
2647     }
2648   } else if (total_args_passed == 2 &&
2649              !method->is_static() && (!InlineTypePassFieldsAsArgs || !method->method_holder()->is_inline_klass())) {
2650     switch (method->signature()->char_at(1)) {
2651       case JVM_SIGNATURE_CLASS: {
2652         if (InlineTypePassFieldsAsArgs) {
2653           SignatureStream ss(method->signature());
2654           InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2655           if (vk != nullptr) {
2656             return nullptr;
2657           }
2658         }
2659         return _obj_obj_arg_handler;
2660       }
2661       case JVM_SIGNATURE_ARRAY:
2662         return _obj_obj_arg_handler;
2663       case JVM_SIGNATURE_INT:
2664       case JVM_SIGNATURE_BOOLEAN:
2665       case JVM_SIGNATURE_CHAR:
2666       case JVM_SIGNATURE_BYTE:
2667       case JVM_SIGNATURE_SHORT:
2668         return _obj_int_arg_handler;
2669     }
2670   }
2671   return nullptr;
2672 }
2673 
2674 CompiledEntrySignature::CompiledEntrySignature(Method* method) :
2675   _method(method), _num_inline_args(0), _has_inline_recv(false),
2676   _regs(nullptr), _regs_cc(nullptr), _regs_cc_ro(nullptr),
2677   _args_on_stack(0), _args_on_stack_cc(0), _args_on_stack_cc_ro(0),
2678   _c1_needs_stack_repair(false), _c2_needs_stack_repair(false), _supers(nullptr) {
2679   _sig = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2680   _sig_cc = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2681   _sig_cc_ro = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2682 }
2683 
2684 // See if we can save space by sharing the same entry for VIEP and VIEP(RO),
2685 // or the same entry for VEP and VIEP(RO).
2686 CodeOffsets::Entries CompiledEntrySignature::c1_inline_ro_entry_type() const {
2687   if (!has_scalarized_args()) {
2688     // VEP/VIEP/VIEP(RO) all share the same entry. There's no packing.
2689     return CodeOffsets::Verified_Entry;
2690   }
2691   if (_method->is_static()) {
2692     // Static methods don't need VIEP(RO)
2693     return CodeOffsets::Verified_Entry;
2694   }
2695 
2696   if (has_inline_recv()) {
2697     if (num_inline_args() == 1) {
2698       // Share same entry for VIEP and VIEP(RO).
2699       // This is quite common: we have an instance method in an InlineKlass that has
2700       // no inline type args other than <this>.
2701       return CodeOffsets::Verified_Inline_Entry;
2702     } else {
2703       assert(num_inline_args() > 1, "must be");
2704       // No sharing:
2705       //   VIEP(RO) -- <this> is passed as object
2706       //   VEP      -- <this> is passed as fields
2707       return CodeOffsets::Verified_Inline_Entry_RO;
2708     }

2709   }
2710 
2711   // Either a static method, or <this> is not an inline type
2712   if (args_on_stack_cc() != args_on_stack_cc_ro()) {
2713     // No sharing:
2714     // Some arguments are passed on the stack, and we have inserted reserved entries
2715     // into the VEP, but we never insert reserved entries into the VIEP(RO).
2716     return CodeOffsets::Verified_Inline_Entry_RO;
2717   } else {
2718     // Share same entry for VEP and VIEP(RO).
2719     return CodeOffsets::Verified_Entry;
2720   }
2721 }
2722 
2723 // Returns all super methods (transitive) in classes and interfaces that are overridden by the current method.
2724 GrowableArray<Method*>* CompiledEntrySignature::get_supers() {
2725   if (_supers != nullptr) {
2726     return _supers;
2727   }
2728   _supers = new GrowableArray<Method*>();
2729   // Skip private, static, and <init> methods
2730   if (_method->is_private() || _method->is_static() || _method->is_object_constructor()) {
2731     return _supers;
2732   }
2733   Symbol* name = _method->name();
2734   Symbol* signature = _method->signature();
2735   const Klass* holder = _method->method_holder()->super();
2736   Symbol* holder_name = holder->name();
2737   ThreadInVMfromUnknown tiv;
2738   JavaThread* current = JavaThread::current();
2739   HandleMark hm(current);
2740   Handle loader(current, _method->method_holder()->class_loader());
2741 
2742   // Walk up the class hierarchy and search for super methods
2743   while (holder != nullptr) {
2744     Method* super_method = holder->lookup_method(name, signature);
2745     if (super_method == nullptr) {
2746       break;
2747     }
2748     if (!super_method->is_static() && !super_method->is_private() &&
2749         (!super_method->is_package_private() ||
2750          super_method->method_holder()->is_same_class_package(loader(), holder_name))) {
2751       _supers->push(super_method);
2752     }
2753     holder = super_method->method_holder()->super();
2754   }
2755   // Search interfaces for super methods
2756   Array<InstanceKlass*>* interfaces = _method->method_holder()->transitive_interfaces();
2757   for (int i = 0; i < interfaces->length(); ++i) {
2758     Method* m = interfaces->at(i)->lookup_method(name, signature);
2759     if (m != nullptr && !m->is_static() && m->is_public()) {
2760       _supers->push(m);
2761     }
2762   }
2763   return _supers;
2764 }
2765 
2766 // Iterate over arguments and compute scalarized and non-scalarized signatures
2767 void CompiledEntrySignature::compute_calling_conventions(bool init) {
2768   bool has_scalarized = false;
2769   if (_method != nullptr) {
2770     InstanceKlass* holder = _method->method_holder();
2771     int arg_num = 0;
2772     if (!_method->is_static()) {
2773       // We shouldn't scalarize 'this' in a value class constructor
2774       if (holder->is_inline_klass() && InlineKlass::cast(holder)->can_be_passed_as_fields() && !_method->is_object_constructor() &&
2775           (init || _method->is_scalarized_arg(arg_num))) {
2776         _sig_cc->appendAll(InlineKlass::cast(holder)->extended_sig());
2777         has_scalarized = true;
2778         _has_inline_recv = true;
2779         _num_inline_args++;
2780       } else {
2781         SigEntry::add_entry(_sig_cc, T_OBJECT, holder->name());
2782       }
2783       SigEntry::add_entry(_sig, T_OBJECT, holder->name());
2784       SigEntry::add_entry(_sig_cc_ro, T_OBJECT, holder->name());
2785       arg_num++;
2786     }
2787     for (SignatureStream ss(_method->signature()); !ss.at_return_type(); ss.next()) {
2788       BasicType bt = ss.type();
2789       if (bt == T_OBJECT) {
2790         InlineKlass* vk = ss.as_inline_klass(holder);
2791         if (vk != nullptr && vk->can_be_passed_as_fields() && (init || _method->is_scalarized_arg(arg_num))) {
2792           // Check for a calling convention mismatch with super method(s)
2793           bool scalar_super = false;
2794           bool non_scalar_super = false;
2795           GrowableArray<Method*>* supers = get_supers();
2796           for (int i = 0; i < supers->length(); ++i) {
2797             Method* super_method = supers->at(i);
2798             if (super_method->is_scalarized_arg(arg_num)) {
2799               scalar_super = true;
2800             } else {
2801               non_scalar_super = true;
2802             }
2803           }
2804 #ifdef ASSERT
2805           // Randomly enable below code paths for stress testing
2806           bool stress = init && StressCallingConvention;
2807           if (stress && (os::random() & 1) == 1) {
2808             non_scalar_super = true;
2809             if ((os::random() & 1) == 1) {
2810               scalar_super = true;
2811             }
2812           }
2813 #endif
2814           if (non_scalar_super) {
2815             // Found a super method with a non-scalarized argument. Fall back to the non-scalarized calling convention.
2816             if (scalar_super) {
2817               // Found non-scalar *and* scalar super methods. We can't handle both.
2818               // Mark the scalar method as mismatch and re-compile call sites to use non-scalarized calling convention.
2819               for (int i = 0; i < supers->length(); ++i) {
2820                 Method* super_method = supers->at(i);
2821                 if (super_method->is_scalarized_arg(arg_num) debug_only(|| (stress && (os::random() & 1) == 1))) {
2822                   super_method->set_mismatch();
2823                   MutexLocker ml(Compile_lock, Mutex::_safepoint_check_flag);
2824                   JavaThread* thread = JavaThread::current();
2825                   HandleMark hm(thread);
2826                   methodHandle mh(thread, super_method);
2827                   DeoptimizationScope deopt_scope;
2828                   CodeCache::mark_for_deoptimization(&deopt_scope, mh());
2829                   deopt_scope.deoptimize_marked();
2830                 }
2831               }
2832             }
2833             // Fall back to non-scalarized calling convention
2834             SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2835             SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2836           } else {
2837             _num_inline_args++;
2838             has_scalarized = true;
2839             int last = _sig_cc->length();
2840             int last_ro = _sig_cc_ro->length();
2841             _sig_cc->appendAll(vk->extended_sig());
2842             _sig_cc_ro->appendAll(vk->extended_sig());
2843             if (bt == T_OBJECT) {
2844               // Nullable inline type argument, insert InlineTypeNode::IsInit field right after T_METADATA delimiter
2845               _sig_cc->insert_before(last+1, SigEntry(T_BOOLEAN, -1, nullptr));
2846               _sig_cc_ro->insert_before(last_ro+1, SigEntry(T_BOOLEAN, -1, nullptr));
2847             }
2848           }
2849         } else {
2850           SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2851           SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2852         }
2853         bt = T_OBJECT;
2854       } else {
2855         SigEntry::add_entry(_sig_cc, ss.type(), ss.as_symbol());
2856         SigEntry::add_entry(_sig_cc_ro, ss.type(), ss.as_symbol());
2857       }
2858       SigEntry::add_entry(_sig, bt, ss.as_symbol());
2859       if (bt != T_VOID) {
2860         arg_num++;
2861       }
2862     }
2863   }
2864 
2865   // Compute the non-scalarized calling convention
2866   _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
2867   _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
2868 
2869   // Compute the scalarized calling conventions if there are scalarized inline types in the signature
2870   if (has_scalarized && !_method->is_native()) {
2871     _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
2872     _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
2873 
2874     _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
2875     _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
2876 
2877     _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
2878     _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
2879 
2880     // Upper bound on stack arguments to avoid hitting the argument limit and
2881     // bailing out of compilation ("unsupported incoming calling sequence").
2882     // TODO we need a reasonable limit (flag?) here
2883     if (MAX2(_args_on_stack_cc, _args_on_stack_cc_ro) <= 60) {
2884       return; // Success
2885     }
2886   }
2887 
2888   // No scalarized args
2889   _sig_cc = _sig;
2890   _regs_cc = _regs;
2891   _args_on_stack_cc = _args_on_stack;
2892 
2893   _sig_cc_ro = _sig;
2894   _regs_cc_ro = _regs;
2895   _args_on_stack_cc_ro = _args_on_stack;
2896 }
2897 
2898 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
2899   // Use customized signature handler.  Need to lock around updates to
2900   // the _adapter_handler_table (it is not safe for concurrent readers
2901   // and a single writer: this could be fixed if it becomes a
2902   // problem).
2903 
2904   // Fast-path for trivial adapters
2905   AdapterHandlerEntry* entry = get_simple_adapter(method);
2906   if (entry != nullptr) {
2907     return entry;
2908   }
2909 
2910   ResourceMark rm;
2911   AdapterBlob* new_adapter = nullptr;
2912 
2913   CompiledEntrySignature ces(method());
2914   ces.compute_calling_conventions();
2915   if (ces.has_scalarized_args()) {
2916     if (!method->has_scalarized_args()) {
2917       method->set_has_scalarized_args();
2918     }
2919     if (ces.c1_needs_stack_repair()) {
2920       method->set_c1_needs_stack_repair();
2921     }
2922     if (ces.c2_needs_stack_repair() && !method->c2_needs_stack_repair()) {
2923       method->set_c2_needs_stack_repair();
2924     }
2925   } else if (method->is_abstract()) {
2926     return _abstract_method_handler;
2927   }
2928 




2929   {
2930     MutexLocker mu(AdapterHandlerLibrary_lock);
2931 
2932     if (ces.has_scalarized_args() && method->is_abstract()) {
2933       // Save a C heap allocated version of the signature for abstract methods with scalarized inline type arguments
2934       address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2935       entry = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(nullptr),
2936                                                SharedRuntime::throw_AbstractMethodError_entry(),
2937                                                wrong_method_abstract, wrong_method_abstract, wrong_method_abstract,
2938                                                wrong_method_abstract, wrong_method_abstract);
2939       GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc_ro()->length(), mtInternal);
2940       heap_sig->appendAll(ces.sig_cc_ro());
2941       entry->set_sig_cc(heap_sig);
2942       return entry;
2943     }
2944 
2945     // Lookup method signature's fingerprint
2946     entry = lookup(ces.sig_cc(), ces.has_inline_recv());
2947 
2948     if (entry != nullptr) {
2949 #ifdef ASSERT
2950       if (VerifyAdapterSharing) {
2951         AdapterBlob* comparison_blob = nullptr;
2952         AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, ces, false);
2953         assert(comparison_blob == nullptr, "no blob should be created when creating an adapter for comparison");
2954         assert(comparison_entry->compare_code(entry), "code must match");
2955         // Release the one just created and return the original
2956         delete comparison_entry;
2957       }
2958 #endif
2959       return entry;
2960     }
2961 
2962     entry = create_adapter(new_adapter, ces, /* allocate_code_blob */ true);
2963   }
2964 
2965   // Outside of the lock
2966   if (new_adapter != nullptr) {
2967     post_adapter_creation(new_adapter, entry);
2968   }
2969   return entry;
2970 }
2971 
2972 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_adapter,
2973                                                            CompiledEntrySignature& ces,

2974                                                            bool allocate_code_blob) {
2975   if (log_is_enabled(Info, perf, class, link)) {
2976     ClassLoader::perf_method_adapters_count()->inc();
2977   }
2978 
2979   // StubRoutines::_final_stubs_code is initialized after this function can be called. As a result,
2980   // VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated prior
2981   // to all StubRoutines::_final_stubs_code being set. Checks refer to runtime range checks generated
2982   // in an I2C stub that ensure that an I2C stub is called from an interpreter frame or stubs.
2983   bool contains_all_checks = StubRoutines::final_stubs_code() != nullptr;
2984 





2985   BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2986   CodeBuffer buffer(buf);
2987   short buffer_locs[20];
2988   buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2989                                           sizeof(buffer_locs)/sizeof(relocInfo));
2990 
2991   // Make a C heap allocated version of the fingerprint to store in the adapter
2992   AdapterFingerPrint* fingerprint = new AdapterFingerPrint(ces.sig_cc(), ces.has_inline_recv());
2993   MacroAssembler _masm(&buffer);
2994   AdapterHandlerEntry* entry = SharedRuntime::generate_i2c2i_adapters(&_masm,
2995                                                 ces.args_on_stack(),
2996                                                 ces.sig(),
2997                                                 ces.regs(),
2998                                                 ces.sig_cc(),
2999                                                 ces.regs_cc(),
3000                                                 ces.sig_cc_ro(),
3001                                                 ces.regs_cc_ro(),
3002                                                 fingerprint,
3003                                                 new_adapter,
3004                                                 allocate_code_blob);
3005 
3006   if (ces.has_scalarized_args()) {
3007     // Save a C heap allocated version of the scalarized signature and store it in the adapter
3008     GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc()->length(), mtInternal);
3009     heap_sig->appendAll(ces.sig_cc());
3010     entry->set_sig_cc(heap_sig);
3011   }
3012 
3013 #ifdef ASSERT
3014   if (VerifyAdapterSharing) {
3015     entry->save_code(buf->code_begin(), buffer.insts_size());
3016     if (!allocate_code_blob) {
3017       return entry;
3018     }
3019   }
3020 #endif
3021 

3022   NOT_PRODUCT(int insts_size = buffer.insts_size());
3023   if (new_adapter == nullptr) {
3024     // CodeCache is full, disable compilation
3025     // Ought to log this but compile log is only per compile thread
3026     // and we're some non descript Java thread.
3027     return nullptr;
3028   }
3029   entry->relocate(new_adapter->content_begin());
3030 #ifndef PRODUCT
3031   // debugging support
3032   if (PrintAdapterHandlers || PrintStubCode) {
3033     ttyLocker ttyl;
3034     entry->print_adapter_on(tty);
3035     tty->print_cr("i2c argument handler #%d for: %s %s (%d bytes generated)",
3036                   _adapter_handler_table->number_of_entries(), fingerprint->as_basic_args_string(),
3037                   fingerprint->as_string(), insts_size);
3038     tty->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(entry->get_c2i_entry()));
3039     if (Verbose || PrintStubCode) {
3040       address first_pc = entry->base_address();
3041       if (first_pc != nullptr) {

3043                              NOT_PRODUCT(COMMA &new_adapter->asm_remarks()));
3044         tty->cr();
3045       }
3046     }
3047   }
3048 #endif
3049 
3050   // Add the entry only if the entry contains all required checks (see sharedRuntime_xxx.cpp)
3051   // The checks are inserted only if -XX:+VerifyAdapterCalls is specified.
3052   if (contains_all_checks || !VerifyAdapterCalls) {
3053     assert_lock_strong(AdapterHandlerLibrary_lock);
3054     _adapter_handler_table->put(fingerprint, entry);
3055   }
3056   return entry;
3057 }
3058 
3059 address AdapterHandlerEntry::base_address() {
3060   address base = _i2c_entry;
3061   if (base == nullptr)  base = _c2i_entry;
3062   assert(base <= _c2i_entry || _c2i_entry == nullptr, "");
3063   assert(base <= _c2i_inline_entry || _c2i_inline_entry == nullptr, "");
3064   assert(base <= _c2i_inline_ro_entry || _c2i_inline_ro_entry == nullptr, "");
3065   assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == nullptr, "");
3066   assert(base <= _c2i_unverified_inline_entry || _c2i_unverified_inline_entry == nullptr, "");
3067   assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == nullptr, "");
3068   return base;
3069 }
3070 
3071 void AdapterHandlerEntry::relocate(address new_base) {
3072   address old_base = base_address();
3073   assert(old_base != nullptr, "");
3074   ptrdiff_t delta = new_base - old_base;
3075   if (_i2c_entry != nullptr)
3076     _i2c_entry += delta;
3077   if (_c2i_entry != nullptr)
3078     _c2i_entry += delta;
3079   if (_c2i_inline_entry != nullptr)
3080     _c2i_inline_entry += delta;
3081   if (_c2i_inline_ro_entry != nullptr)
3082     _c2i_inline_ro_entry += delta;
3083   if (_c2i_unverified_entry != nullptr)
3084     _c2i_unverified_entry += delta;
3085   if (_c2i_unverified_inline_entry != nullptr)
3086     _c2i_unverified_inline_entry += delta;
3087   if (_c2i_no_clinit_check_entry != nullptr)
3088     _c2i_no_clinit_check_entry += delta;
3089   assert(base_address() == new_base, "");
3090 }
3091 
3092 
3093 AdapterHandlerEntry::~AdapterHandlerEntry() {
3094   delete _fingerprint;
3095   if (_sig_cc != nullptr) {
3096     delete _sig_cc;
3097   }
3098 #ifdef ASSERT
3099   FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
3100 #endif
3101 }
3102 
3103 
3104 #ifdef ASSERT
3105 // Capture the code before relocation so that it can be compared
3106 // against other versions.  If the code is captured after relocation
3107 // then relative instructions won't be equivalent.
3108 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3109   _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3110   _saved_code_length = length;
3111   memcpy(_saved_code, buffer, length);
3112 }
3113 
3114 
3115 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3116   assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");
3117 

3164 
3165       struct { double data[20]; } locs_buf;
3166       struct { double data[20]; } stubs_locs_buf;
3167       buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3168 #if defined(AARCH64) || defined(PPC64)
3169       // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3170       // in the constant pool to ensure ordering between the barrier and oops
3171       // accesses. For native_wrappers we need a constant.
3172       // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3173       // static java call that is resolved in the runtime.
3174       if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3175         buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3176       }
3177 #endif
3178       buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3179       MacroAssembler _masm(&buffer);
3180 
3181       // Fill in the signature array, for the calling-convention call.
3182       const int total_args_passed = method->size_of_parameters();
3183 
3184       BasicType stack_sig_bt[16];
3185       VMRegPair stack_regs[16];
3186       BasicType* sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
3187       VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3188 
3189       int i = 0;
3190       if (!method->is_static()) {  // Pass in receiver first
3191         sig_bt[i++] = T_OBJECT;
3192       }
3193       SignatureStream ss(method->signature());
3194       for (; !ss.at_return_type(); ss.next()) {
3195         sig_bt[i++] = ss.type();  // Collect remaining bits of signature
3196         if (ss.type() == T_LONG || ss.type() == T_DOUBLE) {
3197           sig_bt[i++] = T_VOID;   // Longs & doubles take 2 Java slots
3198         }
3199       }
3200       assert(i == total_args_passed, "");
3201       BasicType ret_type = ss.type();
3202 
3203       // Now get the compiled-Java arguments layout.
3204       SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3205 
3206       // Generate the compiled-to-native wrapper code
3207       nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3208 
3209       if (nm != nullptr) {
3210         {
3211           MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3212           if (nm->make_in_use()) {
3213             method->set_code(method, nm);
3214           }
3215         }
3216 
3217         DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3218         if (directive->PrintAssemblyOption) {
3219           nm->print_code();
3220         }
3221         DirectivesStack::release(directive);

3428       st->print("Adapter for signature: ");
3429       a->print_adapter_on(st);
3430       return true;
3431     } else {
3432       return false; // keep looking
3433     }
3434   };
3435   assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3436   _adapter_handler_table->iterate(findblob);
3437   assert(found, "Should have found handler");
3438 }
3439 
3440 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3441   st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3442   if (get_i2c_entry() != nullptr) {
3443     st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3444   }
3445   if (get_c2i_entry() != nullptr) {
3446     st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3447   }
3448   if (get_c2i_entry() != nullptr) {
3449     st->print(" c2iVE: " INTPTR_FORMAT, p2i(get_c2i_inline_entry()));
3450   }
3451   if (get_c2i_entry() != nullptr) {
3452     st->print(" c2iVROE: " INTPTR_FORMAT, p2i(get_c2i_inline_ro_entry()));
3453   }
3454   if (get_c2i_unverified_entry() != nullptr) {
3455     st->print(" c2iUE: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3456   }
3457   if (get_c2i_unverified_entry() != nullptr) {
3458     st->print(" c2iUVE: " INTPTR_FORMAT, p2i(get_c2i_unverified_inline_entry()));
3459   }
3460   if (get_c2i_no_clinit_check_entry() != nullptr) {
3461     st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3462   }
3463   st->cr();
3464 }
3465 
3466 #ifndef PRODUCT
3467 
3468 void AdapterHandlerLibrary::print_statistics() {
3469   print_table_statistics();
3470 }
3471 
3472 #endif /* PRODUCT */
3473 
3474 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3475   assert(current == JavaThread::current(), "pre-condition");
3476   StackOverflow* overflow_state = current->stack_overflow_state();
3477   overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3478   overflow_state->set_reserved_stack_activation(current->stack_base());

3527         event.set_method(method);
3528         event.commit();
3529       }
3530     }
3531   }
3532   return activation;
3533 }
3534 
3535 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
3536   // After any safepoint, just before going back to compiled code,
3537   // we inform the GC that we will be doing initializing writes to
3538   // this object in the future without emitting card-marks, so
3539   // GC may take any compensating steps.
3540 
3541   oop new_obj = current->vm_result();
3542   if (new_obj == nullptr) return;
3543 
3544   BarrierSet *bs = BarrierSet::barrier_set();
3545   bs->on_slowpath_allocation_exit(current, new_obj);
3546 }
3547 
3548 // We are at a compiled code to interpreter call. We need backing
3549 // buffers for all inline type arguments. Allocate an object array to
3550 // hold them (convenient because once we're done with it we don't have
3551 // to worry about freeing it).
3552 oop SharedRuntime::allocate_inline_types_impl(JavaThread* current, methodHandle callee, bool allocate_receiver, TRAPS) {
3553   assert(InlineTypePassFieldsAsArgs, "no reason to call this");
3554   ResourceMark rm;
3555 
3556   int nb_slots = 0;
3557   InstanceKlass* holder = callee->method_holder();
3558   allocate_receiver &= !callee->is_static() && holder->is_inline_klass() && callee->is_scalarized_arg(0);
3559   if (allocate_receiver) {
3560     nb_slots++;
3561   }
3562   int arg_num = callee->is_static() ? 0 : 1;
3563   for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
3564     BasicType bt = ss.type();
3565     if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
3566       nb_slots++;
3567     }
3568     if (bt != T_VOID) {
3569       arg_num++;
3570     }
3571   }
3572   objArrayOop array_oop = oopFactory::new_objectArray(nb_slots, CHECK_NULL);
3573   objArrayHandle array(THREAD, array_oop);
3574   arg_num = callee->is_static() ? 0 : 1;
3575   int i = 0;
3576   if (allocate_receiver) {
3577     InlineKlass* vk = InlineKlass::cast(holder);
3578     oop res = vk->allocate_instance(CHECK_NULL);
3579     array->obj_at_put(i++, res);
3580   }
3581   for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
3582     BasicType bt = ss.type();
3583     if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
3584       InlineKlass* vk = ss.as_inline_klass(holder);
3585       assert(vk != nullptr, "Unexpected klass");
3586       oop res = vk->allocate_instance(CHECK_NULL);
3587       array->obj_at_put(i++, res);
3588     }
3589     if (bt != T_VOID) {
3590       arg_num++;
3591     }
3592   }
3593   return array();
3594 }
3595 
3596 JRT_ENTRY(void, SharedRuntime::allocate_inline_types(JavaThread* current, Method* callee_method, bool allocate_receiver))
3597   methodHandle callee(current, callee_method);
3598   oop array = SharedRuntime::allocate_inline_types_impl(current, callee, allocate_receiver, CHECK);
3599   current->set_vm_result(array);
3600   current->set_vm_result_2(callee()); // TODO: required to keep callee live?
3601 JRT_END
3602 
3603 // We're returning from an interpreted method: load each field into a
3604 // register following the calling convention
3605 JRT_LEAF(void, SharedRuntime::load_inline_type_fields_in_regs(JavaThread* current, oopDesc* res))
3606 {
3607   assert(res->klass()->is_inline_klass(), "only inline types here");
3608   ResourceMark rm;
3609   RegisterMap reg_map(current,
3610                       RegisterMap::UpdateMap::include,
3611                       RegisterMap::ProcessFrames::include,
3612                       RegisterMap::WalkContinuation::skip);
3613   frame stubFrame = current->last_frame();
3614   frame callerFrame = stubFrame.sender(&reg_map);
3615   assert(callerFrame.is_interpreted_frame(), "should be coming from interpreter");
3616 
3617   InlineKlass* vk = InlineKlass::cast(res->klass());
3618 
3619   const Array<SigEntry>* sig_vk = vk->extended_sig();
3620   const Array<VMRegPair>* regs = vk->return_regs();
3621 
3622   if (regs == nullptr) {
3623     // The fields of the inline klass don't fit in registers, bail out
3624     return;
3625   }
3626 
3627   int j = 1;
3628   for (int i = 0; i < sig_vk->length(); i++) {
3629     BasicType bt = sig_vk->at(i)._bt;
3630     if (bt == T_METADATA) {
3631       continue;
3632     }
3633     if (bt == T_VOID) {
3634       if (sig_vk->at(i-1)._bt == T_LONG ||
3635           sig_vk->at(i-1)._bt == T_DOUBLE) {
3636         j++;
3637       }
3638       continue;
3639     }
3640     int off = sig_vk->at(i)._offset;
3641     assert(off > 0, "offset in object should be positive");
3642     VMRegPair pair = regs->at(j);
3643     address loc = reg_map.location(pair.first(), nullptr);
3644     switch(bt) {
3645     case T_BOOLEAN:
3646       *(jboolean*)loc = res->bool_field(off);
3647       break;
3648     case T_CHAR:
3649       *(jchar*)loc = res->char_field(off);
3650       break;
3651     case T_BYTE:
3652       *(jbyte*)loc = res->byte_field(off);
3653       break;
3654     case T_SHORT:
3655       *(jshort*)loc = res->short_field(off);
3656       break;
3657     case T_INT: {
3658       *(jint*)loc = res->int_field(off);
3659       break;
3660     }
3661     case T_LONG:
3662 #ifdef _LP64
3663       *(intptr_t*)loc = res->long_field(off);
3664 #else
3665       Unimplemented();
3666 #endif
3667       break;
3668     case T_OBJECT:
3669     case T_ARRAY: {
3670       *(oop*)loc = res->obj_field(off);
3671       break;
3672     }
3673     case T_FLOAT:
3674       *(jfloat*)loc = res->float_field(off);
3675       break;
3676     case T_DOUBLE:
3677       *(jdouble*)loc = res->double_field(off);
3678       break;
3679     default:
3680       ShouldNotReachHere();
3681     }
3682     j++;
3683   }
3684   assert(j == regs->length(), "missed a field?");
3685 
3686 #ifdef ASSERT
3687   VMRegPair pair = regs->at(0);
3688   address loc = reg_map.location(pair.first(), nullptr);
3689   assert(*(oopDesc**)loc == res, "overwritten object");
3690 #endif
3691 
3692   current->set_vm_result(res);
3693 }
3694 JRT_END
3695 
3696 // We've returned to an interpreted method, the interpreter needs a
3697 // reference to an inline type instance. Allocate it and initialize it
3698 // from field's values in registers.
3699 JRT_BLOCK_ENTRY(void, SharedRuntime::store_inline_type_fields_to_buf(JavaThread* current, intptr_t res))
3700 {
3701   ResourceMark rm;
3702   RegisterMap reg_map(current,
3703                       RegisterMap::UpdateMap::include,
3704                       RegisterMap::ProcessFrames::include,
3705                       RegisterMap::WalkContinuation::skip);
3706   frame stubFrame = current->last_frame();
3707   frame callerFrame = stubFrame.sender(&reg_map);
3708 
3709 #ifdef ASSERT
3710   InlineKlass* verif_vk = InlineKlass::returned_inline_klass(reg_map);
3711 #endif
3712 
3713   if (!is_set_nth_bit(res, 0)) {
3714     // We're not returning with inline type fields in registers (the
3715     // calling convention didn't allow it for this inline klass)
3716     assert(!Metaspace::contains((void*)res), "should be oop or pointer in buffer area");
3717     current->set_vm_result((oopDesc*)res);
3718     assert(verif_vk == nullptr, "broken calling convention");
3719     return;
3720   }
3721 
3722   clear_nth_bit(res, 0);
3723   InlineKlass* vk = (InlineKlass*)res;
3724   assert(verif_vk == vk, "broken calling convention");
3725   assert(Metaspace::contains((void*)res), "should be klass");
3726 
3727   // Allocate handles for every oop field so they are safe in case of
3728   // a safepoint when allocating
3729   GrowableArray<Handle> handles;
3730   vk->save_oop_fields(reg_map, handles);
3731 
3732   // It's unsafe to safepoint until we are here
3733   JRT_BLOCK;
3734   {
3735     JavaThread* THREAD = current;
3736     oop vt = vk->realloc_result(reg_map, handles, CHECK);
3737     current->set_vm_result(vt);
3738   }
3739   JRT_BLOCK_END;
3740 }
3741 JRT_END
< prev index next >