28 #include "classfile/javaClasses.inline.hpp"
29 #include "classfile/stringTable.hpp"
30 #include "classfile/vmClasses.hpp"
31 #include "classfile/vmSymbols.hpp"
32 #include "code/aotCodeCache.hpp"
33 #include "code/codeCache.hpp"
34 #include "code/compiledIC.hpp"
35 #include "code/nmethod.inline.hpp"
36 #include "code/scopeDesc.hpp"
37 #include "code/vtableStubs.hpp"
38 #include "compiler/abstractCompiler.hpp"
39 #include "compiler/compileBroker.hpp"
40 #include "compiler/disassembler.hpp"
41 #include "gc/shared/barrierSet.hpp"
42 #include "gc/shared/collectedHeap.hpp"
43 #include "interpreter/interpreter.hpp"
44 #include "interpreter/interpreterRuntime.hpp"
45 #include "jvm.h"
46 #include "jfr/jfrEvents.hpp"
47 #include "logging/log.hpp"
48 #include "memory/resourceArea.hpp"
49 #include "memory/universe.hpp"
50 #include "metaprogramming/primitiveConversions.hpp"
51 #include "oops/klass.hpp"
52 #include "oops/method.inline.hpp"
53 #include "oops/objArrayKlass.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "prims/forte.hpp"
56 #include "prims/jvmtiExport.hpp"
57 #include "prims/jvmtiThreadState.hpp"
58 #include "prims/methodHandles.hpp"
59 #include "prims/nativeLookup.hpp"
60 #include "runtime/arguments.hpp"
61 #include "runtime/atomic.hpp"
62 #include "runtime/basicLock.inline.hpp"
63 #include "runtime/frame.inline.hpp"
64 #include "runtime/handles.inline.hpp"
65 #include "runtime/init.hpp"
66 #include "runtime/interfaceSupport.inline.hpp"
67 #include "runtime/java.hpp"
68 #include "runtime/javaCalls.hpp"
69 #include "runtime/jniHandles.inline.hpp"
70 #include "runtime/perfData.hpp"
71 #include "runtime/sharedRuntime.hpp"
72 #include "runtime/stackWatermarkSet.hpp"
73 #include "runtime/stubRoutines.hpp"
74 #include "runtime/synchronizer.inline.hpp"
1174 // for a call current in progress, i.e., arguments has been pushed on stack
1175 // but callee has not been invoked yet. Caller frame must be compiled.
1176 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1177 CallInfo& callinfo, TRAPS) {
1178 Handle receiver;
1179 Handle nullHandle; // create a handy null handle for exception returns
1180 JavaThread* current = THREAD;
1181
1182 assert(!vfst.at_end(), "Java frame must exist");
1183
1184 // Find caller and bci from vframe
1185 methodHandle caller(current, vfst.method());
1186 int bci = vfst.bci();
1187
1188 if (caller->is_continuation_enter_intrinsic()) {
1189 bc = Bytecodes::_invokestatic;
1190 LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1191 return receiver;
1192 }
1193
1194 Bytecode_invoke bytecode(caller, bci);
1195 int bytecode_index = bytecode.index();
1196 bc = bytecode.invoke_code();
1197
1198 methodHandle attached_method(current, extract_attached_method(vfst));
1199 if (attached_method.not_null()) {
1200 Method* callee = bytecode.static_target(CHECK_NH);
1201 vmIntrinsics::ID id = callee->intrinsic_id();
1202 // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1203 // it attaches statically resolved method to the call site.
1204 if (MethodHandles::is_signature_polymorphic(id) &&
1205 MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1206 bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1207
1208 // Adjust invocation mode according to the attached method.
1209 switch (bc) {
1210 case Bytecodes::_invokevirtual:
1211 if (attached_method->method_holder()->is_interface()) {
1212 bc = Bytecodes::_invokeinterface;
1213 }
1214 break;
1215 case Bytecodes::_invokeinterface:
1216 if (!attached_method->method_holder()->is_interface()) {
1217 bc = Bytecodes::_invokevirtual;
1218 }
1219 break;
1220 case Bytecodes::_invokehandle:
1221 if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1222 bc = attached_method->is_static() ? Bytecodes::_invokestatic
1223 : Bytecodes::_invokevirtual;
1224 }
1225 break;
1226 default:
1227 break;
1228 }
1229 }
1230 }
1231
1232 assert(bc != Bytecodes::_illegal, "not initialized");
1233
1234 bool has_receiver = bc != Bytecodes::_invokestatic &&
1235 bc != Bytecodes::_invokedynamic &&
1236 bc != Bytecodes::_invokehandle;
1237
1238 // Find receiver for non-static call
1239 if (has_receiver) {
1240 // This register map must be update since we need to find the receiver for
1241 // compiled frames. The receiver might be in a register.
1242 RegisterMap reg_map2(current,
1243 RegisterMap::UpdateMap::include,
1244 RegisterMap::ProcessFrames::include,
1245 RegisterMap::WalkContinuation::skip);
1246 frame stubFrame = current->last_frame();
1247 // Caller-frame is a compiled frame
1248 frame callerFrame = stubFrame.sender(®_map2);
1249
1250 if (attached_method.is_null()) {
1251 Method* callee = bytecode.static_target(CHECK_NH);
1252 if (callee == nullptr) {
1253 THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1254 }
1255 }
1256
1257 // Retrieve from a compiled argument list
1258 receiver = Handle(current, callerFrame.retrieve_receiver(®_map2));
1259 assert(oopDesc::is_oop_or_null(receiver()), "");
1260
1261 if (receiver.is_null()) {
1262 THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);
1263 }
1264 }
1265
1266 // Resolve method
1267 if (attached_method.not_null()) {
1268 // Parameterized by attached method.
1269 LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, CHECK_NH);
1270 } else {
1271 // Parameterized by bytecode.
1272 constantPoolHandle constants(current, caller->constants());
1273 LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1274 }
1275
1276 #ifdef ASSERT
1277 // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1278 if (has_receiver) {
1279 assert(receiver.not_null(), "should have thrown exception");
1280 Klass* receiver_klass = receiver->klass();
1281 Klass* rk = nullptr;
1282 if (attached_method.not_null()) {
1283 // In case there's resolved method attached, use its holder during the check.
1284 rk = attached_method->method_holder();
1285 } else {
1286 // Klass is already loaded.
1287 constantPoolHandle constants(current, caller->constants());
1288 rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1289 }
1290 Klass* static_receiver_klass = rk;
1291 assert(receiver_klass->is_subtype_of(static_receiver_klass),
1292 "actual receiver must be subclass of static receiver klass");
1293 if (receiver_klass->is_instance_klass()) {
1294 if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1295 tty->print_cr("ERROR: Klass not yet initialized!!");
1296 receiver_klass->print();
1297 }
1298 assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1299 }
1300 }
1301 #endif
1302
1303 return receiver;
1304 }
1305
1306 methodHandle SharedRuntime::find_callee_method(TRAPS) {
1307 JavaThread* current = THREAD;
1308 ResourceMark rm(current);
1309 // We need first to check if any Java activations (compiled, interpreted)
1310 // exist on the stack since last JavaCall. If not, we need
1311 // to get the target method from the JavaCall wrapper.
1312 vframeStream vfst(current, true); // Do not skip any javaCalls
1313 methodHandle callee_method;
1314 if (vfst.at_end()) {
1315 // No Java frames were found on stack since we did the JavaCall.
1316 // Hence the stack can only contain an entry_frame. We need to
1317 // find the target method from the stub frame.
1318 RegisterMap reg_map(current,
1319 RegisterMap::UpdateMap::skip,
1320 RegisterMap::ProcessFrames::include,
1321 RegisterMap::WalkContinuation::skip);
1322 frame fr = current->last_frame();
1323 assert(fr.is_runtime_frame(), "must be a runtimeStub");
1324 fr = fr.sender(®_map);
1325 assert(fr.is_entry_frame(), "must be");
1326 // fr is now pointing to the entry frame.
1327 callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1328 } else {
1329 Bytecodes::Code bc;
1330 CallInfo callinfo;
1331 find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));
1332 callee_method = methodHandle(current, callinfo.selected_method());
1333 }
1334 assert(callee_method()->is_method(), "must be");
1335 return callee_method;
1336 }
1337
1338 // Resolves a call.
1339 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, TRAPS) {
1340 JavaThread* current = THREAD;
1341 ResourceMark rm(current);
1342 RegisterMap cbl_map(current,
1343 RegisterMap::UpdateMap::skip,
1344 RegisterMap::ProcessFrames::include,
1345 RegisterMap::WalkContinuation::skip);
1346 frame caller_frame = current->last_frame().sender(&cbl_map);
1347
1348 CodeBlob* caller_cb = caller_frame.cb();
1349 guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1350 nmethod* caller_nm = caller_cb->as_nmethod();
1351
1352 // determine call info & receiver
1353 // note: a) receiver is null for static calls
1354 // b) an exception is thrown if receiver is null for non-static calls
1355 CallInfo call_info;
1356 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1357 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1358
1359 NoSafepointVerifier nsv;
1360
1361 methodHandle callee_method(current, call_info.selected_method());
1362
1363 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1364 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1365 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1366 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1367 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1368
1369 assert(!caller_nm->is_unloading(), "It should not be unloading");
1370
1371 #ifndef PRODUCT
1372 // tracing/debugging/statistics
1373 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1374 (is_virtual) ? (&_resolve_virtual_ctr) :
1375 (&_resolve_static_ctr);
1376 Atomic::inc(addr);
1377
1378 if (TraceCallFixup) {
1379 ResourceMark rm(current);
1380 tty->print("resolving %s%s (%s) call to",
1381 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1382 Bytecodes::name(invoke_code));
1383 callee_method->print_short_name(tty);
1384 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1385 p2i(caller_frame.pc()), p2i(callee_method->code()));
1386 }
1387 #endif
1388
1389 if (invoke_code == Bytecodes::_invokestatic) {
1390 assert(callee_method->method_holder()->is_initialized() ||
1391 callee_method->method_holder()->is_reentrant_initialization(current),
1392 "invalid class initialization state for invoke_static");
1393 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1394 // In order to keep class initialization check, do not patch call
1395 // site for static call when the class is not fully initialized.
1396 // Proper check is enforced by call site re-resolution on every invocation.
1397 //
1398 // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1399 // explicit class initialization check is put in nmethod entry (VEP).
1400 assert(callee_method->method_holder()->is_linked(), "must be");
1401 return callee_method;
1402 }
1403 }
1404
1405
1406 // JSR 292 key invariant:
1407 // If the resolved method is a MethodHandle invoke target, the call
1408 // site must be a MethodHandle call site, because the lambda form might tail-call
1409 // leaving the stack in a state unknown to either caller or callee
1410
1411 // Compute entry points. The computation of the entry points is independent of
1412 // patching the call.
1413
1414 // Make sure the callee nmethod does not get deoptimized and removed before
1415 // we are done patching the code.
1416
1417
1418 CompiledICLocker ml(caller_nm);
1419 if (is_virtual && !is_optimized) {
1420 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1421 inline_cache->update(&call_info, receiver->klass());
1422 } else {
1423 // Callsite is a direct call - set it to the destination method
1424 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1425 callsite->set(callee_method);
1426 }
1427
1428 return callee_method;
1429 }
1430
1431 // Inline caches exist only in compiled code
1432 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1433 #ifdef ASSERT
1434 RegisterMap reg_map(current,
1435 RegisterMap::UpdateMap::skip,
1436 RegisterMap::ProcessFrames::include,
1437 RegisterMap::WalkContinuation::skip);
1438 frame stub_frame = current->last_frame();
1439 assert(stub_frame.is_runtime_frame(), "sanity check");
1440 frame caller_frame = stub_frame.sender(®_map);
1441 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1442 #endif /* ASSERT */
1443
1444 methodHandle callee_method;
1445 JRT_BLOCK
1446 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1447 // Return Method* through TLS
1448 current->set_vm_result_metadata(callee_method());
1449 JRT_BLOCK_END
1450 // return compiled code entry point after potential safepoints
1451 return get_resolved_entry(current, callee_method);
1452 JRT_END
1453
1454
1455 // Handle call site that has been made non-entrant
1456 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1457 // 6243940 We might end up in here if the callee is deoptimized
1458 // as we race to call it. We don't want to take a safepoint if
1459 // the caller was interpreted because the caller frame will look
1460 // interpreted to the stack walkers and arguments are now
1461 // "compiled" so it is much better to make this transition
1462 // invisible to the stack walking code. The i2c path will
1463 // place the callee method in the callee_target. It is stashed
1464 // there because if we try and find the callee by normal means a
1465 // safepoint is possible and have trouble gc'ing the compiled args.
1466 RegisterMap reg_map(current,
1467 RegisterMap::UpdateMap::skip,
1468 RegisterMap::ProcessFrames::include,
1469 RegisterMap::WalkContinuation::skip);
1470 frame stub_frame = current->last_frame();
1471 assert(stub_frame.is_runtime_frame(), "sanity check");
1472 frame caller_frame = stub_frame.sender(®_map);
1473
1474 if (caller_frame.is_interpreted_frame() ||
1475 caller_frame.is_entry_frame() ||
1476 caller_frame.is_upcall_stub_frame()) {
1477 Method* callee = current->callee_target();
1478 guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1479 current->set_vm_result_metadata(callee);
1480 current->set_callee_target(nullptr);
1481 if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1482 // Bypass class initialization checks in c2i when caller is in native.
1483 // JNI calls to static methods don't have class initialization checks.
1484 // Fast class initialization checks are present in c2i adapters and call into
1485 // SharedRuntime::handle_wrong_method() on the slow path.
1486 //
1487 // JVM upcalls may land here as well, but there's a proper check present in
1488 // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1489 // so bypassing it in c2i adapter is benign.
1490 return callee->get_c2i_no_clinit_check_entry();
1491 } else {
1492 return callee->get_c2i_entry();
1493 }
1494 }
1495
1496 // Must be compiled to compiled path which is safe to stackwalk
1497 methodHandle callee_method;
1498 JRT_BLOCK
1499 // Force resolving of caller (if we called from compiled frame)
1500 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1501 current->set_vm_result_metadata(callee_method());
1502 JRT_BLOCK_END
1503 // return compiled code entry point after potential safepoints
1504 return get_resolved_entry(current, callee_method);
1505 JRT_END
1506
1507 // Handle abstract method call
1508 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1509 // Verbose error message for AbstractMethodError.
1510 // Get the called method from the invoke bytecode.
1511 vframeStream vfst(current, true);
1512 assert(!vfst.at_end(), "Java frame must exist");
1513 methodHandle caller(current, vfst.method());
1514 Bytecode_invoke invoke(caller, vfst.bci());
1515 DEBUG_ONLY( invoke.verify(); )
1516
1517 // Find the compiled caller frame.
1518 RegisterMap reg_map(current,
1519 RegisterMap::UpdateMap::include,
1520 RegisterMap::ProcessFrames::include,
1521 RegisterMap::WalkContinuation::skip);
1522 frame stubFrame = current->last_frame();
1523 assert(stubFrame.is_runtime_frame(), "must be");
1524 frame callerFrame = stubFrame.sender(®_map);
1525 assert(callerFrame.is_compiled_frame(), "must be");
1526
1527 // Install exception and return forward entry.
1528 address res = SharedRuntime::throw_AbstractMethodError_entry();
1529 JRT_BLOCK
1530 methodHandle callee(current, invoke.static_target(current));
1531 if (!callee.is_null()) {
1532 oop recv = callerFrame.retrieve_receiver(®_map);
1533 Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1534 res = StubRoutines::forward_exception_entry();
1535 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1536 }
1537 JRT_BLOCK_END
1538 return res;
1539 JRT_END
1540
1541 // return verified_code_entry if interp_only_mode is not set for the current thread;
1542 // otherwise return c2i entry.
1543 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1544 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1545 // In interp_only_mode we need to go to the interpreted entry
1546 // The c2i won't patch in this mode -- see fixup_callers_callsite
1547 return callee_method->get_c2i_entry();
1548 }
1549 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1550 return callee_method->verified_code_entry();
1551 }
1552
1553 // resolve a static call and patch code
1554 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1555 methodHandle callee_method;
1556 bool enter_special = false;
1557 JRT_BLOCK
1558 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1559 current->set_vm_result_metadata(callee_method());
1560 JRT_BLOCK_END
1561 // return compiled code entry point after potential safepoints
1562 return get_resolved_entry(current, callee_method);
1563 JRT_END
1564
1565 // resolve virtual call and update inline cache to monomorphic
1566 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1567 methodHandle callee_method;
1568 JRT_BLOCK
1569 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1570 current->set_vm_result_metadata(callee_method());
1571 JRT_BLOCK_END
1572 // return compiled code entry point after potential safepoints
1573 return get_resolved_entry(current, callee_method);
1574 JRT_END
1575
1576
1577 // Resolve a virtual call that can be statically bound (e.g., always
1578 // monomorphic, so it has no inline cache). Patch code to resolved target.
1579 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1580 methodHandle callee_method;
1581 JRT_BLOCK
1582 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1583 current->set_vm_result_metadata(callee_method());
1584 JRT_BLOCK_END
1585 // return compiled code entry point after potential safepoints
1586 return get_resolved_entry(current, callee_method);
1587 JRT_END
1588
1589 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1590 JavaThread* current = THREAD;
1591 ResourceMark rm(current);
1592 CallInfo call_info;
1593 Bytecodes::Code bc;
1594
1595 // receiver is null for static calls. An exception is thrown for null
1596 // receivers for non-static calls
1597 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1598
1599 methodHandle callee_method(current, call_info.selected_method());
1600
1601 #ifndef PRODUCT
1602 Atomic::inc(&_ic_miss_ctr);
1603
1604 // Statistics & Tracing
1605 if (TraceCallFixup) {
1606 ResourceMark rm(current);
1607 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1608 callee_method->print_short_name(tty);
1609 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1610 }
1611
1612 if (ICMissHistogram) {
1613 MutexLocker m(VMStatistic_lock);
1614 RegisterMap reg_map(current,
1615 RegisterMap::UpdateMap::skip,
1616 RegisterMap::ProcessFrames::include,
1617 RegisterMap::WalkContinuation::skip);
1618 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1619 // produce statistics under the lock
1620 trace_ic_miss(f.pc());
1621 }
1622 #endif
1623
1624 // install an event collector so that when a vtable stub is created the
1625 // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1626 // event can't be posted when the stub is created as locks are held
1627 // - instead the event will be deferred until the event collector goes
1628 // out of scope.
1629 JvmtiDynamicCodeEventCollector event_collector;
1630
1631 // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1632 RegisterMap reg_map(current,
1633 RegisterMap::UpdateMap::skip,
1634 RegisterMap::ProcessFrames::include,
1635 RegisterMap::WalkContinuation::skip);
1636 frame caller_frame = current->last_frame().sender(®_map);
1637 CodeBlob* cb = caller_frame.cb();
1638 nmethod* caller_nm = cb->as_nmethod();
1639
1640 CompiledICLocker ml(caller_nm);
1641 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1642 inline_cache->update(&call_info, receiver()->klass());
1643
1644 return callee_method;
1645 }
1646
1647 //
1648 // Resets a call-site in compiled code so it will get resolved again.
1649 // This routines handles both virtual call sites, optimized virtual call
1650 // sites, and static call sites. Typically used to change a call sites
1651 // destination from compiled to interpreted.
1652 //
1653 methodHandle SharedRuntime::reresolve_call_site(TRAPS) {
1654 JavaThread* current = THREAD;
1655 ResourceMark rm(current);
1656 RegisterMap reg_map(current,
1657 RegisterMap::UpdateMap::skip,
1658 RegisterMap::ProcessFrames::include,
1659 RegisterMap::WalkContinuation::skip);
1660 frame stub_frame = current->last_frame();
1661 assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1662 frame caller = stub_frame.sender(®_map);
1663
1664 // Do nothing if the frame isn't a live compiled frame.
1665 // nmethod could be deoptimized by the time we get here
1666 // so no update to the caller is needed.
1667
1668 if ((caller.is_compiled_frame() && !caller.is_deoptimized_frame()) ||
1669 (caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic())) {
1670
1671 address pc = caller.pc();
1672
1673 nmethod* caller_nm = CodeCache::find_nmethod(pc);
1674 assert(caller_nm != nullptr, "did not find caller nmethod");
1675
1676 // Default call_addr is the location of the "basic" call.
1677 // Determine the address of the call we a reresolving. With
1678 // Inline Caches we will always find a recognizable call.
1679 // With Inline Caches disabled we may or may not find a
1680 // recognizable call. We will always find a call for static
1681 // calls and for optimized virtual calls. For vanilla virtual
1682 // calls it depends on the state of the UseInlineCaches switch.
1683 //
1684 // With Inline Caches disabled we can get here for a virtual call
1685 // for two reasons:
1686 // 1 - calling an abstract method. The vtable for abstract methods
1687 // will run us thru handle_wrong_method and we will eventually
1688 // end up in the interpreter to throw the ame.
1689 // 2 - a racing deoptimization. We could be doing a vanilla vtable
1690 // call and between the time we fetch the entry address and
1691 // we jump to it the target gets deoptimized. Similar to 1
1692 // we will wind up in the interprter (thru a c2i with c2).
1693 //
1694 CompiledICLocker ml(caller_nm);
1695 address call_addr = caller_nm->call_instruction_address(pc);
1696
1697 if (call_addr != nullptr) {
1698 // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1699 // bytes back in the instruction stream so we must also check for reloc info.
1700 RelocIterator iter(caller_nm, call_addr, call_addr+1);
1701 bool ret = iter.next(); // Get item
1702 if (ret) {
1703 switch (iter.type()) {
1704 case relocInfo::static_call_type:
1705 case relocInfo::opt_virtual_call_type: {
1706 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1707 cdc->set_to_clean();
1708 break;
1709 }
1710
1711 case relocInfo::virtual_call_type: {
1712 // compiled, dispatched call (which used to call an interpreted method)
1713 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1714 inline_cache->set_to_clean();
1715 break;
1716 }
1717 default:
1718 break;
1719 }
1720 }
1721 }
1722 }
1723
1724 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1725
1726
1727 #ifndef PRODUCT
1728 Atomic::inc(&_wrong_method_ctr);
1729
1730 if (TraceCallFixup) {
1731 ResourceMark rm(current);
1732 tty->print("handle_wrong_method reresolving call to");
1733 callee_method->print_short_name(tty);
1734 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1735 }
1736 #endif
1737
1738 return callee_method;
1739 }
1740
1741 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1742 // The faulting unsafe accesses should be changed to throw the error
1743 // synchronously instead. Meanwhile the faulting instruction will be
1744 // skipped over (effectively turning it into a no-op) and an
1745 // asynchronous exception will be raised which the thread will
1746 // handle at a later point. If the instruction is a load it will
1747 // return garbage.
1748
1749 // Request an async exception.
1750 thread->set_pending_unsafe_access_error();
1751
1752 // Return address of next instruction to execute.
1918 msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
1919
1920 char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
1921 if (message == nullptr) {
1922 // Shouldn't happen, but don't cause even more problems if it does
1923 message = const_cast<char*>(caster_klass->external_name());
1924 } else {
1925 jio_snprintf(message,
1926 msglen,
1927 "class %s cannot be cast to class %s (%s%s%s)",
1928 caster_name,
1929 target_name,
1930 caster_klass_description,
1931 klass_separator,
1932 target_klass_description
1933 );
1934 }
1935 return message;
1936 }
1937
1938 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
1939 (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
1940 JRT_END
1941
1942 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
1943 if (!SafepointSynchronize::is_synchronizing()) {
1944 // Only try quick_enter() if we're not trying to reach a safepoint
1945 // so that the calling thread reaches the safepoint more quickly.
1946 if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
1947 return;
1948 }
1949 }
1950 // NO_ASYNC required because an async exception on the state transition destructor
1951 // would leave you with the lock held and it would never be released.
1952 // The normal monitorenter NullPointerException is thrown without acquiring a lock
1953 // and the model is that an exception implies the method failed.
1954 JRT_BLOCK_NO_ASYNC
1955 Handle h_obj(THREAD, obj);
1956 ObjectSynchronizer::enter(h_obj, lock, current);
1957 assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");
2168 tty->print_cr(" %% in nested categories are relative to their category");
2169 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2170 tty->cr();
2171
2172 MethodArityHistogram h;
2173 }
2174 #endif
2175
2176 #ifndef PRODUCT
2177 static int _lookups; // number of calls to lookup
2178 static int _equals; // number of buckets checked with matching hash
2179 static int _archived_hits; // number of successful lookups in archived table
2180 static int _runtime_hits; // number of successful lookups in runtime table
2181 #endif
2182
2183 // A simple wrapper class around the calling convention information
2184 // that allows sharing of adapters for the same calling convention.
2185 class AdapterFingerPrint : public MetaspaceObj {
2186 private:
2187 enum {
2188 _basic_type_bits = 4,
2189 _basic_type_mask = right_n_bits(_basic_type_bits),
2190 _basic_types_per_int = BitsPerInt / _basic_type_bits,
2191 };
2192 // TO DO: Consider integrating this with a more global scheme for compressing signatures.
2193 // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2194
2195 int _length;
2196
2197 static int data_offset() { return sizeof(AdapterFingerPrint); }
2198 int* data_pointer() {
2199 return (int*)((address)this + data_offset());
2200 }
2201
2202 // Private construtor. Use allocate() to get an instance.
2203 AdapterFingerPrint(int total_args_passed, BasicType* sig_bt, int len) {
2204 int* data = data_pointer();
2205 // Pack the BasicTypes with 8 per int
2206 assert(len == length(total_args_passed), "sanity");
2207 _length = len;
2208 int sig_index = 0;
2209 for (int index = 0; index < _length; index++) {
2210 int value = 0;
2211 for (int byte = 0; sig_index < total_args_passed && byte < _basic_types_per_int; byte++) {
2212 int bt = adapter_encoding(sig_bt[sig_index++]);
2213 assert((bt & _basic_type_mask) == bt, "must fit in 4 bits");
2214 value = (value << _basic_type_bits) | bt;
2215 }
2216 data[index] = value;
2217 }
2218 }
2219
2220 // Call deallocate instead
2221 ~AdapterFingerPrint() {
2222 ShouldNotCallThis();
2223 }
2224
2225 static int length(int total_args) {
2226 return (total_args + (_basic_types_per_int-1)) / _basic_types_per_int;
2227 }
2228
2229 static int compute_size_in_words(int len) {
2230 return (int)heap_word_size(sizeof(AdapterFingerPrint) + (len * sizeof(int)));
2231 }
2232
2233 // Remap BasicTypes that are handled equivalently by the adapters.
2234 // These are correct for the current system but someday it might be
2235 // necessary to make this mapping platform dependent.
2236 static int adapter_encoding(BasicType in) {
2237 switch (in) {
2238 case T_BOOLEAN:
2239 case T_BYTE:
2240 case T_SHORT:
2241 case T_CHAR:
2242 // There are all promoted to T_INT in the calling convention
2243 return T_INT;
2244
2245 case T_OBJECT:
2246 case T_ARRAY:
2247 // In other words, we assume that any register good enough for
2248 // an int or long is good enough for a managed pointer.
2249 #ifdef _LP64
2250 return T_LONG;
2251 #else
2252 return T_INT;
2253 #endif
2254
2255 case T_INT:
2256 case T_LONG:
2257 case T_FLOAT:
2258 case T_DOUBLE:
2259 case T_VOID:
2260 return in;
2261
2262 default:
2263 ShouldNotReachHere();
2264 return T_CONFLICT;
2265 }
2266 }
2267
2268 void* operator new(size_t size, size_t fp_size) throw() {
2269 assert(fp_size >= size, "sanity check");
2270 void* p = AllocateHeap(fp_size, mtCode);
2271 memset(p, 0, fp_size);
2272 return p;
2273 }
2274
2275 template<typename Function>
2276 void iterate_args(Function function) {
2277 for (int i = 0; i < length(); i++) {
2278 unsigned val = (unsigned)value(i);
2279 // args are packed so that first/lower arguments are in the highest
2280 // bits of each int value, so iterate from highest to the lowest
2281 for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {
2282 unsigned v = (val >> j) & _basic_type_mask;
2283 if (v == 0) {
2284 continue;
2285 }
2286 function(v);
2287 }
2288 }
2289 }
2290
2291 public:
2292 static AdapterFingerPrint* allocate(int total_args_passed, BasicType* sig_bt) {
2293 int len = length(total_args_passed);
2294 int size_in_bytes = BytesPerWord * compute_size_in_words(len);
2295 AdapterFingerPrint* afp = new (size_in_bytes) AdapterFingerPrint(total_args_passed, sig_bt, len);
2296 assert((afp->size() * BytesPerWord) == size_in_bytes, "should match");
2297 return afp;
2298 }
2299
2300 static void deallocate(AdapterFingerPrint* fp) {
2301 FreeHeap(fp);
2302 }
2303
2304 int value(int index) {
2305 int* data = data_pointer();
2306 return data[index];
2307 }
2308
2309 int length() {
2310 return _length;
2311 }
2312
2313 unsigned int compute_hash() {
2314 int hash = 0;
2315 for (int i = 0; i < length(); i++) {
2324 stringStream st;
2325 st.print("0x");
2326 for (int i = 0; i < length(); i++) {
2327 st.print("%x", value(i));
2328 }
2329 return st.as_string();
2330 }
2331
2332 const char* as_basic_args_string() {
2333 stringStream st;
2334 bool long_prev = false;
2335 iterate_args([&] (int arg) {
2336 if (long_prev) {
2337 long_prev = false;
2338 if (arg == T_VOID) {
2339 st.print("J");
2340 } else {
2341 st.print("L");
2342 }
2343 }
2344 switch (arg) {
2345 case T_INT: st.print("I"); break;
2346 case T_LONG: long_prev = true; break;
2347 case T_FLOAT: st.print("F"); break;
2348 case T_DOUBLE: st.print("D"); break;
2349 case T_VOID: break;
2350 default: ShouldNotReachHere();
2351 }
2352 });
2353 if (long_prev) {
2354 st.print("L");
2355 }
2356 return st.as_string();
2357 }
2358
2359 BasicType* as_basic_type(int& nargs) {
2360 nargs = 0;
2361 GrowableArray<BasicType> btarray;
2362 bool long_prev = false;
2363
2364 iterate_args([&] (int arg) {
2365 if (long_prev) {
2366 long_prev = false;
2367 if (arg == T_VOID) {
2368 btarray.append(T_LONG);
2369 } else {
2370 btarray.append(T_OBJECT); // it could be T_ARRAY; it shouldn't matter
2371 }
2372 }
2373 switch (arg) {
2374 case T_INT: // fallthrough
2375 case T_FLOAT: // fallthrough
2376 case T_DOUBLE:
2377 case T_VOID:
2378 btarray.append((BasicType)arg);
2379 break;
2380 case T_LONG:
2381 long_prev = true;
2382 break;
2383 default: ShouldNotReachHere();
2384 }
2385 });
2386
2387 if (long_prev) {
2388 btarray.append(T_OBJECT);
2389 }
2390
2391 nargs = btarray.length();
2392 BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, nargs);
2393 int index = 0;
2394 GrowableArrayIterator<BasicType> iter = btarray.begin();
2395 while (iter != btarray.end()) {
2396 sig_bt[index++] = *iter;
2397 ++iter;
2398 }
2399 assert(index == btarray.length(), "sanity check");
2400 #ifdef ASSERT
2401 {
2402 AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(nargs, sig_bt);
2403 assert(this->equals(compare_fp), "sanity check");
2404 AdapterFingerPrint::deallocate(compare_fp);
2405 }
2406 #endif
2407 return sig_bt;
2408 }
2409
2410 bool equals(AdapterFingerPrint* other) {
2411 if (other->_length != _length) {
2412 return false;
2413 } else {
2414 for (int i = 0; i < _length; i++) {
2415 if (value(i) != other->value(i)) {
2416 return false;
2417 }
2418 }
2419 }
2420 return true;
2421 }
2422
2423 // methods required by virtue of being a MetaspaceObj
2424 void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2425 int size() const { return compute_size_in_words(_length); }
2426 MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2427
2428 static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2429 NOT_PRODUCT(_equals++);
2438 #if INCLUDE_CDS
2439 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2440 return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2441 }
2442
2443 class ArchivedAdapterTable : public OffsetCompactHashtable<
2444 AdapterFingerPrint*,
2445 AdapterHandlerEntry*,
2446 adapter_fp_equals_compact_hashtable_entry> {};
2447 #endif // INCLUDE_CDS
2448
2449 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2450 using AdapterHandlerTable = ResourceHashtable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2451 AnyObj::C_HEAP, mtCode,
2452 AdapterFingerPrint::compute_hash,
2453 AdapterFingerPrint::equals>;
2454 static AdapterHandlerTable* _adapter_handler_table;
2455 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2456
2457 // Find a entry with the same fingerprint if it exists
2458 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(int total_args_passed, BasicType* sig_bt) {
2459 NOT_PRODUCT(_lookups++);
2460 assert_lock_strong(AdapterHandlerLibrary_lock);
2461 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2462 AdapterHandlerEntry* entry = nullptr;
2463 #if INCLUDE_CDS
2464 // if we are building the archive then the archived adapter table is
2465 // not valid and we need to use the ones added to the runtime table
2466 if (AOTCodeCache::is_using_adapter()) {
2467 // Search archived table first. It is read-only table so can be searched without lock
2468 entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2469 #ifndef PRODUCT
2470 if (entry != nullptr) {
2471 _archived_hits++;
2472 }
2473 #endif
2474 }
2475 #endif // INCLUDE_CDS
2476 if (entry == nullptr) {
2477 assert_lock_strong(AdapterHandlerLibrary_lock);
2478 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2479 if (entry_p != nullptr) {
2480 entry = *entry_p;
2481 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2499 ts.print(tty, "AdapterHandlerTable");
2500 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2501 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2502 int total_hits = _archived_hits + _runtime_hits;
2503 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2504 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2505 }
2506 #endif
2507
2508 // ---------------------------------------------------------------------------
2509 // Implementation of AdapterHandlerLibrary
2510 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2511 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2512 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2513 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2514 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2515 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2516 #if INCLUDE_CDS
2517 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2518 #endif // INCLUDE_CDS
2519 static const int AdapterHandlerLibrary_size = 16*K;
2520 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2521
2522 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2523 assert(_buffer != nullptr, "should be initialized");
2524 return _buffer;
2525 }
2526
2527 static void post_adapter_creation(const AdapterBlob* new_adapter,
2528 const AdapterHandlerEntry* entry) {
2529 if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2530 char blob_id[256];
2531 jio_snprintf(blob_id,
2532 sizeof(blob_id),
2533 "%s(%s)",
2534 new_adapter->name(),
2535 entry->fingerprint()->as_string());
2536 if (Forte::is_enabled()) {
2537 Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2538 }
2539
2540 if (JvmtiExport::should_post_dynamic_code_generated()) {
2541 JvmtiExport::post_dynamic_code_generated(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2542 }
2543 }
2544 }
2545
2546 void AdapterHandlerLibrary::create_abstract_method_handler() {
2547 assert_lock_strong(AdapterHandlerLibrary_lock);
2548 // Create a special handler for abstract methods. Abstract methods
2549 // are never compiled so an i2c entry is somewhat meaningless, but
2550 // throw AbstractMethodError just in case.
2551 // Pass wrong_method_abstract for the c2i transitions to return
2552 // AbstractMethodError for invalid invocations.
2553 address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2554 _abstract_method_handler = AdapterHandlerLibrary::new_entry(AdapterFingerPrint::allocate(0, nullptr));
2555 _abstract_method_handler->set_entry_points(SharedRuntime::throw_AbstractMethodError_entry(),
2556 wrong_method_abstract,
2557 wrong_method_abstract,
2558 nullptr);
2559 }
2560
2561 void AdapterHandlerLibrary::initialize() {
2562 {
2563 ResourceMark rm;
2564 MutexLocker mu(AdapterHandlerLibrary_lock);
2565 _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2566 _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2567 create_abstract_method_handler();
2568 }
2569
2570 #if INCLUDE_CDS
2571 // Link adapters in AOT Cache to their code in AOT Code Cache
2572 if (AOTCodeCache::is_using_adapter() && !_aot_adapter_handler_table.empty()) {
2573 link_aot_adapters();
2574 lookup_simple_adapters();
2575 return;
2576 }
2577 #endif // INCLUDE_CDS
2578
2579 ResourceMark rm;
2580 AdapterBlob* no_arg_blob = nullptr;
2581 AdapterBlob* int_arg_blob = nullptr;
2582 AdapterBlob* obj_arg_blob = nullptr;
2583 AdapterBlob* obj_int_arg_blob = nullptr;
2584 AdapterBlob* obj_obj_arg_blob = nullptr;
2585 {
2586 MutexLocker mu(AdapterHandlerLibrary_lock);
2587
2588 _no_arg_handler = create_adapter(no_arg_blob, 0, nullptr);
2589
2590 BasicType obj_args[] = { T_OBJECT };
2591 _obj_arg_handler = create_adapter(obj_arg_blob, 1, obj_args);
2592
2593 BasicType int_args[] = { T_INT };
2594 _int_arg_handler = create_adapter(int_arg_blob, 1, int_args);
2595
2596 BasicType obj_int_args[] = { T_OBJECT, T_INT };
2597 _obj_int_arg_handler = create_adapter(obj_int_arg_blob, 2, obj_int_args);
2598
2599 BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
2600 _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, 2, obj_obj_args);
2601
2602 assert(no_arg_blob != nullptr &&
2603 obj_arg_blob != nullptr &&
2604 int_arg_blob != nullptr &&
2605 obj_int_arg_blob != nullptr &&
2606 obj_obj_arg_blob != nullptr, "Initial adapters must be properly created");
2607 }
2608
2609 // Outside of the lock
2610 post_adapter_creation(no_arg_blob, _no_arg_handler);
2611 post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2612 post_adapter_creation(int_arg_blob, _int_arg_handler);
2613 post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2614 post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2615 }
2616
2617 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint) {
2618 return AdapterHandlerEntry::allocate(fingerprint);
2619 }
2620
2621 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2622 if (method->is_abstract()) {
2623 return _abstract_method_handler;
2624 }
2625 int total_args_passed = method->size_of_parameters(); // All args on stack
2626 if (total_args_passed == 0) {
2627 return _no_arg_handler;
2628 } else if (total_args_passed == 1) {
2629 if (!method->is_static()) {
2630 return _obj_arg_handler;
2631 }
2632 switch (method->signature()->char_at(1)) {
2633 case JVM_SIGNATURE_CLASS:
2634 case JVM_SIGNATURE_ARRAY:
2635 return _obj_arg_handler;
2636 case JVM_SIGNATURE_INT:
2637 case JVM_SIGNATURE_BOOLEAN:
2638 case JVM_SIGNATURE_CHAR:
2639 case JVM_SIGNATURE_BYTE:
2640 case JVM_SIGNATURE_SHORT:
2641 return _int_arg_handler;
2642 }
2643 } else if (total_args_passed == 2 &&
2644 !method->is_static()) {
2645 switch (method->signature()->char_at(1)) {
2646 case JVM_SIGNATURE_CLASS:
2647 case JVM_SIGNATURE_ARRAY:
2648 return _obj_obj_arg_handler;
2649 case JVM_SIGNATURE_INT:
2650 case JVM_SIGNATURE_BOOLEAN:
2651 case JVM_SIGNATURE_CHAR:
2652 case JVM_SIGNATURE_BYTE:
2653 case JVM_SIGNATURE_SHORT:
2654 return _obj_int_arg_handler;
2655 }
2656 }
2657 return nullptr;
2658 }
2659
2660 class AdapterSignatureIterator : public SignatureIterator {
2661 private:
2662 BasicType stack_sig_bt[16];
2663 BasicType* sig_bt;
2664 int index;
2665
2666 public:
2667 AdapterSignatureIterator(Symbol* signature,
2668 fingerprint_t fingerprint,
2669 bool is_static,
2670 int total_args_passed) :
2671 SignatureIterator(signature, fingerprint),
2672 index(0)
2673 {
2674 sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
2675 if (!is_static) { // Pass in receiver first
2676 sig_bt[index++] = T_OBJECT;
2677 }
2678 do_parameters_on(this);
2679 }
2680
2681 BasicType* basic_types() {
2682 return sig_bt;
2683 }
2684
2685 #ifdef ASSERT
2686 int slots() {
2687 return index;
2688 }
2689 #endif
2690
2691 private:
2692
2693 friend class SignatureIterator; // so do_parameters_on can call do_type
2694 void do_type(BasicType type) {
2695 sig_bt[index++] = type;
2696 if (type == T_LONG || type == T_DOUBLE) {
2697 sig_bt[index++] = T_VOID; // Longs & doubles take 2 Java slots
2698 }
2699 }
2700 };
2701
2702
2703 const char* AdapterHandlerEntry::_entry_names[] = {
2704 "i2c", "c2i", "c2i_unverified", "c2i_no_clinit_check"
2705 };
2706
2707 #ifdef ASSERT
2708 void AdapterHandlerLibrary::verify_adapter_sharing(int total_args_passed, BasicType* sig_bt, AdapterHandlerEntry* cached_entry) {
2709 AdapterBlob* comparison_blob = nullptr;
2710 AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, total_args_passed, sig_bt, true);
2711 assert(comparison_blob == nullptr, "no blob should be created when creating an adapter for comparison");
2712 assert(comparison_entry->compare_code(cached_entry), "code must match");
2713 // Release the one just created
2714 AdapterHandlerEntry::deallocate(comparison_entry);
2715 }
2716 #endif /* ASSERT*/
2717
2718 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
2719 // Use customized signature handler. Need to lock around updates to
2720 // the _adapter_handler_table (it is not safe for concurrent readers
2721 // and a single writer: this could be fixed if it becomes a
2722 // problem).
2723
2724 // Fast-path for trivial adapters
2725 AdapterHandlerEntry* entry = get_simple_adapter(method);
2726 if (entry != nullptr) {
2727 return entry;
2728 }
2729
2730 ResourceMark rm;
2731 AdapterBlob* adapter_blob = nullptr;
2732
2733 // Fill in the signature array, for the calling-convention call.
2734 int total_args_passed = method->size_of_parameters(); // All args on stack
2735
2736 AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2737 method->is_static(), total_args_passed);
2738 assert(si.slots() == total_args_passed, "");
2739 BasicType* sig_bt = si.basic_types();
2740 {
2741 MutexLocker mu(AdapterHandlerLibrary_lock);
2742
2743 // Lookup method signature's fingerprint
2744 entry = lookup(total_args_passed, sig_bt);
2745
2746 if (entry != nullptr) {
2747 assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
2748 #ifdef ASSERT
2749 if (!entry->is_shared() && VerifyAdapterSharing) {
2750 verify_adapter_sharing(total_args_passed, sig_bt, entry);
2751 }
2752 #endif
2753 } else {
2754 entry = create_adapter(adapter_blob, total_args_passed, sig_bt);
2755 }
2756 }
2757
2758 // Outside of the lock
2759 if (adapter_blob != nullptr) {
2760 post_adapter_creation(adapter_blob, entry);
2761 }
2762 return entry;
2763 }
2764
2765 AdapterBlob* AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler) {
2766 ResourceMark rm;
2767 const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
2768 const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
2769 int offsets[AdapterHandlerEntry::ENTRIES_COUNT];
2770
2771 AdapterBlob* adapter_blob = nullptr;
2772 CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::Adapter, id, name, AdapterHandlerEntry::ENTRIES_COUNT, offsets);
2773 if (blob != nullptr) {
2774 adapter_blob = blob->as_adapter_blob();
2775 address i2c_entry = adapter_blob->content_begin();
2776 assert(offsets[0] == 0, "sanity check");
2777 handler->set_entry_points(i2c_entry, i2c_entry + offsets[1], i2c_entry + offsets[2], i2c_entry + offsets[3]);
2778 }
2779 return adapter_blob;
2780 }
2781
2782 #ifndef PRODUCT
2783 void AdapterHandlerLibrary::print_adapter_handler_info(outputStream* st, AdapterHandlerEntry* handler, AdapterBlob* adapter_blob) {
2784 ttyLocker ttyl;
2785 ResourceMark rm;
2786 int insts_size = adapter_blob->code_size();
2787 handler->print_adapter_on(tty);
2788 st->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
2789 handler->fingerprint()->as_basic_args_string(),
2790 handler->fingerprint()->as_string(), insts_size);
2791 st->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
2792 if (Verbose || PrintStubCode) {
2793 address first_pc = handler->base_address();
2794 if (first_pc != nullptr) {
2795 Disassembler::decode(first_pc, first_pc + insts_size, st, &adapter_blob->asm_remarks());
2796 st->cr();
2797 }
2798 }
2799 }
2800 #endif // PRODUCT
2801
2802 bool AdapterHandlerLibrary::generate_adapter_code(AdapterBlob*& adapter_blob,
2803 AdapterHandlerEntry* handler,
2804 int total_args_passed,
2805 BasicType* sig_bt,
2806 bool is_transient) {
2807 if (log_is_enabled(Info, perf, class, link)) {
2808 ClassLoader::perf_method_adapters_count()->inc();
2809 }
2810
2811 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2812 CodeBuffer buffer(buf);
2813 short buffer_locs[20];
2814 buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2815 sizeof(buffer_locs)/sizeof(relocInfo));
2816 MacroAssembler masm(&buffer);
2817 VMRegPair stack_regs[16];
2818 VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2819
2820 // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2821 int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2822 SharedRuntime::generate_i2c2i_adapters(&masm,
2823 total_args_passed,
2824 comp_args_on_stack,
2825 sig_bt,
2826 regs,
2827 handler);
2828 #ifdef ASSERT
2829 if (VerifyAdapterSharing) {
2830 handler->save_code(buf->code_begin(), buffer.insts_size());
2831 if (is_transient) {
2832 return true;
2833 }
2834 }
2835 #endif
2836
2837 adapter_blob = AdapterBlob::create(&buffer);
2838 if (adapter_blob == nullptr) {
2839 // CodeCache is full, disable compilation
2840 // Ought to log this but compile log is only per compile thread
2841 // and we're some non descript Java thread.
2842 return false;
2843 }
2844 if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
2845 // try to save generated code
2846 const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
2847 const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
2848 int entry_offset[AdapterHandlerEntry::ENTRIES_COUNT];
2849 assert(AdapterHandlerEntry::ENTRIES_COUNT == 4, "sanity");
2850 address i2c_entry = handler->get_i2c_entry();
2851 entry_offset[0] = 0; // i2c_entry offset
2852 entry_offset[1] = handler->get_c2i_entry() - i2c_entry;
2853 entry_offset[2] = handler->get_c2i_unverified_entry() - i2c_entry;
2854 entry_offset[3] = handler->get_c2i_no_clinit_check_entry() - i2c_entry;
2855 bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name, AdapterHandlerEntry::ENTRIES_COUNT, entry_offset);
2856 assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
2857 }
2858 handler->relocate(adapter_blob->content_begin());
2859 #ifndef PRODUCT
2860 // debugging support
2861 if (PrintAdapterHandlers || PrintStubCode) {
2862 print_adapter_handler_info(tty, handler, adapter_blob);
2863 }
2864 #endif
2865 return true;
2866 }
2867
2868 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& adapter_blob,
2869 int total_args_passed,
2870 BasicType* sig_bt,
2871 bool is_transient) {
2872 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2873 AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
2874 if (!generate_adapter_code(adapter_blob, handler, total_args_passed, sig_bt, is_transient)) {
2875 AdapterHandlerEntry::deallocate(handler);
2876 return nullptr;
2877 }
2878 if (!is_transient) {
2879 assert_lock_strong(AdapterHandlerLibrary_lock);
2880 _adapter_handler_table->put(fp, handler);
2881 }
2882 return handler;
2883 }
2884
2885 #if INCLUDE_CDS
2886 void AdapterHandlerEntry::remove_unshareable_info() {
2887 #ifdef ASSERT
2888 _saved_code = nullptr;
2889 _saved_code_length = 0;
2890 #endif // ASSERT
2891 set_entry_points(nullptr, nullptr, nullptr, nullptr, false);
2892 }
2893
2894 class CopyAdapterTableToArchive : StackObj {
2895 private:
2896 CompactHashtableWriter* _writer;
2897 ArchiveBuilder* _builder;
2898 public:
2899 CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
2900 _builder(ArchiveBuilder::current())
2901 {}
2902
2903 bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
2904 LogStreamHandle(Trace, aot) lsh;
2905 if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
2906 assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
2907 AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
2908 assert(buffered_fp != nullptr,"sanity check");
2909 AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
2910 assert(buffered_entry != nullptr,"sanity check");
2911
2957 // This method is used during production run to link archived adapters (stored in AOT Cache)
2958 // to their code in AOT Code Cache
2959 void AdapterHandlerEntry::link() {
2960 AdapterBlob* adapter_blob = nullptr;
2961 ResourceMark rm;
2962 assert(_fingerprint != nullptr, "_fingerprint must not be null");
2963 bool generate_code = false;
2964 // Generate code only if AOTCodeCache is not available, or
2965 // caching adapters is disabled, or we fail to link
2966 // the AdapterHandlerEntry to its code in the AOTCodeCache
2967 if (AOTCodeCache::is_using_adapter()) {
2968 adapter_blob = AdapterHandlerLibrary::link_aot_adapter_handler(this);
2969 if (adapter_blob == nullptr) {
2970 log_warning(aot)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
2971 generate_code = true;
2972 }
2973 } else {
2974 generate_code = true;
2975 }
2976 if (generate_code) {
2977 int nargs;
2978 BasicType* bt = _fingerprint->as_basic_type(nargs);
2979 if (!AdapterHandlerLibrary::generate_adapter_code(adapter_blob, this, nargs, bt, /* is_transient */ false)) {
2980 // Don't throw exceptions during VM initialization because java.lang.* classes
2981 // might not have been initialized, causing problems when constructing the
2982 // Java exception object.
2983 vm_exit_during_initialization("Out of space in CodeCache for adapters");
2984 }
2985 }
2986 // Outside of the lock
2987 if (adapter_blob != nullptr) {
2988 post_adapter_creation(adapter_blob, this);
2989 }
2990 assert(_linked, "AdapterHandlerEntry must now be linked");
2991 }
2992
2993 void AdapterHandlerLibrary::link_aot_adapters() {
2994 assert(AOTCodeCache::is_using_adapter(), "AOT adapters code should be available");
2995 _aot_adapter_handler_table.iterate([](AdapterHandlerEntry* entry) {
2996 assert(!entry->is_linked(), "AdapterHandlerEntry is already linked!");
2997 entry->link();
2998 });
2999 }
3000
3001 // This method is called during production run to lookup simple adapters
3002 // in the archived adapter handler table
3003 void AdapterHandlerLibrary::lookup_simple_adapters() {
3004 assert(!_aot_adapter_handler_table.empty(), "archived adapter handler table is empty");
3005
3006 MutexLocker mu(AdapterHandlerLibrary_lock);
3007 _no_arg_handler = lookup(0, nullptr);
3008
3009 BasicType obj_args[] = { T_OBJECT };
3010 _obj_arg_handler = lookup(1, obj_args);
3011
3012 BasicType int_args[] = { T_INT };
3013 _int_arg_handler = lookup(1, int_args);
3014
3015 BasicType obj_int_args[] = { T_OBJECT, T_INT };
3016 _obj_int_arg_handler = lookup(2, obj_int_args);
3017
3018 BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
3019 _obj_obj_arg_handler = lookup(2, obj_obj_args);
3020
3021 assert(_no_arg_handler != nullptr &&
3022 _obj_arg_handler != nullptr &&
3023 _int_arg_handler != nullptr &&
3024 _obj_int_arg_handler != nullptr &&
3025 _obj_obj_arg_handler != nullptr, "Initial adapters not found in archived adapter handler table");
3026 assert(_no_arg_handler->is_linked() &&
3027 _obj_arg_handler->is_linked() &&
3028 _int_arg_handler->is_linked() &&
3029 _obj_int_arg_handler->is_linked() &&
3030 _obj_obj_arg_handler->is_linked(), "Initial adapters not in linked state");
3031 }
3032 #endif // INCLUDE_CDS
3033
3034 address AdapterHandlerEntry::base_address() {
3035 address base = _i2c_entry;
3036 if (base == nullptr) base = _c2i_entry;
3037 assert(base <= _c2i_entry || _c2i_entry == nullptr, "");
3038 assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == nullptr, "");
3039 assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == nullptr, "");
3040 return base;
3041 }
3042
3043 void AdapterHandlerEntry::relocate(address new_base) {
3044 address old_base = base_address();
3045 assert(old_base != nullptr, "");
3046 ptrdiff_t delta = new_base - old_base;
3047 if (_i2c_entry != nullptr)
3048 _i2c_entry += delta;
3049 if (_c2i_entry != nullptr)
3050 _c2i_entry += delta;
3051 if (_c2i_unverified_entry != nullptr)
3052 _c2i_unverified_entry += delta;
3053 if (_c2i_no_clinit_check_entry != nullptr)
3054 _c2i_no_clinit_check_entry += delta;
3055 assert(base_address() == new_base, "");
3056 }
3057
3058 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3059 LogStreamHandle(Trace, aot) lsh;
3060 if (lsh.is_enabled()) {
3061 lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3062 lsh.cr();
3063 }
3064 it->push(&_fingerprint);
3065 }
3066
3067 AdapterHandlerEntry::~AdapterHandlerEntry() {
3068 if (_fingerprint != nullptr) {
3069 AdapterFingerPrint::deallocate(_fingerprint);
3070 _fingerprint = nullptr;
3071 }
3072 #ifdef ASSERT
3073 FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
3074 #endif
3075 FreeHeap(this);
3076 }
3077
3078
3079 #ifdef ASSERT
3080 // Capture the code before relocation so that it can be compared
3081 // against other versions. If the code is captured after relocation
3082 // then relative instructions won't be equivalent.
3083 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3084 _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3085 _saved_code_length = length;
3086 memcpy(_saved_code, buffer, length);
3087 }
3088
3089
3090 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3091 assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");
3139
3140 struct { double data[20]; } locs_buf;
3141 struct { double data[20]; } stubs_locs_buf;
3142 buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3143 #if defined(AARCH64) || defined(PPC64)
3144 // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3145 // in the constant pool to ensure ordering between the barrier and oops
3146 // accesses. For native_wrappers we need a constant.
3147 // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3148 // static java call that is resolved in the runtime.
3149 if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3150 buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3151 }
3152 #endif
3153 buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3154 MacroAssembler _masm(&buffer);
3155
3156 // Fill in the signature array, for the calling-convention call.
3157 const int total_args_passed = method->size_of_parameters();
3158
3159 VMRegPair stack_regs[16];
3160 VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3161
3162 AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
3163 method->is_static(), total_args_passed);
3164 BasicType* sig_bt = si.basic_types();
3165 assert(si.slots() == total_args_passed, "");
3166 BasicType ret_type = si.return_type();
3167
3168 // Now get the compiled-Java arguments layout.
3169 SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3170
3171 // Generate the compiled-to-native wrapper code
3172 nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3173
3174 if (nm != nullptr) {
3175 {
3176 MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3177 if (nm->make_in_use()) {
3178 method->set_code(method, nm);
3179 }
3180 }
3181
3182 DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3183 if (directive->PrintAssemblyOption) {
3184 nm->print_code();
3185 }
3186 DirectivesStack::release(directive);
3429 a->print_adapter_on(st);
3430 return true;
3431 } else {
3432 return false; // keep looking
3433 }
3434 };
3435 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3436 _adapter_handler_table->iterate(findblob_runtime_table);
3437 }
3438 assert(found, "Should have found handler");
3439 }
3440
3441 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3442 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3443 if (get_i2c_entry() != nullptr) {
3444 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3445 }
3446 if (get_c2i_entry() != nullptr) {
3447 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3448 }
3449 if (get_c2i_unverified_entry() != nullptr) {
3450 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3451 }
3452 if (get_c2i_no_clinit_check_entry() != nullptr) {
3453 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3454 }
3455 st->cr();
3456 }
3457
3458 #ifndef PRODUCT
3459
3460 void AdapterHandlerLibrary::print_statistics() {
3461 print_table_statistics();
3462 }
3463
3464 #endif /* PRODUCT */
3465
3466 bool AdapterHandlerLibrary::is_abstract_method_adapter(AdapterHandlerEntry* entry) {
3467 if (entry == _abstract_method_handler) {
3468 return true;
3469 }
3470 return false;
3526 event.set_method(method);
3527 event.commit();
3528 }
3529 }
3530 }
3531 return activation;
3532 }
3533
3534 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
3535 // After any safepoint, just before going back to compiled code,
3536 // we inform the GC that we will be doing initializing writes to
3537 // this object in the future without emitting card-marks, so
3538 // GC may take any compensating steps.
3539
3540 oop new_obj = current->vm_result_oop();
3541 if (new_obj == nullptr) return;
3542
3543 BarrierSet *bs = BarrierSet::barrier_set();
3544 bs->on_slowpath_allocation_exit(current, new_obj);
3545 }
|
28 #include "classfile/javaClasses.inline.hpp"
29 #include "classfile/stringTable.hpp"
30 #include "classfile/vmClasses.hpp"
31 #include "classfile/vmSymbols.hpp"
32 #include "code/aotCodeCache.hpp"
33 #include "code/codeCache.hpp"
34 #include "code/compiledIC.hpp"
35 #include "code/nmethod.inline.hpp"
36 #include "code/scopeDesc.hpp"
37 #include "code/vtableStubs.hpp"
38 #include "compiler/abstractCompiler.hpp"
39 #include "compiler/compileBroker.hpp"
40 #include "compiler/disassembler.hpp"
41 #include "gc/shared/barrierSet.hpp"
42 #include "gc/shared/collectedHeap.hpp"
43 #include "interpreter/interpreter.hpp"
44 #include "interpreter/interpreterRuntime.hpp"
45 #include "jvm.h"
46 #include "jfr/jfrEvents.hpp"
47 #include "logging/log.hpp"
48 #include "memory/oopFactory.hpp"
49 #include "memory/resourceArea.hpp"
50 #include "memory/universe.hpp"
51 #include "oops/access.hpp"
52 #include "oops/fieldStreams.inline.hpp"
53 #include "metaprogramming/primitiveConversions.hpp"
54 #include "oops/klass.hpp"
55 #include "oops/method.inline.hpp"
56 #include "oops/objArrayKlass.hpp"
57 #include "oops/objArrayOop.inline.hpp"
58 #include "oops/oop.inline.hpp"
59 #include "oops/inlineKlass.inline.hpp"
60 #include "prims/forte.hpp"
61 #include "prims/jvmtiExport.hpp"
62 #include "prims/jvmtiThreadState.hpp"
63 #include "prims/methodHandles.hpp"
64 #include "prims/nativeLookup.hpp"
65 #include "runtime/arguments.hpp"
66 #include "runtime/atomic.hpp"
67 #include "runtime/basicLock.inline.hpp"
68 #include "runtime/frame.inline.hpp"
69 #include "runtime/handles.inline.hpp"
70 #include "runtime/init.hpp"
71 #include "runtime/interfaceSupport.inline.hpp"
72 #include "runtime/java.hpp"
73 #include "runtime/javaCalls.hpp"
74 #include "runtime/jniHandles.inline.hpp"
75 #include "runtime/perfData.hpp"
76 #include "runtime/sharedRuntime.hpp"
77 #include "runtime/stackWatermarkSet.hpp"
78 #include "runtime/stubRoutines.hpp"
79 #include "runtime/synchronizer.inline.hpp"
1179 // for a call current in progress, i.e., arguments has been pushed on stack
1180 // but callee has not been invoked yet. Caller frame must be compiled.
1181 Handle SharedRuntime::find_callee_info_helper(vframeStream& vfst, Bytecodes::Code& bc,
1182 CallInfo& callinfo, TRAPS) {
1183 Handle receiver;
1184 Handle nullHandle; // create a handy null handle for exception returns
1185 JavaThread* current = THREAD;
1186
1187 assert(!vfst.at_end(), "Java frame must exist");
1188
1189 // Find caller and bci from vframe
1190 methodHandle caller(current, vfst.method());
1191 int bci = vfst.bci();
1192
1193 if (caller->is_continuation_enter_intrinsic()) {
1194 bc = Bytecodes::_invokestatic;
1195 LinkResolver::resolve_continuation_enter(callinfo, CHECK_NH);
1196 return receiver;
1197 }
1198
1199 // Substitutability test implementation piggy backs on static call resolution
1200 Bytecodes::Code code = caller->java_code_at(bci);
1201 if (code == Bytecodes::_if_acmpeq || code == Bytecodes::_if_acmpne) {
1202 bc = Bytecodes::_invokestatic;
1203 methodHandle attached_method(THREAD, extract_attached_method(vfst));
1204 assert(attached_method.not_null(), "must have attached method");
1205 vmClasses::ValueObjectMethods_klass()->initialize(CHECK_NH);
1206 LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, false, CHECK_NH);
1207 #ifdef ASSERT
1208 Method* is_subst = vmClasses::ValueObjectMethods_klass()->find_method(vmSymbols::isSubstitutable_name(), vmSymbols::object_object_boolean_signature());
1209 assert(callinfo.selected_method() == is_subst, "must be isSubstitutable method");
1210 #endif
1211 return receiver;
1212 }
1213
1214 Bytecode_invoke bytecode(caller, bci);
1215 int bytecode_index = bytecode.index();
1216 bc = bytecode.invoke_code();
1217
1218 methodHandle attached_method(current, extract_attached_method(vfst));
1219 if (attached_method.not_null()) {
1220 Method* callee = bytecode.static_target(CHECK_NH);
1221 vmIntrinsics::ID id = callee->intrinsic_id();
1222 // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call,
1223 // it attaches statically resolved method to the call site.
1224 if (MethodHandles::is_signature_polymorphic(id) &&
1225 MethodHandles::is_signature_polymorphic_intrinsic(id)) {
1226 bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id);
1227
1228 // Adjust invocation mode according to the attached method.
1229 switch (bc) {
1230 case Bytecodes::_invokevirtual:
1231 if (attached_method->method_holder()->is_interface()) {
1232 bc = Bytecodes::_invokeinterface;
1233 }
1234 break;
1235 case Bytecodes::_invokeinterface:
1236 if (!attached_method->method_holder()->is_interface()) {
1237 bc = Bytecodes::_invokevirtual;
1238 }
1239 break;
1240 case Bytecodes::_invokehandle:
1241 if (!MethodHandles::is_signature_polymorphic_method(attached_method())) {
1242 bc = attached_method->is_static() ? Bytecodes::_invokestatic
1243 : Bytecodes::_invokevirtual;
1244 }
1245 break;
1246 default:
1247 break;
1248 }
1249 } else {
1250 assert(attached_method->has_scalarized_args(), "invalid use of attached method");
1251 if (!attached_method->method_holder()->is_inline_klass()) {
1252 // Ignore the attached method in this case to not confuse below code
1253 attached_method = methodHandle(current, nullptr);
1254 }
1255 }
1256 }
1257
1258 assert(bc != Bytecodes::_illegal, "not initialized");
1259
1260 bool has_receiver = bc != Bytecodes::_invokestatic &&
1261 bc != Bytecodes::_invokedynamic &&
1262 bc != Bytecodes::_invokehandle;
1263 bool check_null_and_abstract = true;
1264
1265 // Find receiver for non-static call
1266 if (has_receiver) {
1267 // This register map must be update since we need to find the receiver for
1268 // compiled frames. The receiver might be in a register.
1269 RegisterMap reg_map2(current,
1270 RegisterMap::UpdateMap::include,
1271 RegisterMap::ProcessFrames::include,
1272 RegisterMap::WalkContinuation::skip);
1273 frame stubFrame = current->last_frame();
1274 // Caller-frame is a compiled frame
1275 frame callerFrame = stubFrame.sender(®_map2);
1276
1277 Method* callee = attached_method();
1278 if (callee == nullptr) {
1279 callee = bytecode.static_target(CHECK_NH);
1280 if (callee == nullptr) {
1281 THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle);
1282 }
1283 }
1284 bool caller_is_c1 = callerFrame.is_compiled_frame() && callerFrame.cb()->as_nmethod()->is_compiled_by_c1();
1285 if (!caller_is_c1 && callee->is_scalarized_arg(0)) {
1286 // If the receiver is an inline type that is passed as fields, no oop is available
1287 // Resolve the call without receiver null checking.
1288 assert(!callee->mismatch(), "calls with inline type receivers should never mismatch");
1289 assert(attached_method.not_null() && !attached_method->is_abstract(), "must have non-abstract attached method");
1290 if (bc == Bytecodes::_invokeinterface) {
1291 bc = Bytecodes::_invokevirtual; // C2 optimistically replaces interface calls by virtual calls
1292 }
1293 check_null_and_abstract = false;
1294 } else {
1295 // Retrieve from a compiled argument list
1296 receiver = Handle(current, callerFrame.retrieve_receiver(®_map2));
1297 assert(oopDesc::is_oop_or_null(receiver()), "");
1298 if (receiver.is_null()) {
1299 THROW_(vmSymbols::java_lang_NullPointerException(), nullHandle);
1300 }
1301 }
1302 }
1303
1304 // Resolve method
1305 if (attached_method.not_null()) {
1306 // Parameterized by attached method.
1307 LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, check_null_and_abstract, CHECK_NH);
1308 } else {
1309 // Parameterized by bytecode.
1310 constantPoolHandle constants(current, caller->constants());
1311 LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH);
1312 }
1313
1314 #ifdef ASSERT
1315 // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls
1316 if (has_receiver && check_null_and_abstract) {
1317 assert(receiver.not_null(), "should have thrown exception");
1318 Klass* receiver_klass = receiver->klass();
1319 Klass* rk = nullptr;
1320 if (attached_method.not_null()) {
1321 // In case there's resolved method attached, use its holder during the check.
1322 rk = attached_method->method_holder();
1323 } else {
1324 // Klass is already loaded.
1325 constantPoolHandle constants(current, caller->constants());
1326 rk = constants->klass_ref_at(bytecode_index, bc, CHECK_NH);
1327 }
1328 Klass* static_receiver_klass = rk;
1329 assert(receiver_klass->is_subtype_of(static_receiver_klass),
1330 "actual receiver must be subclass of static receiver klass");
1331 if (receiver_klass->is_instance_klass()) {
1332 if (InstanceKlass::cast(receiver_klass)->is_not_initialized()) {
1333 tty->print_cr("ERROR: Klass not yet initialized!!");
1334 receiver_klass->print();
1335 }
1336 assert(!InstanceKlass::cast(receiver_klass)->is_not_initialized(), "receiver_klass must be initialized");
1337 }
1338 }
1339 #endif
1340
1341 return receiver;
1342 }
1343
1344 methodHandle SharedRuntime::find_callee_method(bool is_optimized, bool& caller_is_c1, TRAPS) {
1345 JavaThread* current = THREAD;
1346 ResourceMark rm(current);
1347 // We need first to check if any Java activations (compiled, interpreted)
1348 // exist on the stack since last JavaCall. If not, we need
1349 // to get the target method from the JavaCall wrapper.
1350 vframeStream vfst(current, true); // Do not skip any javaCalls
1351 methodHandle callee_method;
1352 if (vfst.at_end()) {
1353 // No Java frames were found on stack since we did the JavaCall.
1354 // Hence the stack can only contain an entry_frame. We need to
1355 // find the target method from the stub frame.
1356 RegisterMap reg_map(current,
1357 RegisterMap::UpdateMap::skip,
1358 RegisterMap::ProcessFrames::include,
1359 RegisterMap::WalkContinuation::skip);
1360 frame fr = current->last_frame();
1361 assert(fr.is_runtime_frame(), "must be a runtimeStub");
1362 fr = fr.sender(®_map);
1363 assert(fr.is_entry_frame(), "must be");
1364 // fr is now pointing to the entry frame.
1365 callee_method = methodHandle(current, fr.entry_frame_call_wrapper()->callee_method());
1366 } else {
1367 Bytecodes::Code bc;
1368 CallInfo callinfo;
1369 find_callee_info_helper(vfst, bc, callinfo, CHECK_(methodHandle()));
1370 // Calls via mismatching methods are always non-scalarized
1371 if (callinfo.resolved_method()->mismatch() && !is_optimized) {
1372 caller_is_c1 = true;
1373 }
1374 callee_method = methodHandle(current, callinfo.selected_method());
1375 }
1376 assert(callee_method()->is_method(), "must be");
1377 return callee_method;
1378 }
1379
1380 // Resolves a call.
1381 methodHandle SharedRuntime::resolve_helper(bool is_virtual, bool is_optimized, bool& caller_is_c1, TRAPS) {
1382 JavaThread* current = THREAD;
1383 ResourceMark rm(current);
1384 RegisterMap cbl_map(current,
1385 RegisterMap::UpdateMap::skip,
1386 RegisterMap::ProcessFrames::include,
1387 RegisterMap::WalkContinuation::skip);
1388 frame caller_frame = current->last_frame().sender(&cbl_map);
1389
1390 CodeBlob* caller_cb = caller_frame.cb();
1391 guarantee(caller_cb != nullptr && caller_cb->is_nmethod(), "must be called from compiled method");
1392 nmethod* caller_nm = caller_cb->as_nmethod();
1393
1394 // determine call info & receiver
1395 // note: a) receiver is null for static calls
1396 // b) an exception is thrown if receiver is null for non-static calls
1397 CallInfo call_info;
1398 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1399 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1400
1401 NoSafepointVerifier nsv;
1402
1403 methodHandle callee_method(current, call_info.selected_method());
1404 // Calls via mismatching methods are always non-scalarized
1405 if (caller_nm->is_compiled_by_c1() || (call_info.resolved_method()->mismatch() && !is_optimized)) {
1406 caller_is_c1 = true;
1407 }
1408
1409 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1410 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1411 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1412 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1413 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1414
1415 assert(!caller_nm->is_unloading(), "It should not be unloading");
1416
1417 #ifndef PRODUCT
1418 // tracing/debugging/statistics
1419 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1420 (is_virtual) ? (&_resolve_virtual_ctr) :
1421 (&_resolve_static_ctr);
1422 Atomic::inc(addr);
1423
1424 if (TraceCallFixup) {
1425 ResourceMark rm(current);
1426 tty->print("resolving %s%s (%s) call%s to",
1427 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1428 Bytecodes::name(invoke_code), (caller_is_c1) ? " from C1" : "");
1429 callee_method->print_short_name(tty);
1430 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1431 p2i(caller_frame.pc()), p2i(callee_method->code()));
1432 }
1433 #endif
1434
1435 if (invoke_code == Bytecodes::_invokestatic) {
1436 assert(callee_method->method_holder()->is_initialized() ||
1437 callee_method->method_holder()->is_reentrant_initialization(current),
1438 "invalid class initialization state for invoke_static");
1439 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1440 // In order to keep class initialization check, do not patch call
1441 // site for static call when the class is not fully initialized.
1442 // Proper check is enforced by call site re-resolution on every invocation.
1443 //
1444 // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
1445 // explicit class initialization check is put in nmethod entry (VEP).
1446 assert(callee_method->method_holder()->is_linked(), "must be");
1447 return callee_method;
1448 }
1449 }
1450
1451
1452 // JSR 292 key invariant:
1453 // If the resolved method is a MethodHandle invoke target, the call
1454 // site must be a MethodHandle call site, because the lambda form might tail-call
1455 // leaving the stack in a state unknown to either caller or callee
1456
1457 // Compute entry points. The computation of the entry points is independent of
1458 // patching the call.
1459
1460 // Make sure the callee nmethod does not get deoptimized and removed before
1461 // we are done patching the code.
1462
1463
1464 CompiledICLocker ml(caller_nm);
1465 if (is_virtual && !is_optimized) {
1466 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1467 inline_cache->update(&call_info, receiver->klass(), caller_is_c1);
1468 } else {
1469 // Callsite is a direct call - set it to the destination method
1470 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1471 callsite->set(callee_method, caller_is_c1);
1472 }
1473
1474 return callee_method;
1475 }
1476
1477 // Inline caches exist only in compiled code
1478 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1479 #ifdef ASSERT
1480 RegisterMap reg_map(current,
1481 RegisterMap::UpdateMap::skip,
1482 RegisterMap::ProcessFrames::include,
1483 RegisterMap::WalkContinuation::skip);
1484 frame stub_frame = current->last_frame();
1485 assert(stub_frame.is_runtime_frame(), "sanity check");
1486 frame caller_frame = stub_frame.sender(®_map);
1487 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1488 #endif /* ASSERT */
1489
1490 methodHandle callee_method;
1491 bool is_optimized = false;
1492 bool caller_is_c1 = false;
1493 JRT_BLOCK
1494 callee_method = SharedRuntime::handle_ic_miss_helper(is_optimized, caller_is_c1, CHECK_NULL);
1495 // Return Method* through TLS
1496 current->set_vm_result_metadata(callee_method());
1497 JRT_BLOCK_END
1498 // return compiled code entry point after potential safepoints
1499 return get_resolved_entry(current, callee_method, false, is_optimized, caller_is_c1);
1500 JRT_END
1501
1502
1503 // Handle call site that has been made non-entrant
1504 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1505 // 6243940 We might end up in here if the callee is deoptimized
1506 // as we race to call it. We don't want to take a safepoint if
1507 // the caller was interpreted because the caller frame will look
1508 // interpreted to the stack walkers and arguments are now
1509 // "compiled" so it is much better to make this transition
1510 // invisible to the stack walking code. The i2c path will
1511 // place the callee method in the callee_target. It is stashed
1512 // there because if we try and find the callee by normal means a
1513 // safepoint is possible and have trouble gc'ing the compiled args.
1514 RegisterMap reg_map(current,
1515 RegisterMap::UpdateMap::skip,
1516 RegisterMap::ProcessFrames::include,
1517 RegisterMap::WalkContinuation::skip);
1518 frame stub_frame = current->last_frame();
1519 assert(stub_frame.is_runtime_frame(), "sanity check");
1520 frame caller_frame = stub_frame.sender(®_map);
1521
1522 if (caller_frame.is_interpreted_frame() ||
1523 caller_frame.is_entry_frame() ||
1524 caller_frame.is_upcall_stub_frame()) {
1525 Method* callee = current->callee_target();
1526 guarantee(callee != nullptr && callee->is_method(), "bad handshake");
1527 current->set_vm_result_metadata(callee);
1528 current->set_callee_target(nullptr);
1529 if (caller_frame.is_entry_frame() && VM_Version::supports_fast_class_init_checks()) {
1530 // Bypass class initialization checks in c2i when caller is in native.
1531 // JNI calls to static methods don't have class initialization checks.
1532 // Fast class initialization checks are present in c2i adapters and call into
1533 // SharedRuntime::handle_wrong_method() on the slow path.
1534 //
1535 // JVM upcalls may land here as well, but there's a proper check present in
1536 // LinkResolver::resolve_static_call (called from JavaCalls::call_static),
1537 // so bypassing it in c2i adapter is benign.
1538 return callee->get_c2i_no_clinit_check_entry();
1539 } else {
1540 if (caller_frame.is_interpreted_frame()) {
1541 return callee->get_c2i_inline_entry();
1542 } else {
1543 return callee->get_c2i_entry();
1544 }
1545 }
1546 }
1547
1548 // Must be compiled to compiled path which is safe to stackwalk
1549 methodHandle callee_method;
1550 bool is_static_call = false;
1551 bool is_optimized = false;
1552 bool caller_is_c1 = false;
1553 JRT_BLOCK
1554 // Force resolving of caller (if we called from compiled frame)
1555 callee_method = SharedRuntime::reresolve_call_site(is_static_call, is_optimized, caller_is_c1, CHECK_NULL);
1556 current->set_vm_result_metadata(callee_method());
1557 JRT_BLOCK_END
1558 // return compiled code entry point after potential safepoints
1559 return get_resolved_entry(current, callee_method, is_static_call, is_optimized, caller_is_c1);
1560 JRT_END
1561
1562 // Handle abstract method call
1563 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1564 // Verbose error message for AbstractMethodError.
1565 // Get the called method from the invoke bytecode.
1566 vframeStream vfst(current, true);
1567 assert(!vfst.at_end(), "Java frame must exist");
1568 methodHandle caller(current, vfst.method());
1569 Bytecode_invoke invoke(caller, vfst.bci());
1570 DEBUG_ONLY( invoke.verify(); )
1571
1572 // Find the compiled caller frame.
1573 RegisterMap reg_map(current,
1574 RegisterMap::UpdateMap::include,
1575 RegisterMap::ProcessFrames::include,
1576 RegisterMap::WalkContinuation::skip);
1577 frame stubFrame = current->last_frame();
1578 assert(stubFrame.is_runtime_frame(), "must be");
1579 frame callerFrame = stubFrame.sender(®_map);
1580 assert(callerFrame.is_compiled_frame(), "must be");
1581
1582 // Install exception and return forward entry.
1583 address res = SharedRuntime::throw_AbstractMethodError_entry();
1584 JRT_BLOCK
1585 methodHandle callee(current, invoke.static_target(current));
1586 if (!callee.is_null()) {
1587 oop recv = callerFrame.retrieve_receiver(®_map);
1588 Klass *recv_klass = (recv != nullptr) ? recv->klass() : nullptr;
1589 res = StubRoutines::forward_exception_entry();
1590 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1591 }
1592 JRT_BLOCK_END
1593 return res;
1594 JRT_END
1595
1596 // return verified_code_entry if interp_only_mode is not set for the current thread;
1597 // otherwise return c2i entry.
1598 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method,
1599 bool is_static_call, bool is_optimized, bool caller_is_c1) {
1600 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1601 // In interp_only_mode we need to go to the interpreted entry
1602 // The c2i won't patch in this mode -- see fixup_callers_callsite
1603 return callee_method->get_c2i_entry();
1604 }
1605
1606 if (caller_is_c1) {
1607 assert(callee_method->verified_inline_code_entry() != nullptr, "Jump to zero!");
1608 return callee_method->verified_inline_code_entry();
1609 } else if (is_static_call || is_optimized) {
1610 assert(callee_method->verified_code_entry() != nullptr, "Jump to zero!");
1611 return callee_method->verified_code_entry();
1612 } else {
1613 assert(callee_method->verified_inline_ro_code_entry() != nullptr, "Jump to zero!");
1614 return callee_method->verified_inline_ro_code_entry();
1615 }
1616 }
1617
1618 // resolve a static call and patch code
1619 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1620 methodHandle callee_method;
1621 bool caller_is_c1 = false;
1622 bool enter_special = false;
1623 JRT_BLOCK
1624 callee_method = SharedRuntime::resolve_helper(false, false, caller_is_c1, CHECK_NULL);
1625 current->set_vm_result_metadata(callee_method());
1626 JRT_BLOCK_END
1627 // return compiled code entry point after potential safepoints
1628 return get_resolved_entry(current, callee_method, true, false, caller_is_c1);
1629 JRT_END
1630
1631 // resolve virtual call and update inline cache to monomorphic
1632 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1633 methodHandle callee_method;
1634 bool caller_is_c1 = false;
1635 JRT_BLOCK
1636 callee_method = SharedRuntime::resolve_helper(true, false, caller_is_c1, CHECK_NULL);
1637 current->set_vm_result_metadata(callee_method());
1638 JRT_BLOCK_END
1639 // return compiled code entry point after potential safepoints
1640 return get_resolved_entry(current, callee_method, false, false, caller_is_c1);
1641 JRT_END
1642
1643
1644 // Resolve a virtual call that can be statically bound (e.g., always
1645 // monomorphic, so it has no inline cache). Patch code to resolved target.
1646 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1647 methodHandle callee_method;
1648 bool caller_is_c1 = false;
1649 JRT_BLOCK
1650 callee_method = SharedRuntime::resolve_helper(true, true, caller_is_c1, CHECK_NULL);
1651 current->set_vm_result_metadata(callee_method());
1652 JRT_BLOCK_END
1653 // return compiled code entry point after potential safepoints
1654 return get_resolved_entry(current, callee_method, false, true, caller_is_c1);
1655 JRT_END
1656
1657
1658
1659 methodHandle SharedRuntime::handle_ic_miss_helper(bool& is_optimized, bool& caller_is_c1, TRAPS) {
1660 JavaThread* current = THREAD;
1661 ResourceMark rm(current);
1662 CallInfo call_info;
1663 Bytecodes::Code bc;
1664
1665 // receiver is null for static calls. An exception is thrown for null
1666 // receivers for non-static calls
1667 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1668
1669 methodHandle callee_method(current, call_info.selected_method());
1670
1671 #ifndef PRODUCT
1672 Atomic::inc(&_ic_miss_ctr);
1673
1674 // Statistics & Tracing
1675 if (TraceCallFixup) {
1676 ResourceMark rm(current);
1677 tty->print("IC miss (%s) call%s to", Bytecodes::name(bc), (caller_is_c1) ? " from C1" : "");
1678 callee_method->print_short_name(tty);
1679 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1680 }
1681
1682 if (ICMissHistogram) {
1683 MutexLocker m(VMStatistic_lock);
1684 RegisterMap reg_map(current,
1685 RegisterMap::UpdateMap::skip,
1686 RegisterMap::ProcessFrames::include,
1687 RegisterMap::WalkContinuation::skip);
1688 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1689 // produce statistics under the lock
1690 trace_ic_miss(f.pc());
1691 }
1692 #endif
1693
1694 // install an event collector so that when a vtable stub is created the
1695 // profiler can be notified via a DYNAMIC_CODE_GENERATED event. The
1696 // event can't be posted when the stub is created as locks are held
1697 // - instead the event will be deferred until the event collector goes
1698 // out of scope.
1699 JvmtiDynamicCodeEventCollector event_collector;
1700
1701 // Update inline cache to megamorphic. Skip update if we are called from interpreted.
1702 RegisterMap reg_map(current,
1703 RegisterMap::UpdateMap::skip,
1704 RegisterMap::ProcessFrames::include,
1705 RegisterMap::WalkContinuation::skip);
1706 frame caller_frame = current->last_frame().sender(®_map);
1707 CodeBlob* cb = caller_frame.cb();
1708 nmethod* caller_nm = cb->as_nmethod();
1709 // Calls via mismatching methods are always non-scalarized
1710 if (caller_nm->is_compiled_by_c1() || call_info.resolved_method()->mismatch()) {
1711 caller_is_c1 = true;
1712 }
1713
1714 CompiledICLocker ml(caller_nm);
1715 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1716 inline_cache->update(&call_info, receiver()->klass(), caller_is_c1);
1717
1718 return callee_method;
1719 }
1720
1721 //
1722 // Resets a call-site in compiled code so it will get resolved again.
1723 // This routines handles both virtual call sites, optimized virtual call
1724 // sites, and static call sites. Typically used to change a call sites
1725 // destination from compiled to interpreted.
1726 //
1727 methodHandle SharedRuntime::reresolve_call_site(bool& is_static_call, bool& is_optimized, bool& caller_is_c1, TRAPS) {
1728 JavaThread* current = THREAD;
1729 ResourceMark rm(current);
1730 RegisterMap reg_map(current,
1731 RegisterMap::UpdateMap::skip,
1732 RegisterMap::ProcessFrames::include,
1733 RegisterMap::WalkContinuation::skip);
1734 frame stub_frame = current->last_frame();
1735 assert(stub_frame.is_runtime_frame(), "must be a runtimeStub");
1736 frame caller = stub_frame.sender(®_map);
1737 if (caller.is_compiled_frame()) {
1738 caller_is_c1 = caller.cb()->as_nmethod()->is_compiled_by_c1();
1739 }
1740
1741 // Do nothing if the frame isn't a live compiled frame.
1742 // nmethod could be deoptimized by the time we get here
1743 // so no update to the caller is needed.
1744
1745 if ((caller.is_compiled_frame() && !caller.is_deoptimized_frame()) ||
1746 (caller.is_native_frame() && caller.cb()->as_nmethod()->method()->is_continuation_enter_intrinsic())) {
1747
1748 address pc = caller.pc();
1749
1750 nmethod* caller_nm = CodeCache::find_nmethod(pc);
1751 assert(caller_nm != nullptr, "did not find caller nmethod");
1752
1753 // Default call_addr is the location of the "basic" call.
1754 // Determine the address of the call we a reresolving. With
1755 // Inline Caches we will always find a recognizable call.
1756 // With Inline Caches disabled we may or may not find a
1757 // recognizable call. We will always find a call for static
1758 // calls and for optimized virtual calls. For vanilla virtual
1759 // calls it depends on the state of the UseInlineCaches switch.
1760 //
1761 // With Inline Caches disabled we can get here for a virtual call
1762 // for two reasons:
1763 // 1 - calling an abstract method. The vtable for abstract methods
1764 // will run us thru handle_wrong_method and we will eventually
1765 // end up in the interpreter to throw the ame.
1766 // 2 - a racing deoptimization. We could be doing a vanilla vtable
1767 // call and between the time we fetch the entry address and
1768 // we jump to it the target gets deoptimized. Similar to 1
1769 // we will wind up in the interprter (thru a c2i with c2).
1770 //
1771 CompiledICLocker ml(caller_nm);
1772 address call_addr = caller_nm->call_instruction_address(pc);
1773
1774 if (call_addr != nullptr) {
1775 // On x86 the logic for finding a call instruction is blindly checking for a call opcode 5
1776 // bytes back in the instruction stream so we must also check for reloc info.
1777 RelocIterator iter(caller_nm, call_addr, call_addr+1);
1778 bool ret = iter.next(); // Get item
1779 if (ret) {
1780 is_static_call = false;
1781 is_optimized = false;
1782 switch (iter.type()) {
1783 case relocInfo::static_call_type:
1784 is_static_call = true;
1785 case relocInfo::opt_virtual_call_type: {
1786 is_optimized = (iter.type() == relocInfo::opt_virtual_call_type);
1787 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1788 cdc->set_to_clean();
1789 break;
1790 }
1791 case relocInfo::virtual_call_type: {
1792 // compiled, dispatched call (which used to call an interpreted method)
1793 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1794 inline_cache->set_to_clean();
1795 break;
1796 }
1797 default:
1798 break;
1799 }
1800 }
1801 }
1802 }
1803
1804 methodHandle callee_method = find_callee_method(is_optimized, caller_is_c1, CHECK_(methodHandle()));
1805
1806 #ifndef PRODUCT
1807 Atomic::inc(&_wrong_method_ctr);
1808
1809 if (TraceCallFixup) {
1810 ResourceMark rm(current);
1811 tty->print("handle_wrong_method reresolving call%s to", (caller_is_c1) ? " from C1" : "");
1812 callee_method->print_short_name(tty);
1813 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1814 }
1815 #endif
1816
1817 return callee_method;
1818 }
1819
1820 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1821 // The faulting unsafe accesses should be changed to throw the error
1822 // synchronously instead. Meanwhile the faulting instruction will be
1823 // skipped over (effectively turning it into a no-op) and an
1824 // asynchronous exception will be raised which the thread will
1825 // handle at a later point. If the instruction is a load it will
1826 // return garbage.
1827
1828 // Request an async exception.
1829 thread->set_pending_unsafe_access_error();
1830
1831 // Return address of next instruction to execute.
1997 msglen += strlen(caster_klass_description) + strlen(target_klass_description) + strlen(klass_separator) + 3;
1998
1999 char* message = NEW_RESOURCE_ARRAY_RETURN_NULL(char, msglen);
2000 if (message == nullptr) {
2001 // Shouldn't happen, but don't cause even more problems if it does
2002 message = const_cast<char*>(caster_klass->external_name());
2003 } else {
2004 jio_snprintf(message,
2005 msglen,
2006 "class %s cannot be cast to class %s (%s%s%s)",
2007 caster_name,
2008 target_name,
2009 caster_klass_description,
2010 klass_separator,
2011 target_klass_description
2012 );
2013 }
2014 return message;
2015 }
2016
2017 char* SharedRuntime::generate_identity_exception_message(JavaThread* current, Klass* klass) {
2018 assert(klass->is_inline_klass(), "Must be a concrete value class");
2019 const char* desc = "Cannot synchronize on an instance of value class ";
2020 const char* className = klass->external_name();
2021 size_t msglen = strlen(desc) + strlen(className) + 1;
2022 char* message = NEW_RESOURCE_ARRAY(char, msglen);
2023 if (nullptr == message) {
2024 // Out of memory: can't create detailed error message
2025 message = const_cast<char*>(klass->external_name());
2026 } else {
2027 jio_snprintf(message, msglen, "%s%s", desc, className);
2028 }
2029 return message;
2030 }
2031
2032 JRT_LEAF(void, SharedRuntime::reguard_yellow_pages())
2033 (void) JavaThread::current()->stack_overflow_state()->reguard_stack();
2034 JRT_END
2035
2036 void SharedRuntime::monitor_enter_helper(oopDesc* obj, BasicLock* lock, JavaThread* current) {
2037 if (!SafepointSynchronize::is_synchronizing()) {
2038 // Only try quick_enter() if we're not trying to reach a safepoint
2039 // so that the calling thread reaches the safepoint more quickly.
2040 if (ObjectSynchronizer::quick_enter(obj, lock, current)) {
2041 return;
2042 }
2043 }
2044 // NO_ASYNC required because an async exception on the state transition destructor
2045 // would leave you with the lock held and it would never be released.
2046 // The normal monitorenter NullPointerException is thrown without acquiring a lock
2047 // and the model is that an exception implies the method failed.
2048 JRT_BLOCK_NO_ASYNC
2049 Handle h_obj(THREAD, obj);
2050 ObjectSynchronizer::enter(h_obj, lock, current);
2051 assert(!HAS_PENDING_EXCEPTION, "Should have no exception here");
2262 tty->print_cr(" %% in nested categories are relative to their category");
2263 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2264 tty->cr();
2265
2266 MethodArityHistogram h;
2267 }
2268 #endif
2269
2270 #ifndef PRODUCT
2271 static int _lookups; // number of calls to lookup
2272 static int _equals; // number of buckets checked with matching hash
2273 static int _archived_hits; // number of successful lookups in archived table
2274 static int _runtime_hits; // number of successful lookups in runtime table
2275 #endif
2276
2277 // A simple wrapper class around the calling convention information
2278 // that allows sharing of adapters for the same calling convention.
2279 class AdapterFingerPrint : public MetaspaceObj {
2280 private:
2281 enum {
2282 _basic_type_bits = 5,
2283 _basic_type_mask = right_n_bits(_basic_type_bits),
2284 _basic_types_per_int = BitsPerInt / _basic_type_bits,
2285 };
2286 // TO DO: Consider integrating this with a more global scheme for compressing signatures.
2287 // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2288
2289 int _length;
2290
2291 static int data_offset() { return sizeof(AdapterFingerPrint); }
2292 int* data_pointer() {
2293 return (int*)((address)this + data_offset());
2294 }
2295
2296 // Private construtor. Use allocate() to get an instance.
2297 AdapterFingerPrint(const GrowableArray<SigEntry>* sig, bool has_ro_adapter = false) {
2298 int* data = data_pointer();
2299 // Pack the BasicTypes with 8 per int
2300 int total_args_passed = total_args_passed_in_sig(sig);
2301 _length = length(total_args_passed);
2302 int sig_index = 0;
2303 BasicType prev_bt = T_ILLEGAL;
2304 int vt_count = 0;
2305 for (int index = 0; index < _length; index++) {
2306 int value = 0;
2307 for (int byte = 0; byte < _basic_types_per_int; byte++) {
2308 BasicType bt = T_ILLEGAL;
2309 if (sig_index < total_args_passed) {
2310 bt = sig->at(sig_index++)._bt;
2311 if (bt == T_METADATA) {
2312 // Found start of inline type in signature
2313 assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
2314 if (sig_index == 1 && has_ro_adapter) {
2315 // With a ro_adapter, replace receiver inline type delimiter by T_VOID to prevent matching
2316 // with other adapters that have the same inline type as first argument and no receiver.
2317 bt = T_VOID;
2318 }
2319 vt_count++;
2320 } else if (bt == T_VOID && prev_bt != T_LONG && prev_bt != T_DOUBLE) {
2321 // Found end of inline type in signature
2322 assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
2323 vt_count--;
2324 assert(vt_count >= 0, "invalid vt_count");
2325 } else if (vt_count == 0) {
2326 // Widen fields that are not part of a scalarized inline type argument
2327 bt = adapter_encoding(bt);
2328 }
2329 prev_bt = bt;
2330 }
2331 int bt_val = (bt == T_ILLEGAL) ? 0 : bt;
2332 assert((bt_val & _basic_type_mask) == bt_val, "must fit in 4 bits");
2333 value = (value << _basic_type_bits) | bt_val;
2334 }
2335 data[index] = value;
2336 }
2337 assert(vt_count == 0, "invalid vt_count");
2338 }
2339
2340 // Call deallocate instead
2341 ~AdapterFingerPrint() {
2342 ShouldNotCallThis();
2343 }
2344
2345 static int total_args_passed_in_sig(const GrowableArray<SigEntry>* sig) {
2346 return (sig != nullptr) ? sig->length() : 0;
2347 }
2348
2349 static int length(int total_args) {
2350 return (total_args + (_basic_types_per_int-1)) / _basic_types_per_int;
2351 }
2352
2353 static int compute_size_in_words(int len) {
2354 return (int)heap_word_size(sizeof(AdapterFingerPrint) + (len * sizeof(int)));
2355 }
2356
2357 // Remap BasicTypes that are handled equivalently by the adapters.
2358 // These are correct for the current system but someday it might be
2359 // necessary to make this mapping platform dependent.
2360 static BasicType adapter_encoding(BasicType in) {
2361 switch (in) {
2362 case T_BOOLEAN:
2363 case T_BYTE:
2364 case T_SHORT:
2365 case T_CHAR:
2366 // They are all promoted to T_INT in the calling convention
2367 return T_INT;
2368
2369 case T_OBJECT:
2370 case T_ARRAY:
2371 // In other words, we assume that any register good enough for
2372 // an int or long is good enough for a managed pointer.
2373 #ifdef _LP64
2374 return T_LONG;
2375 #else
2376 return T_INT;
2377 #endif
2378
2379 case T_INT:
2380 case T_LONG:
2381 case T_FLOAT:
2382 case T_DOUBLE:
2383 case T_VOID:
2384 return in;
2385
2386 default:
2387 ShouldNotReachHere();
2388 return T_CONFLICT;
2389 }
2390 }
2391
2392 void* operator new(size_t size, size_t fp_size) throw() {
2393 assert(fp_size >= size, "sanity check");
2394 void* p = AllocateHeap(fp_size, mtCode);
2395 memset(p, 0, fp_size);
2396 return p;
2397 }
2398
2399 public:
2400 template<typename Function>
2401 void iterate_args(Function function) {
2402 for (int i = 0; i < length(); i++) {
2403 unsigned val = (unsigned)value(i);
2404 // args are packed so that first/lower arguments are in the highest
2405 // bits of each int value, so iterate from highest to the lowest
2406 int first_entry = _basic_types_per_int * _basic_type_bits;
2407 for (int j = first_entry; j >= 0; j -= _basic_type_bits) {
2408 unsigned v = (val >> j) & _basic_type_mask;
2409 if (v == 0) {
2410 continue;
2411 }
2412 function(v);
2413 }
2414 }
2415 }
2416
2417 static AdapterFingerPrint* allocate(const GrowableArray<SigEntry>* sig, bool has_ro_adapter = false) {
2418 int total_args_passed = total_args_passed_in_sig(sig);
2419 int len = length(total_args_passed);
2420 int size_in_bytes = BytesPerWord * compute_size_in_words(len);
2421 AdapterFingerPrint* afp = new (size_in_bytes) AdapterFingerPrint(sig, has_ro_adapter);
2422 assert((afp->size() * BytesPerWord) == size_in_bytes, "should match");
2423 return afp;
2424 }
2425
2426 static void deallocate(AdapterFingerPrint* fp) {
2427 FreeHeap(fp);
2428 }
2429
2430 int value(int index) {
2431 int* data = data_pointer();
2432 return data[index];
2433 }
2434
2435 int length() {
2436 return _length;
2437 }
2438
2439 unsigned int compute_hash() {
2440 int hash = 0;
2441 for (int i = 0; i < length(); i++) {
2450 stringStream st;
2451 st.print("0x");
2452 for (int i = 0; i < length(); i++) {
2453 st.print("%x", value(i));
2454 }
2455 return st.as_string();
2456 }
2457
2458 const char* as_basic_args_string() {
2459 stringStream st;
2460 bool long_prev = false;
2461 iterate_args([&] (int arg) {
2462 if (long_prev) {
2463 long_prev = false;
2464 if (arg == T_VOID) {
2465 st.print("J");
2466 } else {
2467 st.print("L");
2468 }
2469 }
2470 if (arg == T_LONG) {
2471 long_prev = true;
2472 } else if (arg != T_VOID) {
2473 st.print("%c", type2char((BasicType)arg));
2474 }
2475 });
2476 if (long_prev) {
2477 st.print("L");
2478 }
2479 return st.as_string();
2480 }
2481
2482 bool equals(AdapterFingerPrint* other) {
2483 if (other->_length != _length) {
2484 return false;
2485 } else {
2486 for (int i = 0; i < _length; i++) {
2487 if (value(i) != other->value(i)) {
2488 return false;
2489 }
2490 }
2491 }
2492 return true;
2493 }
2494
2495 // methods required by virtue of being a MetaspaceObj
2496 void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2497 int size() const { return compute_size_in_words(_length); }
2498 MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2499
2500 static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2501 NOT_PRODUCT(_equals++);
2510 #if INCLUDE_CDS
2511 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2512 return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2513 }
2514
2515 class ArchivedAdapterTable : public OffsetCompactHashtable<
2516 AdapterFingerPrint*,
2517 AdapterHandlerEntry*,
2518 adapter_fp_equals_compact_hashtable_entry> {};
2519 #endif // INCLUDE_CDS
2520
2521 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2522 using AdapterHandlerTable = ResourceHashtable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2523 AnyObj::C_HEAP, mtCode,
2524 AdapterFingerPrint::compute_hash,
2525 AdapterFingerPrint::equals>;
2526 static AdapterHandlerTable* _adapter_handler_table;
2527 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2528
2529 // Find a entry with the same fingerprint if it exists
2530 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(const GrowableArray<SigEntry>* sig, bool has_ro_adapter) {
2531 NOT_PRODUCT(_lookups++);
2532 assert_lock_strong(AdapterHandlerLibrary_lock);
2533 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(sig, has_ro_adapter);
2534 AdapterHandlerEntry* entry = nullptr;
2535 #if INCLUDE_CDS
2536 // if we are building the archive then the archived adapter table is
2537 // not valid and we need to use the ones added to the runtime table
2538 if (AOTCodeCache::is_using_adapter()) {
2539 // Search archived table first. It is read-only table so can be searched without lock
2540 entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2541 #ifndef PRODUCT
2542 if (entry != nullptr) {
2543 _archived_hits++;
2544 }
2545 #endif
2546 }
2547 #endif // INCLUDE_CDS
2548 if (entry == nullptr) {
2549 assert_lock_strong(AdapterHandlerLibrary_lock);
2550 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2551 if (entry_p != nullptr) {
2552 entry = *entry_p;
2553 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2571 ts.print(tty, "AdapterHandlerTable");
2572 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2573 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2574 int total_hits = _archived_hits + _runtime_hits;
2575 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d)",
2576 _lookups, _equals, total_hits, _archived_hits, _runtime_hits);
2577 }
2578 #endif
2579
2580 // ---------------------------------------------------------------------------
2581 // Implementation of AdapterHandlerLibrary
2582 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2583 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2584 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2585 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2586 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2587 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2588 #if INCLUDE_CDS
2589 ArchivedAdapterTable AdapterHandlerLibrary::_aot_adapter_handler_table;
2590 #endif // INCLUDE_CDS
2591 static const int AdapterHandlerLibrary_size = 48*K;
2592 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2593
2594 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2595 assert(_buffer != nullptr, "should be initialized");
2596 return _buffer;
2597 }
2598
2599 static void post_adapter_creation(const AdapterBlob* new_adapter,
2600 const AdapterHandlerEntry* entry) {
2601 if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2602 char blob_id[256];
2603 jio_snprintf(blob_id,
2604 sizeof(blob_id),
2605 "%s(%s)",
2606 new_adapter->name(),
2607 entry->fingerprint()->as_string());
2608 if (Forte::is_enabled()) {
2609 Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2610 }
2611
2612 if (JvmtiExport::should_post_dynamic_code_generated()) {
2613 JvmtiExport::post_dynamic_code_generated(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2614 }
2615 }
2616 }
2617
2618 void AdapterHandlerLibrary::create_abstract_method_handler() {
2619 assert_lock_strong(AdapterHandlerLibrary_lock);
2620 // Create a special handler for abstract methods. Abstract methods
2621 // are never compiled so an i2c entry is somewhat meaningless, but
2622 // throw AbstractMethodError just in case.
2623 // Pass wrong_method_abstract for the c2i transitions to return
2624 // AbstractMethodError for invalid invocations.
2625 address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2626 _abstract_method_handler = AdapterHandlerLibrary::new_entry(AdapterFingerPrint::allocate(nullptr));
2627 _abstract_method_handler->set_entry_points(SharedRuntime::throw_AbstractMethodError_entry(),
2628 wrong_method_abstract, wrong_method_abstract, wrong_method_abstract,
2629 wrong_method_abstract, wrong_method_abstract);
2630 }
2631
2632 void AdapterHandlerLibrary::initialize() {
2633 {
2634 ResourceMark rm;
2635 MutexLocker mu(AdapterHandlerLibrary_lock);
2636 _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2637 _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2638 create_abstract_method_handler();
2639 }
2640
2641 #if INCLUDE_CDS
2642 // Link adapters in AOT Cache to their code in AOT Code Cache
2643 if (AOTCodeCache::is_using_adapter() && !_aot_adapter_handler_table.empty()) {
2644 link_aot_adapters();
2645 lookup_simple_adapters();
2646 return;
2647 }
2648 #endif // INCLUDE_CDS
2649
2650 ResourceMark rm;
2651 AdapterBlob* no_arg_blob = nullptr;
2652 AdapterBlob* int_arg_blob = nullptr;
2653 AdapterBlob* obj_arg_blob = nullptr;
2654 AdapterBlob* obj_int_arg_blob = nullptr;
2655 AdapterBlob* obj_obj_arg_blob = nullptr;
2656 {
2657 MutexLocker mu(AdapterHandlerLibrary_lock);
2658
2659 CompiledEntrySignature no_args;
2660 no_args.compute_calling_conventions();
2661 _no_arg_handler = create_adapter(no_arg_blob, no_args, true);
2662
2663 CompiledEntrySignature obj_args;
2664 SigEntry::add_entry(obj_args.sig(), T_OBJECT);
2665 obj_args.compute_calling_conventions();
2666 _obj_arg_handler = create_adapter(obj_arg_blob, obj_args, true);
2667
2668 CompiledEntrySignature int_args;
2669 SigEntry::add_entry(int_args.sig(), T_INT);
2670 int_args.compute_calling_conventions();
2671 _int_arg_handler = create_adapter(int_arg_blob, int_args, true);
2672
2673 CompiledEntrySignature obj_int_args;
2674 SigEntry::add_entry(obj_int_args.sig(), T_OBJECT);
2675 SigEntry::add_entry(obj_int_args.sig(), T_INT);
2676 obj_int_args.compute_calling_conventions();
2677 _obj_int_arg_handler = create_adapter(obj_int_arg_blob, obj_int_args, true);
2678
2679 CompiledEntrySignature obj_obj_args;
2680 SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
2681 SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
2682 obj_obj_args.compute_calling_conventions();
2683 _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, obj_obj_args, true);
2684
2685 assert(no_arg_blob != nullptr &&
2686 obj_arg_blob != nullptr &&
2687 int_arg_blob != nullptr &&
2688 obj_int_arg_blob != nullptr &&
2689 obj_obj_arg_blob != nullptr, "Initial adapters must be properly created");
2690 }
2691
2692 // Outside of the lock
2693 post_adapter_creation(no_arg_blob, _no_arg_handler);
2694 post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2695 post_adapter_creation(int_arg_blob, _int_arg_handler);
2696 post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2697 post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2698 }
2699
2700 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint) {
2701 return AdapterHandlerEntry::allocate(fingerprint);
2702 }
2703
2704 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2705 if (method->is_abstract()) {
2706 return nullptr;
2707 }
2708 int total_args_passed = method->size_of_parameters(); // All args on stack
2709 if (total_args_passed == 0) {
2710 return _no_arg_handler;
2711 } else if (total_args_passed == 1) {
2712 if (!method->is_static()) {
2713 if (InlineTypePassFieldsAsArgs && method->method_holder()->is_inline_klass()) {
2714 return nullptr;
2715 }
2716 return _obj_arg_handler;
2717 }
2718 switch (method->signature()->char_at(1)) {
2719 case JVM_SIGNATURE_CLASS: {
2720 if (InlineTypePassFieldsAsArgs) {
2721 SignatureStream ss(method->signature());
2722 InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2723 if (vk != nullptr) {
2724 return nullptr;
2725 }
2726 }
2727 return _obj_arg_handler;
2728 }
2729 case JVM_SIGNATURE_ARRAY:
2730 return _obj_arg_handler;
2731 case JVM_SIGNATURE_INT:
2732 case JVM_SIGNATURE_BOOLEAN:
2733 case JVM_SIGNATURE_CHAR:
2734 case JVM_SIGNATURE_BYTE:
2735 case JVM_SIGNATURE_SHORT:
2736 return _int_arg_handler;
2737 }
2738 } else if (total_args_passed == 2 &&
2739 !method->is_static() && (!InlineTypePassFieldsAsArgs || !method->method_holder()->is_inline_klass())) {
2740 switch (method->signature()->char_at(1)) {
2741 case JVM_SIGNATURE_CLASS: {
2742 if (InlineTypePassFieldsAsArgs) {
2743 SignatureStream ss(method->signature());
2744 InlineKlass* vk = ss.as_inline_klass(method->method_holder());
2745 if (vk != nullptr) {
2746 return nullptr;
2747 }
2748 }
2749 return _obj_obj_arg_handler;
2750 }
2751 case JVM_SIGNATURE_ARRAY:
2752 return _obj_obj_arg_handler;
2753 case JVM_SIGNATURE_INT:
2754 case JVM_SIGNATURE_BOOLEAN:
2755 case JVM_SIGNATURE_CHAR:
2756 case JVM_SIGNATURE_BYTE:
2757 case JVM_SIGNATURE_SHORT:
2758 return _obj_int_arg_handler;
2759 }
2760 }
2761 return nullptr;
2762 }
2763
2764 CompiledEntrySignature::CompiledEntrySignature(Method* method) :
2765 _method(method), _num_inline_args(0), _has_inline_recv(false),
2766 _regs(nullptr), _regs_cc(nullptr), _regs_cc_ro(nullptr),
2767 _args_on_stack(0), _args_on_stack_cc(0), _args_on_stack_cc_ro(0),
2768 _c1_needs_stack_repair(false), _c2_needs_stack_repair(false), _supers(nullptr) {
2769 _sig = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2770 _sig_cc = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2771 _sig_cc_ro = new GrowableArray<SigEntry>((method != nullptr) ? method->size_of_parameters() : 1);
2772 }
2773
2774 // See if we can save space by sharing the same entry for VIEP and VIEP(RO),
2775 // or the same entry for VEP and VIEP(RO).
2776 CodeOffsets::Entries CompiledEntrySignature::c1_inline_ro_entry_type() const {
2777 if (!has_scalarized_args()) {
2778 // VEP/VIEP/VIEP(RO) all share the same entry. There's no packing.
2779 return CodeOffsets::Verified_Entry;
2780 }
2781 if (_method->is_static()) {
2782 // Static methods don't need VIEP(RO)
2783 return CodeOffsets::Verified_Entry;
2784 }
2785
2786 if (has_inline_recv()) {
2787 if (num_inline_args() == 1) {
2788 // Share same entry for VIEP and VIEP(RO).
2789 // This is quite common: we have an instance method in an InlineKlass that has
2790 // no inline type args other than <this>.
2791 return CodeOffsets::Verified_Inline_Entry;
2792 } else {
2793 assert(num_inline_args() > 1, "must be");
2794 // No sharing:
2795 // VIEP(RO) -- <this> is passed as object
2796 // VEP -- <this> is passed as fields
2797 return CodeOffsets::Verified_Inline_Entry_RO;
2798 }
2799 }
2800
2801 // Either a static method, or <this> is not an inline type
2802 if (args_on_stack_cc() != args_on_stack_cc_ro()) {
2803 // No sharing:
2804 // Some arguments are passed on the stack, and we have inserted reserved entries
2805 // into the VEP, but we never insert reserved entries into the VIEP(RO).
2806 return CodeOffsets::Verified_Inline_Entry_RO;
2807 } else {
2808 // Share same entry for VEP and VIEP(RO).
2809 return CodeOffsets::Verified_Entry;
2810 }
2811 }
2812
2813 // Returns all super methods (transitive) in classes and interfaces that are overridden by the current method.
2814 GrowableArray<Method*>* CompiledEntrySignature::get_supers() {
2815 if (_supers != nullptr) {
2816 return _supers;
2817 }
2818 _supers = new GrowableArray<Method*>();
2819 // Skip private, static, and <init> methods
2820 if (_method->is_private() || _method->is_static() || _method->is_object_constructor()) {
2821 return _supers;
2822 }
2823 Symbol* name = _method->name();
2824 Symbol* signature = _method->signature();
2825 const Klass* holder = _method->method_holder()->super();
2826 Symbol* holder_name = holder->name();
2827 ThreadInVMfromUnknown tiv;
2828 JavaThread* current = JavaThread::current();
2829 HandleMark hm(current);
2830 Handle loader(current, _method->method_holder()->class_loader());
2831
2832 // Walk up the class hierarchy and search for super methods
2833 while (holder != nullptr) {
2834 Method* super_method = holder->lookup_method(name, signature);
2835 if (super_method == nullptr) {
2836 break;
2837 }
2838 if (!super_method->is_static() && !super_method->is_private() &&
2839 (!super_method->is_package_private() ||
2840 super_method->method_holder()->is_same_class_package(loader(), holder_name))) {
2841 _supers->push(super_method);
2842 }
2843 holder = super_method->method_holder()->super();
2844 }
2845 // Search interfaces for super methods
2846 Array<InstanceKlass*>* interfaces = _method->method_holder()->transitive_interfaces();
2847 for (int i = 0; i < interfaces->length(); ++i) {
2848 Method* m = interfaces->at(i)->lookup_method(name, signature);
2849 if (m != nullptr && !m->is_static() && m->is_public()) {
2850 _supers->push(m);
2851 }
2852 }
2853 return _supers;
2854 }
2855
2856 // Iterate over arguments and compute scalarized and non-scalarized signatures
2857 void CompiledEntrySignature::compute_calling_conventions(bool init) {
2858 bool has_scalarized = false;
2859 if (_method != nullptr) {
2860 InstanceKlass* holder = _method->method_holder();
2861 int arg_num = 0;
2862 if (!_method->is_static()) {
2863 // We shouldn't scalarize 'this' in a value class constructor
2864 if (holder->is_inline_klass() && InlineKlass::cast(holder)->can_be_passed_as_fields() && !_method->is_object_constructor() &&
2865 (init || _method->is_scalarized_arg(arg_num))) {
2866 _sig_cc->appendAll(InlineKlass::cast(holder)->extended_sig());
2867 has_scalarized = true;
2868 _has_inline_recv = true;
2869 _num_inline_args++;
2870 } else {
2871 SigEntry::add_entry(_sig_cc, T_OBJECT, holder->name());
2872 }
2873 SigEntry::add_entry(_sig, T_OBJECT, holder->name());
2874 SigEntry::add_entry(_sig_cc_ro, T_OBJECT, holder->name());
2875 arg_num++;
2876 }
2877 for (SignatureStream ss(_method->signature()); !ss.at_return_type(); ss.next()) {
2878 BasicType bt = ss.type();
2879 if (bt == T_OBJECT) {
2880 InlineKlass* vk = ss.as_inline_klass(holder);
2881 if (vk != nullptr && vk->can_be_passed_as_fields() && (init || _method->is_scalarized_arg(arg_num))) {
2882 // Check for a calling convention mismatch with super method(s)
2883 bool scalar_super = false;
2884 bool non_scalar_super = false;
2885 GrowableArray<Method*>* supers = get_supers();
2886 for (int i = 0; i < supers->length(); ++i) {
2887 Method* super_method = supers->at(i);
2888 if (super_method->is_scalarized_arg(arg_num)) {
2889 scalar_super = true;
2890 } else {
2891 non_scalar_super = true;
2892 }
2893 }
2894 #ifdef ASSERT
2895 // Randomly enable below code paths for stress testing
2896 bool stress = init && StressCallingConvention;
2897 if (stress && (os::random() & 1) == 1) {
2898 non_scalar_super = true;
2899 if ((os::random() & 1) == 1) {
2900 scalar_super = true;
2901 }
2902 }
2903 #endif
2904 if (non_scalar_super) {
2905 // Found a super method with a non-scalarized argument. Fall back to the non-scalarized calling convention.
2906 if (scalar_super) {
2907 // Found non-scalar *and* scalar super methods. We can't handle both.
2908 // Mark the scalar method as mismatch and re-compile call sites to use non-scalarized calling convention.
2909 for (int i = 0; i < supers->length(); ++i) {
2910 Method* super_method = supers->at(i);
2911 if (super_method->is_scalarized_arg(arg_num) DEBUG_ONLY(|| (stress && (os::random() & 1) == 1))) {
2912 super_method->set_mismatch();
2913 MutexLocker ml(Compile_lock, Mutex::_safepoint_check_flag);
2914 JavaThread* thread = JavaThread::current();
2915 HandleMark hm(thread);
2916 methodHandle mh(thread, super_method);
2917 DeoptimizationScope deopt_scope;
2918 CodeCache::mark_for_deoptimization(&deopt_scope, mh());
2919 deopt_scope.deoptimize_marked();
2920 }
2921 }
2922 }
2923 // Fall back to non-scalarized calling convention
2924 SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2925 SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2926 } else {
2927 _num_inline_args++;
2928 has_scalarized = true;
2929 int last = _sig_cc->length();
2930 int last_ro = _sig_cc_ro->length();
2931 _sig_cc->appendAll(vk->extended_sig());
2932 _sig_cc_ro->appendAll(vk->extended_sig());
2933 if (bt == T_OBJECT) {
2934 // Nullable inline type argument, insert InlineTypeNode::NullMarker field right after T_METADATA delimiter
2935 _sig_cc->insert_before(last+1, SigEntry(T_BOOLEAN, -1, nullptr, true));
2936 _sig_cc_ro->insert_before(last_ro+1, SigEntry(T_BOOLEAN, -1, nullptr, true));
2937 }
2938 }
2939 } else {
2940 SigEntry::add_entry(_sig_cc, T_OBJECT, ss.as_symbol());
2941 SigEntry::add_entry(_sig_cc_ro, T_OBJECT, ss.as_symbol());
2942 }
2943 bt = T_OBJECT;
2944 } else {
2945 SigEntry::add_entry(_sig_cc, ss.type(), ss.as_symbol());
2946 SigEntry::add_entry(_sig_cc_ro, ss.type(), ss.as_symbol());
2947 }
2948 SigEntry::add_entry(_sig, bt, ss.as_symbol());
2949 if (bt != T_VOID) {
2950 arg_num++;
2951 }
2952 }
2953 }
2954
2955 // Compute the non-scalarized calling convention
2956 _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
2957 _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
2958
2959 // Compute the scalarized calling conventions if there are scalarized inline types in the signature
2960 if (has_scalarized && !_method->is_native()) {
2961 _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
2962 _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
2963
2964 _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
2965 _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
2966
2967 _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
2968 _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
2969
2970 // Upper bound on stack arguments to avoid hitting the argument limit and
2971 // bailing out of compilation ("unsupported incoming calling sequence").
2972 // TODO we need a reasonable limit (flag?) here
2973 if (MAX2(_args_on_stack_cc, _args_on_stack_cc_ro) <= 60) {
2974 return; // Success
2975 }
2976 }
2977
2978 // No scalarized args
2979 _sig_cc = _sig;
2980 _regs_cc = _regs;
2981 _args_on_stack_cc = _args_on_stack;
2982
2983 _sig_cc_ro = _sig;
2984 _regs_cc_ro = _regs;
2985 _args_on_stack_cc_ro = _args_on_stack;
2986 }
2987
2988 void CompiledEntrySignature::initialize_from_fingerprint(AdapterFingerPrint* fingerprint) {
2989 int value_object_count = 0;
2990 bool is_receiver = true;
2991 BasicType prev_bt = T_ILLEGAL;
2992 bool long_prev = false;
2993 bool has_scalarized_arguments = false;
2994
2995 fingerprint->iterate_args([&] (int arg) {
2996 BasicType bt = (BasicType)arg;
2997 if (long_prev) {
2998 long_prev = false;
2999 BasicType bt_to_add;
3000 if (bt == T_VOID) {
3001 bt_to_add = T_LONG;
3002 } else {
3003 bt_to_add = T_OBJECT; // it could be T_ARRAY; it shouldn't matter
3004 }
3005 SigEntry::add_entry(_sig_cc, bt_to_add);
3006 SigEntry::add_entry(_sig_cc_ro, bt_to_add);
3007 if (value_object_count == 0) {
3008 SigEntry::add_entry(_sig, bt_to_add);
3009 }
3010 }
3011 switch (bt) {
3012 case T_VOID:
3013 if (is_receiver) {
3014 // 'this' when ro adapter is available
3015 assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
3016 value_object_count++;
3017 has_scalarized_arguments = true;
3018 _has_inline_recv = true;
3019 SigEntry::add_entry(_sig, T_OBJECT);
3020 SigEntry::add_entry(_sig_cc, T_METADATA);
3021 SigEntry::add_entry(_sig_cc_ro, T_METADATA);
3022 } else if (prev_bt != T_LONG && prev_bt != T_DOUBLE) {
3023 assert(InlineTypePassFieldsAsArgs, "unexpected end of inline type");
3024 value_object_count--;
3025 SigEntry::add_entry(_sig_cc, T_VOID);
3026 SigEntry::add_entry(_sig_cc_ro, T_VOID);
3027 assert(value_object_count >= 0, "invalid value object count");
3028 } else {
3029 // Nothing to add for _sig: We already added an addition T_VOID in add_entry() when adding T_LONG or T_DOUBLE.
3030 }
3031 break;
3032 case T_INT:
3033 case T_FLOAT:
3034 case T_DOUBLE:
3035 if (value_object_count == 0) {
3036 SigEntry::add_entry(_sig, bt);
3037 }
3038 SigEntry::add_entry(_sig_cc, bt);
3039 SigEntry::add_entry(_sig_cc_ro, bt);
3040 break;
3041 case T_LONG:
3042 long_prev = true;
3043 break;
3044 case T_BOOLEAN:
3045 case T_CHAR:
3046 case T_BYTE:
3047 case T_SHORT:
3048 case T_OBJECT:
3049 case T_ARRAY:
3050 assert(value_object_count > 0 && !is_receiver, "must be value object field");
3051 SigEntry::add_entry(_sig_cc, bt);
3052 SigEntry::add_entry(_sig_cc_ro, bt);
3053 break;
3054 case T_METADATA:
3055 assert(InlineTypePassFieldsAsArgs, "unexpected start of inline type");
3056 value_object_count++;
3057 has_scalarized_arguments = true;
3058 SigEntry::add_entry(_sig, T_OBJECT);
3059 SigEntry::add_entry(_sig_cc, T_METADATA);
3060 SigEntry::add_entry(_sig_cc_ro, T_METADATA);
3061 break;
3062 default: {
3063 fatal("Unexpected BasicType: %s", basictype_to_str(bt));
3064 }
3065 }
3066 prev_bt = bt;
3067 is_receiver = false;
3068 });
3069
3070 if (long_prev) {
3071 // If previous bt was T_LONG and we reached the end of the signature, we know that it must be a T_OBJECT.
3072 SigEntry::add_entry(_sig, T_OBJECT);
3073 SigEntry::add_entry(_sig_cc, T_OBJECT);
3074 SigEntry::add_entry(_sig_cc_ro, T_OBJECT);
3075 }
3076 assert(value_object_count == 0, "invalid value object count");
3077
3078 _regs = NEW_RESOURCE_ARRAY(VMRegPair, _sig->length());
3079 _args_on_stack = SharedRuntime::java_calling_convention(_sig, _regs);
3080
3081 // Compute the scalarized calling conventions if there are scalarized inline types in the signature
3082 if (has_scalarized_arguments) {
3083 _regs_cc = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc->length());
3084 _args_on_stack_cc = SharedRuntime::java_calling_convention(_sig_cc, _regs_cc);
3085
3086 _regs_cc_ro = NEW_RESOURCE_ARRAY(VMRegPair, _sig_cc_ro->length());
3087 _args_on_stack_cc_ro = SharedRuntime::java_calling_convention(_sig_cc_ro, _regs_cc_ro);
3088
3089 _c1_needs_stack_repair = (_args_on_stack_cc < _args_on_stack) || (_args_on_stack_cc_ro < _args_on_stack);
3090 _c2_needs_stack_repair = (_args_on_stack_cc > _args_on_stack) || (_args_on_stack_cc > _args_on_stack_cc_ro);
3091 } else {
3092 // No scalarized args
3093 _sig_cc = _sig;
3094 _regs_cc = _regs;
3095 _args_on_stack_cc = _args_on_stack;
3096
3097 _sig_cc_ro = _sig;
3098 _regs_cc_ro = _regs;
3099 _args_on_stack_cc_ro = _args_on_stack;
3100 }
3101
3102 #ifdef ASSERT
3103 {
3104 AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(_sig_cc, _has_inline_recv);
3105 assert(fingerprint->equals(compare_fp), "sanity check");
3106 AdapterFingerPrint::deallocate(compare_fp);
3107 }
3108 #endif
3109 }
3110
3111 const char* AdapterHandlerEntry::_entry_names[] = {
3112 "i2c", "c2i", "c2i_unverified", "c2i_no_clinit_check"
3113 };
3114
3115 #ifdef ASSERT
3116 void AdapterHandlerLibrary::verify_adapter_sharing(CompiledEntrySignature& ces, AdapterHandlerEntry* cached_entry) {
3117 AdapterBlob* comparison_blob = nullptr;
3118 AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, ces, false, true);
3119 assert(comparison_blob == nullptr, "no blob should be created when creating an adapter for comparison");
3120 assert(comparison_entry->compare_code(cached_entry), "code must match");
3121 // Release the one just created
3122 AdapterHandlerEntry::deallocate(comparison_entry);
3123 }
3124 #endif /* ASSERT*/
3125
3126 AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& method) {
3127 // Use customized signature handler. Need to lock around updates to
3128 // the _adapter_handler_table (it is not safe for concurrent readers
3129 // and a single writer: this could be fixed if it becomes a
3130 // problem).
3131
3132 // Fast-path for trivial adapters
3133 AdapterHandlerEntry* entry = get_simple_adapter(method);
3134 if (entry != nullptr) {
3135 return entry;
3136 }
3137
3138 ResourceMark rm;
3139 AdapterBlob* adapter_blob = nullptr;
3140
3141 CompiledEntrySignature ces(method());
3142 ces.compute_calling_conventions();
3143 if (ces.has_scalarized_args()) {
3144 if (!method->has_scalarized_args()) {
3145 method->set_has_scalarized_args();
3146 }
3147 if (ces.c1_needs_stack_repair()) {
3148 method->set_c1_needs_stack_repair();
3149 }
3150 if (ces.c2_needs_stack_repair() && !method->c2_needs_stack_repair()) {
3151 method->set_c2_needs_stack_repair();
3152 }
3153 } else if (method->is_abstract()) {
3154 return _abstract_method_handler;
3155 }
3156
3157 {
3158 MutexLocker mu(AdapterHandlerLibrary_lock);
3159
3160 if (ces.has_scalarized_args() && method->is_abstract()) {
3161 // Save a C heap allocated version of the signature for abstract methods with scalarized inline type arguments
3162 address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
3163 entry = AdapterHandlerLibrary::new_entry(AdapterFingerPrint::allocate(nullptr));
3164 entry->set_entry_points(SharedRuntime::throw_AbstractMethodError_entry(),
3165 wrong_method_abstract, wrong_method_abstract, wrong_method_abstract,
3166 wrong_method_abstract, wrong_method_abstract);
3167 GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc_ro()->length(), mtInternal);
3168 heap_sig->appendAll(ces.sig_cc_ro());
3169 entry->set_sig_cc(heap_sig);
3170 return entry;
3171 }
3172
3173 // Lookup method signature's fingerprint
3174 entry = lookup(ces.sig_cc(), ces.has_inline_recv());
3175
3176 if (entry != nullptr) {
3177 assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
3178 #ifdef ASSERT
3179 if (!entry->is_shared() && VerifyAdapterSharing) {
3180 verify_adapter_sharing(ces, entry);
3181 }
3182 #endif
3183 } else {
3184 entry = create_adapter(adapter_blob, ces, /* allocate_code_blob */ true);
3185 }
3186 }
3187
3188 // Outside of the lock
3189 if (adapter_blob != nullptr) {
3190 post_adapter_creation(adapter_blob, entry);
3191 }
3192 return entry;
3193 }
3194
3195 AdapterBlob* AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler) {
3196 ResourceMark rm;
3197 const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
3198 const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
3199 int offsets[AdapterHandlerEntry::ENTRIES_COUNT];
3200
3201 AdapterBlob* adapter_blob = nullptr;
3202 CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::Adapter, id, name, AdapterHandlerEntry::ENTRIES_COUNT, offsets);
3203 if (blob != nullptr) {
3204 adapter_blob = blob->as_adapter_blob();
3205 address i2c_entry = adapter_blob->content_begin();
3206 assert(offsets[0] == 0, "sanity check");
3207 handler->set_entry_points(i2c_entry, i2c_entry + offsets[1], i2c_entry + offsets[2], i2c_entry + offsets[3],
3208 i2c_entry + offsets[4], i2c_entry + offsets[5], i2c_entry + offsets[6]);
3209 }
3210 return adapter_blob;
3211 }
3212
3213 #ifndef PRODUCT
3214 void AdapterHandlerLibrary::print_adapter_handler_info(outputStream* st, AdapterHandlerEntry* handler, AdapterBlob* adapter_blob) {
3215 ttyLocker ttyl;
3216 ResourceMark rm;
3217 int insts_size = adapter_blob->code_size();
3218 handler->print_adapter_on(tty);
3219 st->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
3220 handler->fingerprint()->as_basic_args_string(),
3221 handler->fingerprint()->as_string(), insts_size);
3222 st->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
3223 if (Verbose || PrintStubCode) {
3224 address first_pc = handler->base_address();
3225 if (first_pc != nullptr) {
3226 Disassembler::decode(first_pc, first_pc + insts_size, st, &adapter_blob->asm_remarks());
3227 st->cr();
3228 }
3229 }
3230 }
3231 #endif // PRODUCT
3232
3233 bool AdapterHandlerLibrary::generate_adapter_code(AdapterBlob*& adapter_blob,
3234 AdapterHandlerEntry* handler,
3235 CompiledEntrySignature& ces,
3236 bool allocate_code_blob,
3237 bool is_transient) {
3238 if (log_is_enabled(Info, perf, class, link)) {
3239 ClassLoader::perf_method_adapters_count()->inc();
3240 }
3241
3242 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
3243 CodeBuffer buffer(buf);
3244 short buffer_locs[20];
3245 buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
3246 sizeof(buffer_locs)/sizeof(relocInfo));
3247 MacroAssembler masm(&buffer);
3248
3249 // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
3250 SharedRuntime::generate_i2c2i_adapters(&masm,
3251 ces.args_on_stack(),
3252 ces.sig(),
3253 ces.regs(),
3254 ces.sig_cc(),
3255 ces.regs_cc(),
3256 ces.sig_cc_ro(),
3257 ces.regs_cc_ro(),
3258 handler,
3259 adapter_blob,
3260 allocate_code_blob);
3261
3262 if (ces.has_scalarized_args()) {
3263 // Save a C heap allocated version of the scalarized signature and store it in the adapter
3264 GrowableArray<SigEntry>* heap_sig = new (mtInternal) GrowableArray<SigEntry>(ces.sig_cc()->length(), mtInternal);
3265 heap_sig->appendAll(ces.sig_cc());
3266 handler->set_sig_cc(heap_sig);
3267 }
3268 #ifdef ASSERT
3269 if (VerifyAdapterSharing) {
3270 handler->save_code(buf->code_begin(), buffer.insts_size());
3271 if (is_transient) {
3272 return true;
3273 }
3274 }
3275 #endif
3276
3277 if (adapter_blob == nullptr) {
3278 // CodeCache is full, disable compilation
3279 // Ought to log this but compile log is only per compile thread
3280 // and we're some non descript Java thread.
3281 return false;
3282 }
3283 if (!is_transient && AOTCodeCache::is_dumping_adapter()) {
3284 // try to save generated code
3285 const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
3286 const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
3287 int entry_offset[AdapterHandlerEntry::ENTRIES_COUNT];
3288 assert(AdapterHandlerEntry::ENTRIES_COUNT == 7, "sanity");
3289 address i2c_entry = handler->get_i2c_entry();
3290 entry_offset[0] = 0; // i2c_entry offset
3291 entry_offset[1] = handler->get_c2i_entry() - i2c_entry;
3292 entry_offset[2] = handler->get_c2i_inline_entry() - i2c_entry;
3293 entry_offset[3] = handler->get_c2i_inline_ro_entry() - i2c_entry;
3294 entry_offset[4] = handler->get_c2i_unverified_entry() - i2c_entry;
3295 entry_offset[5] = handler->get_c2i_unverified_inline_entry() - i2c_entry;
3296 entry_offset[6] = handler->get_c2i_no_clinit_check_entry() - i2c_entry;
3297 bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name, AdapterHandlerEntry::ENTRIES_COUNT, entry_offset);
3298 assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled");
3299 }
3300 handler->relocate(adapter_blob->content_begin());
3301 #ifndef PRODUCT
3302 // debugging support
3303 if (PrintAdapterHandlers || PrintStubCode) {
3304 print_adapter_handler_info(tty, handler, adapter_blob);
3305 }
3306 #endif
3307 return true;
3308 }
3309
3310 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& adapter_blob,
3311 CompiledEntrySignature& ces,
3312 bool allocate_code_blob,
3313 bool is_transient) {
3314 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(ces.sig_cc(), ces.has_inline_recv());
3315 #ifdef ASSERT
3316 // Verify that we can successfully restore the compiled entry signature object.
3317 CompiledEntrySignature ces_verify;
3318 ces_verify.initialize_from_fingerprint(fp);
3319 #endif
3320 AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fp);
3321 if (!generate_adapter_code(adapter_blob, handler, ces, allocate_code_blob, is_transient)) {
3322 AdapterHandlerEntry::deallocate(handler);
3323 return nullptr;
3324 }
3325 if (!is_transient) {
3326 assert_lock_strong(AdapterHandlerLibrary_lock);
3327 _adapter_handler_table->put(fp, handler);
3328 }
3329 return handler;
3330 }
3331
3332 #if INCLUDE_CDS
3333 void AdapterHandlerEntry::remove_unshareable_info() {
3334 #ifdef ASSERT
3335 _saved_code = nullptr;
3336 _saved_code_length = 0;
3337 #endif // ASSERT
3338 set_entry_points(nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, false);
3339 }
3340
3341 class CopyAdapterTableToArchive : StackObj {
3342 private:
3343 CompactHashtableWriter* _writer;
3344 ArchiveBuilder* _builder;
3345 public:
3346 CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
3347 _builder(ArchiveBuilder::current())
3348 {}
3349
3350 bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
3351 LogStreamHandle(Trace, aot) lsh;
3352 if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
3353 assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
3354 AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
3355 assert(buffered_fp != nullptr,"sanity check");
3356 AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
3357 assert(buffered_entry != nullptr,"sanity check");
3358
3404 // This method is used during production run to link archived adapters (stored in AOT Cache)
3405 // to their code in AOT Code Cache
3406 void AdapterHandlerEntry::link() {
3407 AdapterBlob* adapter_blob = nullptr;
3408 ResourceMark rm;
3409 assert(_fingerprint != nullptr, "_fingerprint must not be null");
3410 bool generate_code = false;
3411 // Generate code only if AOTCodeCache is not available, or
3412 // caching adapters is disabled, or we fail to link
3413 // the AdapterHandlerEntry to its code in the AOTCodeCache
3414 if (AOTCodeCache::is_using_adapter()) {
3415 adapter_blob = AdapterHandlerLibrary::link_aot_adapter_handler(this);
3416 if (adapter_blob == nullptr) {
3417 log_warning(aot)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string());
3418 generate_code = true;
3419 }
3420 } else {
3421 generate_code = true;
3422 }
3423 if (generate_code) {
3424 CompiledEntrySignature ces;
3425 ces.initialize_from_fingerprint(_fingerprint);
3426 if (!AdapterHandlerLibrary::generate_adapter_code(adapter_blob, this, ces, true, false)) {
3427 // Don't throw exceptions during VM initialization because java.lang.* classes
3428 // might not have been initialized, causing problems when constructing the
3429 // Java exception object.
3430 vm_exit_during_initialization("Out of space in CodeCache for adapters");
3431 }
3432 }
3433 // Outside of the lock
3434 if (adapter_blob != nullptr) {
3435 post_adapter_creation(adapter_blob, this);
3436 }
3437 assert(_linked, "AdapterHandlerEntry must now be linked");
3438 }
3439
3440 void AdapterHandlerLibrary::link_aot_adapters() {
3441 assert(AOTCodeCache::is_using_adapter(), "AOT adapters code should be available");
3442 _aot_adapter_handler_table.iterate([](AdapterHandlerEntry* entry) {
3443 assert(!entry->is_linked(), "AdapterHandlerEntry is already linked!");
3444 entry->link();
3445 });
3446 }
3447
3448 // This method is called during production run to lookup simple adapters
3449 // in the archived adapter handler table
3450 void AdapterHandlerLibrary::lookup_simple_adapters() {
3451 assert(!_aot_adapter_handler_table.empty(), "archived adapter handler table is empty");
3452
3453 MutexLocker mu(AdapterHandlerLibrary_lock);
3454 ResourceMark rm;
3455 CompiledEntrySignature no_args;
3456 no_args.compute_calling_conventions();
3457 _no_arg_handler = lookup(no_args.sig_cc(), no_args.has_inline_recv());
3458
3459 CompiledEntrySignature obj_args;
3460 SigEntry::add_entry(obj_args.sig(), T_OBJECT);
3461 obj_args.compute_calling_conventions();
3462 _obj_arg_handler = lookup(obj_args.sig_cc(), obj_args.has_inline_recv());
3463
3464 CompiledEntrySignature int_args;
3465 SigEntry::add_entry(int_args.sig(), T_INT);
3466 int_args.compute_calling_conventions();
3467 _int_arg_handler = lookup(int_args.sig_cc(), int_args.has_inline_recv());
3468
3469 CompiledEntrySignature obj_int_args;
3470 SigEntry::add_entry(obj_int_args.sig(), T_OBJECT);
3471 SigEntry::add_entry(obj_int_args.sig(), T_INT);
3472 obj_int_args.compute_calling_conventions();
3473 _obj_int_arg_handler = lookup(obj_int_args.sig_cc(), obj_int_args.has_inline_recv());
3474
3475 CompiledEntrySignature obj_obj_args;
3476 SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
3477 SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT);
3478 obj_obj_args.compute_calling_conventions();
3479 _obj_obj_arg_handler = lookup(obj_obj_args.sig_cc(), obj_obj_args.has_inline_recv());
3480
3481 assert(_no_arg_handler != nullptr &&
3482 _obj_arg_handler != nullptr &&
3483 _int_arg_handler != nullptr &&
3484 _obj_int_arg_handler != nullptr &&
3485 _obj_obj_arg_handler != nullptr, "Initial adapters not found in archived adapter handler table");
3486 assert(_no_arg_handler->is_linked() &&
3487 _obj_arg_handler->is_linked() &&
3488 _int_arg_handler->is_linked() &&
3489 _obj_int_arg_handler->is_linked() &&
3490 _obj_obj_arg_handler->is_linked(), "Initial adapters not in linked state");
3491 }
3492 #endif // INCLUDE_CDS
3493
3494 address AdapterHandlerEntry::base_address() {
3495 address base = _i2c_entry;
3496 if (base == nullptr) base = _c2i_entry;
3497 assert(base <= _c2i_entry || _c2i_entry == nullptr, "");
3498 assert(base <= _c2i_inline_entry || _c2i_inline_entry == nullptr, "");
3499 assert(base <= _c2i_inline_ro_entry || _c2i_inline_ro_entry == nullptr, "");
3500 assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == nullptr, "");
3501 assert(base <= _c2i_unverified_inline_entry || _c2i_unverified_inline_entry == nullptr, "");
3502 assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == nullptr, "");
3503 return base;
3504 }
3505
3506 void AdapterHandlerEntry::relocate(address new_base) {
3507 address old_base = base_address();
3508 assert(old_base != nullptr, "");
3509 ptrdiff_t delta = new_base - old_base;
3510 if (_i2c_entry != nullptr)
3511 _i2c_entry += delta;
3512 if (_c2i_entry != nullptr)
3513 _c2i_entry += delta;
3514 if (_c2i_inline_entry != nullptr)
3515 _c2i_inline_entry += delta;
3516 if (_c2i_inline_ro_entry != nullptr)
3517 _c2i_inline_ro_entry += delta;
3518 if (_c2i_unverified_entry != nullptr)
3519 _c2i_unverified_entry += delta;
3520 if (_c2i_unverified_inline_entry != nullptr)
3521 _c2i_unverified_inline_entry += delta;
3522 if (_c2i_no_clinit_check_entry != nullptr)
3523 _c2i_no_clinit_check_entry += delta;
3524 assert(base_address() == new_base, "");
3525 }
3526
3527 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3528 LogStreamHandle(Trace, aot) lsh;
3529 if (lsh.is_enabled()) {
3530 lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3531 lsh.cr();
3532 }
3533 it->push(&_fingerprint);
3534 }
3535
3536 AdapterHandlerEntry::~AdapterHandlerEntry() {
3537 if (_fingerprint != nullptr) {
3538 AdapterFingerPrint::deallocate(_fingerprint);
3539 _fingerprint = nullptr;
3540 }
3541 if (_sig_cc != nullptr) {
3542 delete _sig_cc;
3543 }
3544 #ifdef ASSERT
3545 FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
3546 #endif
3547 FreeHeap(this);
3548 }
3549
3550
3551 #ifdef ASSERT
3552 // Capture the code before relocation so that it can be compared
3553 // against other versions. If the code is captured after relocation
3554 // then relative instructions won't be equivalent.
3555 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3556 _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3557 _saved_code_length = length;
3558 memcpy(_saved_code, buffer, length);
3559 }
3560
3561
3562 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3563 assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");
3611
3612 struct { double data[20]; } locs_buf;
3613 struct { double data[20]; } stubs_locs_buf;
3614 buffer.insts()->initialize_shared_locs((relocInfo*)&locs_buf, sizeof(locs_buf) / sizeof(relocInfo));
3615 #if defined(AARCH64) || defined(PPC64)
3616 // On AArch64 with ZGC and nmethod entry barriers, we need all oops to be
3617 // in the constant pool to ensure ordering between the barrier and oops
3618 // accesses. For native_wrappers we need a constant.
3619 // On PPC64 the continuation enter intrinsic needs the constant pool for the compiled
3620 // static java call that is resolved in the runtime.
3621 if (PPC64_ONLY(method->is_continuation_enter_intrinsic() &&) true) {
3622 buffer.initialize_consts_size(8 PPC64_ONLY(+ 24));
3623 }
3624 #endif
3625 buffer.stubs()->initialize_shared_locs((relocInfo*)&stubs_locs_buf, sizeof(stubs_locs_buf) / sizeof(relocInfo));
3626 MacroAssembler _masm(&buffer);
3627
3628 // Fill in the signature array, for the calling-convention call.
3629 const int total_args_passed = method->size_of_parameters();
3630
3631 BasicType stack_sig_bt[16];
3632 VMRegPair stack_regs[16];
3633 BasicType* sig_bt = (total_args_passed <= 16) ? stack_sig_bt : NEW_RESOURCE_ARRAY(BasicType, total_args_passed);
3634 VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
3635
3636 int i = 0;
3637 if (!method->is_static()) { // Pass in receiver first
3638 sig_bt[i++] = T_OBJECT;
3639 }
3640 SignatureStream ss(method->signature());
3641 for (; !ss.at_return_type(); ss.next()) {
3642 sig_bt[i++] = ss.type(); // Collect remaining bits of signature
3643 if (ss.type() == T_LONG || ss.type() == T_DOUBLE) {
3644 sig_bt[i++] = T_VOID; // Longs & doubles take 2 Java slots
3645 }
3646 }
3647 assert(i == total_args_passed, "");
3648 BasicType ret_type = ss.type();
3649
3650 // Now get the compiled-Java arguments layout.
3651 SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
3652
3653 // Generate the compiled-to-native wrapper code
3654 nm = SharedRuntime::generate_native_wrapper(&_masm, method, compile_id, sig_bt, regs, ret_type);
3655
3656 if (nm != nullptr) {
3657 {
3658 MutexLocker pl(NMethodState_lock, Mutex::_no_safepoint_check_flag);
3659 if (nm->make_in_use()) {
3660 method->set_code(method, nm);
3661 }
3662 }
3663
3664 DirectiveSet* directive = DirectivesStack::getMatchingDirective(method, CompileBroker::compiler(CompLevel_simple));
3665 if (directive->PrintAssemblyOption) {
3666 nm->print_code();
3667 }
3668 DirectivesStack::release(directive);
3911 a->print_adapter_on(st);
3912 return true;
3913 } else {
3914 return false; // keep looking
3915 }
3916 };
3917 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3918 _adapter_handler_table->iterate(findblob_runtime_table);
3919 }
3920 assert(found, "Should have found handler");
3921 }
3922
3923 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3924 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3925 if (get_i2c_entry() != nullptr) {
3926 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3927 }
3928 if (get_c2i_entry() != nullptr) {
3929 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3930 }
3931 if (get_c2i_entry() != nullptr) {
3932 st->print(" c2iVE: " INTPTR_FORMAT, p2i(get_c2i_inline_entry()));
3933 }
3934 if (get_c2i_entry() != nullptr) {
3935 st->print(" c2iVROE: " INTPTR_FORMAT, p2i(get_c2i_inline_ro_entry()));
3936 }
3937 if (get_c2i_unverified_entry() != nullptr) {
3938 st->print(" c2iUE: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3939 }
3940 if (get_c2i_unverified_entry() != nullptr) {
3941 st->print(" c2iUVE: " INTPTR_FORMAT, p2i(get_c2i_unverified_inline_entry()));
3942 }
3943 if (get_c2i_no_clinit_check_entry() != nullptr) {
3944 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3945 }
3946 st->cr();
3947 }
3948
3949 #ifndef PRODUCT
3950
3951 void AdapterHandlerLibrary::print_statistics() {
3952 print_table_statistics();
3953 }
3954
3955 #endif /* PRODUCT */
3956
3957 bool AdapterHandlerLibrary::is_abstract_method_adapter(AdapterHandlerEntry* entry) {
3958 if (entry == _abstract_method_handler) {
3959 return true;
3960 }
3961 return false;
4017 event.set_method(method);
4018 event.commit();
4019 }
4020 }
4021 }
4022 return activation;
4023 }
4024
4025 void SharedRuntime::on_slowpath_allocation_exit(JavaThread* current) {
4026 // After any safepoint, just before going back to compiled code,
4027 // we inform the GC that we will be doing initializing writes to
4028 // this object in the future without emitting card-marks, so
4029 // GC may take any compensating steps.
4030
4031 oop new_obj = current->vm_result_oop();
4032 if (new_obj == nullptr) return;
4033
4034 BarrierSet *bs = BarrierSet::barrier_set();
4035 bs->on_slowpath_allocation_exit(current, new_obj);
4036 }
4037
4038 // We are at a compiled code to interpreter call. We need backing
4039 // buffers for all inline type arguments. Allocate an object array to
4040 // hold them (convenient because once we're done with it we don't have
4041 // to worry about freeing it).
4042 oop SharedRuntime::allocate_inline_types_impl(JavaThread* current, methodHandle callee, bool allocate_receiver, TRAPS) {
4043 assert(InlineTypePassFieldsAsArgs, "no reason to call this");
4044 ResourceMark rm;
4045
4046 int nb_slots = 0;
4047 InstanceKlass* holder = callee->method_holder();
4048 allocate_receiver &= !callee->is_static() && holder->is_inline_klass() && callee->is_scalarized_arg(0);
4049 if (allocate_receiver) {
4050 nb_slots++;
4051 }
4052 int arg_num = callee->is_static() ? 0 : 1;
4053 for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
4054 BasicType bt = ss.type();
4055 if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
4056 nb_slots++;
4057 }
4058 if (bt != T_VOID) {
4059 arg_num++;
4060 }
4061 }
4062 objArrayOop array_oop = oopFactory::new_objectArray(nb_slots, CHECK_NULL);
4063 objArrayHandle array(THREAD, array_oop);
4064 arg_num = callee->is_static() ? 0 : 1;
4065 int i = 0;
4066 if (allocate_receiver) {
4067 InlineKlass* vk = InlineKlass::cast(holder);
4068 oop res = vk->allocate_instance(CHECK_NULL);
4069 array->obj_at_put(i++, res);
4070 }
4071 for (SignatureStream ss(callee->signature()); !ss.at_return_type(); ss.next()) {
4072 BasicType bt = ss.type();
4073 if (bt == T_OBJECT && callee->is_scalarized_arg(arg_num)) {
4074 InlineKlass* vk = ss.as_inline_klass(holder);
4075 assert(vk != nullptr, "Unexpected klass");
4076 oop res = vk->allocate_instance(CHECK_NULL);
4077 array->obj_at_put(i++, res);
4078 }
4079 if (bt != T_VOID) {
4080 arg_num++;
4081 }
4082 }
4083 return array();
4084 }
4085
4086 JRT_ENTRY(void, SharedRuntime::allocate_inline_types(JavaThread* current, Method* callee_method, bool allocate_receiver))
4087 methodHandle callee(current, callee_method);
4088 oop array = SharedRuntime::allocate_inline_types_impl(current, callee, allocate_receiver, CHECK);
4089 current->set_vm_result_oop(array);
4090 current->set_vm_result_metadata(callee()); // TODO: required to keep callee live?
4091 JRT_END
4092
4093 // We're returning from an interpreted method: load each field into a
4094 // register following the calling convention
4095 JRT_LEAF(void, SharedRuntime::load_inline_type_fields_in_regs(JavaThread* current, oopDesc* res))
4096 {
4097 assert(res->klass()->is_inline_klass(), "only inline types here");
4098 ResourceMark rm;
4099 RegisterMap reg_map(current,
4100 RegisterMap::UpdateMap::include,
4101 RegisterMap::ProcessFrames::include,
4102 RegisterMap::WalkContinuation::skip);
4103 frame stubFrame = current->last_frame();
4104 frame callerFrame = stubFrame.sender(®_map);
4105 assert(callerFrame.is_interpreted_frame(), "should be coming from interpreter");
4106
4107 InlineKlass* vk = InlineKlass::cast(res->klass());
4108
4109 const Array<SigEntry>* sig_vk = vk->extended_sig();
4110 const Array<VMRegPair>* regs = vk->return_regs();
4111
4112 if (regs == nullptr) {
4113 // The fields of the inline klass don't fit in registers, bail out
4114 return;
4115 }
4116
4117 int j = 1;
4118 for (int i = 0; i < sig_vk->length(); i++) {
4119 BasicType bt = sig_vk->at(i)._bt;
4120 if (bt == T_METADATA) {
4121 continue;
4122 }
4123 if (bt == T_VOID) {
4124 if (sig_vk->at(i-1)._bt == T_LONG ||
4125 sig_vk->at(i-1)._bt == T_DOUBLE) {
4126 j++;
4127 }
4128 continue;
4129 }
4130 int off = sig_vk->at(i)._offset;
4131 assert(off > 0, "offset in object should be positive");
4132 VMRegPair pair = regs->at(j);
4133 address loc = reg_map.location(pair.first(), nullptr);
4134 switch(bt) {
4135 case T_BOOLEAN:
4136 *(jboolean*)loc = res->bool_field(off);
4137 break;
4138 case T_CHAR:
4139 *(jchar*)loc = res->char_field(off);
4140 break;
4141 case T_BYTE:
4142 *(jbyte*)loc = res->byte_field(off);
4143 break;
4144 case T_SHORT:
4145 *(jshort*)loc = res->short_field(off);
4146 break;
4147 case T_INT: {
4148 *(jint*)loc = res->int_field(off);
4149 break;
4150 }
4151 case T_LONG:
4152 #ifdef _LP64
4153 *(intptr_t*)loc = res->long_field(off);
4154 #else
4155 Unimplemented();
4156 #endif
4157 break;
4158 case T_OBJECT:
4159 case T_ARRAY: {
4160 *(oop*)loc = res->obj_field(off);
4161 break;
4162 }
4163 case T_FLOAT:
4164 *(jfloat*)loc = res->float_field(off);
4165 break;
4166 case T_DOUBLE:
4167 *(jdouble*)loc = res->double_field(off);
4168 break;
4169 default:
4170 ShouldNotReachHere();
4171 }
4172 j++;
4173 }
4174 assert(j == regs->length(), "missed a field?");
4175
4176 #ifdef ASSERT
4177 VMRegPair pair = regs->at(0);
4178 address loc = reg_map.location(pair.first(), nullptr);
4179 assert(*(oopDesc**)loc == res, "overwritten object");
4180 #endif
4181
4182 current->set_vm_result_oop(res);
4183 }
4184 JRT_END
4185
4186 // We've returned to an interpreted method, the interpreter needs a
4187 // reference to an inline type instance. Allocate it and initialize it
4188 // from field's values in registers.
4189 JRT_BLOCK_ENTRY(void, SharedRuntime::store_inline_type_fields_to_buf(JavaThread* current, intptr_t res))
4190 {
4191 ResourceMark rm;
4192 RegisterMap reg_map(current,
4193 RegisterMap::UpdateMap::include,
4194 RegisterMap::ProcessFrames::include,
4195 RegisterMap::WalkContinuation::skip);
4196 frame stubFrame = current->last_frame();
4197 frame callerFrame = stubFrame.sender(®_map);
4198
4199 #ifdef ASSERT
4200 InlineKlass* verif_vk = InlineKlass::returned_inline_klass(reg_map);
4201 #endif
4202
4203 if (!is_set_nth_bit(res, 0)) {
4204 // We're not returning with inline type fields in registers (the
4205 // calling convention didn't allow it for this inline klass)
4206 assert(!Metaspace::contains((void*)res), "should be oop or pointer in buffer area");
4207 current->set_vm_result_oop((oopDesc*)res);
4208 assert(verif_vk == nullptr, "broken calling convention");
4209 return;
4210 }
4211
4212 clear_nth_bit(res, 0);
4213 InlineKlass* vk = (InlineKlass*)res;
4214 assert(verif_vk == vk, "broken calling convention");
4215 assert(Metaspace::contains((void*)res), "should be klass");
4216
4217 // Allocate handles for every oop field so they are safe in case of
4218 // a safepoint when allocating
4219 GrowableArray<Handle> handles;
4220 vk->save_oop_fields(reg_map, handles);
4221
4222 // It's unsafe to safepoint until we are here
4223 JRT_BLOCK;
4224 {
4225 JavaThread* THREAD = current;
4226 oop vt = vk->realloc_result(reg_map, handles, CHECK);
4227 current->set_vm_result_oop(vt);
4228 }
4229 JRT_BLOCK_END;
4230 }
4231 JRT_END
|