5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classLoader.hpp"
26 #include "classfile/javaClasses.inline.hpp"
27 #include "classfile/stringTable.hpp"
28 #include "classfile/vmClasses.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "code/codeCache.hpp"
31 #include "code/compiledIC.hpp"
32 #include "code/nmethod.inline.hpp"
33 #include "code/scopeDesc.hpp"
34 #include "code/vtableStubs.hpp"
35 #include "compiler/abstractCompiler.hpp"
36 #include "compiler/compileBroker.hpp"
37 #include "compiler/disassembler.hpp"
38 #include "gc/shared/barrierSet.hpp"
39 #include "gc/shared/collectedHeap.hpp"
40 #include "interpreter/interpreter.hpp"
41 #include "interpreter/interpreterRuntime.hpp"
42 #include "jvm.h"
43 #include "jfr/jfrEvents.hpp"
44 #include "logging/log.hpp"
45 #include "memory/resourceArea.hpp"
46 #include "memory/universe.hpp"
47 #include "metaprogramming/primitiveConversions.hpp"
48 #include "oops/klass.hpp"
49 #include "oops/method.inline.hpp"
50 #include "oops/objArrayKlass.hpp"
51 #include "oops/oop.inline.hpp"
52 #include "prims/forte.hpp"
53 #include "prims/jvmtiExport.hpp"
54 #include "prims/jvmtiThreadState.hpp"
55 #include "prims/methodHandles.hpp"
56 #include "prims/nativeLookup.hpp"
57 #include "runtime/arguments.hpp"
58 #include "runtime/atomic.hpp"
59 #include "runtime/basicLock.inline.hpp"
60 #include "runtime/frame.inline.hpp"
61 #include "runtime/handles.inline.hpp"
62 #include "runtime/init.hpp"
63 #include "runtime/interfaceSupport.inline.hpp"
64 #include "runtime/java.hpp"
65 #include "runtime/javaCalls.hpp"
66 #include "runtime/jniHandles.inline.hpp"
67 #include "runtime/perfData.hpp"
68 #include "runtime/sharedRuntime.hpp"
69 #include "runtime/stackWatermarkSet.hpp"
70 #include "runtime/stubRoutines.hpp"
71 #include "runtime/synchronizer.inline.hpp"
72 #include "runtime/timerTrace.hpp"
73 #include "runtime/vframe.inline.hpp"
74 #include "runtime/vframeArray.hpp"
75 #include "runtime/vm_version.hpp"
76 #include "utilities/copy.hpp"
77 #include "utilities/dtrace.hpp"
78 #include "utilities/events.hpp"
79 #include "utilities/globalDefinitions.hpp"
80 #include "utilities/resourceHash.hpp"
81 #include "utilities/macros.hpp"
82 #include "utilities/xmlstream.hpp"
83 #ifdef COMPILER1
84 #include "c1/c1_Runtime1.hpp"
85 #endif
86 #if INCLUDE_JFR
87 #include "jfr/jfr.hpp"
88 #endif
89
90 // Shared runtime stub routines reside in their own unique blob with a
91 // single entry point
92
93
94 #define SHARED_STUB_FIELD_DEFINE(name, type) \
95 type SharedRuntime::BLOB_FIELD_NAME(name);
96 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
97 #undef SHARED_STUB_FIELD_DEFINE
98
99 nmethod* SharedRuntime::_cont_doYield_stub;
100
101 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
102 const char *SharedRuntime::_stub_names[] = {
103 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
104 };
105
106 //----------------------------generate_stubs-----------------------------------
107 void SharedRuntime::generate_initial_stubs() {
108 // Build this early so it's available for the interpreter.
109 _throw_StackOverflowError_blob =
110 generate_throw_exception(SharedStubId::throw_StackOverflowError_id,
111 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
112 }
113
114 void SharedRuntime::generate_stubs() {
115 _wrong_method_blob =
116 generate_resolve_blob(SharedStubId::wrong_method_id,
117 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
118 _wrong_method_abstract_blob =
119 generate_resolve_blob(SharedStubId::wrong_method_abstract_id,
120 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract));
130 _resolve_static_call_blob =
131 generate_resolve_blob(SharedStubId::resolve_static_call_id,
132 CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C));
133
134 _throw_delayed_StackOverflowError_blob =
135 generate_throw_exception(SharedStubId::throw_delayed_StackOverflowError_id,
136 CAST_FROM_FN_PTR(address, SharedRuntime::throw_delayed_StackOverflowError));
137
138 _throw_AbstractMethodError_blob =
139 generate_throw_exception(SharedStubId::throw_AbstractMethodError_id,
140 CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
141
142 _throw_IncompatibleClassChangeError_blob =
143 generate_throw_exception(SharedStubId::throw_IncompatibleClassChangeError_id,
144 CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
145
146 _throw_NullPointerException_at_call_blob =
147 generate_throw_exception(SharedStubId::throw_NullPointerException_at_call_id,
148 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
149
150 AdapterHandlerLibrary::initialize();
151
152 #if COMPILER2_OR_JVMCI
153 // Vectors are generated only by C2 and JVMCI.
154 bool support_wide = is_wide_vector(MaxVectorSize);
155 if (support_wide) {
156 _polling_page_vectors_safepoint_handler_blob =
157 generate_handler_blob(SharedStubId::polling_page_vectors_safepoint_handler_id,
158 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
159 }
160 #endif // COMPILER2_OR_JVMCI
161 _polling_page_safepoint_handler_blob =
162 generate_handler_blob(SharedStubId::polling_page_safepoint_handler_id,
163 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
164 _polling_page_return_handler_blob =
165 generate_handler_blob(SharedStubId::polling_page_return_handler_id,
166 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
167
168 generate_deopt_blob();
169 }
170
171 #if INCLUDE_JFR
172 //------------------------------generate jfr runtime stubs ------
173 void SharedRuntime::generate_jfr_stubs() {
174 ResourceMark rm;
175 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
176 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
177
178 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
179 _jfr_return_lease_blob = generate_jfr_return_lease();
180 }
181
182 #endif // INCLUDE_JFR
183
184 #include <math.h>
185
186 // Implementation of SharedRuntime
187
188 #ifndef PRODUCT
189 // For statistics
190 uint SharedRuntime::_ic_miss_ctr = 0;
191 uint SharedRuntime::_wrong_method_ctr = 0;
192 uint SharedRuntime::_resolve_static_ctr = 0;
193 uint SharedRuntime::_resolve_virtual_ctr = 0;
194 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
195 uint SharedRuntime::_implicit_null_throws = 0;
196 uint SharedRuntime::_implicit_div0_throws = 0;
197
198 int64_t SharedRuntime::_nof_normal_calls = 0;
199 int64_t SharedRuntime::_nof_inlined_calls = 0;
200 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
201 int64_t SharedRuntime::_nof_static_calls = 0;
202 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
203 int64_t SharedRuntime::_nof_interface_calls = 0;
204 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
205
206 uint SharedRuntime::_new_instance_ctr=0;
207 uint SharedRuntime::_new_array_ctr=0;
208 uint SharedRuntime::_multi2_ctr=0;
209 uint SharedRuntime::_multi3_ctr=0;
210 uint SharedRuntime::_multi4_ctr=0;
211 uint SharedRuntime::_multi5_ctr=0;
212 uint SharedRuntime::_mon_enter_stub_ctr=0;
213 uint SharedRuntime::_mon_exit_stub_ctr=0;
214 uint SharedRuntime::_mon_enter_ctr=0;
228 uint SharedRuntime::_unsafe_set_memory_ctr=0;
229
230 int SharedRuntime::_ICmiss_index = 0;
231 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
232 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
233
234
235 void SharedRuntime::trace_ic_miss(address at) {
236 for (int i = 0; i < _ICmiss_index; i++) {
237 if (_ICmiss_at[i] == at) {
238 _ICmiss_count[i]++;
239 return;
240 }
241 }
242 int index = _ICmiss_index++;
243 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
244 _ICmiss_at[index] = at;
245 _ICmiss_count[index] = 1;
246 }
247
248 void SharedRuntime::print_ic_miss_histogram() {
249 if (ICMissHistogram) {
250 tty->print_cr("IC Miss Histogram:");
251 int tot_misses = 0;
252 for (int i = 0; i < _ICmiss_index; i++) {
253 tty->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
254 tot_misses += _ICmiss_count[i];
255 }
256 tty->print_cr("Total IC misses: %7d", tot_misses);
257 }
258 }
259 #endif // PRODUCT
260
261
262 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
263 return x * y;
264 JRT_END
265
266
267 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
268 if (x == min_jlong && y == CONST64(-1)) {
269 return x;
270 } else {
271 return x / y;
272 }
273 JRT_END
274
275
276 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
277 if (x == min_jlong && y == CONST64(-1)) {
278 return 0;
279 } else {
706 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
707 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
708 JNIHandles::destroy_local(vthread);
709 JRT_END
710 #endif // INCLUDE_JVMTI
711
712 // The interpreter code to call this tracing function is only
713 // called/generated when UL is on for redefine, class and has the right level
714 // and tags. Since obsolete methods are never compiled, we don't have
715 // to modify the compilers to generate calls to this function.
716 //
717 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
718 JavaThread* thread, Method* method))
719 if (method->is_obsolete()) {
720 // We are calling an obsolete method, but this is not necessarily
721 // an error. Our method could have been redefined just after we
722 // fetched the Method* from the constant pool.
723 ResourceMark rm;
724 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
725 }
726 return 0;
727 JRT_END
728
729 // ret_pc points into caller; we are returning caller's exception handler
730 // for given exception
731 // Note that the implementation of this method assumes it's only called when an exception has actually occured
732 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
733 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
734 assert(nm != nullptr, "must exist");
735 ResourceMark rm;
736
737 #if INCLUDE_JVMCI
738 if (nm->is_compiled_by_jvmci()) {
739 // lookup exception handler for this pc
740 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
741 ExceptionHandlerTable table(nm);
742 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
743 if (t != nullptr) {
744 return nm->code_begin() + t->pco();
745 } else {
1345
1346 // determine call info & receiver
1347 // note: a) receiver is null for static calls
1348 // b) an exception is thrown if receiver is null for non-static calls
1349 CallInfo call_info;
1350 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1351 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1352
1353 NoSafepointVerifier nsv;
1354
1355 methodHandle callee_method(current, call_info.selected_method());
1356
1357 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1358 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1359 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1360 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1361 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1362
1363 assert(!caller_nm->is_unloading(), "It should not be unloading");
1364
1365 #ifndef PRODUCT
1366 // tracing/debugging/statistics
1367 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1368 (is_virtual) ? (&_resolve_virtual_ctr) :
1369 (&_resolve_static_ctr);
1370 Atomic::inc(addr);
1371
1372 if (TraceCallFixup) {
1373 ResourceMark rm(current);
1374 tty->print("resolving %s%s (%s) call to",
1375 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1376 Bytecodes::name(invoke_code));
1377 callee_method->print_short_name(tty);
1378 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1379 p2i(caller_frame.pc()), p2i(callee_method->code()));
1380 }
1381 #endif
1382
1383 if (invoke_code == Bytecodes::_invokestatic) {
1384 assert(callee_method->method_holder()->is_initialized() ||
1385 callee_method->method_holder()->is_reentrant_initialization(current),
1386 "invalid class initialization state for invoke_static");
1387 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1388 // In order to keep class initialization check, do not patch call
1389 // site for static call when the class is not fully initialized.
1390 // Proper check is enforced by call site re-resolution on every invocation.
1391 //
1407
1408 // Make sure the callee nmethod does not get deoptimized and removed before
1409 // we are done patching the code.
1410
1411
1412 CompiledICLocker ml(caller_nm);
1413 if (is_virtual && !is_optimized) {
1414 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1415 inline_cache->update(&call_info, receiver->klass());
1416 } else {
1417 // Callsite is a direct call - set it to the destination method
1418 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1419 callsite->set(callee_method);
1420 }
1421
1422 return callee_method;
1423 }
1424
1425 // Inline caches exist only in compiled code
1426 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1427 #ifdef ASSERT
1428 RegisterMap reg_map(current,
1429 RegisterMap::UpdateMap::skip,
1430 RegisterMap::ProcessFrames::include,
1431 RegisterMap::WalkContinuation::skip);
1432 frame stub_frame = current->last_frame();
1433 assert(stub_frame.is_runtime_frame(), "sanity check");
1434 frame caller_frame = stub_frame.sender(®_map);
1435 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1436 #endif /* ASSERT */
1437
1438 methodHandle callee_method;
1439 JRT_BLOCK
1440 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1441 // Return Method* through TLS
1442 current->set_vm_result_2(callee_method());
1443 JRT_BLOCK_END
1444 // return compiled code entry point after potential safepoints
1445 return get_resolved_entry(current, callee_method);
1446 JRT_END
1447
1448
1449 // Handle call site that has been made non-entrant
1450 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1451 // 6243940 We might end up in here if the callee is deoptimized
1452 // as we race to call it. We don't want to take a safepoint if
1453 // the caller was interpreted because the caller frame will look
1454 // interpreted to the stack walkers and arguments are now
1455 // "compiled" so it is much better to make this transition
1456 // invisible to the stack walking code. The i2c path will
1457 // place the callee method in the callee_target. It is stashed
1458 // there because if we try and find the callee by normal means a
1459 // safepoint is possible and have trouble gc'ing the compiled args.
1460 RegisterMap reg_map(current,
1461 RegisterMap::UpdateMap::skip,
1462 RegisterMap::ProcessFrames::include,
1463 RegisterMap::WalkContinuation::skip);
1464 frame stub_frame = current->last_frame();
1465 assert(stub_frame.is_runtime_frame(), "sanity check");
1466 frame caller_frame = stub_frame.sender(®_map);
1467
1468 if (caller_frame.is_interpreted_frame() ||
1469 caller_frame.is_entry_frame() ||
1470 caller_frame.is_upcall_stub_frame()) {
1483 // so bypassing it in c2i adapter is benign.
1484 return callee->get_c2i_no_clinit_check_entry();
1485 } else {
1486 return callee->get_c2i_entry();
1487 }
1488 }
1489
1490 // Must be compiled to compiled path which is safe to stackwalk
1491 methodHandle callee_method;
1492 JRT_BLOCK
1493 // Force resolving of caller (if we called from compiled frame)
1494 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1495 current->set_vm_result_2(callee_method());
1496 JRT_BLOCK_END
1497 // return compiled code entry point after potential safepoints
1498 return get_resolved_entry(current, callee_method);
1499 JRT_END
1500
1501 // Handle abstract method call
1502 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1503 // Verbose error message for AbstractMethodError.
1504 // Get the called method from the invoke bytecode.
1505 vframeStream vfst(current, true);
1506 assert(!vfst.at_end(), "Java frame must exist");
1507 methodHandle caller(current, vfst.method());
1508 Bytecode_invoke invoke(caller, vfst.bci());
1509 DEBUG_ONLY( invoke.verify(); )
1510
1511 // Find the compiled caller frame.
1512 RegisterMap reg_map(current,
1513 RegisterMap::UpdateMap::include,
1514 RegisterMap::ProcessFrames::include,
1515 RegisterMap::WalkContinuation::skip);
1516 frame stubFrame = current->last_frame();
1517 assert(stubFrame.is_runtime_frame(), "must be");
1518 frame callerFrame = stubFrame.sender(®_map);
1519 assert(callerFrame.is_compiled_frame(), "must be");
1520
1521 // Install exception and return forward entry.
1522 address res = SharedRuntime::throw_AbstractMethodError_entry();
1529 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1530 }
1531 JRT_BLOCK_END
1532 return res;
1533 JRT_END
1534
1535 // return verified_code_entry if interp_only_mode is not set for the current thread;
1536 // otherwise return c2i entry.
1537 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1538 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1539 // In interp_only_mode we need to go to the interpreted entry
1540 // The c2i won't patch in this mode -- see fixup_callers_callsite
1541 return callee_method->get_c2i_entry();
1542 }
1543 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1544 return callee_method->verified_code_entry();
1545 }
1546
1547 // resolve a static call and patch code
1548 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1549 methodHandle callee_method;
1550 bool enter_special = false;
1551 JRT_BLOCK
1552 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1553 current->set_vm_result_2(callee_method());
1554 JRT_BLOCK_END
1555 // return compiled code entry point after potential safepoints
1556 return get_resolved_entry(current, callee_method);
1557 JRT_END
1558
1559 // resolve virtual call and update inline cache to monomorphic
1560 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1561 methodHandle callee_method;
1562 JRT_BLOCK
1563 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1564 current->set_vm_result_2(callee_method());
1565 JRT_BLOCK_END
1566 // return compiled code entry point after potential safepoints
1567 return get_resolved_entry(current, callee_method);
1568 JRT_END
1569
1570
1571 // Resolve a virtual call that can be statically bound (e.g., always
1572 // monomorphic, so it has no inline cache). Patch code to resolved target.
1573 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1574 methodHandle callee_method;
1575 JRT_BLOCK
1576 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1577 current->set_vm_result_2(callee_method());
1578 JRT_BLOCK_END
1579 // return compiled code entry point after potential safepoints
1580 return get_resolved_entry(current, callee_method);
1581 JRT_END
1582
1583 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1584 JavaThread* current = THREAD;
1585 ResourceMark rm(current);
1586 CallInfo call_info;
1587 Bytecodes::Code bc;
1588
1589 // receiver is null for static calls. An exception is thrown for null
1590 // receivers for non-static calls
1591 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1592
1593 methodHandle callee_method(current, call_info.selected_method());
1594
1595 #ifndef PRODUCT
1596 Atomic::inc(&_ic_miss_ctr);
1597
1598 // Statistics & Tracing
1599 if (TraceCallFixup) {
1600 ResourceMark rm(current);
1601 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1602 callee_method->print_short_name(tty);
1603 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1604 }
1605
1606 if (ICMissHistogram) {
1607 MutexLocker m(VMStatistic_lock);
1608 RegisterMap reg_map(current,
1609 RegisterMap::UpdateMap::skip,
1610 RegisterMap::ProcessFrames::include,
1611 RegisterMap::WalkContinuation::skip);
1612 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1613 // produce statistics under the lock
1614 trace_ic_miss(f.pc());
1615 }
1616 #endif
1617
1700 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1701 cdc->set_to_clean();
1702 break;
1703 }
1704
1705 case relocInfo::virtual_call_type: {
1706 // compiled, dispatched call (which used to call an interpreted method)
1707 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1708 inline_cache->set_to_clean();
1709 break;
1710 }
1711 default:
1712 break;
1713 }
1714 }
1715 }
1716 }
1717
1718 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1719
1720
1721 #ifndef PRODUCT
1722 Atomic::inc(&_wrong_method_ctr);
1723
1724 if (TraceCallFixup) {
1725 ResourceMark rm(current);
1726 tty->print("handle_wrong_method reresolving call to");
1727 callee_method->print_short_name(tty);
1728 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1729 }
1730 #endif
1731
1732 return callee_method;
1733 }
1734
1735 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1736 // The faulting unsafe accesses should be changed to throw the error
1737 // synchronously instead. Meanwhile the faulting instruction will be
1738 // skipped over (effectively turning it into a no-op) and an
1739 // asynchronous exception will be raised which the thread will
1740 // handle at a later point. If the instruction is a load it will
1741 // return garbage.
1742
1743 // Request an async exception.
2001 // This is only called when CheckJNICalls is true, and only
2002 // for virtual thread termination.
2003 JRT_LEAF(void, SharedRuntime::log_jni_monitor_still_held())
2004 assert(CheckJNICalls, "Only call this when checking JNI usage");
2005 if (log_is_enabled(Debug, jni)) {
2006 JavaThread* current = JavaThread::current();
2007 int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2008 int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2009 log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2010 ") exiting with Objects still locked by JNI MonitorEnter.",
2011 vthread_id, carrier_id);
2012 }
2013 JRT_END
2014
2015 #ifndef PRODUCT
2016
2017 void SharedRuntime::print_statistics() {
2018 ttyLocker ttyl;
2019 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2020
2021 SharedRuntime::print_ic_miss_histogram();
2022
2023 // Dump the JRT_ENTRY counters
2024 if (_new_instance_ctr) tty->print_cr("%5u new instance requires GC", _new_instance_ctr);
2025 if (_new_array_ctr) tty->print_cr("%5u new array requires GC", _new_array_ctr);
2026 if (_multi2_ctr) tty->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2027 if (_multi3_ctr) tty->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2028 if (_multi4_ctr) tty->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2029 if (_multi5_ctr) tty->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2030
2031 tty->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2032 tty->print_cr("%5u wrong method", _wrong_method_ctr);
2033 tty->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2034 tty->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2035 tty->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2036
2037 if (_mon_enter_stub_ctr) tty->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2038 if (_mon_exit_stub_ctr) tty->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2039 if (_mon_enter_ctr) tty->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2040 if (_mon_exit_ctr) tty->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2041 if (_partial_subtype_ctr) tty->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2042 if (_jbyte_array_copy_ctr) tty->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2043 if (_jshort_array_copy_ctr) tty->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2044 if (_jint_array_copy_ctr) tty->print_cr("%5u int array copies", _jint_array_copy_ctr);
2045 if (_jlong_array_copy_ctr) tty->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2046 if (_oop_array_copy_ctr) tty->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2047 if (_checkcast_array_copy_ctr) tty->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2048 if (_unsafe_array_copy_ctr) tty->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2049 if (_generic_array_copy_ctr) tty->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2050 if (_slow_array_copy_ctr) tty->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2051 if (_find_handler_ctr) tty->print_cr("%5u find exception handler", _find_handler_ctr);
2052 if (_rethrow_ctr) tty->print_cr("%5u rethrow handler", _rethrow_ctr);
2053 if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2054
2055 AdapterHandlerLibrary::print_statistics();
2056
2057 if (xtty != nullptr) xtty->tail("statistics");
2058 }
2059
2060 inline double percent(int64_t x, int64_t y) {
2061 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2062 }
2063
2064 class MethodArityHistogram {
2065 public:
2066 enum { MAX_ARITY = 256 };
2067 private:
2068 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2069 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2070 static uint64_t _total_compiled_calls;
2071 static uint64_t _max_compiled_calls_per_method;
2072 static int _max_arity; // max. arity seen
2073 static int _max_size; // max. arg size seen
2074
2075 static void add_method_to_histogram(nmethod* nm) {
2076 Method* method = (nm == nullptr) ? nullptr : nm->method();
2077 if (method != nullptr) {
2078 ArgumentCount args(method->signature());
2079 int arity = args.size() + (method->is_static() ? 0 : 1);
2124 // Take the Compile_lock to protect against changes in the CodeBlob structures
2125 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2126 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2127 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2128 _max_arity = _max_size = 0;
2129 _total_compiled_calls = 0;
2130 _max_compiled_calls_per_method = 0;
2131 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2132 CodeCache::nmethods_do(add_method_to_histogram);
2133 print_histogram();
2134 }
2135 };
2136
2137 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2138 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2139 uint64_t MethodArityHistogram::_total_compiled_calls;
2140 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2141 int MethodArityHistogram::_max_arity;
2142 int MethodArityHistogram::_max_size;
2143
2144 void SharedRuntime::print_call_statistics(uint64_t comp_total) {
2145 tty->print_cr("Calls from compiled code:");
2146 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2147 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2148 int64_t mono_i = _nof_interface_calls;
2149 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2150 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2151 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2152 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2153 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2154 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2155 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2156 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2157 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2158 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2159 tty->cr();
2160 tty->print_cr("Note 1: counter updates are not MT-safe.");
2161 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2162 tty->print_cr(" %% in nested categories are relative to their category");
2163 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2164 tty->cr();
2165
2166 MethodArityHistogram h;
2167 }
2168 #endif
2169
2170 #ifndef PRODUCT
2171 static int _lookups; // number of calls to lookup
2172 static int _equals; // number of buckets checked with matching hash
2173 static int _hits; // number of successful lookups
2174 static int _compact; // number of equals calls with compact signature
2175 #endif
2176
2177 // A simple wrapper class around the calling convention information
2178 // that allows sharing of adapters for the same calling convention.
2179 class AdapterFingerPrint : public CHeapObj<mtCode> {
2180 private:
2181 enum {
2182 _basic_type_bits = 4,
2183 _basic_type_mask = right_n_bits(_basic_type_bits),
2184 _basic_types_per_int = BitsPerInt / _basic_type_bits,
2185 _compact_int_count = 3
2186 };
2187 // TO DO: Consider integrating this with a more global scheme for compressing signatures.
2188 // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2189
2190 union {
2191 int _compact[_compact_int_count];
2192 int* _fingerprint;
2193 } _value;
2194 int _length; // A negative length indicates the fingerprint is in the compact form,
2195 // Otherwise _value._fingerprint is the array.
2196
2197 // Remap BasicTypes that are handled equivalently by the adapters.
2198 // These are correct for the current system but someday it might be
2199 // necessary to make this mapping platform dependent.
2200 static int adapter_encoding(BasicType in) {
2201 switch (in) {
2202 case T_BOOLEAN:
2203 case T_BYTE:
2204 case T_SHORT:
2205 case T_CHAR:
2206 // There are all promoted to T_INT in the calling convention
2207 return T_INT;
2208
2209 case T_OBJECT:
2210 case T_ARRAY:
2211 // In other words, we assume that any register good enough for
2212 // an int or long is good enough for a managed pointer.
2213 #ifdef _LP64
2214 return T_LONG;
2215 #else
2216 return T_INT;
2217 #endif
2218
2219 case T_INT:
2220 case T_LONG:
2221 case T_FLOAT:
2222 case T_DOUBLE:
2223 case T_VOID:
2224 return in;
2225
2226 default:
2227 ShouldNotReachHere();
2228 return T_CONFLICT;
2229 }
2230 }
2231
2232 public:
2233 AdapterFingerPrint(int total_args_passed, BasicType* sig_bt) {
2234 // The fingerprint is based on the BasicType signature encoded
2235 // into an array of ints with eight entries per int.
2236 int* ptr;
2237 int len = (total_args_passed + (_basic_types_per_int-1)) / _basic_types_per_int;
2238 if (len <= _compact_int_count) {
2239 assert(_compact_int_count == 3, "else change next line");
2240 _value._compact[0] = _value._compact[1] = _value._compact[2] = 0;
2241 // Storing the signature encoded as signed chars hits about 98%
2242 // of the time.
2243 _length = -len;
2244 ptr = _value._compact;
2245 } else {
2246 _length = len;
2247 _value._fingerprint = NEW_C_HEAP_ARRAY(int, _length, mtCode);
2248 ptr = _value._fingerprint;
2249 }
2250
2251 // Now pack the BasicTypes with 8 per int
2252 int sig_index = 0;
2253 for (int index = 0; index < len; index++) {
2254 int value = 0;
2255 for (int byte = 0; sig_index < total_args_passed && byte < _basic_types_per_int; byte++) {
2256 int bt = adapter_encoding(sig_bt[sig_index++]);
2257 assert((bt & _basic_type_mask) == bt, "must fit in 4 bits");
2258 value = (value << _basic_type_bits) | bt;
2259 }
2260 ptr[index] = value;
2261 }
2262 }
2263
2264 ~AdapterFingerPrint() {
2265 if (_length > 0) {
2266 FREE_C_HEAP_ARRAY(int, _value._fingerprint);
2267 }
2268 }
2269
2270 int value(int index) {
2271 if (_length < 0) {
2272 return _value._compact[index];
2273 }
2274 return _value._fingerprint[index];
2275 }
2276 int length() {
2277 if (_length < 0) return -_length;
2278 return _length;
2279 }
2280
2281 bool is_compact() {
2282 return _length <= 0;
2283 }
2284
2285 unsigned int compute_hash() {
2286 int hash = 0;
2287 for (int i = 0; i < length(); i++) {
2288 int v = value(i);
2289 hash = (hash << 8) ^ v ^ (hash >> 5);
2290 }
2291 return (unsigned int)hash;
2292 }
2293
2294 const char* as_string() {
2295 stringStream st;
2296 st.print("0x");
2297 for (int i = 0; i < length(); i++) {
2298 st.print("%x", value(i));
2299 }
2300 return st.as_string();
2301 }
2302
2303 #ifndef PRODUCT
2304 // Reconstitutes the basic type arguments from the fingerprint,
2305 // producing strings like LIJDF
2306 const char* as_basic_args_string() {
2307 stringStream st;
2308 bool long_prev = false;
2309 for (int i = 0; i < length(); i++) {
2310 unsigned val = (unsigned)value(i);
2311 // args are packed so that first/lower arguments are in the highest
2312 // bits of each int value, so iterate from highest to the lowest
2313 for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {
2314 unsigned v = (val >> j) & _basic_type_mask;
2315 if (v == 0) {
2316 assert(i == length() - 1, "Only expect zeroes in the last word");
2317 continue;
2318 }
2319 if (long_prev) {
2320 long_prev = false;
2321 if (v == T_VOID) {
2322 st.print("J");
2323 } else {
2324 st.print("L");
2325 }
2326 }
2327 switch (v) {
2328 case T_INT: st.print("I"); break;
2329 case T_LONG: long_prev = true; break;
2330 case T_FLOAT: st.print("F"); break;
2331 case T_DOUBLE: st.print("D"); break;
2332 case T_VOID: break;
2333 default: ShouldNotReachHere();
2334 }
2335 }
2336 }
2337 if (long_prev) {
2338 st.print("L");
2339 }
2340 return st.as_string();
2341 }
2342 #endif // !product
2343
2344 bool equals(AdapterFingerPrint* other) {
2345 if (other->_length != _length) {
2346 return false;
2347 }
2348 if (_length < 0) {
2349 assert(_compact_int_count == 3, "else change next line");
2350 return _value._compact[0] == other->_value._compact[0] &&
2351 _value._compact[1] == other->_value._compact[1] &&
2352 _value._compact[2] == other->_value._compact[2];
2353 } else {
2354 for (int i = 0; i < _length; i++) {
2355 if (_value._fingerprint[i] != other->_value._fingerprint[i]) {
2356 return false;
2357 }
2358 }
2359 }
2360 return true;
2361 }
2362
2363 static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2364 NOT_PRODUCT(_equals++);
2365 return fp1->equals(fp2);
2366 }
2367
2368 static unsigned int compute_hash(AdapterFingerPrint* const& fp) {
2369 return fp->compute_hash();
2370 }
2371 };
2372
2373 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2374 using AdapterHandlerTable = ResourceHashtable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2375 AnyObj::C_HEAP, mtCode,
2376 AdapterFingerPrint::compute_hash,
2377 AdapterFingerPrint::equals>;
2378 static AdapterHandlerTable* _adapter_handler_table;
2379
2380 // Find a entry with the same fingerprint if it exists
2381 static AdapterHandlerEntry* lookup(int total_args_passed, BasicType* sig_bt) {
2382 NOT_PRODUCT(_lookups++);
2383 assert_lock_strong(AdapterHandlerLibrary_lock);
2384 AdapterFingerPrint fp(total_args_passed, sig_bt);
2385 AdapterHandlerEntry** entry = _adapter_handler_table->get(&fp);
2386 if (entry != nullptr) {
2387 #ifndef PRODUCT
2388 if (fp.is_compact()) _compact++;
2389 _hits++;
2390 #endif
2391 return *entry;
2392 }
2393 return nullptr;
2394 }
2395
2396 #ifndef PRODUCT
2397 static void print_table_statistics() {
2398 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2399 return sizeof(*key) + sizeof(*a);
2400 };
2401 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2402 ts.print(tty, "AdapterHandlerTable");
2403 tty->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2404 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2405 tty->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d compact %d",
2406 _lookups, _equals, _hits, _compact);
2407 }
2408 #endif
2409
2410 // ---------------------------------------------------------------------------
2411 // Implementation of AdapterHandlerLibrary
2412 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2413 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2414 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2415 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2416 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2417 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2418 const int AdapterHandlerLibrary_size = 16*K;
2419 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2420
2421 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2422 return _buffer;
2423 }
2424
2425 static void post_adapter_creation(const AdapterBlob* new_adapter,
2426 const AdapterHandlerEntry* entry) {
2427 if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2428 char blob_id[256];
2429 jio_snprintf(blob_id,
2430 sizeof(blob_id),
2431 "%s(%s)",
2432 new_adapter->name(),
2433 entry->fingerprint()->as_string());
2434 if (Forte::is_enabled()) {
2435 Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2436 }
2437
2441 }
2442 }
2443
2444 void AdapterHandlerLibrary::initialize() {
2445 ResourceMark rm;
2446 AdapterBlob* no_arg_blob = nullptr;
2447 AdapterBlob* int_arg_blob = nullptr;
2448 AdapterBlob* obj_arg_blob = nullptr;
2449 AdapterBlob* obj_int_arg_blob = nullptr;
2450 AdapterBlob* obj_obj_arg_blob = nullptr;
2451 {
2452 _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2453 MutexLocker mu(AdapterHandlerLibrary_lock);
2454
2455 // Create a special handler for abstract methods. Abstract methods
2456 // are never compiled so an i2c entry is somewhat meaningless, but
2457 // throw AbstractMethodError just in case.
2458 // Pass wrong_method_abstract for the c2i transitions to return
2459 // AbstractMethodError for invalid invocations.
2460 address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2461 _abstract_method_handler = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(0, nullptr),
2462 SharedRuntime::throw_AbstractMethodError_entry(),
2463 wrong_method_abstract, wrong_method_abstract);
2464
2465 _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2466 _no_arg_handler = create_adapter(no_arg_blob, 0, nullptr, true);
2467
2468 BasicType obj_args[] = { T_OBJECT };
2469 _obj_arg_handler = create_adapter(obj_arg_blob, 1, obj_args, true);
2470
2471 BasicType int_args[] = { T_INT };
2472 _int_arg_handler = create_adapter(int_arg_blob, 1, int_args, true);
2473
2474 BasicType obj_int_args[] = { T_OBJECT, T_INT };
2475 _obj_int_arg_handler = create_adapter(obj_int_arg_blob, 2, obj_int_args, true);
2476
2477 BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
2478 _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, 2, obj_obj_args, true);
2479
2480 assert(no_arg_blob != nullptr &&
2481 obj_arg_blob != nullptr &&
2482 int_arg_blob != nullptr &&
2483 obj_int_arg_blob != nullptr &&
2484 obj_obj_arg_blob != nullptr, "Initial adapters must be properly created");
2485 }
2486
2487 // Outside of the lock
2488 post_adapter_creation(no_arg_blob, _no_arg_handler);
2489 post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2490 post_adapter_creation(int_arg_blob, _int_arg_handler);
2491 post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2492 post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2493 }
2494
2495 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint,
2496 address i2c_entry,
2497 address c2i_entry,
2498 address c2i_unverified_entry,
2499 address c2i_no_clinit_check_entry) {
2500 // Insert an entry into the table
2501 return new AdapterHandlerEntry(fingerprint, i2c_entry, c2i_entry, c2i_unverified_entry,
2502 c2i_no_clinit_check_entry);
2503 }
2504
2505 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2506 if (method->is_abstract()) {
2507 return _abstract_method_handler;
2508 }
2509 int total_args_passed = method->size_of_parameters(); // All args on stack
2510 if (total_args_passed == 0) {
2511 return _no_arg_handler;
2512 } else if (total_args_passed == 1) {
2513 if (!method->is_static()) {
2514 return _obj_arg_handler;
2515 }
2516 switch (method->signature()->char_at(1)) {
2517 case JVM_SIGNATURE_CLASS:
2518 case JVM_SIGNATURE_ARRAY:
2519 return _obj_arg_handler;
2520 case JVM_SIGNATURE_INT:
2521 case JVM_SIGNATURE_BOOLEAN:
2522 case JVM_SIGNATURE_CHAR:
2592 // Fast-path for trivial adapters
2593 AdapterHandlerEntry* entry = get_simple_adapter(method);
2594 if (entry != nullptr) {
2595 return entry;
2596 }
2597
2598 ResourceMark rm;
2599 AdapterBlob* new_adapter = nullptr;
2600
2601 // Fill in the signature array, for the calling-convention call.
2602 int total_args_passed = method->size_of_parameters(); // All args on stack
2603
2604 AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2605 method->is_static(), total_args_passed);
2606 assert(si.slots() == total_args_passed, "");
2607 BasicType* sig_bt = si.basic_types();
2608 {
2609 MutexLocker mu(AdapterHandlerLibrary_lock);
2610
2611 // Lookup method signature's fingerprint
2612 entry = lookup(total_args_passed, sig_bt);
2613
2614 if (entry != nullptr) {
2615 #ifdef ASSERT
2616 if (VerifyAdapterSharing) {
2617 AdapterBlob* comparison_blob = nullptr;
2618 AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, total_args_passed, sig_bt, false);
2619 assert(comparison_blob == nullptr, "no blob should be created when creating an adapter for comparison");
2620 assert(comparison_entry->compare_code(entry), "code must match");
2621 // Release the one just created and return the original
2622 delete comparison_entry;
2623 }
2624 #endif
2625 return entry;
2626 }
2627
2628 entry = create_adapter(new_adapter, total_args_passed, sig_bt, /* allocate_code_blob */ true);
2629 }
2630
2631 // Outside of the lock
2632 if (new_adapter != nullptr) {
2633 post_adapter_creation(new_adapter, entry);
2634 }
2635 return entry;
2636 }
2637
2638 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& new_adapter,
2639 int total_args_passed,
2640 BasicType* sig_bt,
2641 bool allocate_code_blob) {
2642 if (log_is_enabled(Info, perf, class, link)) {
2643 ClassLoader::perf_method_adapters_count()->inc();
2644 }
2645
2646 // StubRoutines::_final_stubs_code is initialized after this function can be called. As a result,
2647 // VerifyAdapterCalls and VerifyAdapterSharing can fail if we re-use code that generated prior
2648 // to all StubRoutines::_final_stubs_code being set. Checks refer to runtime range checks generated
2649 // in an I2C stub that ensure that an I2C stub is called from an interpreter frame or stubs.
2650 bool contains_all_checks = StubRoutines::final_stubs_code() != nullptr;
2651
2652 VMRegPair stack_regs[16];
2653 VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2654
2655 // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2656 int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2657 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2658 CodeBuffer buffer(buf);
2659 short buffer_locs[20];
2660 buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2661 sizeof(buffer_locs)/sizeof(relocInfo));
2662
2663 // Make a C heap allocated version of the fingerprint to store in the adapter
2664 AdapterFingerPrint* fingerprint = new AdapterFingerPrint(total_args_passed, sig_bt);
2665 MacroAssembler _masm(&buffer);
2666 AdapterHandlerEntry* entry = SharedRuntime::generate_i2c2i_adapters(&_masm,
2667 total_args_passed,
2668 comp_args_on_stack,
2669 sig_bt,
2670 regs,
2671 fingerprint);
2672
2673 #ifdef ASSERT
2674 if (VerifyAdapterSharing) {
2675 entry->save_code(buf->code_begin(), buffer.insts_size());
2676 if (!allocate_code_blob) {
2677 return entry;
2678 }
2679 }
2680 #endif
2681
2682 new_adapter = AdapterBlob::create(&buffer);
2683 NOT_PRODUCT(int insts_size = buffer.insts_size());
2684 if (new_adapter == nullptr) {
2685 // CodeCache is full, disable compilation
2686 // Ought to log this but compile log is only per compile thread
2687 // and we're some non descript Java thread.
2688 return nullptr;
2689 }
2690 entry->relocate(new_adapter->content_begin());
2691 #ifndef PRODUCT
2692 // debugging support
2693 if (PrintAdapterHandlers || PrintStubCode) {
2694 ttyLocker ttyl;
2695 entry->print_adapter_on(tty);
2696 tty->print_cr("i2c argument handler #%d for: %s %s (%d bytes generated)",
2697 _adapter_handler_table->number_of_entries(), fingerprint->as_basic_args_string(),
2698 fingerprint->as_string(), insts_size);
2699 tty->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(entry->get_c2i_entry()));
2700 if (Verbose || PrintStubCode) {
2701 address first_pc = entry->base_address();
2702 if (first_pc != nullptr) {
2703 Disassembler::decode(first_pc, first_pc + insts_size, tty
2704 NOT_PRODUCT(COMMA &new_adapter->asm_remarks()));
2705 tty->cr();
2706 }
2707 }
2708 }
2709 #endif
2710
2711 // Add the entry only if the entry contains all required checks (see sharedRuntime_xxx.cpp)
2712 // The checks are inserted only if -XX:+VerifyAdapterCalls is specified.
2713 if (contains_all_checks || !VerifyAdapterCalls) {
2714 assert_lock_strong(AdapterHandlerLibrary_lock);
2715 _adapter_handler_table->put(fingerprint, entry);
2716 }
2717 return entry;
2718 }
2719
2720 address AdapterHandlerEntry::base_address() {
2721 address base = _i2c_entry;
2722 if (base == nullptr) base = _c2i_entry;
2723 assert(base <= _c2i_entry || _c2i_entry == nullptr, "");
2724 assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == nullptr, "");
2725 assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == nullptr, "");
2726 return base;
2727 }
2728
2729 void AdapterHandlerEntry::relocate(address new_base) {
2730 address old_base = base_address();
2731 assert(old_base != nullptr, "");
2732 ptrdiff_t delta = new_base - old_base;
2733 if (_i2c_entry != nullptr)
2734 _i2c_entry += delta;
2735 if (_c2i_entry != nullptr)
2736 _c2i_entry += delta;
2737 if (_c2i_unverified_entry != nullptr)
2738 _c2i_unverified_entry += delta;
2739 if (_c2i_no_clinit_check_entry != nullptr)
2740 _c2i_no_clinit_check_entry += delta;
2741 assert(base_address() == new_base, "");
2742 }
2743
2744
2745 AdapterHandlerEntry::~AdapterHandlerEntry() {
2746 delete _fingerprint;
2747 #ifdef ASSERT
2748 FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
2749 #endif
2750 }
2751
2752
2753 #ifdef ASSERT
2754 // Capture the code before relocation so that it can be compared
2755 // against other versions. If the code is captured after relocation
2756 // then relative instructions won't be equivalent.
2757 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
2758 _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
2759 _saved_code_length = length;
2760 memcpy(_saved_code, buffer, length);
2761 }
2762
2763
2764 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
2765 assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");
2766
2767 if (other->_saved_code_length != _saved_code_length) {
2768 return false;
2769 }
3034 assert(i - max_locals == active_monitor_count*2, "found the expected number of monitors");
3035
3036 RegisterMap map(current,
3037 RegisterMap::UpdateMap::skip,
3038 RegisterMap::ProcessFrames::include,
3039 RegisterMap::WalkContinuation::skip);
3040 frame sender = fr.sender(&map);
3041 if (sender.is_interpreted_frame()) {
3042 current->push_cont_fastpath(sender.sp());
3043 }
3044
3045 return buf;
3046 JRT_END
3047
3048 JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
3049 FREE_C_HEAP_ARRAY(intptr_t, buf);
3050 JRT_END
3051
3052 bool AdapterHandlerLibrary::contains(const CodeBlob* b) {
3053 bool found = false;
3054 auto findblob = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
3055 return (found = (b == CodeCache::find_blob(a->get_i2c_entry())));
3056 };
3057 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3058 _adapter_handler_table->iterate(findblob);
3059 return found;
3060 }
3061
3062 void AdapterHandlerLibrary::print_handler_on(outputStream* st, const CodeBlob* b) {
3063 bool found = false;
3064 auto findblob = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
3065 if (b == CodeCache::find_blob(a->get_i2c_entry())) {
3066 found = true;
3067 st->print("Adapter for signature: ");
3068 a->print_adapter_on(st);
3069 return true;
3070 } else {
3071 return false; // keep looking
3072 }
3073 };
3074 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3075 _adapter_handler_table->iterate(findblob);
3076 assert(found, "Should have found handler");
3077 }
3078
3079 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3080 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3081 if (get_i2c_entry() != nullptr) {
3082 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3083 }
3084 if (get_c2i_entry() != nullptr) {
3085 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3086 }
3087 if (get_c2i_unverified_entry() != nullptr) {
3088 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3089 }
3090 if (get_c2i_no_clinit_check_entry() != nullptr) {
3091 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3092 }
3093 st->cr();
3094 }
3095
3096 #ifndef PRODUCT
3097
3098 void AdapterHandlerLibrary::print_statistics() {
3099 print_table_statistics();
3100 }
3101
3102 #endif /* PRODUCT */
3103
3104 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3105 assert(current == JavaThread::current(), "pre-condition");
3106 StackOverflow* overflow_state = current->stack_overflow_state();
3107 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3108 overflow_state->set_reserved_stack_activation(current->stack_base());
3109 JRT_END
3110
3111 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3112 ResourceMark rm(current);
3113 frame activation;
3114 nmethod* nm = nullptr;
3115 int count = 1;
3116
3117 assert(fr.is_java_frame(), "Must start on Java frame");
3118
3119 RegisterMap map(JavaThread::current(),
3120 RegisterMap::UpdateMap::skip,
3121 RegisterMap::ProcessFrames::skip,
3122 RegisterMap::WalkContinuation::skip); // don't walk continuations
3123 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/archiveBuilder.hpp"
26 #include "cds/archiveUtils.inline.hpp"
27 #include "cds/cdsConfig.hpp"
28 #include "classfile/classLoader.hpp"
29 #include "classfile/javaClasses.inline.hpp"
30 #include "classfile/stringTable.hpp"
31 #include "classfile/vmClasses.hpp"
32 #include "classfile/vmSymbols.hpp"
33 #include "code/SCCache.hpp"
34 #include "code/codeCache.hpp"
35 #include "code/compiledIC.hpp"
36 #include "code/nmethod.inline.hpp"
37 #include "code/scopeDesc.hpp"
38 #include "code/vtableStubs.hpp"
39 #include "compiler/abstractCompiler.hpp"
40 #include "compiler/compileBroker.hpp"
41 #include "compiler/disassembler.hpp"
42 #include "gc/shared/barrierSet.hpp"
43 #include "gc/shared/collectedHeap.hpp"
44 #include "interpreter/interpreter.hpp"
45 #include "interpreter/interpreterRuntime.hpp"
46 #include "jvm.h"
47 #include "jfr/jfrEvents.hpp"
48 #include "logging/log.hpp"
49 #include "memory/resourceArea.hpp"
50 #include "memory/universe.hpp"
51 #include "metaprogramming/primitiveConversions.hpp"
52 #include "oops/klass.hpp"
53 #include "oops/method.inline.hpp"
54 #include "oops/objArrayKlass.hpp"
55 #include "oops/oop.inline.hpp"
56 #include "prims/forte.hpp"
57 #include "prims/jvmtiExport.hpp"
58 #include "prims/jvmtiThreadState.hpp"
59 #include "prims/methodHandles.hpp"
60 #include "prims/nativeLookup.hpp"
61 #include "runtime/arguments.hpp"
62 #include "runtime/atomic.hpp"
63 #include "runtime/basicLock.inline.hpp"
64 #include "runtime/frame.inline.hpp"
65 #include "runtime/handles.inline.hpp"
66 #include "runtime/init.hpp"
67 #include "runtime/interfaceSupport.inline.hpp"
68 #include "runtime/java.hpp"
69 #include "runtime/javaCalls.hpp"
70 #include "runtime/jniHandles.inline.hpp"
71 #include "runtime/perfData.inline.hpp"
72 #include "runtime/sharedRuntime.hpp"
73 #include "runtime/stackWatermarkSet.hpp"
74 #include "runtime/stubRoutines.hpp"
75 #include "runtime/synchronizer.inline.hpp"
76 #include "runtime/timerTrace.hpp"
77 #include "runtime/vframe.inline.hpp"
78 #include "runtime/vframeArray.hpp"
79 #include "runtime/vm_version.hpp"
80 #include "services/management.hpp"
81 #include "utilities/copy.hpp"
82 #include "utilities/dtrace.hpp"
83 #include "utilities/events.hpp"
84 #include "utilities/globalDefinitions.hpp"
85 #include "utilities/resourceHash.hpp"
86 #include "utilities/macros.hpp"
87 #include "utilities/xmlstream.hpp"
88 #ifdef COMPILER1
89 #include "c1/c1_Runtime1.hpp"
90 #endif
91 #if INCLUDE_JFR
92 #include "jfr/jfr.hpp"
93 #endif
94
95 // Shared runtime stub routines reside in their own unique blob with a
96 // single entry point
97
98
99 #define SHARED_STUB_FIELD_DEFINE(name, type) \
100 type SharedRuntime::BLOB_FIELD_NAME(name);
101 SHARED_STUBS_DO(SHARED_STUB_FIELD_DEFINE)
102 #undef SHARED_STUB_FIELD_DEFINE
103
104 nmethod* SharedRuntime::_cont_doYield_stub;
105
106 PerfTickCounters* SharedRuntime::_perf_resolve_opt_virtual_total_time = nullptr;
107 PerfTickCounters* SharedRuntime::_perf_resolve_virtual_total_time = nullptr;
108 PerfTickCounters* SharedRuntime::_perf_resolve_static_total_time = nullptr;
109 PerfTickCounters* SharedRuntime::_perf_handle_wrong_method_total_time = nullptr;
110 PerfTickCounters* SharedRuntime::_perf_ic_miss_total_time = nullptr;
111
112 #define SHARED_STUB_NAME_DECLARE(name, type) "Shared Runtime " # name "_blob",
113 const char *SharedRuntime::_stub_names[] = {
114 SHARED_STUBS_DO(SHARED_STUB_NAME_DECLARE)
115 };
116
117 //----------------------------generate_stubs-----------------------------------
118 void SharedRuntime::generate_initial_stubs() {
119 // Build this early so it's available for the interpreter.
120 _throw_StackOverflowError_blob =
121 generate_throw_exception(SharedStubId::throw_StackOverflowError_id,
122 CAST_FROM_FN_PTR(address, SharedRuntime::throw_StackOverflowError));
123 }
124
125 void SharedRuntime::generate_stubs() {
126 _wrong_method_blob =
127 generate_resolve_blob(SharedStubId::wrong_method_id,
128 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method));
129 _wrong_method_abstract_blob =
130 generate_resolve_blob(SharedStubId::wrong_method_abstract_id,
131 CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract));
141 _resolve_static_call_blob =
142 generate_resolve_blob(SharedStubId::resolve_static_call_id,
143 CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C));
144
145 _throw_delayed_StackOverflowError_blob =
146 generate_throw_exception(SharedStubId::throw_delayed_StackOverflowError_id,
147 CAST_FROM_FN_PTR(address, SharedRuntime::throw_delayed_StackOverflowError));
148
149 _throw_AbstractMethodError_blob =
150 generate_throw_exception(SharedStubId::throw_AbstractMethodError_id,
151 CAST_FROM_FN_PTR(address, SharedRuntime::throw_AbstractMethodError));
152
153 _throw_IncompatibleClassChangeError_blob =
154 generate_throw_exception(SharedStubId::throw_IncompatibleClassChangeError_id,
155 CAST_FROM_FN_PTR(address, SharedRuntime::throw_IncompatibleClassChangeError));
156
157 _throw_NullPointerException_at_call_blob =
158 generate_throw_exception(SharedStubId::throw_NullPointerException_at_call_id,
159 CAST_FROM_FN_PTR(address, SharedRuntime::throw_NullPointerException_at_call));
160
161 #if COMPILER2_OR_JVMCI
162 // Vectors are generated only by C2 and JVMCI.
163 bool support_wide = is_wide_vector(MaxVectorSize);
164 if (support_wide) {
165 _polling_page_vectors_safepoint_handler_blob =
166 generate_handler_blob(SharedStubId::polling_page_vectors_safepoint_handler_id,
167 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
168 }
169 #endif // COMPILER2_OR_JVMCI
170 _polling_page_safepoint_handler_blob =
171 generate_handler_blob(SharedStubId::polling_page_safepoint_handler_id,
172 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
173 _polling_page_return_handler_blob =
174 generate_handler_blob(SharedStubId::polling_page_return_handler_id,
175 CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
176
177 generate_deopt_blob();
178
179 if (UsePerfData) {
180 EXCEPTION_MARK;
181 NEWPERFTICKCOUNTERS(_perf_resolve_opt_virtual_total_time, SUN_CI, "resovle_opt_virtual_call");
182 NEWPERFTICKCOUNTERS(_perf_resolve_virtual_total_time, SUN_CI, "resovle_virtual_call");
183 NEWPERFTICKCOUNTERS(_perf_resolve_static_total_time, SUN_CI, "resovle_static_call");
184 NEWPERFTICKCOUNTERS(_perf_handle_wrong_method_total_time, SUN_CI, "handle_wrong_method");
185 NEWPERFTICKCOUNTERS(_perf_ic_miss_total_time , SUN_CI, "ic_miss");
186 if (HAS_PENDING_EXCEPTION) {
187 vm_exit_during_initialization("SharedRuntime::generate_stubs() failed unexpectedly");
188 }
189 }
190 }
191
192 void SharedRuntime::init_adapter_library() {
193 AdapterHandlerLibrary::initialize();
194 }
195
196 static void print_counter_on(outputStream* st, const char* name, PerfTickCounters* counter, uint cnt) {
197 st->print(" %-28s " JLONG_FORMAT_W(6) "us", name, counter->elapsed_counter_value_us());
198 if (TraceThreadTime) {
199 st->print(" (elapsed) " JLONG_FORMAT_W(6) "us (thread)", counter->thread_counter_value_us());
200 }
201 st->print(" / %5d events", cnt);
202 st->cr();
203 }
204
205 void SharedRuntime::print_counters_on(outputStream* st) {
206 st->print_cr("SharedRuntime:");
207 if (UsePerfData) {
208 print_counter_on(st, "resolve_opt_virtual_call:", _perf_resolve_opt_virtual_total_time, _resolve_opt_virtual_ctr);
209 print_counter_on(st, "resolve_virtual_call:", _perf_resolve_virtual_total_time, _resolve_virtual_ctr);
210 print_counter_on(st, "resolve_static_call:", _perf_resolve_static_total_time, _resolve_static_ctr);
211 print_counter_on(st, "handle_wrong_method:", _perf_handle_wrong_method_total_time, _wrong_method_ctr);
212 print_counter_on(st, "ic_miss:", _perf_ic_miss_total_time, _ic_miss_ctr);
213
214 jlong total_elapsed_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->elapsed_counter_value() +
215 _perf_resolve_virtual_total_time->elapsed_counter_value() +
216 _perf_resolve_static_total_time->elapsed_counter_value() +
217 _perf_handle_wrong_method_total_time->elapsed_counter_value() +
218 _perf_ic_miss_total_time->elapsed_counter_value());
219 st->print("Total: " JLONG_FORMAT_W(5) "us", total_elapsed_time_us);
220 if (TraceThreadTime) {
221 jlong total_thread_time_us = Management::ticks_to_us(_perf_resolve_opt_virtual_total_time->thread_counter_value() +
222 _perf_resolve_virtual_total_time->thread_counter_value() +
223 _perf_resolve_static_total_time->thread_counter_value() +
224 _perf_handle_wrong_method_total_time->thread_counter_value() +
225 _perf_ic_miss_total_time->thread_counter_value());
226 st->print(" (elapsed) " JLONG_FORMAT_W(5) "us (thread)", total_thread_time_us);
227
228 }
229 st->cr();
230 } else {
231 st->print_cr(" no data (UsePerfData is turned off)");
232 }
233 }
234
235 #if INCLUDE_JFR
236 //------------------------------generate jfr runtime stubs ------
237 void SharedRuntime::generate_jfr_stubs() {
238 ResourceMark rm;
239 const char* timer_msg = "SharedRuntime generate_jfr_stubs";
240 TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
241
242 _jfr_write_checkpoint_blob = generate_jfr_write_checkpoint();
243 _jfr_return_lease_blob = generate_jfr_return_lease();
244 }
245
246 #endif // INCLUDE_JFR
247
248 #include <math.h>
249
250 // Implementation of SharedRuntime
251
252 // For statistics
253 uint SharedRuntime::_ic_miss_ctr = 0;
254 uint SharedRuntime::_wrong_method_ctr = 0;
255 uint SharedRuntime::_resolve_static_ctr = 0;
256 uint SharedRuntime::_resolve_virtual_ctr = 0;
257 uint SharedRuntime::_resolve_opt_virtual_ctr = 0;
258
259 #ifndef PRODUCT
260 uint SharedRuntime::_implicit_null_throws = 0;
261 uint SharedRuntime::_implicit_div0_throws = 0;
262
263 int64_t SharedRuntime::_nof_normal_calls = 0;
264 int64_t SharedRuntime::_nof_inlined_calls = 0;
265 int64_t SharedRuntime::_nof_megamorphic_calls = 0;
266 int64_t SharedRuntime::_nof_static_calls = 0;
267 int64_t SharedRuntime::_nof_inlined_static_calls = 0;
268 int64_t SharedRuntime::_nof_interface_calls = 0;
269 int64_t SharedRuntime::_nof_inlined_interface_calls = 0;
270
271 uint SharedRuntime::_new_instance_ctr=0;
272 uint SharedRuntime::_new_array_ctr=0;
273 uint SharedRuntime::_multi2_ctr=0;
274 uint SharedRuntime::_multi3_ctr=0;
275 uint SharedRuntime::_multi4_ctr=0;
276 uint SharedRuntime::_multi5_ctr=0;
277 uint SharedRuntime::_mon_enter_stub_ctr=0;
278 uint SharedRuntime::_mon_exit_stub_ctr=0;
279 uint SharedRuntime::_mon_enter_ctr=0;
293 uint SharedRuntime::_unsafe_set_memory_ctr=0;
294
295 int SharedRuntime::_ICmiss_index = 0;
296 int SharedRuntime::_ICmiss_count[SharedRuntime::maxICmiss_count];
297 address SharedRuntime::_ICmiss_at[SharedRuntime::maxICmiss_count];
298
299
300 void SharedRuntime::trace_ic_miss(address at) {
301 for (int i = 0; i < _ICmiss_index; i++) {
302 if (_ICmiss_at[i] == at) {
303 _ICmiss_count[i]++;
304 return;
305 }
306 }
307 int index = _ICmiss_index++;
308 if (_ICmiss_index >= maxICmiss_count) _ICmiss_index = maxICmiss_count - 1;
309 _ICmiss_at[index] = at;
310 _ICmiss_count[index] = 1;
311 }
312
313 void SharedRuntime::print_ic_miss_histogram_on(outputStream* st) {
314 if (ICMissHistogram) {
315 st->print_cr("IC Miss Histogram:");
316 int tot_misses = 0;
317 for (int i = 0; i < _ICmiss_index; i++) {
318 st->print_cr(" at: " INTPTR_FORMAT " nof: %d", p2i(_ICmiss_at[i]), _ICmiss_count[i]);
319 tot_misses += _ICmiss_count[i];
320 }
321 st->print_cr("Total IC misses: %7d", tot_misses);
322 }
323 }
324 #endif // !PRODUCT
325
326
327 JRT_LEAF(jlong, SharedRuntime::lmul(jlong y, jlong x))
328 return x * y;
329 JRT_END
330
331
332 JRT_LEAF(jlong, SharedRuntime::ldiv(jlong y, jlong x))
333 if (x == min_jlong && y == CONST64(-1)) {
334 return x;
335 } else {
336 return x / y;
337 }
338 JRT_END
339
340
341 JRT_LEAF(jlong, SharedRuntime::lrem(jlong y, jlong x))
342 if (x == min_jlong && y == CONST64(-1)) {
343 return 0;
344 } else {
771 jobject vthread = JNIHandles::make_local(const_cast<oopDesc*>(vt));
772 JvmtiVTMSTransitionDisabler::VTMS_vthread_unmount(vthread, hide);
773 JNIHandles::destroy_local(vthread);
774 JRT_END
775 #endif // INCLUDE_JVMTI
776
777 // The interpreter code to call this tracing function is only
778 // called/generated when UL is on for redefine, class and has the right level
779 // and tags. Since obsolete methods are never compiled, we don't have
780 // to modify the compilers to generate calls to this function.
781 //
782 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
783 JavaThread* thread, Method* method))
784 if (method->is_obsolete()) {
785 // We are calling an obsolete method, but this is not necessarily
786 // an error. Our method could have been redefined just after we
787 // fetched the Method* from the constant pool.
788 ResourceMark rm;
789 log_trace(redefine, class, obsolete)("calling obsolete method '%s'", method->name_and_sig_as_C_string());
790 }
791
792 LogStreamHandle(Trace, interpreter, bytecode) log;
793 if (log.is_enabled()) {
794 ResourceMark rm;
795 log.print("method entry: " INTPTR_FORMAT " %s %s%s%s%s",
796 p2i(thread),
797 (method->is_static() ? "static" : "virtual"),
798 method->name_and_sig_as_C_string(),
799 (method->is_native() ? " native" : ""),
800 (thread->class_being_initialized() != nullptr ? " clinit" : ""),
801 (method->method_holder()->is_initialized() ? "" : " being_initialized"));
802 }
803 return 0;
804 JRT_END
805
806 // ret_pc points into caller; we are returning caller's exception handler
807 // for given exception
808 // Note that the implementation of this method assumes it's only called when an exception has actually occured
809 address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc, Handle& exception,
810 bool force_unwind, bool top_frame_only, bool& recursive_exception_occurred) {
811 assert(nm != nullptr, "must exist");
812 ResourceMark rm;
813
814 #if INCLUDE_JVMCI
815 if (nm->is_compiled_by_jvmci()) {
816 // lookup exception handler for this pc
817 int catch_pco = pointer_delta_as_int(ret_pc, nm->code_begin());
818 ExceptionHandlerTable table(nm);
819 HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0);
820 if (t != nullptr) {
821 return nm->code_begin() + t->pco();
822 } else {
1422
1423 // determine call info & receiver
1424 // note: a) receiver is null for static calls
1425 // b) an exception is thrown if receiver is null for non-static calls
1426 CallInfo call_info;
1427 Bytecodes::Code invoke_code = Bytecodes::_illegal;
1428 Handle receiver = find_callee_info(invoke_code, call_info, CHECK_(methodHandle()));
1429
1430 NoSafepointVerifier nsv;
1431
1432 methodHandle callee_method(current, call_info.selected_method());
1433
1434 assert((!is_virtual && invoke_code == Bytecodes::_invokestatic ) ||
1435 (!is_virtual && invoke_code == Bytecodes::_invokespecial) ||
1436 (!is_virtual && invoke_code == Bytecodes::_invokehandle ) ||
1437 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) ||
1438 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode");
1439
1440 assert(!caller_nm->is_unloading(), "It should not be unloading");
1441
1442 // tracing/debugging/statistics
1443 uint *addr = (is_optimized) ? (&_resolve_opt_virtual_ctr) :
1444 (is_virtual) ? (&_resolve_virtual_ctr) :
1445 (&_resolve_static_ctr);
1446 Atomic::inc(addr);
1447
1448 #ifndef PRODUCT
1449 if (TraceCallFixup) {
1450 ResourceMark rm(current);
1451 tty->print("resolving %s%s (%s) call to",
1452 (is_optimized) ? "optimized " : "", (is_virtual) ? "virtual" : "static",
1453 Bytecodes::name(invoke_code));
1454 callee_method->print_short_name(tty);
1455 tty->print_cr(" at pc: " INTPTR_FORMAT " to code: " INTPTR_FORMAT,
1456 p2i(caller_frame.pc()), p2i(callee_method->code()));
1457 }
1458 #endif
1459
1460 if (invoke_code == Bytecodes::_invokestatic) {
1461 assert(callee_method->method_holder()->is_initialized() ||
1462 callee_method->method_holder()->is_reentrant_initialization(current),
1463 "invalid class initialization state for invoke_static");
1464 if (!VM_Version::supports_fast_class_init_checks() && callee_method->needs_clinit_barrier()) {
1465 // In order to keep class initialization check, do not patch call
1466 // site for static call when the class is not fully initialized.
1467 // Proper check is enforced by call site re-resolution on every invocation.
1468 //
1484
1485 // Make sure the callee nmethod does not get deoptimized and removed before
1486 // we are done patching the code.
1487
1488
1489 CompiledICLocker ml(caller_nm);
1490 if (is_virtual && !is_optimized) {
1491 CompiledIC* inline_cache = CompiledIC_before(caller_nm, caller_frame.pc());
1492 inline_cache->update(&call_info, receiver->klass());
1493 } else {
1494 // Callsite is a direct call - set it to the destination method
1495 CompiledDirectCall* callsite = CompiledDirectCall::before(caller_frame.pc());
1496 callsite->set(callee_method);
1497 }
1498
1499 return callee_method;
1500 }
1501
1502 // Inline caches exist only in compiled code
1503 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread* current))
1504 PerfTraceTime timer(_perf_ic_miss_total_time);
1505
1506 #ifdef ASSERT
1507 RegisterMap reg_map(current,
1508 RegisterMap::UpdateMap::skip,
1509 RegisterMap::ProcessFrames::include,
1510 RegisterMap::WalkContinuation::skip);
1511 frame stub_frame = current->last_frame();
1512 assert(stub_frame.is_runtime_frame(), "sanity check");
1513 frame caller_frame = stub_frame.sender(®_map);
1514 assert(!caller_frame.is_interpreted_frame() && !caller_frame.is_entry_frame() && !caller_frame.is_upcall_stub_frame(), "unexpected frame");
1515 #endif /* ASSERT */
1516
1517 methodHandle callee_method;
1518 JRT_BLOCK
1519 callee_method = SharedRuntime::handle_ic_miss_helper(CHECK_NULL);
1520 // Return Method* through TLS
1521 current->set_vm_result_2(callee_method());
1522 JRT_BLOCK_END
1523 // return compiled code entry point after potential safepoints
1524 return get_resolved_entry(current, callee_method);
1525 JRT_END
1526
1527
1528 // Handle call site that has been made non-entrant
1529 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current))
1530 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1531
1532 // 6243940 We might end up in here if the callee is deoptimized
1533 // as we race to call it. We don't want to take a safepoint if
1534 // the caller was interpreted because the caller frame will look
1535 // interpreted to the stack walkers and arguments are now
1536 // "compiled" so it is much better to make this transition
1537 // invisible to the stack walking code. The i2c path will
1538 // place the callee method in the callee_target. It is stashed
1539 // there because if we try and find the callee by normal means a
1540 // safepoint is possible and have trouble gc'ing the compiled args.
1541 RegisterMap reg_map(current,
1542 RegisterMap::UpdateMap::skip,
1543 RegisterMap::ProcessFrames::include,
1544 RegisterMap::WalkContinuation::skip);
1545 frame stub_frame = current->last_frame();
1546 assert(stub_frame.is_runtime_frame(), "sanity check");
1547 frame caller_frame = stub_frame.sender(®_map);
1548
1549 if (caller_frame.is_interpreted_frame() ||
1550 caller_frame.is_entry_frame() ||
1551 caller_frame.is_upcall_stub_frame()) {
1564 // so bypassing it in c2i adapter is benign.
1565 return callee->get_c2i_no_clinit_check_entry();
1566 } else {
1567 return callee->get_c2i_entry();
1568 }
1569 }
1570
1571 // Must be compiled to compiled path which is safe to stackwalk
1572 methodHandle callee_method;
1573 JRT_BLOCK
1574 // Force resolving of caller (if we called from compiled frame)
1575 callee_method = SharedRuntime::reresolve_call_site(CHECK_NULL);
1576 current->set_vm_result_2(callee_method());
1577 JRT_BLOCK_END
1578 // return compiled code entry point after potential safepoints
1579 return get_resolved_entry(current, callee_method);
1580 JRT_END
1581
1582 // Handle abstract method call
1583 JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* current))
1584 PerfTraceTime timer(_perf_handle_wrong_method_total_time);
1585
1586 // Verbose error message for AbstractMethodError.
1587 // Get the called method from the invoke bytecode.
1588 vframeStream vfst(current, true);
1589 assert(!vfst.at_end(), "Java frame must exist");
1590 methodHandle caller(current, vfst.method());
1591 Bytecode_invoke invoke(caller, vfst.bci());
1592 DEBUG_ONLY( invoke.verify(); )
1593
1594 // Find the compiled caller frame.
1595 RegisterMap reg_map(current,
1596 RegisterMap::UpdateMap::include,
1597 RegisterMap::ProcessFrames::include,
1598 RegisterMap::WalkContinuation::skip);
1599 frame stubFrame = current->last_frame();
1600 assert(stubFrame.is_runtime_frame(), "must be");
1601 frame callerFrame = stubFrame.sender(®_map);
1602 assert(callerFrame.is_compiled_frame(), "must be");
1603
1604 // Install exception and return forward entry.
1605 address res = SharedRuntime::throw_AbstractMethodError_entry();
1612 LinkResolver::throw_abstract_method_error(callee, recv_klass, CHECK_(res));
1613 }
1614 JRT_BLOCK_END
1615 return res;
1616 JRT_END
1617
1618 // return verified_code_entry if interp_only_mode is not set for the current thread;
1619 // otherwise return c2i entry.
1620 address SharedRuntime::get_resolved_entry(JavaThread* current, methodHandle callee_method) {
1621 if (current->is_interp_only_mode() && !callee_method->is_special_native_intrinsic()) {
1622 // In interp_only_mode we need to go to the interpreted entry
1623 // The c2i won't patch in this mode -- see fixup_callers_callsite
1624 return callee_method->get_c2i_entry();
1625 }
1626 assert(callee_method->verified_code_entry() != nullptr, " Jump to zero!");
1627 return callee_method->verified_code_entry();
1628 }
1629
1630 // resolve a static call and patch code
1631 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread* current ))
1632 PerfTraceTime timer(_perf_resolve_static_total_time);
1633
1634 methodHandle callee_method;
1635 bool enter_special = false;
1636 JRT_BLOCK
1637 callee_method = SharedRuntime::resolve_helper(false, false, CHECK_NULL);
1638 current->set_vm_result_2(callee_method());
1639 JRT_BLOCK_END
1640 // return compiled code entry point after potential safepoints
1641 return get_resolved_entry(current, callee_method);
1642 JRT_END
1643
1644 // resolve virtual call and update inline cache to monomorphic
1645 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_virtual_call_C(JavaThread* current))
1646 PerfTraceTime timer(_perf_resolve_virtual_total_time);
1647
1648 methodHandle callee_method;
1649 JRT_BLOCK
1650 callee_method = SharedRuntime::resolve_helper(true, false, CHECK_NULL);
1651 current->set_vm_result_2(callee_method());
1652 JRT_BLOCK_END
1653 // return compiled code entry point after potential safepoints
1654 return get_resolved_entry(current, callee_method);
1655 JRT_END
1656
1657
1658 // Resolve a virtual call that can be statically bound (e.g., always
1659 // monomorphic, so it has no inline cache). Patch code to resolved target.
1660 JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_opt_virtual_call_C(JavaThread* current))
1661 PerfTraceTime timer(_perf_resolve_opt_virtual_total_time);
1662
1663 methodHandle callee_method;
1664 JRT_BLOCK
1665 callee_method = SharedRuntime::resolve_helper(true, true, CHECK_NULL);
1666 current->set_vm_result_2(callee_method());
1667 JRT_BLOCK_END
1668 // return compiled code entry point after potential safepoints
1669 return get_resolved_entry(current, callee_method);
1670 JRT_END
1671
1672 methodHandle SharedRuntime::handle_ic_miss_helper(TRAPS) {
1673 JavaThread* current = THREAD;
1674 ResourceMark rm(current);
1675 CallInfo call_info;
1676 Bytecodes::Code bc;
1677
1678 // receiver is null for static calls. An exception is thrown for null
1679 // receivers for non-static calls
1680 Handle receiver = find_callee_info(bc, call_info, CHECK_(methodHandle()));
1681
1682 methodHandle callee_method(current, call_info.selected_method());
1683
1684 Atomic::inc(&_ic_miss_ctr);
1685
1686 #ifndef PRODUCT
1687 // Statistics & Tracing
1688 if (TraceCallFixup) {
1689 ResourceMark rm(current);
1690 tty->print("IC miss (%s) call to", Bytecodes::name(bc));
1691 callee_method->print_short_name(tty);
1692 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1693 }
1694
1695 if (ICMissHistogram) {
1696 MutexLocker m(VMStatistic_lock);
1697 RegisterMap reg_map(current,
1698 RegisterMap::UpdateMap::skip,
1699 RegisterMap::ProcessFrames::include,
1700 RegisterMap::WalkContinuation::skip);
1701 frame f = current->last_frame().real_sender(®_map);// skip runtime stub
1702 // produce statistics under the lock
1703 trace_ic_miss(f.pc());
1704 }
1705 #endif
1706
1789 CompiledDirectCall* cdc = CompiledDirectCall::at(call_addr);
1790 cdc->set_to_clean();
1791 break;
1792 }
1793
1794 case relocInfo::virtual_call_type: {
1795 // compiled, dispatched call (which used to call an interpreted method)
1796 CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr);
1797 inline_cache->set_to_clean();
1798 break;
1799 }
1800 default:
1801 break;
1802 }
1803 }
1804 }
1805 }
1806
1807 methodHandle callee_method = find_callee_method(CHECK_(methodHandle()));
1808
1809 Atomic::inc(&_wrong_method_ctr);
1810
1811 #ifndef PRODUCT
1812 if (TraceCallFixup) {
1813 ResourceMark rm(current);
1814 tty->print("handle_wrong_method reresolving call to");
1815 callee_method->print_short_name(tty);
1816 tty->print_cr(" code: " INTPTR_FORMAT, p2i(callee_method->code()));
1817 }
1818 #endif
1819
1820 return callee_method;
1821 }
1822
1823 address SharedRuntime::handle_unsafe_access(JavaThread* thread, address next_pc) {
1824 // The faulting unsafe accesses should be changed to throw the error
1825 // synchronously instead. Meanwhile the faulting instruction will be
1826 // skipped over (effectively turning it into a no-op) and an
1827 // asynchronous exception will be raised which the thread will
1828 // handle at a later point. If the instruction is a load it will
1829 // return garbage.
1830
1831 // Request an async exception.
2089 // This is only called when CheckJNICalls is true, and only
2090 // for virtual thread termination.
2091 JRT_LEAF(void, SharedRuntime::log_jni_monitor_still_held())
2092 assert(CheckJNICalls, "Only call this when checking JNI usage");
2093 if (log_is_enabled(Debug, jni)) {
2094 JavaThread* current = JavaThread::current();
2095 int64_t vthread_id = java_lang_Thread::thread_id(current->vthread());
2096 int64_t carrier_id = java_lang_Thread::thread_id(current->threadObj());
2097 log_debug(jni)("VirtualThread (tid: " INT64_FORMAT ", carrier id: " INT64_FORMAT
2098 ") exiting with Objects still locked by JNI MonitorEnter.",
2099 vthread_id, carrier_id);
2100 }
2101 JRT_END
2102
2103 #ifndef PRODUCT
2104
2105 void SharedRuntime::print_statistics() {
2106 ttyLocker ttyl;
2107 if (xtty != nullptr) xtty->head("statistics type='SharedRuntime'");
2108
2109 SharedRuntime::print_ic_miss_histogram_on(tty);
2110 SharedRuntime::print_counters_on(tty);
2111 AdapterHandlerLibrary::print_statistics_on(tty);
2112
2113 if (xtty != nullptr) xtty->tail("statistics");
2114 }
2115
2116 //void SharedRuntime::print_counters_on(outputStream* st) {
2117 // // Dump the JRT_ENTRY counters
2118 // if (_new_instance_ctr) st->print_cr("%5u new instance requires GC", _new_instance_ctr);
2119 // if (_new_array_ctr) st->print_cr("%5u new array requires GC", _new_array_ctr);
2120 // if (_multi2_ctr) st->print_cr("%5u multianewarray 2 dim", _multi2_ctr);
2121 // if (_multi3_ctr) st->print_cr("%5u multianewarray 3 dim", _multi3_ctr);
2122 // if (_multi4_ctr) st->print_cr("%5u multianewarray 4 dim", _multi4_ctr);
2123 // if (_multi5_ctr) st->print_cr("%5u multianewarray 5 dim", _multi5_ctr);
2124 //
2125 // st->print_cr("%5u inline cache miss in compiled", _ic_miss_ctr);
2126 // st->print_cr("%5u wrong method", _wrong_method_ctr);
2127 // st->print_cr("%5u unresolved static call site", _resolve_static_ctr);
2128 // st->print_cr("%5u unresolved virtual call site", _resolve_virtual_ctr);
2129 // st->print_cr("%5u unresolved opt virtual call site", _resolve_opt_virtual_ctr);
2130 //
2131 // if (_mon_enter_stub_ctr) st->print_cr("%5u monitor enter stub", _mon_enter_stub_ctr);
2132 // if (_mon_exit_stub_ctr) st->print_cr("%5u monitor exit stub", _mon_exit_stub_ctr);
2133 // if (_mon_enter_ctr) st->print_cr("%5u monitor enter slow", _mon_enter_ctr);
2134 // if (_mon_exit_ctr) st->print_cr("%5u monitor exit slow", _mon_exit_ctr);
2135 // if (_partial_subtype_ctr) st->print_cr("%5u slow partial subtype", _partial_subtype_ctr);
2136 // if (_jbyte_array_copy_ctr) st->print_cr("%5u byte array copies", _jbyte_array_copy_ctr);
2137 // if (_jshort_array_copy_ctr) st->print_cr("%5u short array copies", _jshort_array_copy_ctr);
2138 // if (_jint_array_copy_ctr) st->print_cr("%5u int array copies", _jint_array_copy_ctr);
2139 // if (_jlong_array_copy_ctr) st->print_cr("%5u long array copies", _jlong_array_copy_ctr);
2140 // if (_oop_array_copy_ctr) st->print_cr("%5u oop array copies", _oop_array_copy_ctr);
2141 // if (_checkcast_array_copy_ctr) st->print_cr("%5u checkcast array copies", _checkcast_array_copy_ctr);
2142 // if (_unsafe_array_copy_ctr) st->print_cr("%5u unsafe array copies", _unsafe_array_copy_ctr);
2143 // if (_generic_array_copy_ctr) st->print_cr("%5u generic array copies", _generic_array_copy_ctr);
2144 // if (_slow_array_copy_ctr) st->print_cr("%5u slow array copies", _slow_array_copy_ctr);
2145 // if (_find_handler_ctr) st->print_cr("%5u find exception handler", _find_handler_ctr);
2146 // if (_rethrow_ctr) st->print_cr("%5u rethrow handler", _rethrow_ctr);
2147 // if (_unsafe_set_memory_ctr) tty->print_cr("%5u unsafe set memorys", _unsafe_set_memory_ctr);
2148 //}
2149
2150 inline double percent(int64_t x, int64_t y) {
2151 return 100.0 * (double)x / (double)MAX2(y, (int64_t)1);
2152 }
2153
2154 class MethodArityHistogram {
2155 public:
2156 enum { MAX_ARITY = 256 };
2157 private:
2158 static uint64_t _arity_histogram[MAX_ARITY]; // histogram of #args
2159 static uint64_t _size_histogram[MAX_ARITY]; // histogram of arg size in words
2160 static uint64_t _total_compiled_calls;
2161 static uint64_t _max_compiled_calls_per_method;
2162 static int _max_arity; // max. arity seen
2163 static int _max_size; // max. arg size seen
2164
2165 static void add_method_to_histogram(nmethod* nm) {
2166 Method* method = (nm == nullptr) ? nullptr : nm->method();
2167 if (method != nullptr) {
2168 ArgumentCount args(method->signature());
2169 int arity = args.size() + (method->is_static() ? 0 : 1);
2214 // Take the Compile_lock to protect against changes in the CodeBlob structures
2215 MutexLocker mu1(Compile_lock, Mutex::_safepoint_check_flag);
2216 // Take the CodeCache_lock to protect against changes in the CodeHeap structure
2217 MutexLocker mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2218 _max_arity = _max_size = 0;
2219 _total_compiled_calls = 0;
2220 _max_compiled_calls_per_method = 0;
2221 for (int i = 0; i < MAX_ARITY; i++) _arity_histogram[i] = _size_histogram[i] = 0;
2222 CodeCache::nmethods_do(add_method_to_histogram);
2223 print_histogram();
2224 }
2225 };
2226
2227 uint64_t MethodArityHistogram::_arity_histogram[MethodArityHistogram::MAX_ARITY];
2228 uint64_t MethodArityHistogram::_size_histogram[MethodArityHistogram::MAX_ARITY];
2229 uint64_t MethodArityHistogram::_total_compiled_calls;
2230 uint64_t MethodArityHistogram::_max_compiled_calls_per_method;
2231 int MethodArityHistogram::_max_arity;
2232 int MethodArityHistogram::_max_size;
2233
2234 void SharedRuntime::print_call_statistics_on(outputStream* st) {
2235 tty->print_cr("Calls from compiled code:");
2236 int64_t total = _nof_normal_calls + _nof_interface_calls + _nof_static_calls;
2237 int64_t mono_c = _nof_normal_calls - _nof_megamorphic_calls;
2238 int64_t mono_i = _nof_interface_calls;
2239 tty->print_cr("\t" INT64_FORMAT_W(12) " (100%%) total non-inlined ", total);
2240 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- virtual calls ", _nof_normal_calls, percent(_nof_normal_calls, total));
2241 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_calls, percent(_nof_inlined_calls, _nof_normal_calls));
2242 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_c, percent(mono_c, _nof_normal_calls));
2243 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- megamorphic ", _nof_megamorphic_calls, percent(_nof_megamorphic_calls, _nof_normal_calls));
2244 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- interface calls ", _nof_interface_calls, percent(_nof_interface_calls, total));
2245 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_interface_calls, percent(_nof_inlined_interface_calls, _nof_interface_calls));
2246 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- monomorphic ", mono_i, percent(mono_i, _nof_interface_calls));
2247 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.1f%%) |- static/special calls", _nof_static_calls, percent(_nof_static_calls, total));
2248 tty->print_cr("\t" INT64_FORMAT_W(12) " (%4.0f%%) | |- inlined ", _nof_inlined_static_calls, percent(_nof_inlined_static_calls, _nof_static_calls));
2249 tty->cr();
2250 tty->print_cr("Note 1: counter updates are not MT-safe.");
2251 tty->print_cr("Note 2: %% in major categories are relative to total non-inlined calls;");
2252 tty->print_cr(" %% in nested categories are relative to their category");
2253 tty->print_cr(" (and thus add up to more than 100%% with inlining)");
2254 tty->cr();
2255
2256 MethodArityHistogram h;
2257 }
2258 #endif
2259
2260 #ifndef PRODUCT
2261 static int _lookups; // number of calls to lookup
2262 static int _equals; // number of buckets checked with matching hash
2263 static int _archived_hits; // number of successful lookups in archived table
2264 static int _runtime_hits; // number of successful lookups in runtime table
2265 static int _compact; // number of equals calls with compact signature
2266 #endif
2267
2268 // A simple wrapper class around the calling convention information
2269 // that allows sharing of adapters for the same calling convention.
2270 class AdapterFingerPrint : public MetaspaceObj {
2271 private:
2272 enum {
2273 _basic_type_bits = 4,
2274 _basic_type_mask = right_n_bits(_basic_type_bits),
2275 _basic_types_per_int = BitsPerInt / _basic_type_bits,
2276 _compact_int_count = 3
2277 };
2278 // TO DO: Consider integrating this with a more global scheme for compressing signatures.
2279 // For now, 4 bits per components (plus T_VOID gaps after double/long) is not excessive.
2280
2281 int _length;
2282 int _value[_compact_int_count];
2283
2284 // Private construtor. Use allocate() to get an instance.
2285 AdapterFingerPrint(int total_args_passed, BasicType* sig_bt) {
2286 // Pack the BasicTypes with 8 per int
2287 _length = (total_args_passed + (_basic_types_per_int-1)) / _basic_types_per_int;
2288 int sig_index = 0;
2289 for (int index = 0; index < _length; index++) {
2290 int value = 0;
2291 for (int byte = 0; sig_index < total_args_passed && byte < _basic_types_per_int; byte++) {
2292 int bt = adapter_encoding(sig_bt[sig_index++]);
2293 assert((bt & _basic_type_mask) == bt, "must fit in 4 bits");
2294 value = (value << _basic_type_bits) | bt;
2295 }
2296 _value[index] = value;
2297 }
2298 }
2299
2300 // Call deallocate instead
2301 ~AdapterFingerPrint() {
2302 FreeHeap(this);
2303 }
2304
2305 // Remap BasicTypes that are handled equivalently by the adapters.
2306 // These are correct for the current system but someday it might be
2307 // necessary to make this mapping platform dependent.
2308 static int adapter_encoding(BasicType in) {
2309 switch (in) {
2310 case T_BOOLEAN:
2311 case T_BYTE:
2312 case T_SHORT:
2313 case T_CHAR:
2314 // There are all promoted to T_INT in the calling convention
2315 return T_INT;
2316
2317 case T_OBJECT:
2318 case T_ARRAY:
2319 // In other words, we assume that any register good enough for
2320 // an int or long is good enough for a managed pointer.
2321 #ifdef _LP64
2322 return T_LONG;
2323 #else
2324 return T_INT;
2325 #endif
2326
2327 case T_INT:
2328 case T_LONG:
2329 case T_FLOAT:
2330 case T_DOUBLE:
2331 case T_VOID:
2332 return in;
2333
2334 default:
2335 ShouldNotReachHere();
2336 return T_CONFLICT;
2337 }
2338 }
2339
2340 void* operator new(size_t size, size_t fp_size) throw() {
2341 assert(fp_size >= size, "sanity check");
2342 void* p = AllocateHeap(fp_size, mtCode);
2343 memset(p, 0, fp_size);
2344 return p;
2345 }
2346
2347 template<typename Function>
2348 void iterate_args(Function function) {
2349 for (int i = 0; i < length(); i++) {
2350 unsigned val = (unsigned)value(i);
2351 // args are packed so that first/lower arguments are in the highest
2352 // bits of each int value, so iterate from highest to the lowest
2353 for (int j = 32 - _basic_type_bits; j >= 0; j -= _basic_type_bits) {
2354 unsigned v = (val >> j) & _basic_type_mask;
2355 if (v == 0) {
2356 continue;
2357 }
2358 function(v);
2359 }
2360 }
2361 }
2362
2363 public:
2364 static int allocation_size(int total_args_passed, BasicType* sig_bt) {
2365 int len = (total_args_passed + (_basic_types_per_int-1)) / _basic_types_per_int;
2366 return sizeof(AdapterFingerPrint) + (len > _compact_int_count ? (len - _compact_int_count) * sizeof(int) : 0);
2367 }
2368
2369 static AdapterFingerPrint* allocate(int total_args_passed, BasicType* sig_bt) {
2370 int size_in_bytes = allocation_size(total_args_passed, sig_bt);
2371 return new (size_in_bytes) AdapterFingerPrint(total_args_passed, sig_bt);
2372 }
2373
2374 static void deallocate(AdapterFingerPrint* fp) {
2375 fp->~AdapterFingerPrint();
2376 }
2377
2378 int value(int index) {
2379 return _value[index];
2380 }
2381
2382 int length() {
2383 if (_length < 0) return -_length;
2384 return _length;
2385 }
2386
2387 bool is_compact() {
2388 return _length <= _compact_int_count;
2389 }
2390
2391 unsigned int compute_hash() {
2392 int hash = 0;
2393 for (int i = 0; i < length(); i++) {
2394 int v = value(i);
2395 //Add arithmetic operation to the hash, like +3 to improve hashing
2396 hash = ((hash << 8) ^ v ^ (hash >> 5)) + 3;
2397 }
2398 return (unsigned int)hash;
2399 }
2400
2401 const char* as_string() {
2402 stringStream st;
2403 st.print("0x");
2404 for (int i = 0; i < length(); i++) {
2405 st.print("%x", value(i));
2406 }
2407 return st.as_string();
2408 }
2409
2410 const char* as_basic_args_string() {
2411 stringStream st;
2412 bool long_prev = false;
2413 iterate_args([&] (int arg) {
2414 if (long_prev) {
2415 long_prev = false;
2416 if (arg == T_VOID) {
2417 st.print("J");
2418 } else {
2419 st.print("L");
2420 }
2421 }
2422 switch (arg) {
2423 case T_INT: st.print("I"); break;
2424 case T_LONG: long_prev = true; break;
2425 case T_FLOAT: st.print("F"); break;
2426 case T_DOUBLE: st.print("D"); break;
2427 case T_VOID: break;
2428 default: ShouldNotReachHere();
2429 }
2430 });
2431 if (long_prev) {
2432 st.print("L");
2433 }
2434 return st.as_string();
2435 }
2436
2437 BasicType* as_basic_type(int& nargs) {
2438 nargs = 0;
2439 GrowableArray<BasicType> btarray;
2440 bool long_prev = false;
2441
2442 iterate_args([&] (int arg) {
2443 if (long_prev) {
2444 long_prev = false;
2445 if (arg == T_VOID) {
2446 btarray.append(T_LONG);
2447 } else {
2448 btarray.append(T_OBJECT); // it could be T_ARRAY; it shouldn't matter
2449 }
2450 }
2451 switch (arg) {
2452 case T_INT: // fallthrough
2453 case T_FLOAT: // fallthrough
2454 case T_DOUBLE:
2455 case T_VOID:
2456 btarray.append((BasicType)arg);
2457 break;
2458 case T_LONG:
2459 long_prev = true;
2460 break;
2461 default: ShouldNotReachHere();
2462 }
2463 });
2464
2465 if (long_prev) {
2466 btarray.append(T_OBJECT);
2467 }
2468
2469 nargs = btarray.length();
2470 BasicType* sig_bt = NEW_RESOURCE_ARRAY(BasicType, nargs);
2471 int index = 0;
2472 GrowableArrayIterator<BasicType> iter = btarray.begin();
2473 while (iter != btarray.end()) {
2474 sig_bt[index++] = *iter;
2475 ++iter;
2476 }
2477 assert(index == btarray.length(), "sanity check");
2478 #ifdef ASSERT
2479 {
2480 AdapterFingerPrint* compare_fp = AdapterFingerPrint::allocate(nargs, sig_bt);
2481 assert(this->equals(compare_fp), "sanity check");
2482 AdapterFingerPrint::deallocate(compare_fp);
2483 }
2484 #endif
2485 return sig_bt;
2486 }
2487
2488 bool equals(AdapterFingerPrint* other) {
2489 if (other->_length != _length) {
2490 return false;
2491 } else {
2492 for (int i = 0; i < _length; i++) {
2493 if (_value[i] != other->_value[i]) {
2494 return false;
2495 }
2496 }
2497 }
2498 return true;
2499 }
2500
2501 // methods required by virtue of being a MetaspaceObj
2502 void metaspace_pointers_do(MetaspaceClosure* it) { return; /* nothing to do here */ }
2503 int size() const { return (int)heap_word_size(sizeof(AdapterFingerPrint) + (_length > _compact_int_count ? (_length - _compact_int_count) * sizeof(int) : 0)); }
2504 MetaspaceObj::Type type() const { return AdapterFingerPrintType; }
2505
2506 static bool equals(AdapterFingerPrint* const& fp1, AdapterFingerPrint* const& fp2) {
2507 NOT_PRODUCT(_equals++);
2508 return fp1->equals(fp2);
2509 }
2510
2511 static unsigned int compute_hash(AdapterFingerPrint* const& fp) {
2512 return fp->compute_hash();
2513 }
2514 };
2515
2516 #if INCLUDE_CDS
2517 static inline bool adapter_fp_equals_compact_hashtable_entry(AdapterHandlerEntry* entry, AdapterFingerPrint* fp, int len_unused) {
2518 return AdapterFingerPrint::equals(entry->fingerprint(), fp);
2519 }
2520
2521 class ArchivedAdapterTable : public OffsetCompactHashtable<
2522 AdapterFingerPrint*,
2523 AdapterHandlerEntry*,
2524 adapter_fp_equals_compact_hashtable_entry> {};
2525 #endif // INCLUDE_CDS
2526
2527 // A hashtable mapping from AdapterFingerPrints to AdapterHandlerEntries
2528 using AdapterHandlerTable = ResourceHashtable<AdapterFingerPrint*, AdapterHandlerEntry*, 293,
2529 AnyObj::C_HEAP, mtCode,
2530 AdapterFingerPrint::compute_hash,
2531 AdapterFingerPrint::equals>;
2532 static AdapterHandlerTable* _adapter_handler_table;
2533 static GrowableArray<AdapterHandlerEntry*>* _adapter_handler_list = nullptr;
2534
2535 // Find a entry with the same fingerprint if it exists
2536 AdapterHandlerEntry* AdapterHandlerLibrary::lookup(AdapterFingerPrint* fp) {
2537 NOT_PRODUCT(_lookups++);
2538 AdapterHandlerEntry* entry = nullptr;
2539 #if INCLUDE_CDS
2540 // if we are building the archive then the archived adapter table is
2541 // not valid and we need to use the ones added to the runtime table
2542 if (!CDSConfig::is_dumping_adapters()) {
2543 // Search archived table first. It is read-only table so can be searched without lock
2544 entry = _archived_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */);
2545 if (entry != nullptr) {
2546 #ifndef PRODUCT
2547 if (fp->is_compact()) {
2548 _compact++;
2549 }
2550 _archived_hits++;
2551 #endif
2552 return entry;
2553 }
2554 }
2555 #endif // INCLUDE_CDS
2556 assert_lock_strong(AdapterHandlerLibrary_lock);
2557 AdapterHandlerEntry** entry_p = _adapter_handler_table->get(fp);
2558 if (entry_p != nullptr) {
2559 entry = *entry_p;
2560 assert(entry->fingerprint()->equals(fp), "fingerprint mismatch key fp %s %s (hash=%d) != found fp %s %s (hash=%d)",
2561 entry->fingerprint()->as_basic_args_string(), entry->fingerprint()->as_string(), entry->fingerprint()->compute_hash(),
2562 fp->as_basic_args_string(), fp->as_string(), fp->compute_hash());
2563 #ifndef PRODUCT
2564 if (fp->is_compact()) _compact++;
2565 _runtime_hits++;
2566 #endif
2567 return entry;
2568 }
2569 return nullptr;
2570 }
2571
2572 #ifndef PRODUCT
2573 void AdapterHandlerLibrary::print_statistics_on(outputStream* st) {
2574 auto size = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
2575 return sizeof(*key) + sizeof(*a);
2576 };
2577 TableStatistics ts = _adapter_handler_table->statistics_calculate(size);
2578 ts.print(st, "AdapterHandlerTable");
2579 st->print_cr("AdapterHandlerTable (table_size=%d, entries=%d)",
2580 _adapter_handler_table->table_size(), _adapter_handler_table->number_of_entries());
2581 int total_hits = _archived_hits + _runtime_hits;
2582 st->print_cr("AdapterHandlerTable: lookups %d equals %d hits %d (archived=%d+runtime=%d) compact %d",
2583 _lookups, _equals, total_hits, _archived_hits, _runtime_hits, _compact);
2584 }
2585 #endif // !PRODUCT
2586
2587 // ---------------------------------------------------------------------------
2588 // Implementation of AdapterHandlerLibrary
2589 AdapterHandlerEntry* AdapterHandlerLibrary::_abstract_method_handler = nullptr;
2590 AdapterHandlerEntry* AdapterHandlerLibrary::_no_arg_handler = nullptr;
2591 AdapterHandlerEntry* AdapterHandlerLibrary::_int_arg_handler = nullptr;
2592 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_arg_handler = nullptr;
2593 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_int_arg_handler = nullptr;
2594 AdapterHandlerEntry* AdapterHandlerLibrary::_obj_obj_arg_handler = nullptr;
2595 #if INCLUDE_CDS
2596 ArchivedAdapterTable AdapterHandlerLibrary::_archived_adapter_handler_table;
2597 #endif // INCLUDE_CDS
2598 const int AdapterHandlerLibrary_size = 16*K;
2599 BufferBlob* AdapterHandlerLibrary::_buffer = nullptr;
2600
2601 BufferBlob* AdapterHandlerLibrary::buffer_blob() {
2602 return _buffer;
2603 }
2604
2605 static void post_adapter_creation(const AdapterBlob* new_adapter,
2606 const AdapterHandlerEntry* entry) {
2607 if (Forte::is_enabled() || JvmtiExport::should_post_dynamic_code_generated()) {
2608 char blob_id[256];
2609 jio_snprintf(blob_id,
2610 sizeof(blob_id),
2611 "%s(%s)",
2612 new_adapter->name(),
2613 entry->fingerprint()->as_string());
2614 if (Forte::is_enabled()) {
2615 Forte::register_stub(blob_id, new_adapter->content_begin(), new_adapter->content_end());
2616 }
2617
2621 }
2622 }
2623
2624 void AdapterHandlerLibrary::initialize() {
2625 ResourceMark rm;
2626 AdapterBlob* no_arg_blob = nullptr;
2627 AdapterBlob* int_arg_blob = nullptr;
2628 AdapterBlob* obj_arg_blob = nullptr;
2629 AdapterBlob* obj_int_arg_blob = nullptr;
2630 AdapterBlob* obj_obj_arg_blob = nullptr;
2631 {
2632 _adapter_handler_table = new (mtCode) AdapterHandlerTable();
2633 MutexLocker mu(AdapterHandlerLibrary_lock);
2634
2635 // Create a special handler for abstract methods. Abstract methods
2636 // are never compiled so an i2c entry is somewhat meaningless, but
2637 // throw AbstractMethodError just in case.
2638 // Pass wrong_method_abstract for the c2i transitions to return
2639 // AbstractMethodError for invalid invocations.
2640 address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
2641 _abstract_method_handler = AdapterHandlerLibrary::new_entry(AdapterFingerPrint::allocate(0, nullptr),
2642 SharedRuntime::throw_AbstractMethodError_entry(),
2643 wrong_method_abstract, wrong_method_abstract);
2644
2645 _buffer = BufferBlob::create("adapters", AdapterHandlerLibrary_size);
2646 _no_arg_handler = create_simple_adapter(no_arg_blob, 0, nullptr);
2647
2648 BasicType obj_args[] = { T_OBJECT };
2649 _obj_arg_handler = create_simple_adapter(obj_arg_blob, 1, obj_args);
2650
2651 BasicType int_args[] = { T_INT };
2652 _int_arg_handler = create_simple_adapter(int_arg_blob, 1, int_args);
2653
2654 BasicType obj_int_args[] = { T_OBJECT, T_INT };
2655 _obj_int_arg_handler = create_simple_adapter(obj_int_arg_blob, 2, obj_int_args);
2656
2657 BasicType obj_obj_args[] = { T_OBJECT, T_OBJECT };
2658 _obj_obj_arg_handler = create_simple_adapter(obj_obj_arg_blob, 2, obj_obj_args);
2659
2660 assert(no_arg_blob != nullptr &&
2661 obj_arg_blob != nullptr &&
2662 int_arg_blob != nullptr &&
2663 obj_int_arg_blob != nullptr &&
2664 obj_obj_arg_blob != nullptr, "Initial adapters must be properly created");
2665 }
2666
2667 // Outside of the lock
2668 post_adapter_creation(no_arg_blob, _no_arg_handler);
2669 post_adapter_creation(obj_arg_blob, _obj_arg_handler);
2670 post_adapter_creation(int_arg_blob, _int_arg_handler);
2671 post_adapter_creation(obj_int_arg_blob, _obj_int_arg_handler);
2672 post_adapter_creation(obj_obj_arg_blob, _obj_obj_arg_handler);
2673 }
2674
2675 AdapterHandlerEntry* AdapterHandlerLibrary::create_simple_adapter(AdapterBlob*& adapter_blob,
2676 int total_args_passed,
2677 BasicType* sig_bt) {
2678 AdapterFingerPrint* fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2679 // We may find the adapter in the table if it is loaded from the AOT cache
2680 AdapterHandlerEntry* entry = lookup(fp);
2681 if (entry != nullptr) {
2682 assert(entry->is_shared() && !entry->is_linked(), "Non null AdapterHandlerEntry should be in the AOT cache in unlinked state");
2683 if (!link_adapter_handler(entry, adapter_blob)) {
2684 if (!generate_adapter_code(adapter_blob, entry, total_args_passed, sig_bt, /* is_transient */ false)) {
2685 return nullptr;
2686 }
2687 }
2688 // AdapterFingerPrint is already in the cache, no need to keep this one
2689 AdapterFingerPrint::deallocate(fp);
2690 } else {
2691 entry = create_adapter(adapter_blob, fp, total_args_passed, sig_bt, /* is_transient */ false);
2692 }
2693 return entry;
2694 }
2695
2696 AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint,
2697 address i2c_entry,
2698 address c2i_entry,
2699 address c2i_unverified_entry,
2700 address c2i_no_clinit_check_entry) {
2701 // Insert an entry into the table
2702 return AdapterHandlerEntry::allocate(fingerprint, i2c_entry, c2i_entry, c2i_unverified_entry,
2703 c2i_no_clinit_check_entry);
2704 }
2705
2706 AdapterHandlerEntry* AdapterHandlerLibrary::get_simple_adapter(const methodHandle& method) {
2707 if (method->is_abstract()) {
2708 return _abstract_method_handler;
2709 }
2710 int total_args_passed = method->size_of_parameters(); // All args on stack
2711 if (total_args_passed == 0) {
2712 return _no_arg_handler;
2713 } else if (total_args_passed == 1) {
2714 if (!method->is_static()) {
2715 return _obj_arg_handler;
2716 }
2717 switch (method->signature()->char_at(1)) {
2718 case JVM_SIGNATURE_CLASS:
2719 case JVM_SIGNATURE_ARRAY:
2720 return _obj_arg_handler;
2721 case JVM_SIGNATURE_INT:
2722 case JVM_SIGNATURE_BOOLEAN:
2723 case JVM_SIGNATURE_CHAR:
2793 // Fast-path for trivial adapters
2794 AdapterHandlerEntry* entry = get_simple_adapter(method);
2795 if (entry != nullptr) {
2796 return entry;
2797 }
2798
2799 ResourceMark rm;
2800 AdapterBlob* new_adapter = nullptr;
2801
2802 // Fill in the signature array, for the calling-convention call.
2803 int total_args_passed = method->size_of_parameters(); // All args on stack
2804
2805 AdapterSignatureIterator si(method->signature(), method->constMethod()->fingerprint(),
2806 method->is_static(), total_args_passed);
2807 assert(si.slots() == total_args_passed, "");
2808 BasicType* sig_bt = si.basic_types();
2809 {
2810 MutexLocker mu(AdapterHandlerLibrary_lock);
2811
2812 // Lookup method signature's fingerprint
2813 AdapterFingerPrint *fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2814 entry = lookup(fp);
2815
2816 if (entry != nullptr) {
2817 #ifdef ASSERT
2818 if (VerifyAdapterSharing) {
2819 AdapterBlob* comparison_blob = nullptr;
2820 AdapterFingerPrint* comparison_fp = AdapterFingerPrint::allocate(total_args_passed, sig_bt);
2821 AdapterHandlerEntry* comparison_entry = create_adapter(comparison_blob, comparison_fp, total_args_passed, sig_bt, true);
2822 assert(comparison_blob == nullptr, "no blob should be created when creating an adapter for comparison");
2823 assert(comparison_entry->compare_code(entry), "code must match");
2824 AdapterFingerPrint::deallocate(comparison_fp);
2825 // Release the one just created and return the original
2826 AdapterHandlerEntry::deallocate(comparison_entry);
2827 }
2828 #endif
2829 AdapterFingerPrint::deallocate(fp);
2830 return entry;
2831 }
2832
2833 entry = create_adapter(new_adapter, fp, total_args_passed, sig_bt, /* is_transient */ false);
2834 }
2835
2836 // Outside of the lock
2837 if (new_adapter != nullptr) {
2838 post_adapter_creation(new_adapter, entry);
2839 }
2840 return entry;
2841 }
2842
2843 bool AdapterHandlerLibrary::lookup_aot_cache(AdapterHandlerEntry* handler, CodeBuffer* buffer) {
2844 ResourceMark rm;
2845 const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
2846 const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
2847 uint32_t offsets[4];
2848 if (SCCache::load_adapter(buffer, id, name, offsets)) {
2849 address i2c_entry = buffer->insts_begin();
2850 assert(offsets[0] == 0, "sanity check");
2851 handler->set_entry_points(i2c_entry, i2c_entry + offsets[1], i2c_entry + offsets[2], i2c_entry + offsets[3]);
2852 return true;
2853 }
2854 return false;
2855 }
2856
2857 #ifndef PRODUCT
2858 void AdapterHandlerLibrary::print_adapter_handler_info(AdapterHandlerEntry* handler, AdapterBlob* adapter_blob) {
2859 ttyLocker ttyl;
2860 ResourceMark rm;
2861 int insts_size = adapter_blob->code_size();
2862 handler->print_adapter_on(tty);
2863 tty->print_cr("i2c argument handler for: %s %s (%d bytes generated)",
2864 handler->fingerprint()->as_basic_args_string(),
2865 handler->fingerprint()->as_string(), insts_size);
2866 tty->print_cr("c2i argument handler starts at " INTPTR_FORMAT, p2i(handler->get_c2i_entry()));
2867 if (Verbose || PrintStubCode) {
2868 address first_pc = handler->base_address();
2869 if (first_pc != nullptr) {
2870 Disassembler::decode(first_pc, first_pc + insts_size, tty, &adapter_blob->asm_remarks());
2871 tty->cr();
2872 }
2873 }
2874 }
2875 #endif // PRODUCT
2876
2877 bool AdapterHandlerLibrary::generate_adapter_code(AdapterBlob*& adapter_blob,
2878 AdapterHandlerEntry* handler,
2879 int total_args_passed,
2880 BasicType* sig_bt,
2881 bool is_transient) {
2882 if (log_is_enabled(Info, perf, class, link)) {
2883 ClassLoader::perf_method_adapters_count()->inc();
2884 }
2885
2886 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2887 CodeBuffer buffer(buf);
2888 short buffer_locs[20];
2889 buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2890 sizeof(buffer_locs)/sizeof(relocInfo));
2891 MacroAssembler masm(&buffer);
2892 VMRegPair stack_regs[16];
2893 VMRegPair* regs = (total_args_passed <= 16) ? stack_regs : NEW_RESOURCE_ARRAY(VMRegPair, total_args_passed);
2894
2895 // Get a description of the compiled java calling convention and the largest used (VMReg) stack slot usage
2896 int comp_args_on_stack = SharedRuntime::java_calling_convention(sig_bt, regs, total_args_passed);
2897 SharedRuntime::generate_i2c2i_adapters(&masm,
2898 total_args_passed,
2899 comp_args_on_stack,
2900 sig_bt,
2901 regs,
2902 handler);
2903 if (CDSConfig::is_dumping_adapters()) {
2904 // try to save generated code
2905 const char* name = AdapterHandlerLibrary::name(handler->fingerprint());
2906 const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint());
2907 uint32_t offsets[4];
2908 offsets[0] = 0;
2909 offsets[1] = handler->get_c2i_entry() - handler->get_i2c_entry();
2910 offsets[2] = handler->get_c2i_unverified_entry() - handler->get_i2c_entry();
2911 offsets[3] = handler->get_c2i_no_clinit_check_entry() - handler->get_i2c_entry();
2912 SCCache::store_adapter(&buffer, id, name, offsets);
2913 }
2914 #ifdef ASSERT
2915 if (VerifyAdapterSharing) {
2916 handler->save_code(buf->code_begin(), buffer.insts_size());
2917 if (is_transient) {
2918 return true;
2919 }
2920 }
2921 #endif
2922
2923 adapter_blob = AdapterBlob::create(&buffer);
2924 if (adapter_blob == nullptr) {
2925 // CodeCache is full, disable compilation
2926 // Ought to log this but compile log is only per compile thread
2927 // and we're some non descript Java thread.
2928 return false;
2929 }
2930 handler->relocate(adapter_blob->content_begin());
2931 #ifndef PRODUCT
2932 // debugging support
2933 if (PrintAdapterHandlers || PrintStubCode) {
2934 print_adapter_handler_info(handler, adapter_blob);
2935 }
2936 #endif
2937 return true;
2938 }
2939
2940 AdapterHandlerEntry* AdapterHandlerLibrary::create_adapter(AdapterBlob*& adapter_blob,
2941 AdapterFingerPrint* fingerprint,
2942 int total_args_passed,
2943 BasicType* sig_bt,
2944 bool is_transient) {
2945 AdapterHandlerEntry* handler = AdapterHandlerLibrary::new_entry(fingerprint);
2946 if (!generate_adapter_code(adapter_blob, handler, total_args_passed, sig_bt, is_transient)) {
2947 return nullptr;
2948 }
2949 if (!is_transient) {
2950 assert_lock_strong(AdapterHandlerLibrary_lock);
2951 _adapter_handler_table->put(fingerprint, handler);
2952 }
2953 return handler;
2954 }
2955
2956 #if INCLUDE_CDS
2957 bool AdapterHandlerLibrary::link_adapter_handler(AdapterHandlerEntry* handler, AdapterBlob*& adapter_blob) {
2958 #ifndef PRODUCT
2959 if (TestAdapterLinkFailure) {
2960 return false;
2961 }
2962 #endif
2963 BufferBlob* buf = buffer_blob(); // the temporary code buffer in CodeCache
2964 CodeBuffer buffer(buf);
2965 short buffer_locs[20];
2966 buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
2967 sizeof(buffer_locs)/sizeof(relocInfo));
2968
2969 if (!lookup_aot_cache(handler, &buffer)) {
2970 return false;
2971 }
2972 adapter_blob = AdapterBlob::create(&buffer);
2973 if (adapter_blob == nullptr) {
2974 // CodeCache is full, disable compilation
2975 // Ought to log this but compile log is only per compile thread
2976 // and we're some non descript Java thread.
2977 return false;
2978 }
2979 handler->relocate(adapter_blob->content_begin());
2980 #ifndef PRODUCT
2981 // debugging support
2982 if (PrintAdapterHandlers || PrintStubCode) {
2983 print_adapter_handler_info(handler, adapter_blob);
2984 }
2985 #endif
2986 return true;
2987 }
2988
2989 class CopyAdapterTableToArchive : StackObj {
2990 private:
2991 CompactHashtableWriter* _writer;
2992 ArchiveBuilder* _builder;
2993 public:
2994 CopyAdapterTableToArchive(CompactHashtableWriter* writer) : _writer(writer),
2995 _builder(ArchiveBuilder::current())
2996 {}
2997
2998 bool do_entry(AdapterFingerPrint* fp, AdapterHandlerEntry* entry) {
2999 LogStreamHandle(Trace, cds) lsh;
3000 if (ArchiveBuilder::current()->has_been_archived((address)entry)) {
3001 assert(ArchiveBuilder::current()->has_been_archived((address)fp), "must be");
3002 AdapterFingerPrint* buffered_fp = ArchiveBuilder::current()->get_buffered_addr(fp);
3003 assert(buffered_fp != nullptr,"sanity check");
3004 AdapterHandlerEntry* buffered_entry = ArchiveBuilder::current()->get_buffered_addr(entry);
3005 assert(buffered_entry != nullptr,"sanity check");
3006
3007 uint hash = fp->compute_hash();
3008 u4 delta = _builder->buffer_to_offset_u4((address)buffered_entry);
3009 _writer->add(hash, delta);
3010 if (lsh.is_enabled()) {
3011 address fp_runtime_addr = (address)buffered_fp + ArchiveBuilder::current()->buffer_to_requested_delta();
3012 address entry_runtime_addr = (address)buffered_entry + ArchiveBuilder::current()->buffer_to_requested_delta();
3013 log_trace(cds)("Added fp=%p (%s), entry=%p to the archived adater table", buffered_fp, buffered_fp->as_basic_args_string(), buffered_entry);
3014 }
3015 } else {
3016 if (lsh.is_enabled()) {
3017 log_trace(cds)("Skipping adapter handler %p (fp=%s) as it is not archived", entry, fp->as_basic_args_string());
3018 }
3019 }
3020 return true;
3021 }
3022 };
3023
3024 size_t AdapterHandlerLibrary::estimate_size_for_archive() {
3025 return CompactHashtableWriter::estimate_size(_adapter_handler_table->number_of_entries());
3026 }
3027
3028 void AdapterHandlerLibrary::archive_adapter_table() {
3029 CompactHashtableStats stats;
3030 CompactHashtableWriter writer(_adapter_handler_table->number_of_entries(), &stats);
3031 CopyAdapterTableToArchive copy(&writer);
3032 _adapter_handler_table->iterate(©);
3033 writer.dump(&_archived_adapter_handler_table, "archived adapter table");
3034 }
3035
3036 void AdapterHandlerLibrary::serialize_shared_table_header(SerializeClosure* soc) {
3037 _archived_adapter_handler_table.serialize_header(soc);
3038 }
3039 #endif // INCLUDE_CDS
3040
3041 address AdapterHandlerEntry::base_address() {
3042 address base = _i2c_entry;
3043 if (base == nullptr) base = _c2i_entry;
3044 assert(base <= _c2i_entry || _c2i_entry == nullptr, "");
3045 assert(base <= _c2i_unverified_entry || _c2i_unverified_entry == nullptr, "");
3046 assert(base <= _c2i_no_clinit_check_entry || _c2i_no_clinit_check_entry == nullptr, "");
3047 return base;
3048 }
3049
3050 void AdapterHandlerEntry::relocate(address new_base) {
3051 address old_base = base_address();
3052 assert(old_base != nullptr, "");
3053 ptrdiff_t delta = new_base - old_base;
3054 if (_i2c_entry != nullptr)
3055 _i2c_entry += delta;
3056 if (_c2i_entry != nullptr)
3057 _c2i_entry += delta;
3058 if (_c2i_unverified_entry != nullptr)
3059 _c2i_unverified_entry += delta;
3060 if (_c2i_no_clinit_check_entry != nullptr)
3061 _c2i_no_clinit_check_entry += delta;
3062 assert(base_address() == new_base, "");
3063 }
3064
3065 void AdapterHandlerEntry::metaspace_pointers_do(MetaspaceClosure* it) {
3066 LogStreamHandle(Trace, cds) lsh;
3067 if (lsh.is_enabled()) {
3068 lsh.print("Iter(AdapterHandlerEntry): %p(%s)", this, _fingerprint->as_basic_args_string());
3069 lsh.cr();
3070 }
3071 it->push(&_fingerprint);
3072 }
3073
3074 #if INCLUDE_CDS
3075 void AdapterHandlerEntry::remove_unshareable_info() {
3076 set_entry_points(nullptr, nullptr, nullptr, nullptr, false);
3077 }
3078
3079 void AdapterHandlerEntry::restore_unshareable_info(TRAPS) {
3080 PerfTraceElapsedTime timer(ClassLoader::perf_method_adapters_time());
3081 // A fixed set of simple adapters are eagerly linked during JVM initialization
3082 // in AdapterHandlerTable::initialize().
3083 // Others may already have been linked because they are shared by other methods.
3084 if (is_linked()) {
3085 return;
3086 }
3087 AdapterBlob* adapter_blob = nullptr;
3088 {
3089 MutexLocker mu(AdapterHandlerLibrary_lock);
3090 assert(_fingerprint != nullptr, "_fingerprint must not be null");
3091 #ifdef ASSERT
3092 AdapterHandlerEntry* entry = AdapterHandlerLibrary::lookup(_fingerprint);
3093 assert(entry == this, "sanity check");
3094 #endif
3095 if (!AdapterHandlerLibrary::link_adapter_handler(this, adapter_blob)) {
3096 ResourceMark rm;
3097 log_warning(cds)("Failed to link AdapterHandlerEntry to its code in the AOT code cache");
3098 int nargs;
3099 BasicType* bt = _fingerprint->as_basic_type(nargs);
3100 if (!AdapterHandlerLibrary::generate_adapter_code(adapter_blob, this, nargs, bt, /* is_transient */ false)) {
3101 if (!is_init_completed()) {
3102 // Don't throw exceptions during VM initialization because java.lang.* classes
3103 // might not have been initialized, causing problems when constructing the
3104 // Java exception object.
3105 vm_exit_during_initialization("Out of space in CodeCache for adapters");
3106 } else {
3107 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Out of space in CodeCache for adapters");
3108 }
3109 }
3110 }
3111 }
3112 // Outside of the lock
3113 if (adapter_blob != nullptr) {
3114 post_adapter_creation(adapter_blob, this);
3115 }
3116 assert(_linked, "AdapterHandlerEntry must now be linked");
3117 }
3118 #endif // INCLUDE_CDS
3119
3120 AdapterHandlerEntry::~AdapterHandlerEntry() {
3121 #ifdef ASSERT
3122 FREE_C_HEAP_ARRAY(unsigned char, _saved_code);
3123 #endif
3124 FreeHeap(this);
3125 }
3126
3127
3128 #ifdef ASSERT
3129 // Capture the code before relocation so that it can be compared
3130 // against other versions. If the code is captured after relocation
3131 // then relative instructions won't be equivalent.
3132 void AdapterHandlerEntry::save_code(unsigned char* buffer, int length) {
3133 _saved_code = NEW_C_HEAP_ARRAY(unsigned char, length, mtCode);
3134 _saved_code_length = length;
3135 memcpy(_saved_code, buffer, length);
3136 }
3137
3138
3139 bool AdapterHandlerEntry::compare_code(AdapterHandlerEntry* other) {
3140 assert(_saved_code != nullptr && other->_saved_code != nullptr, "code not saved");
3141
3142 if (other->_saved_code_length != _saved_code_length) {
3143 return false;
3144 }
3409 assert(i - max_locals == active_monitor_count*2, "found the expected number of monitors");
3410
3411 RegisterMap map(current,
3412 RegisterMap::UpdateMap::skip,
3413 RegisterMap::ProcessFrames::include,
3414 RegisterMap::WalkContinuation::skip);
3415 frame sender = fr.sender(&map);
3416 if (sender.is_interpreted_frame()) {
3417 current->push_cont_fastpath(sender.sp());
3418 }
3419
3420 return buf;
3421 JRT_END
3422
3423 JRT_LEAF(void, SharedRuntime::OSR_migration_end( intptr_t* buf) )
3424 FREE_C_HEAP_ARRAY(intptr_t, buf);
3425 JRT_END
3426
3427 bool AdapterHandlerLibrary::contains(const CodeBlob* b) {
3428 bool found = false;
3429 #if INCLUDE_CDS
3430 auto findblob_archived_table = [&] (AdapterHandlerEntry* handler) {
3431 return (found = (b == CodeCache::find_blob(handler->get_i2c_entry())));
3432 };
3433 _archived_adapter_handler_table.iterate(findblob_archived_table);
3434 #endif // INCLUDE_CDS
3435 if (!found) {
3436 auto findblob_runtime_table = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
3437 return (found = (b == CodeCache::find_blob(a->get_i2c_entry())));
3438 };
3439 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3440 _adapter_handler_table->iterate(findblob_runtime_table);
3441 }
3442 return found;
3443 }
3444
3445 const char* AdapterHandlerLibrary::name(AdapterFingerPrint* fingerprint) {
3446 return fingerprint->as_basic_args_string();
3447 }
3448
3449 uint32_t AdapterHandlerLibrary::id(AdapterFingerPrint* fingerprint) {
3450 unsigned int hash = fingerprint->compute_hash();
3451 return hash;
3452 }
3453
3454 void AdapterHandlerLibrary::print_handler_on(outputStream* st, const CodeBlob* b) {
3455 bool found = false;
3456 #if INCLUDE_CDS
3457 auto findblob_archived_table = [&] (AdapterHandlerEntry* handler) {
3458 if (b == CodeCache::find_blob(handler->get_i2c_entry())) {
3459 found = true;
3460 st->print("Adapter for signature: ");
3461 handler->print_adapter_on(st);
3462 return true;
3463 } else {
3464 return false; // keep looking
3465
3466 }
3467 };
3468 _archived_adapter_handler_table.iterate(findblob_archived_table);
3469 #endif // INCLUDE_CDS
3470 if (!found) {
3471 auto findblob_runtime_table = [&] (AdapterFingerPrint* key, AdapterHandlerEntry* a) {
3472 if (b == CodeCache::find_blob(a->get_i2c_entry())) {
3473 found = true;
3474 st->print("Adapter for signature: ");
3475 a->print_adapter_on(st);
3476 return true;
3477 } else {
3478 return false; // keep looking
3479 }
3480 };
3481 assert_locked_or_safepoint(AdapterHandlerLibrary_lock);
3482 _adapter_handler_table->iterate(findblob_runtime_table);
3483 }
3484 assert(found, "Should have found handler");
3485 }
3486
3487 void AdapterHandlerEntry::print_adapter_on(outputStream* st) const {
3488 st->print("AHE@" INTPTR_FORMAT ": %s", p2i(this), fingerprint()->as_string());
3489 if (get_i2c_entry() != nullptr) {
3490 st->print(" i2c: " INTPTR_FORMAT, p2i(get_i2c_entry()));
3491 }
3492 if (get_c2i_entry() != nullptr) {
3493 st->print(" c2i: " INTPTR_FORMAT, p2i(get_c2i_entry()));
3494 }
3495 if (get_c2i_unverified_entry() != nullptr) {
3496 st->print(" c2iUV: " INTPTR_FORMAT, p2i(get_c2i_unverified_entry()));
3497 }
3498 if (get_c2i_no_clinit_check_entry() != nullptr) {
3499 st->print(" c2iNCI: " INTPTR_FORMAT, p2i(get_c2i_no_clinit_check_entry()));
3500 }
3501 st->cr();
3502 }
3503
3504 bool AdapterHandlerLibrary::is_abstract_method_adapter(AdapterHandlerEntry* entry) {
3505 if (entry == _abstract_method_handler) {
3506 return true;
3507 }
3508 return false;
3509 }
3510
3511 JRT_LEAF(void, SharedRuntime::enable_stack_reserved_zone(JavaThread* current))
3512 assert(current == JavaThread::current(), "pre-condition");
3513 StackOverflow* overflow_state = current->stack_overflow_state();
3514 overflow_state->enable_stack_reserved_zone(/*check_if_disabled*/true);
3515 overflow_state->set_reserved_stack_activation(current->stack_base());
3516 JRT_END
3517
3518 frame SharedRuntime::look_for_reserved_stack_annotated_method(JavaThread* current, frame fr) {
3519 ResourceMark rm(current);
3520 frame activation;
3521 nmethod* nm = nullptr;
3522 int count = 1;
3523
3524 assert(fr.is_java_frame(), "Must start on Java frame");
3525
3526 RegisterMap map(JavaThread::current(),
3527 RegisterMap::UpdateMap::skip,
3528 RegisterMap::ProcessFrames::skip,
3529 RegisterMap::WalkContinuation::skip); // don't walk continuations
3530 for (; !fr.is_first_frame(); fr = fr.sender(&map)) {
|