7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/assembler.inline.hpp"
26 #include "cds/cdsConfig.hpp"
27 #include "code/codeCache.hpp"
28 #include "code/compiledIC.hpp"
29 #include "code/dependencies.hpp"
30 #include "code/nativeInst.hpp"
31 #include "code/nmethod.inline.hpp"
32 #include "code/scopeDesc.hpp"
33 #include "compiler/abstractCompiler.hpp"
34 #include "compiler/compilationLog.hpp"
35 #include "compiler/compileBroker.hpp"
36 #include "compiler/compileLog.hpp"
37 #include "compiler/compilerDirectives.hpp"
38 #include "compiler/compilerOracle.hpp"
39 #include "compiler/compileTask.hpp"
40 #include "compiler/directivesParser.hpp"
41 #include "compiler/disassembler.hpp"
42 #include "compiler/oopMap.inline.hpp"
43 #include "gc/shared/barrierSet.hpp"
44 #include "gc/shared/barrierSetNMethod.hpp"
45 #include "gc/shared/classUnloadingContext.hpp"
46 #include "gc/shared/collectedHeap.hpp"
47 #include "interpreter/bytecode.inline.hpp"
48 #include "jvm.h"
49 #include "logging/log.hpp"
50 #include "logging/logStream.hpp"
51 #include "memory/allocation.inline.hpp"
52 #include "memory/resourceArea.hpp"
53 #include "memory/universe.hpp"
54 #include "oops/access.inline.hpp"
55 #include "oops/klass.inline.hpp"
56 #include "oops/method.inline.hpp"
57 #include "oops/methodData.hpp"
58 #include "oops/oop.inline.hpp"
59 #include "oops/weakHandle.inline.hpp"
60 #include "prims/jvmtiImpl.hpp"
61 #include "prims/jvmtiThreadState.hpp"
62 #include "prims/methodHandles.hpp"
63 #include "runtime/atomicAccess.hpp"
64 #include "runtime/continuation.hpp"
65 #include "runtime/deoptimization.hpp"
66 #include "runtime/flags/flagSetting.hpp"
67 #include "runtime/frame.inline.hpp"
68 #include "runtime/handles.inline.hpp"
69 #include "runtime/jniHandles.inline.hpp"
70 #include "runtime/orderAccess.hpp"
71 #include "runtime/os.hpp"
72 #include "runtime/safepointVerifiers.hpp"
73 #include "runtime/serviceThread.hpp"
74 #include "runtime/sharedRuntime.hpp"
75 #include "runtime/signature.hpp"
76 #include "runtime/threadWXSetters.inline.hpp"
77 #include "runtime/vmThread.hpp"
78 #include "utilities/align.hpp"
991 _method->method_holder()->external_name(),
992 _method->name()->as_C_string(),
993 _method->signature()->as_C_string(),
994 compile_id());
995 }
996 return check_evol.has_evol_dependency();
997 }
998
999 int nmethod::total_size() const {
1000 return
1001 consts_size() +
1002 insts_size() +
1003 stub_size() +
1004 scopes_data_size() +
1005 scopes_pcs_size() +
1006 handler_table_size() +
1007 nul_chk_table_size();
1008 }
1009
1010 const char* nmethod::compile_kind() const {
1011 if (is_osr_method()) return "osr";
1012 if (method() != nullptr && is_native_method()) {
1013 if (method()->is_continuation_native_intrinsic()) {
1014 return "cnt";
1015 }
1016 return "c2n";
1017 }
1018 return nullptr;
1019 }
1020
1021 const char* nmethod::compiler_name() const {
1022 return compilertype2name(_compiler_type);
1023 }
1024
1025 #ifdef ASSERT
1026 class CheckForOopsClosure : public OopClosure {
1027 bool _found_oop = false;
1028 public:
1029 virtual void do_oop(oop* o) { _found_oop = true; }
1030 virtual void do_oop(narrowOop* o) { _found_oop = true; }
1031 bool found_oop() { return _found_oop; }
1032 };
1033 class CheckForMetadataClosure : public MetadataClosure {
1034 bool _found_metadata = false;
1035 Metadata* _ignore = nullptr;
1036 public:
1037 CheckForMetadataClosure(Metadata* ignore) : _ignore(ignore) {}
1038 virtual void do_metadata(Metadata* md) { if (md != _ignore) _found_metadata = true; }
1097 nm = new (native_nmethod_size, allow_NonNMethod_space)
1098 nmethod(method(), compiler_none, native_nmethod_size,
1099 compile_id, &offsets,
1100 code_buffer, frame_size,
1101 basic_lock_owner_sp_offset,
1102 basic_lock_sp_offset,
1103 oop_maps, mutable_data_size);
1104 DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1105 NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1106 }
1107
1108 if (nm != nullptr) {
1109 // verify nmethod
1110 DEBUG_ONLY(nm->verify();) // might block
1111
1112 nm->log_new_nmethod();
1113 }
1114 return nm;
1115 }
1116
1117 nmethod* nmethod::new_nmethod(const methodHandle& method,
1118 int compile_id,
1119 int entry_bci,
1120 CodeOffsets* offsets,
1121 int orig_pc_offset,
1122 DebugInformationRecorder* debug_info,
1123 Dependencies* dependencies,
1124 CodeBuffer* code_buffer, int frame_size,
1125 OopMapSet* oop_maps,
1126 ExceptionHandlerTable* handler_table,
1127 ImplicitExceptionTable* nul_chk_table,
1128 AbstractCompiler* compiler,
1129 CompLevel comp_level
1130 #if INCLUDE_JVMCI
1131 , char* speculations,
1132 int speculations_len,
1133 JVMCINMethodData* jvmci_data
1134 #endif
1135 )
1136 {
1163
1164 int mutable_data_size = required_mutable_data_size(code_buffer
1165 JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1166
1167 {
1168 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1169
1170 nm = new (nmethod_size, comp_level)
1171 nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1172 compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1173 debug_info, dependencies, code_buffer, frame_size, oop_maps,
1174 handler_table, nul_chk_table, compiler, comp_level
1175 #if INCLUDE_JVMCI
1176 , speculations,
1177 speculations_len,
1178 jvmci_data
1179 #endif
1180 );
1181
1182 if (nm != nullptr) {
1183 // To make dependency checking during class loading fast, record
1184 // the nmethod dependencies in the classes it is dependent on.
1185 // This allows the dependency checking code to simply walk the
1186 // class hierarchy above the loaded class, checking only nmethods
1187 // which are dependent on those classes. The slow way is to
1188 // check every nmethod for dependencies which makes it linear in
1189 // the number of methods compiled. For applications with a lot
1190 // classes the slow way is too slow.
1191 for (Dependencies::DepStream deps(nm); deps.next(); ) {
1192 if (deps.type() == Dependencies::call_site_target_value) {
1193 // CallSite dependencies are managed on per-CallSite instance basis.
1194 oop call_site = deps.argument_oop(0);
1195 MethodHandles::add_dependent_nmethod(call_site, nm);
1196 } else {
1197 InstanceKlass* ik = deps.context_type();
1198 if (ik == nullptr) {
1199 continue; // ignore things like evol_method
1200 }
1201 // record this nmethod as dependent on this klass
1202 ik->add_dependent_nmethod(nm);
1203 }
1204 }
1205 NOT_PRODUCT(if (nm != nullptr) note_java_nmethod(nm));
1206 }
1207 }
1208 // Do verification and logging outside CodeCache_lock.
1209 if (nm != nullptr) {
1210 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1211 DEBUG_ONLY(nm->verify();)
1212 nm->log_new_nmethod();
1213 }
1214 return nm;
1215 }
1216
1217 // Fill in default values for various fields
1218 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1219 // avoid uninitialized fields, even for short time periods
1220 _exception_cache = nullptr;
1221 _gc_data = nullptr;
1222 _oops_do_mark_link = nullptr;
1223 _compiled_ic_data = nullptr;
1224
1225 _is_unloading_state = 0;
1226 _state = not_installed;
1227
1228 _has_unsafe_access = 0;
1229 _has_wide_vectors = 0;
1230 _has_monitors = 0;
1231 _has_scoped_access = 0;
1232 _has_flushed_dependencies = 0;
1233 _is_unlinked = 0;
1234 _load_reported = 0; // jvmti state
1235
1236 _deoptimization_status = not_marked;
1237
1238 // SECT_CONSTS is first in code buffer so the offset should be 0.
1239 int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1240 assert(consts_offset == 0, "const_offset: %d", consts_offset);
1241
1242 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1243
1244 CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
1245 CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1246
1247 _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1248 }
1249
1250 // Post initialization
1251 void nmethod::post_init() {
1252 clear_unloading_state();
1253
1254 finalize_relocations();
1255
1256 Universe::heap()->register_nmethod(this);
1257 DEBUG_ONLY(Universe::heap()->verify_nmethod(this));
1258
1259 CodeCache::commit(this);
1260 }
1261
1262 // For native wrappers
1263 nmethod::nmethod(
1264 Method* method,
1265 CompilerType type,
1266 int nmethod_size,
1267 int compile_id,
1268 CodeOffsets* offsets,
1269 CodeBuffer* code_buffer,
1270 int frame_size,
1271 ByteSize basic_lock_owner_sp_offset,
1272 ByteSize basic_lock_sp_offset,
1273 OopMapSet* oop_maps,
1274 int mutable_data_size)
1275 : CodeBlob("native nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1287 init_defaults(code_buffer, offsets);
1288
1289 _osr_entry_point = nullptr;
1290 _pc_desc_container = nullptr;
1291 _entry_bci = InvocationEntryBci;
1292 _compile_id = compile_id;
1293 _comp_level = CompLevel_none;
1294 _compiler_type = type;
1295 _orig_pc_offset = 0;
1296 _num_stack_arg_slots = 0;
1297
1298 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1299 // Continuation enter intrinsic
1300 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1301 } else {
1302 _exception_offset = 0;
1303 }
1304 // Native wrappers do not have deopt handlers. Make the values
1305 // something that will never match a pc like the nmethod vtable entry
1306 _deopt_handler_entry_offset = 0;
1307 _unwind_handler_offset = 0;
1308
1309 CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1310 uint16_t metadata_size;
1311 CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1312 JVMCI_ONLY( _metadata_size = metadata_size; )
1313 assert(_mutable_data_size == _relocation_size + metadata_size,
1314 "wrong mutable data size: %d != %d + %d",
1315 _mutable_data_size, _relocation_size, metadata_size);
1316
1317 // native wrapper does not have read-only data but we need unique not null address
1318 _immutable_data = blob_end();
1319 _immutable_data_size = 0;
1320 _nul_chk_table_offset = 0;
1321 _handler_table_offset = 0;
1322 _scopes_pcs_offset = 0;
1323 _scopes_data_offset = 0;
1324 #if INCLUDE_JVMCI
1325 _speculations_offset = 0;
1326 #endif
1347 // This is both handled in decode2(), called via print_code() -> decode()
1348 if (PrintNativeNMethods) {
1349 tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1350 print_code();
1351 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1352 #if defined(SUPPORT_DATA_STRUCTS)
1353 if (AbstractDisassembler::show_structs()) {
1354 if (oop_maps != nullptr) {
1355 tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1356 oop_maps->print_on(tty);
1357 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1358 }
1359 }
1360 #endif
1361 } else {
1362 print(); // print the header part only.
1363 }
1364 #if defined(SUPPORT_DATA_STRUCTS)
1365 if (AbstractDisassembler::show_structs()) {
1366 if (PrintRelocations) {
1367 print_relocations();
1368 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1369 }
1370 }
1371 #endif
1372 if (xtty != nullptr) {
1373 xtty->tail("print_native_nmethod");
1374 }
1375 }
1376 }
1377
1378
1379 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1380 {
1381
1382 if (nm._oop_maps != nullptr) {
1383 _oop_maps = nm._oop_maps->clone();
1384 } else {
1385 _oop_maps = nullptr;
1386 }
1387
1411 if (_mutable_data_size > 0) {
1412 _mutable_data = (address)os::malloc(_mutable_data_size, mtCode);
1413 if (_mutable_data == nullptr) {
1414 vm_exit_out_of_memory(_mutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for mutable data");
1415 }
1416 memcpy(mutable_data_begin(), nm.mutable_data_begin(), nm.mutable_data_size());
1417 } else {
1418 _mutable_data = nullptr;
1419 }
1420
1421 _deoptimization_generation = 0;
1422 _gc_epoch = CodeCache::gc_epoch();
1423 _method = nm._method;
1424 _osr_link = nullptr;
1425
1426 _exception_cache = nullptr;
1427 _gc_data = nullptr;
1428 _oops_do_mark_nmethods = nullptr;
1429 _oops_do_mark_link = nullptr;
1430 _compiled_ic_data = nullptr;
1431
1432 if (nm._osr_entry_point != nullptr) {
1433 _osr_entry_point = (nm._osr_entry_point - (address) &nm) + (address) this;
1434 } else {
1435 _osr_entry_point = nullptr;
1436 }
1437
1438 _entry_offset = nm._entry_offset;
1439 _verified_entry_offset = nm._verified_entry_offset;
1440 _entry_bci = nm._entry_bci;
1441 _immutable_data_size = nm._immutable_data_size;
1442
1443 _skipped_instructions_size = nm._skipped_instructions_size;
1444 _stub_offset = nm._stub_offset;
1445 _exception_offset = nm._exception_offset;
1446 _deopt_handler_entry_offset = nm._deopt_handler_entry_offset;
1447 _unwind_handler_offset = nm._unwind_handler_offset;
1448 _num_stack_arg_slots = nm._num_stack_arg_slots;
1449 _oops_size = nm._oops_size;
1450 #if INCLUDE_JVMCI
1464 _immutable_data = nm._immutable_data;
1465 inc_immutable_data_ref_count();
1466 } else {
1467 _immutable_data = blob_end();
1468 }
1469
1470 _orig_pc_offset = nm._orig_pc_offset;
1471 _compile_id = nm._compile_id;
1472 _comp_level = nm._comp_level;
1473 _compiler_type = nm._compiler_type;
1474 _is_unloading_state = nm._is_unloading_state;
1475 _state = not_installed;
1476
1477 _has_unsafe_access = nm._has_unsafe_access;
1478 _has_wide_vectors = nm._has_wide_vectors;
1479 _has_monitors = nm._has_monitors;
1480 _has_scoped_access = nm._has_scoped_access;
1481 _has_flushed_dependencies = nm._has_flushed_dependencies;
1482 _is_unlinked = nm._is_unlinked;
1483 _load_reported = nm._load_reported;
1484
1485 _deoptimization_status = nm._deoptimization_status;
1486
1487 if (nm._pc_desc_container != nullptr) {
1488 _pc_desc_container = new PcDescContainer(scopes_pcs_begin());
1489 } else {
1490 _pc_desc_container = nullptr;
1491 }
1492
1493 // Copy nmethod contents excluding header
1494 // - Constant part (doubles, longs and floats used in nmethod)
1495 // - Code part:
1496 // - Code body
1497 // - Exception handler
1498 // - Stub code
1499 // - OOP table
1500 memcpy(consts_begin(), nm.consts_begin(), nm.data_end() - nm.consts_begin());
1501
1502 // Fix relocation
1503 RelocIterator iter(this);
1680 CompLevel comp_level
1681 #if INCLUDE_JVMCI
1682 , char* speculations,
1683 int speculations_len,
1684 JVMCINMethodData* jvmci_data
1685 #endif
1686 )
1687 : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1688 offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1689 _deoptimization_generation(0),
1690 _gc_epoch(CodeCache::gc_epoch()),
1691 _method(method),
1692 _osr_link(nullptr)
1693 {
1694 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1695 {
1696 DEBUG_ONLY(NoSafepointVerifier nsv;)
1697 assert_locked_or_safepoint(CodeCache_lock);
1698
1699 init_defaults(code_buffer, offsets);
1700
1701 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1702 _entry_bci = entry_bci;
1703 _compile_id = compile_id;
1704 _comp_level = comp_level;
1705 _compiler_type = type;
1706 _orig_pc_offset = orig_pc_offset;
1707
1708 _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1709
1710 set_ctable_begin(header_begin() + content_offset());
1711
1712 #if INCLUDE_JVMCI
1713 if (compiler->is_jvmci()) {
1714 // JVMCI might not produce any stub sections
1715 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1716 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1717 } else {
1718 _exception_offset = -1;
1719 }
1809 // Copy speculations to nmethod
1810 if (speculations_size() != 0) {
1811 memcpy(speculations_begin(), speculations, speculations_len);
1812 }
1813 #endif
1814 init_immutable_data_ref_count();
1815
1816 post_init();
1817
1818 // we use the information of entry points to find out if a method is
1819 // static or non static
1820 assert(compiler->is_c2() || compiler->is_jvmci() ||
1821 _method->is_static() == (entry_point() == verified_entry_point()),
1822 " entry points must be same for static methods and vice versa");
1823 }
1824 }
1825
1826 // Print a short set of xml attributes to identify this nmethod. The
1827 // output should be embedded in some other element.
1828 void nmethod::log_identity(xmlStream* log) const {
1829 log->print(" compile_id='%d'", compile_id());
1830 const char* nm_kind = compile_kind();
1831 if (nm_kind != nullptr) log->print(" compile_kind='%s'", nm_kind);
1832 log->print(" compiler='%s'", compiler_name());
1833 if (TieredCompilation) {
1834 log->print(" level='%d'", comp_level());
1835 }
1836 #if INCLUDE_JVMCI
1837 if (jvmci_nmethod_data() != nullptr) {
1838 const char* jvmci_name = jvmci_nmethod_data()->name();
1839 if (jvmci_name != nullptr) {
1840 log->print(" jvmci_mirror_name='");
1841 log->text("%s", jvmci_name);
1842 log->print("'");
1843 }
1844 }
1845 #endif
1846 }
1847
1848
1849 #define LOG_OFFSET(log, name) \
1850 if (p2i(name##_end()) - p2i(name##_begin())) \
1851 log->print(" " XSTR(name) "_offset='%zd'" , \
1852 p2i(name##_begin()) - p2i(this))
1853
1854
1969 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1970 if (oop_maps() != nullptr) {
1971 tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1972 oop_maps()->print_on(tty);
1973 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1974 }
1975 }
1976 #endif
1977 } else {
1978 print(); // print the header part only.
1979 }
1980
1981 #if defined(SUPPORT_DATA_STRUCTS)
1982 if (AbstractDisassembler::show_structs()) {
1983 methodHandle mh(Thread::current(), _method);
1984 if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1985 print_scopes();
1986 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1987 }
1988 if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1989 print_relocations();
1990 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1991 }
1992 if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1993 print_dependencies_on(tty);
1994 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1995 }
1996 if (printmethod || PrintExceptionHandlers) {
1997 print_handler_table();
1998 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1999 print_nul_chk_table();
2000 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2001 }
2002
2003 if (printmethod) {
2004 print_recorded_oops();
2005 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2006 print_recorded_metadata();
2007 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2008 }
2009 }
2010 #endif
2011
2012 if (xtty != nullptr) {
2013 xtty->tail("print_nmethod");
2014 }
2015 }
2016
2017
2018 // Promote one word from an assembly-time handle to a live embedded oop.
2019 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2020 if (handle == nullptr ||
2021 // As a special case, IC oops are initialized to 1 or -1.
2022 handle == (jobject) Universe::non_oop_word()) {
2023 *(void**)dest = handle;
2024 } else {
2025 *dest = JNIHandles::resolve_non_null(handle);
2026 }
2027 }
2028
2029
2030 // Have to have the same name because it's called by a template
2031 void nmethod::copy_values(GrowableArray<jobject>* array) {
2032 int length = array->length();
2033 assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2034 oop* dest = oops_begin();
2035 for (int index = 0 ; index < length; index++) {
2036 initialize_immediate_oop(&dest[index], array->at(index));
2037 }
2038
2039 // Now we can fix up all the oops in the code. We need to do this
2040 // in the code because the assembler uses jobjects as placeholders.
2041 // The code and relocations have already been initialized by the
2042 // CodeBlob constructor, so it is valid even at this early point to
2043 // iterate over relocations and patch the code.
2044 fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2045 }
2046
2047 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2048 int length = array->length();
2056 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2057 // re-patch all oop-bearing instructions, just in case some oops moved
2058 RelocIterator iter(this, begin, end);
2059 while (iter.next()) {
2060 if (iter.type() == relocInfo::oop_type) {
2061 oop_Relocation* reloc = iter.oop_reloc();
2062 if (initialize_immediates && reloc->oop_is_immediate()) {
2063 oop* dest = reloc->oop_addr();
2064 jobject obj = *reinterpret_cast<jobject*>(dest);
2065 initialize_immediate_oop(dest, obj);
2066 }
2067 // Refresh the oop-related bits of this instruction.
2068 reloc->fix_oop_relocation();
2069 } else if (iter.type() == relocInfo::metadata_type) {
2070 metadata_Relocation* reloc = iter.metadata_reloc();
2071 reloc->fix_metadata_relocation();
2072 }
2073 }
2074 }
2075
2076 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2077 NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2078 intptr_t cbaddr = (intptr_t) nm;
2079 intptr_t offset = ((intptr_t) pc) - cbaddr;
2080
2081 int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2082 if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2083 log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2084 } else if (!nop->patch(oopmap_slot, offset)) {
2085 log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2086 }
2087 }
2088
2089 void nmethod::finalize_relocations() {
2090 NoSafepointVerifier nsv;
2091
2092 GrowableArray<NativeMovConstReg*> virtual_call_data;
2093
2094 // Make sure that post call nops fill in nmethod offsets eagerly so
2095 // we don't have to race with deoptimization
2226 // be alive the previous completed marking cycle.
2227 return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2228 }
2229
2230 void nmethod::inc_decompile_count() {
2231 if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2232 // Could be gated by ProfileTraps, but do not bother...
2233 #if INCLUDE_JVMCI
2234 if (jvmci_skip_profile_deopt()) {
2235 return;
2236 }
2237 #endif
2238 Method* m = method();
2239 if (m == nullptr) return;
2240 MethodData* mdo = m->method_data();
2241 if (mdo == nullptr) return;
2242 // There is a benign race here. See comments in methodData.hpp.
2243 mdo->inc_decompile_count();
2244 }
2245
2246 bool nmethod::try_transition(signed char new_state_int) {
2247 signed char new_state = new_state_int;
2248 assert_lock_strong(NMethodState_lock);
2249 signed char old_state = _state;
2250 if (old_state >= new_state) {
2251 // Ensure monotonicity of transitions.
2252 return false;
2253 }
2254 AtomicAccess::store(&_state, new_state);
2255 return true;
2256 }
2257
2258 void nmethod::invalidate_osr_method() {
2259 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2260 // Remove from list of active nmethods
2261 if (method() != nullptr) {
2262 method()->method_holder()->remove_osr_nmethod(this);
2263 }
2264 }
2265
2275 }
2276 }
2277
2278 ResourceMark rm;
2279 stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2280 ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2281
2282 CompileTask::print_ul(this, ss.freeze());
2283 if (PrintCompilation) {
2284 print_on_with_msg(tty, ss.freeze());
2285 }
2286 }
2287
2288 void nmethod::unlink_from_method() {
2289 if (method() != nullptr) {
2290 method()->unlink_code(this);
2291 }
2292 }
2293
2294 // Invalidate code
2295 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason) {
2296 // This can be called while the system is already at a safepoint which is ok
2297 NoSafepointVerifier nsv;
2298
2299 if (is_unloading()) {
2300 // If the nmethod is unloading, then it is already not entrant through
2301 // the nmethod entry barriers. No need to do anything; GC will unload it.
2302 return false;
2303 }
2304
2305 if (AtomicAccess::load(&_state) == not_entrant) {
2306 // Avoid taking the lock if already in required state.
2307 // This is safe from races because the state is an end-state,
2308 // which the nmethod cannot back out of once entered.
2309 // No need for fencing either.
2310 return false;
2311 }
2312
2313 MACOS_AARCH64_ONLY(os::thread_wx_enable_write());
2314
2315 {
2339 }
2340
2341 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2342 if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2343 // If nmethod entry barriers are not supported, we won't mark
2344 // nmethods as on-stack when they become on-stack. So we
2345 // degrade to a less accurate flushing strategy, for now.
2346 mark_as_maybe_on_stack();
2347 }
2348
2349 // Change state
2350 bool success = try_transition(not_entrant);
2351 assert(success, "Transition can't fail");
2352
2353 // Log the transition once
2354 log_state_change(invalidation_reason);
2355
2356 // Remove nmethod from method.
2357 unlink_from_method();
2358
2359 } // leave critical region under NMethodState_lock
2360
2361 #if INCLUDE_JVMCI
2362 // Invalidate can't occur while holding the NMethodState_lock
2363 JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2364 if (nmethod_data != nullptr) {
2365 nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2366 }
2367 #endif
2368
2369 #ifdef ASSERT
2370 if (is_osr_method() && method() != nullptr) {
2371 // Make sure osr nmethod is invalidated, i.e. not on the list
2372 bool found = method()->method_holder()->remove_osr_nmethod(this);
2373 assert(!found, "osr nmethod should have been invalidated");
2374 }
2375 #endif
2376
2377 return true;
2378 }
2403 nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2404 nmethod::InvalidationReason::UNLOADING_COLD :
2405 nmethod::InvalidationReason::UNLOADING);
2406 }
2407 #endif
2408
2409 // Post before flushing as jmethodID is being used
2410 post_compiled_method_unload();
2411
2412 // Register for flushing when it is safe. For concurrent class unloading,
2413 // that would be after the unloading handshake, and for STW class unloading
2414 // that would be when getting back to the VM thread.
2415 ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2416 }
2417
2418 void nmethod::purge(bool unregister_nmethod) {
2419
2420 MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2421
2422 // completely deallocate this method
2423 Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2424
2425 LogTarget(Debug, codecache) lt;
2426 if (lt.is_enabled()) {
2427 ResourceMark rm;
2428 LogStream ls(lt);
2429 const char* method_name = method()->name()->as_C_string();
2430 const size_t codecache_capacity = CodeCache::capacity()/1024;
2431 const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2432 ls.print("Flushing nmethod %6d/" INTPTR_FORMAT ", level=%d, osr=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2433 "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2434 _compile_id, p2i(this), _comp_level, is_osr_method(), is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2435 codecache_capacity, codecache_free_space, method_name, compiler_name());
2436 }
2437
2438 // We need to deallocate any ExceptionCache data.
2439 // Note that we do not need to grab the nmethod lock for this, it
2440 // better be thread safe if we're disposing of it!
2441 ExceptionCache* ec = exception_cache();
2442 while(ec != nullptr) {
2443 ExceptionCache* next = ec->next();
2444 delete ec;
2445 ec = next;
2446 }
2447 if (_pc_desc_container != nullptr) {
2448 delete _pc_desc_container;
2449 }
2450 delete[] _compiled_ic_data;
2451
2452 if (_immutable_data != blob_end()) {
2453 // Free memory if this was the last nmethod referencing immutable data
2454 if (dec_immutable_data_ref_count() == 0) {
2455 os::free(_immutable_data);
2456 }
2457
2458 _immutable_data = blob_end(); // Valid not null address
2459 }
2460
2461 if (unregister_nmethod) {
2462 Universe::heap()->unregister_nmethod(this);
2463 }
2464 CodeCache::unregister_old_nmethod(this);
2465
2466 JVMCI_ONLY( _metadata_size = 0; )
2467 CodeBlob::purge();
2468 }
2469
2470 oop nmethod::oop_at(int index) const {
2471 if (index == 0) {
2472 return nullptr;
2499 MethodHandles::clean_dependency_context(call_site);
2500 } else {
2501 InstanceKlass* ik = deps.context_type();
2502 if (ik == nullptr) {
2503 continue; // ignore things like evol_method
2504 }
2505 // During GC liveness of dependee determines class that needs to be updated.
2506 // The GC may clean dependency contexts concurrently and in parallel.
2507 ik->clean_dependency_context();
2508 }
2509 }
2510 }
2511 }
2512
2513 void nmethod::post_compiled_method(CompileTask* task) {
2514 task->mark_success();
2515 task->set_nm_content_size(content_size());
2516 task->set_nm_insts_size(insts_size());
2517 task->set_nm_total_size(total_size());
2518
2519 // JVMTI -- compiled method notification (must be done outside lock)
2520 post_compiled_method_load_event();
2521
2522 if (CompilationLog::log() != nullptr) {
2523 CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2524 }
2525
2526 const DirectiveSet* directive = task->directive();
2527 maybe_print_nmethod(directive);
2528 }
2529
2530 #if INCLUDE_CDS
2531 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2532
2533 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2534 precond(CDSConfig::is_using_aot_linked_classes());
2535 precond(!ServiceThread::has_started());
2536
2537 // We are still in single threaded stage of VM bootstrap. No need to lock.
2538 if (_delayed_compiled_method_load_events == nullptr) {
3257 void nmethod::verify() {
3258 if (is_not_entrant())
3259 return;
3260
3261 // assert(oopDesc::is_oop(method()), "must be valid");
3262
3263 ResourceMark rm;
3264
3265 if (!CodeCache::contains(this)) {
3266 fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3267 }
3268
3269 if(is_native_method() )
3270 return;
3271
3272 nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3273 if (nm != this) {
3274 fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3275 }
3276
3277 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3278 if (! p->verify(this)) {
3279 tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3280 }
3281 }
3282
3283 #ifdef ASSERT
3284 #if INCLUDE_JVMCI
3285 {
3286 // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3287 ImmutableOopMapSet* oms = oop_maps();
3288 ImplicitExceptionTable implicit_table(this);
3289 for (uint i = 0; i < implicit_table.len(); i++) {
3290 int exec_offset = (int) implicit_table.get_exec_offset(i);
3291 if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3292 assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3293 bool found = false;
3294 for (int i = 0, imax = oms->count(); i < imax; i++) {
3295 if (oms->pair_at(i)->pc_offset() == exec_offset) {
3296 found = true;
3297 break;
3298 }
3299 }
3300 assert(found, "missing oopmap");
3301 }
3302 }
3303 }
3304 #endif
3305 #endif
3306
3307 VerifyOopsClosure voc(this);
3308 oops_do(&voc);
3309 assert(voc.ok(), "embedded oops must be OK");
3310 Universe::heap()->verify_nmethod(this);
3311
3312 assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3313 nm->method()->external_name(), p2i(_oops_do_mark_link));
3314 verify_scopes();
3315
3316 CompiledICLocker nm_verify(this);
3317 VerifyMetadataClosure vmc;
3318 metadata_do(&vmc);
3319 }
3320
3321
3322 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3323
3324 // Verify IC only when nmethod installation is finished.
3325 if (!is_not_installed()) {
3326 if (CompiledICLocker::is_safe(this)) {
3327 if (is_inline_cache) {
3328 CompiledIC_at(this, call_site);
3329 } else {
3330 CompiledDirectCall::at(call_site);
3331 }
3332 } else {
3333 CompiledICLocker ml_verify(this);
3334 if (is_inline_cache) {
3463 p2i(nul_chk_table_end()),
3464 nul_chk_table_size());
3465 if (handler_table_size() > 0) st->print_cr(" handler table [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3466 p2i(handler_table_begin()),
3467 p2i(handler_table_end()),
3468 handler_table_size());
3469 if (scopes_pcs_size () > 0) st->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3470 p2i(scopes_pcs_begin()),
3471 p2i(scopes_pcs_end()),
3472 scopes_pcs_size());
3473 if (scopes_data_size () > 0) st->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3474 p2i(scopes_data_begin()),
3475 p2i(scopes_data_end()),
3476 scopes_data_size());
3477 #if INCLUDE_JVMCI
3478 if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3479 p2i(speculations_begin()),
3480 p2i(speculations_end()),
3481 speculations_size());
3482 #endif
3483 }
3484
3485 void nmethod::print_code() {
3486 ResourceMark m;
3487 ttyLocker ttyl;
3488 // Call the specialized decode method of this class.
3489 decode(tty);
3490 }
3491
3492 #ifndef PRODUCT // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3493
3494 void nmethod::print_dependencies_on(outputStream* out) {
3495 ResourceMark rm;
3496 stringStream st;
3497 st.print_cr("Dependencies:");
3498 for (Dependencies::DepStream deps(this); deps.next(); ) {
3499 deps.print_dependency(&st);
3500 InstanceKlass* ctxk = deps.context_type();
3501 if (ctxk != nullptr) {
3502 if (ctxk->is_dependent_nmethod(this)) {
3562 st->print("scopes:");
3563 if (scopes_pcs_begin() < scopes_pcs_end()) {
3564 st->cr();
3565 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3566 if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3567 continue;
3568
3569 ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3570 while (sd != nullptr) {
3571 sd->print_on(st, p); // print output ends with a newline
3572 sd = sd->sender();
3573 }
3574 }
3575 } else {
3576 st->print_cr(" <list empty>");
3577 }
3578 }
3579 #endif
3580
3581 #ifndef PRODUCT // RelocIterator does support printing only then.
3582 void nmethod::print_relocations() {
3583 ResourceMark m; // in case methods get printed via the debugger
3584 tty->print_cr("relocations:");
3585 RelocIterator iter(this);
3586 iter.print_on(tty);
3587 }
3588 #endif
3589
3590 void nmethod::print_pcs_on(outputStream* st) {
3591 ResourceMark m; // in case methods get printed via debugger
3592 st->print("pc-bytecode offsets:");
3593 if (scopes_pcs_begin() < scopes_pcs_end()) {
3594 st->cr();
3595 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3596 p->print_on(st, this); // print output ends with a newline
3597 }
3598 } else {
3599 st->print_cr(" <list empty>");
3600 }
3601 }
3602
3603 void nmethod::print_handler_table() {
3604 ExceptionHandlerTable(this).print(code_begin());
3605 }
3606
4421 void nmethod::update_speculation(JavaThread* thread) {
4422 jlong speculation = thread->pending_failed_speculation();
4423 if (speculation != 0) {
4424 guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4425 jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4426 thread->set_pending_failed_speculation(0);
4427 }
4428 }
4429
4430 const char* nmethod::jvmci_name() {
4431 if (jvmci_nmethod_data() != nullptr) {
4432 return jvmci_nmethod_data()->name();
4433 }
4434 return nullptr;
4435 }
4436
4437 bool nmethod::jvmci_skip_profile_deopt() const {
4438 return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4439 }
4440 #endif
|
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/assembler.inline.hpp"
26 #include "cds/cdsConfig.hpp"
27 #include "code/aotCodeCache.hpp"
28 #include "code/codeCache.hpp"
29 #include "code/compiledIC.hpp"
30 #include "code/dependencies.hpp"
31 #include "code/nativeInst.hpp"
32 #include "code/nmethod.inline.hpp"
33 #include "code/scopeDesc.hpp"
34 #include "compiler/abstractCompiler.hpp"
35 #include "compiler/compilationLog.hpp"
36 #include "compiler/compileBroker.hpp"
37 #include "compiler/compileLog.hpp"
38 #include "compiler/compilerDirectives.hpp"
39 #include "compiler/compilerOracle.hpp"
40 #include "compiler/compileTask.hpp"
41 #include "compiler/directivesParser.hpp"
42 #include "compiler/disassembler.hpp"
43 #include "compiler/oopMap.inline.hpp"
44 #include "gc/shared/barrierSet.hpp"
45 #include "gc/shared/barrierSetNMethod.hpp"
46 #include "gc/shared/classUnloadingContext.hpp"
47 #include "gc/shared/collectedHeap.hpp"
48 #include "interpreter/bytecode.inline.hpp"
49 #include "jvm.h"
50 #include "logging/log.hpp"
51 #include "logging/logStream.hpp"
52 #include "memory/allocation.inline.hpp"
53 #include "memory/resourceArea.hpp"
54 #include "memory/universe.hpp"
55 #include "oops/access.inline.hpp"
56 #include "oops/klass.inline.hpp"
57 #include "oops/method.inline.hpp"
58 #include "oops/methodData.hpp"
59 #include "oops/oop.inline.hpp"
60 #include "oops/trainingData.hpp"
61 #include "oops/weakHandle.inline.hpp"
62 #include "prims/jvmtiImpl.hpp"
63 #include "prims/jvmtiThreadState.hpp"
64 #include "prims/methodHandles.hpp"
65 #include "runtime/atomicAccess.hpp"
66 #include "runtime/continuation.hpp"
67 #include "runtime/deoptimization.hpp"
68 #include "runtime/flags/flagSetting.hpp"
69 #include "runtime/frame.inline.hpp"
70 #include "runtime/handles.inline.hpp"
71 #include "runtime/jniHandles.inline.hpp"
72 #include "runtime/orderAccess.hpp"
73 #include "runtime/os.hpp"
74 #include "runtime/safepointVerifiers.hpp"
75 #include "runtime/serviceThread.hpp"
76 #include "runtime/sharedRuntime.hpp"
77 #include "runtime/signature.hpp"
78 #include "runtime/threadWXSetters.inline.hpp"
79 #include "runtime/vmThread.hpp"
80 #include "utilities/align.hpp"
993 _method->method_holder()->external_name(),
994 _method->name()->as_C_string(),
995 _method->signature()->as_C_string(),
996 compile_id());
997 }
998 return check_evol.has_evol_dependency();
999 }
1000
1001 int nmethod::total_size() const {
1002 return
1003 consts_size() +
1004 insts_size() +
1005 stub_size() +
1006 scopes_data_size() +
1007 scopes_pcs_size() +
1008 handler_table_size() +
1009 nul_chk_table_size();
1010 }
1011
1012 const char* nmethod::compile_kind() const {
1013 if (is_osr_method()) return "osr";
1014 if (preloaded()) return "AP";
1015 if (is_aot()) return "A";
1016
1017 if (method() != nullptr && is_native_method()) {
1018 if (method()->is_continuation_native_intrinsic()) {
1019 return "cnt";
1020 }
1021 return "c2n";
1022 }
1023 return "";
1024 }
1025
1026 const char* nmethod::compiler_name() const {
1027 return compilertype2name(_compiler_type);
1028 }
1029
1030 #ifdef ASSERT
1031 class CheckForOopsClosure : public OopClosure {
1032 bool _found_oop = false;
1033 public:
1034 virtual void do_oop(oop* o) { _found_oop = true; }
1035 virtual void do_oop(narrowOop* o) { _found_oop = true; }
1036 bool found_oop() { return _found_oop; }
1037 };
1038 class CheckForMetadataClosure : public MetadataClosure {
1039 bool _found_metadata = false;
1040 Metadata* _ignore = nullptr;
1041 public:
1042 CheckForMetadataClosure(Metadata* ignore) : _ignore(ignore) {}
1043 virtual void do_metadata(Metadata* md) { if (md != _ignore) _found_metadata = true; }
1102 nm = new (native_nmethod_size, allow_NonNMethod_space)
1103 nmethod(method(), compiler_none, native_nmethod_size,
1104 compile_id, &offsets,
1105 code_buffer, frame_size,
1106 basic_lock_owner_sp_offset,
1107 basic_lock_sp_offset,
1108 oop_maps, mutable_data_size);
1109 DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1110 NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1111 }
1112
1113 if (nm != nullptr) {
1114 // verify nmethod
1115 DEBUG_ONLY(nm->verify();) // might block
1116
1117 nm->log_new_nmethod();
1118 }
1119 return nm;
1120 }
1121
1122 void nmethod::record_nmethod_dependency() {
1123 // To make dependency checking during class loading fast, record
1124 // the nmethod dependencies in the classes it is dependent on.
1125 // This allows the dependency checking code to simply walk the
1126 // class hierarchy above the loaded class, checking only nmethods
1127 // which are dependent on those classes. The slow way is to
1128 // check every nmethod for dependencies which makes it linear in
1129 // the number of methods compiled. For applications with a lot
1130 // classes the slow way is too slow.
1131 for (Dependencies::DepStream deps(this); deps.next(); ) {
1132 if (deps.type() == Dependencies::call_site_target_value) {
1133 // CallSite dependencies are managed on per-CallSite instance basis.
1134 oop call_site = deps.argument_oop(0);
1135 MethodHandles::add_dependent_nmethod(call_site, this);
1136 } else {
1137 InstanceKlass* ik = deps.context_type();
1138 if (ik == nullptr) {
1139 continue; // ignore things like evol_method
1140 }
1141 // record this nmethod as dependent on this klass
1142 ik->add_dependent_nmethod(this);
1143 }
1144 }
1145 }
1146
1147 nmethod* nmethod::new_nmethod(const methodHandle& method,
1148 int compile_id,
1149 int entry_bci,
1150 CodeOffsets* offsets,
1151 int orig_pc_offset,
1152 DebugInformationRecorder* debug_info,
1153 Dependencies* dependencies,
1154 CodeBuffer* code_buffer, int frame_size,
1155 OopMapSet* oop_maps,
1156 ExceptionHandlerTable* handler_table,
1157 ImplicitExceptionTable* nul_chk_table,
1158 AbstractCompiler* compiler,
1159 CompLevel comp_level
1160 #if INCLUDE_JVMCI
1161 , char* speculations,
1162 int speculations_len,
1163 JVMCINMethodData* jvmci_data
1164 #endif
1165 )
1166 {
1193
1194 int mutable_data_size = required_mutable_data_size(code_buffer
1195 JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1196
1197 {
1198 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1199
1200 nm = new (nmethod_size, comp_level)
1201 nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1202 compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1203 debug_info, dependencies, code_buffer, frame_size, oop_maps,
1204 handler_table, nul_chk_table, compiler, comp_level
1205 #if INCLUDE_JVMCI
1206 , speculations,
1207 speculations_len,
1208 jvmci_data
1209 #endif
1210 );
1211
1212 if (nm != nullptr) {
1213 nm->record_nmethod_dependency();
1214 NOT_PRODUCT(note_java_nmethod(nm));
1215 }
1216 }
1217 // Do verification and logging outside CodeCache_lock.
1218 if (nm != nullptr) {
1219
1220 #ifdef ASSERT
1221 LogTarget(Debug, aot, codecache, nmethod) log;
1222 if (log.is_enabled()) {
1223 LogStream out(log);
1224 out.print_cr("== new_nmethod 2");
1225 FlagSetting fs(PrintRelocations, true);
1226 nm->print_on_impl(&out);
1227 nm->decode(&out);
1228 }
1229 #endif
1230
1231 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1232 DEBUG_ONLY(nm->verify();)
1233 nm->log_new_nmethod();
1234 }
1235 return nm;
1236 }
1237
1238 nmethod* nmethod::restore(address code_cache_buffer,
1239 const methodHandle& method,
1240 int compile_id,
1241 address reloc_data,
1242 GrowableArray<Handle>& oop_list,
1243 GrowableArray<Metadata*>& metadata_list,
1244 ImmutableOopMapSet* oop_maps,
1245 address immutable_data,
1246 GrowableArray<Handle>& reloc_imm_oop_list,
1247 GrowableArray<Metadata*>& reloc_imm_metadata_list,
1248 AOTCodeReader* aot_code_reader)
1249 {
1250 CodeBlob::restore(code_cache_buffer, "nmethod", reloc_data, oop_maps);
1251 nmethod* nm = (nmethod*)code_cache_buffer;
1252 nm->set_method(method());
1253 nm->_compile_id = compile_id;
1254 nm->_gc_epoch = CodeCache::gc_epoch();
1255 nm->set_immutable_data(immutable_data);
1256 nm->copy_values(&oop_list);
1257 nm->copy_values(&metadata_list);
1258
1259 aot_code_reader->fix_relocations(nm, &reloc_imm_oop_list, &reloc_imm_metadata_list);
1260
1261 #ifndef PRODUCT
1262 nm->asm_remarks().init();
1263 aot_code_reader->read_asm_remarks(nm->asm_remarks(), /* use_string_table */ false);
1264 nm->dbg_strings().init();
1265 aot_code_reader->read_dbg_strings(nm->dbg_strings(), /* use_string_table */ false);
1266 #endif
1267
1268 // Flush the code block
1269 ICache::invalidate_range(nm->code_begin(), nm->code_size());
1270
1271 // Create cache after PcDesc data is copied - it will be used to initialize cache
1272 nm->_pc_desc_container = new PcDescContainer(nm->scopes_pcs_begin());
1273
1274 nm->set_aot_code_entry(aot_code_reader->aot_code_entry());
1275
1276 nm->post_init();
1277 return nm;
1278 }
1279
1280 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1281 const methodHandle& method,
1282 AbstractCompiler* compiler,
1283 int compile_id,
1284 address reloc_data,
1285 GrowableArray<Handle>& oop_list,
1286 GrowableArray<Metadata*>& metadata_list,
1287 ImmutableOopMapSet* oop_maps,
1288 address immutable_data,
1289 GrowableArray<Handle>& reloc_imm_oop_list,
1290 GrowableArray<Metadata*>& reloc_imm_metadata_list,
1291 AOTCodeReader* aot_code_reader)
1292 {
1293 nmethod* nm = nullptr;
1294 int nmethod_size = archived_nm->size();
1295 // create nmethod
1296 {
1297 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1298 address code_cache_buffer = (address)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1299 if (code_cache_buffer != nullptr) {
1300 nm = archived_nm->restore(code_cache_buffer,
1301 method,
1302 compile_id,
1303 reloc_data,
1304 oop_list,
1305 metadata_list,
1306 oop_maps,
1307 immutable_data,
1308 reloc_imm_oop_list,
1309 reloc_imm_metadata_list,
1310 aot_code_reader);
1311 nm->record_nmethod_dependency();
1312 NOT_PRODUCT(note_java_nmethod(nm));
1313 }
1314 }
1315 // Do verification and logging outside CodeCache_lock.
1316 if (nm != nullptr) {
1317 #ifdef ASSERT
1318 LogTarget(Debug, aot, codecache, nmethod) log;
1319 if (log.is_enabled()) {
1320 LogStream out(log);
1321 out.print_cr("== new_nmethod 2");
1322 FlagSetting fs(PrintRelocations, true);
1323 nm->print_on_impl(&out);
1324 nm->decode(&out);
1325 }
1326 #endif
1327 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1328 DEBUG_ONLY(nm->verify();)
1329 nm->log_new_nmethod();
1330 }
1331 return nm;
1332 }
1333
1334 // Fill in default values for various fields
1335 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1336 // avoid uninitialized fields, even for short time periods
1337 _exception_cache = nullptr;
1338 _gc_data = nullptr;
1339 _oops_do_mark_link = nullptr;
1340 _compiled_ic_data = nullptr;
1341 _aot_code_entry = nullptr;
1342
1343 _is_unloading_state = 0;
1344 _state = not_installed;
1345
1346 _has_unsafe_access = 0;
1347 _has_wide_vectors = 0;
1348 _has_monitors = 0;
1349 _has_scoped_access = 0;
1350 _has_flushed_dependencies = 0;
1351 _is_unlinked = 0;
1352 _load_reported = 0; // jvmti state
1353 _preloaded = 0;
1354 _has_clinit_barriers = 0;
1355
1356 _used = false;
1357 _deoptimization_status = not_marked;
1358
1359 // SECT_CONSTS is first in code buffer so the offset should be 0.
1360 int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1361 assert(consts_offset == 0, "const_offset: %d", consts_offset);
1362
1363 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1364
1365 CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
1366 CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1367
1368 _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1369 }
1370
1371 // Post initialization
1372 void nmethod::post_init() {
1373 clear_unloading_state();
1374
1375 finalize_relocations();
1376
1377 // This will disarm entry barrier.
1378 Universe::heap()->register_nmethod(this);
1379 DEBUG_ONLY(Universe::heap()->verify_nmethod(this));
1380
1381 CodeCache::commit(this);
1382 }
1383
1384 // For native wrappers
1385 nmethod::nmethod(
1386 Method* method,
1387 CompilerType type,
1388 int nmethod_size,
1389 int compile_id,
1390 CodeOffsets* offsets,
1391 CodeBuffer* code_buffer,
1392 int frame_size,
1393 ByteSize basic_lock_owner_sp_offset,
1394 ByteSize basic_lock_sp_offset,
1395 OopMapSet* oop_maps,
1396 int mutable_data_size)
1397 : CodeBlob("native nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1409 init_defaults(code_buffer, offsets);
1410
1411 _osr_entry_point = nullptr;
1412 _pc_desc_container = nullptr;
1413 _entry_bci = InvocationEntryBci;
1414 _compile_id = compile_id;
1415 _comp_level = CompLevel_none;
1416 _compiler_type = type;
1417 _orig_pc_offset = 0;
1418 _num_stack_arg_slots = 0;
1419
1420 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1421 // Continuation enter intrinsic
1422 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1423 } else {
1424 _exception_offset = 0;
1425 }
1426 // Native wrappers do not have deopt handlers. Make the values
1427 // something that will never match a pc like the nmethod vtable entry
1428 _deopt_handler_entry_offset = 0;
1429 _method_profiling_count = 0;
1430 _unwind_handler_offset = 0;
1431
1432 CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1433 uint16_t metadata_size;
1434 CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1435 JVMCI_ONLY( _metadata_size = metadata_size; )
1436 assert(_mutable_data_size == _relocation_size + metadata_size,
1437 "wrong mutable data size: %d != %d + %d",
1438 _mutable_data_size, _relocation_size, metadata_size);
1439
1440 // native wrapper does not have read-only data but we need unique not null address
1441 _immutable_data = blob_end();
1442 _immutable_data_size = 0;
1443 _nul_chk_table_offset = 0;
1444 _handler_table_offset = 0;
1445 _scopes_pcs_offset = 0;
1446 _scopes_data_offset = 0;
1447 #if INCLUDE_JVMCI
1448 _speculations_offset = 0;
1449 #endif
1470 // This is both handled in decode2(), called via print_code() -> decode()
1471 if (PrintNativeNMethods) {
1472 tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1473 print_code();
1474 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1475 #if defined(SUPPORT_DATA_STRUCTS)
1476 if (AbstractDisassembler::show_structs()) {
1477 if (oop_maps != nullptr) {
1478 tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1479 oop_maps->print_on(tty);
1480 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1481 }
1482 }
1483 #endif
1484 } else {
1485 print(); // print the header part only.
1486 }
1487 #if defined(SUPPORT_DATA_STRUCTS)
1488 if (AbstractDisassembler::show_structs()) {
1489 if (PrintRelocations) {
1490 print_relocations_on(tty);
1491 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1492 }
1493 }
1494 #endif
1495 if (xtty != nullptr) {
1496 xtty->tail("print_native_nmethod");
1497 }
1498 }
1499 }
1500
1501
1502 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1503 {
1504
1505 if (nm._oop_maps != nullptr) {
1506 _oop_maps = nm._oop_maps->clone();
1507 } else {
1508 _oop_maps = nullptr;
1509 }
1510
1534 if (_mutable_data_size > 0) {
1535 _mutable_data = (address)os::malloc(_mutable_data_size, mtCode);
1536 if (_mutable_data == nullptr) {
1537 vm_exit_out_of_memory(_mutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for mutable data");
1538 }
1539 memcpy(mutable_data_begin(), nm.mutable_data_begin(), nm.mutable_data_size());
1540 } else {
1541 _mutable_data = nullptr;
1542 }
1543
1544 _deoptimization_generation = 0;
1545 _gc_epoch = CodeCache::gc_epoch();
1546 _method = nm._method;
1547 _osr_link = nullptr;
1548
1549 _exception_cache = nullptr;
1550 _gc_data = nullptr;
1551 _oops_do_mark_nmethods = nullptr;
1552 _oops_do_mark_link = nullptr;
1553 _compiled_ic_data = nullptr;
1554 _aot_code_entry = nm._aot_code_entry;
1555
1556 if (nm._osr_entry_point != nullptr) {
1557 _osr_entry_point = (nm._osr_entry_point - (address) &nm) + (address) this;
1558 } else {
1559 _osr_entry_point = nullptr;
1560 }
1561
1562 _entry_offset = nm._entry_offset;
1563 _verified_entry_offset = nm._verified_entry_offset;
1564 _entry_bci = nm._entry_bci;
1565 _immutable_data_size = nm._immutable_data_size;
1566
1567 _skipped_instructions_size = nm._skipped_instructions_size;
1568 _stub_offset = nm._stub_offset;
1569 _exception_offset = nm._exception_offset;
1570 _deopt_handler_entry_offset = nm._deopt_handler_entry_offset;
1571 _unwind_handler_offset = nm._unwind_handler_offset;
1572 _num_stack_arg_slots = nm._num_stack_arg_slots;
1573 _oops_size = nm._oops_size;
1574 #if INCLUDE_JVMCI
1588 _immutable_data = nm._immutable_data;
1589 inc_immutable_data_ref_count();
1590 } else {
1591 _immutable_data = blob_end();
1592 }
1593
1594 _orig_pc_offset = nm._orig_pc_offset;
1595 _compile_id = nm._compile_id;
1596 _comp_level = nm._comp_level;
1597 _compiler_type = nm._compiler_type;
1598 _is_unloading_state = nm._is_unloading_state;
1599 _state = not_installed;
1600
1601 _has_unsafe_access = nm._has_unsafe_access;
1602 _has_wide_vectors = nm._has_wide_vectors;
1603 _has_monitors = nm._has_monitors;
1604 _has_scoped_access = nm._has_scoped_access;
1605 _has_flushed_dependencies = nm._has_flushed_dependencies;
1606 _is_unlinked = nm._is_unlinked;
1607 _load_reported = nm._load_reported;
1608 _preloaded = nm._preloaded;
1609 _has_clinit_barriers = nm._has_clinit_barriers;
1610
1611 _deoptimization_status = nm._deoptimization_status;
1612
1613 if (nm._pc_desc_container != nullptr) {
1614 _pc_desc_container = new PcDescContainer(scopes_pcs_begin());
1615 } else {
1616 _pc_desc_container = nullptr;
1617 }
1618
1619 // Copy nmethod contents excluding header
1620 // - Constant part (doubles, longs and floats used in nmethod)
1621 // - Code part:
1622 // - Code body
1623 // - Exception handler
1624 // - Stub code
1625 // - OOP table
1626 memcpy(consts_begin(), nm.consts_begin(), nm.data_end() - nm.consts_begin());
1627
1628 // Fix relocation
1629 RelocIterator iter(this);
1806 CompLevel comp_level
1807 #if INCLUDE_JVMCI
1808 , char* speculations,
1809 int speculations_len,
1810 JVMCINMethodData* jvmci_data
1811 #endif
1812 )
1813 : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1814 offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1815 _deoptimization_generation(0),
1816 _gc_epoch(CodeCache::gc_epoch()),
1817 _method(method),
1818 _osr_link(nullptr)
1819 {
1820 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1821 {
1822 DEBUG_ONLY(NoSafepointVerifier nsv;)
1823 assert_locked_or_safepoint(CodeCache_lock);
1824
1825 init_defaults(code_buffer, offsets);
1826 _method_profiling_count = 0;
1827
1828 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1829 _entry_bci = entry_bci;
1830 _compile_id = compile_id;
1831 _comp_level = comp_level;
1832 _compiler_type = type;
1833 _orig_pc_offset = orig_pc_offset;
1834
1835 _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1836
1837 set_ctable_begin(header_begin() + content_offset());
1838
1839 #if INCLUDE_JVMCI
1840 if (compiler->is_jvmci()) {
1841 // JVMCI might not produce any stub sections
1842 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1843 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1844 } else {
1845 _exception_offset = -1;
1846 }
1936 // Copy speculations to nmethod
1937 if (speculations_size() != 0) {
1938 memcpy(speculations_begin(), speculations, speculations_len);
1939 }
1940 #endif
1941 init_immutable_data_ref_count();
1942
1943 post_init();
1944
1945 // we use the information of entry points to find out if a method is
1946 // static or non static
1947 assert(compiler->is_c2() || compiler->is_jvmci() ||
1948 _method->is_static() == (entry_point() == verified_entry_point()),
1949 " entry points must be same for static methods and vice versa");
1950 }
1951 }
1952
1953 // Print a short set of xml attributes to identify this nmethod. The
1954 // output should be embedded in some other element.
1955 void nmethod::log_identity(xmlStream* log) const {
1956 assert(log->inside_attrs_or_error(), "printing attributes");
1957 log->print(" compile_id='%d'", compile_id());
1958 const char* nm_kind = compile_kind();
1959 log->print(" compile_kind='%s'", nm_kind);
1960 log->print(" compiler='%s'", compiler_name());
1961 if (TieredCompilation) {
1962 log->print(" compile_level='%d'", comp_level());
1963 }
1964 #if INCLUDE_JVMCI
1965 if (jvmci_nmethod_data() != nullptr) {
1966 const char* jvmci_name = jvmci_nmethod_data()->name();
1967 if (jvmci_name != nullptr) {
1968 log->print(" jvmci_mirror_name='");
1969 log->text("%s", jvmci_name);
1970 log->print("'");
1971 }
1972 }
1973 #endif
1974 }
1975
1976
1977 #define LOG_OFFSET(log, name) \
1978 if (p2i(name##_end()) - p2i(name##_begin())) \
1979 log->print(" " XSTR(name) "_offset='%zd'" , \
1980 p2i(name##_begin()) - p2i(this))
1981
1982
2097 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2098 if (oop_maps() != nullptr) {
2099 tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
2100 oop_maps()->print_on(tty);
2101 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2102 }
2103 }
2104 #endif
2105 } else {
2106 print(); // print the header part only.
2107 }
2108
2109 #if defined(SUPPORT_DATA_STRUCTS)
2110 if (AbstractDisassembler::show_structs()) {
2111 methodHandle mh(Thread::current(), _method);
2112 if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
2113 print_scopes();
2114 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2115 }
2116 if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
2117 print_relocations_on(tty);
2118 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2119 }
2120 if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
2121 print_dependencies_on(tty);
2122 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2123 }
2124 if (printmethod || PrintExceptionHandlers) {
2125 print_handler_table();
2126 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2127 print_nul_chk_table();
2128 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2129 }
2130
2131 if (printmethod) {
2132 print_recorded_oops();
2133 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2134 print_recorded_metadata();
2135 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2136 }
2137 }
2138 #endif
2139
2140 if (xtty != nullptr) {
2141 xtty->tail("print_nmethod");
2142 }
2143 }
2144
2145
2146 // Promote one word from an assembly-time handle to a live embedded oop.
2147 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2148 if (handle == nullptr ||
2149 // As a special case, IC oops are initialized to 1 or -1.
2150 handle == (jobject) Universe::non_oop_word()) {
2151 *(void**)dest = handle;
2152 } else {
2153 *dest = JNIHandles::resolve_non_null(handle);
2154 }
2155 }
2156
2157 void nmethod::copy_values(GrowableArray<Handle>* array) {
2158 int length = array->length();
2159 assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2160 oop* dest = oops_begin();
2161 for (int index = 0 ; index < length; index++) {
2162 dest[index] = array->at(index)();
2163 }
2164 }
2165
2166 // Have to have the same name because it's called by a template
2167 void nmethod::copy_values(GrowableArray<jobject>* array) {
2168 int length = array->length();
2169 assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2170 oop* dest = oops_begin();
2171 for (int index = 0 ; index < length; index++) {
2172 initialize_immediate_oop(&dest[index], array->at(index));
2173 }
2174
2175 // Now we can fix up all the oops in the code. We need to do this
2176 // in the code because the assembler uses jobjects as placeholders.
2177 // The code and relocations have already been initialized by the
2178 // CodeBlob constructor, so it is valid even at this early point to
2179 // iterate over relocations and patch the code.
2180 fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2181 }
2182
2183 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2184 int length = array->length();
2192 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2193 // re-patch all oop-bearing instructions, just in case some oops moved
2194 RelocIterator iter(this, begin, end);
2195 while (iter.next()) {
2196 if (iter.type() == relocInfo::oop_type) {
2197 oop_Relocation* reloc = iter.oop_reloc();
2198 if (initialize_immediates && reloc->oop_is_immediate()) {
2199 oop* dest = reloc->oop_addr();
2200 jobject obj = *reinterpret_cast<jobject*>(dest);
2201 initialize_immediate_oop(dest, obj);
2202 }
2203 // Refresh the oop-related bits of this instruction.
2204 reloc->fix_oop_relocation();
2205 } else if (iter.type() == relocInfo::metadata_type) {
2206 metadata_Relocation* reloc = iter.metadata_reloc();
2207 reloc->fix_metadata_relocation();
2208 }
2209 }
2210 }
2211
2212 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
2213 RelocIterator iter(this);
2214 while (iter.next()) {
2215 if (iter.type() == relocInfo::oop_type) {
2216 oop_Relocation* reloc = iter.oop_reloc();
2217 if (reloc->oop_is_immediate()) {
2218 oop dest = reloc->oop_value();
2219 Handle h(thread, dest);
2220 oop_list.append(h);
2221 }
2222 } else if (iter.type() == relocInfo::metadata_type) {
2223 metadata_Relocation* reloc = iter.metadata_reloc();
2224 if (reloc->metadata_is_immediate()) {
2225 Metadata* m = reloc->metadata_value();
2226 metadata_list.append(m);
2227 }
2228 }
2229 }
2230 }
2231
2232 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2233 NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2234 intptr_t cbaddr = (intptr_t) nm;
2235 intptr_t offset = ((intptr_t) pc) - cbaddr;
2236
2237 int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2238 if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2239 log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2240 } else if (!nop->patch(oopmap_slot, offset)) {
2241 log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2242 }
2243 }
2244
2245 void nmethod::finalize_relocations() {
2246 NoSafepointVerifier nsv;
2247
2248 GrowableArray<NativeMovConstReg*> virtual_call_data;
2249
2250 // Make sure that post call nops fill in nmethod offsets eagerly so
2251 // we don't have to race with deoptimization
2382 // be alive the previous completed marking cycle.
2383 return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2384 }
2385
2386 void nmethod::inc_decompile_count() {
2387 if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2388 // Could be gated by ProfileTraps, but do not bother...
2389 #if INCLUDE_JVMCI
2390 if (jvmci_skip_profile_deopt()) {
2391 return;
2392 }
2393 #endif
2394 Method* m = method();
2395 if (m == nullptr) return;
2396 MethodData* mdo = m->method_data();
2397 if (mdo == nullptr) return;
2398 // There is a benign race here. See comments in methodData.hpp.
2399 mdo->inc_decompile_count();
2400 }
2401
2402 void nmethod::inc_method_profiling_count() {
2403 AtomicAccess::inc(&_method_profiling_count);
2404 }
2405
2406 uint64_t nmethod::method_profiling_count() {
2407 return _method_profiling_count;
2408 }
2409
2410 bool nmethod::try_transition(signed char new_state_int) {
2411 signed char new_state = new_state_int;
2412 assert_lock_strong(NMethodState_lock);
2413 signed char old_state = _state;
2414 if (old_state >= new_state) {
2415 // Ensure monotonicity of transitions.
2416 return false;
2417 }
2418 AtomicAccess::store(&_state, new_state);
2419 return true;
2420 }
2421
2422 void nmethod::invalidate_osr_method() {
2423 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2424 // Remove from list of active nmethods
2425 if (method() != nullptr) {
2426 method()->method_holder()->remove_osr_nmethod(this);
2427 }
2428 }
2429
2439 }
2440 }
2441
2442 ResourceMark rm;
2443 stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2444 ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2445
2446 CompileTask::print_ul(this, ss.freeze());
2447 if (PrintCompilation) {
2448 print_on_with_msg(tty, ss.freeze());
2449 }
2450 }
2451
2452 void nmethod::unlink_from_method() {
2453 if (method() != nullptr) {
2454 method()->unlink_code(this);
2455 }
2456 }
2457
2458 // Invalidate code
2459 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason, bool keep_aot_entry) {
2460 // This can be called while the system is already at a safepoint which is ok
2461 NoSafepointVerifier nsv;
2462
2463 if (is_unloading()) {
2464 // If the nmethod is unloading, then it is already not entrant through
2465 // the nmethod entry barriers. No need to do anything; GC will unload it.
2466 return false;
2467 }
2468
2469 if (AtomicAccess::load(&_state) == not_entrant) {
2470 // Avoid taking the lock if already in required state.
2471 // This is safe from races because the state is an end-state,
2472 // which the nmethod cannot back out of once entered.
2473 // No need for fencing either.
2474 return false;
2475 }
2476
2477 MACOS_AARCH64_ONLY(os::thread_wx_enable_write());
2478
2479 {
2503 }
2504
2505 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2506 if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2507 // If nmethod entry barriers are not supported, we won't mark
2508 // nmethods as on-stack when they become on-stack. So we
2509 // degrade to a less accurate flushing strategy, for now.
2510 mark_as_maybe_on_stack();
2511 }
2512
2513 // Change state
2514 bool success = try_transition(not_entrant);
2515 assert(success, "Transition can't fail");
2516
2517 // Log the transition once
2518 log_state_change(invalidation_reason);
2519
2520 // Remove nmethod from method.
2521 unlink_from_method();
2522
2523 if (!keep_aot_entry) {
2524 // Keep AOT code if it was simply replaced
2525 // otherwise make it not entrant too.
2526 AOTCodeCache::invalidate(_aot_code_entry);
2527 }
2528
2529 CompileBroker::log_not_entrant(this);
2530 } // leave critical region under NMethodState_lock
2531
2532 #if INCLUDE_JVMCI
2533 // Invalidate can't occur while holding the NMethodState_lock
2534 JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2535 if (nmethod_data != nullptr) {
2536 nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2537 }
2538 #endif
2539
2540 #ifdef ASSERT
2541 if (is_osr_method() && method() != nullptr) {
2542 // Make sure osr nmethod is invalidated, i.e. not on the list
2543 bool found = method()->method_holder()->remove_osr_nmethod(this);
2544 assert(!found, "osr nmethod should have been invalidated");
2545 }
2546 #endif
2547
2548 return true;
2549 }
2574 nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2575 nmethod::InvalidationReason::UNLOADING_COLD :
2576 nmethod::InvalidationReason::UNLOADING);
2577 }
2578 #endif
2579
2580 // Post before flushing as jmethodID is being used
2581 post_compiled_method_unload();
2582
2583 // Register for flushing when it is safe. For concurrent class unloading,
2584 // that would be after the unloading handshake, and for STW class unloading
2585 // that would be when getting back to the VM thread.
2586 ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2587 }
2588
2589 void nmethod::purge(bool unregister_nmethod) {
2590
2591 MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2592
2593 // completely deallocate this method
2594 Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, compile_kind(), p2i(this));
2595
2596 LogTarget(Debug, codecache) lt;
2597 if (lt.is_enabled()) {
2598 ResourceMark rm;
2599 LogStream ls(lt);
2600 const char* method_name = method()->name()->as_C_string();
2601 const size_t codecache_capacity = CodeCache::capacity()/1024;
2602 const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2603 ls.print("Flushing %s nmethod %6d/" INTPTR_FORMAT ", level=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2604 "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2605 compile_kind(), _compile_id, p2i(this), _comp_level, is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2606 codecache_capacity, codecache_free_space, method_name, compiler_name());
2607 }
2608
2609 // We need to deallocate any ExceptionCache data.
2610 // Note that we do not need to grab the nmethod lock for this, it
2611 // better be thread safe if we're disposing of it!
2612 ExceptionCache* ec = exception_cache();
2613 while(ec != nullptr) {
2614 ExceptionCache* next = ec->next();
2615 delete ec;
2616 ec = next;
2617 }
2618 if (_pc_desc_container != nullptr) {
2619 delete _pc_desc_container;
2620 }
2621 if (_compiled_ic_data != nullptr) {
2622 delete[] _compiled_ic_data;
2623 }
2624
2625 if (_immutable_data != blob_end() && !AOTCodeCache::is_address_in_aot_cache((address)_oop_maps)) {
2626 // Free memory if this was the last nmethod referencing immutable data
2627 if (dec_immutable_data_ref_count() == 0) {
2628 os::free(_immutable_data);
2629 }
2630
2631 _immutable_data = blob_end(); // Valid not null address
2632 }
2633
2634 if (unregister_nmethod) {
2635 Universe::heap()->unregister_nmethod(this);
2636 }
2637 CodeCache::unregister_old_nmethod(this);
2638
2639 JVMCI_ONLY( _metadata_size = 0; )
2640 CodeBlob::purge();
2641 }
2642
2643 oop nmethod::oop_at(int index) const {
2644 if (index == 0) {
2645 return nullptr;
2672 MethodHandles::clean_dependency_context(call_site);
2673 } else {
2674 InstanceKlass* ik = deps.context_type();
2675 if (ik == nullptr) {
2676 continue; // ignore things like evol_method
2677 }
2678 // During GC liveness of dependee determines class that needs to be updated.
2679 // The GC may clean dependency contexts concurrently and in parallel.
2680 ik->clean_dependency_context();
2681 }
2682 }
2683 }
2684 }
2685
2686 void nmethod::post_compiled_method(CompileTask* task) {
2687 task->mark_success();
2688 task->set_nm_content_size(content_size());
2689 task->set_nm_insts_size(insts_size());
2690 task->set_nm_total_size(total_size());
2691
2692 CompileTrainingData* ctd = task->training_data();
2693 if (ctd != nullptr) {
2694 // Record inline code size during training to help inlining during production run
2695 precond(TrainingData::need_data()); // training run
2696 int inline_size = inline_instructions_size();
2697 if (inline_size < 0) inline_size = 0;
2698 ctd->set_inline_instructions_size(inline_size);
2699 }
2700
2701 // task->is_aot_load() is true only for loaded AOT code.
2702 // nmethod::_aot_code_entry is set for loaded and stored AOT code
2703 // to invalidate the entry when nmethod is deoptimized.
2704 // VerifyAOTCode is option to not store in archive AOT code.
2705 guarantee((_aot_code_entry != nullptr) || !task->is_aot_load() || VerifyAOTCode, "sanity");
2706
2707 // JVMTI -- compiled method notification (must be done outside lock)
2708 post_compiled_method_load_event();
2709
2710 if (CompilationLog::log() != nullptr) {
2711 CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2712 }
2713
2714 const DirectiveSet* directive = task->directive();
2715 maybe_print_nmethod(directive);
2716 }
2717
2718 #if INCLUDE_CDS
2719 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2720
2721 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2722 precond(CDSConfig::is_using_aot_linked_classes());
2723 precond(!ServiceThread::has_started());
2724
2725 // We are still in single threaded stage of VM bootstrap. No need to lock.
2726 if (_delayed_compiled_method_load_events == nullptr) {
3445 void nmethod::verify() {
3446 if (is_not_entrant())
3447 return;
3448
3449 // assert(oopDesc::is_oop(method()), "must be valid");
3450
3451 ResourceMark rm;
3452
3453 if (!CodeCache::contains(this)) {
3454 fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3455 }
3456
3457 if(is_native_method() )
3458 return;
3459
3460 nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3461 if (nm != this) {
3462 fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3463 }
3464
3465 // Verification can triggered during shutdown after AOTCodeCache is closed.
3466 // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3467 if (!is_aot() || AOTCodeCache::is_on()) {
3468 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3469 if (! p->verify(this)) {
3470 tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3471 }
3472 }
3473
3474 #ifdef ASSERT
3475 #if INCLUDE_JVMCI
3476 {
3477 // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3478 ImmutableOopMapSet* oms = oop_maps();
3479 ImplicitExceptionTable implicit_table(this);
3480 for (uint i = 0; i < implicit_table.len(); i++) {
3481 int exec_offset = (int) implicit_table.get_exec_offset(i);
3482 if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3483 assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3484 bool found = false;
3485 for (int i = 0, imax = oms->count(); i < imax; i++) {
3486 if (oms->pair_at(i)->pc_offset() == exec_offset) {
3487 found = true;
3488 break;
3489 }
3490 }
3491 assert(found, "missing oopmap");
3492 }
3493 }
3494 }
3495 #endif
3496 #endif
3497 }
3498
3499 VerifyOopsClosure voc(this);
3500 oops_do(&voc);
3501 assert(voc.ok(), "embedded oops must be OK");
3502 Universe::heap()->verify_nmethod(this);
3503
3504 assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3505 nm->method()->external_name(), p2i(_oops_do_mark_link));
3506 if (!is_aot() || AOTCodeCache::is_on()) {
3507 verify_scopes();
3508 }
3509
3510 CompiledICLocker nm_verify(this);
3511 VerifyMetadataClosure vmc;
3512 metadata_do(&vmc);
3513 }
3514
3515
3516 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3517
3518 // Verify IC only when nmethod installation is finished.
3519 if (!is_not_installed()) {
3520 if (CompiledICLocker::is_safe(this)) {
3521 if (is_inline_cache) {
3522 CompiledIC_at(this, call_site);
3523 } else {
3524 CompiledDirectCall::at(call_site);
3525 }
3526 } else {
3527 CompiledICLocker ml_verify(this);
3528 if (is_inline_cache) {
3657 p2i(nul_chk_table_end()),
3658 nul_chk_table_size());
3659 if (handler_table_size() > 0) st->print_cr(" handler table [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3660 p2i(handler_table_begin()),
3661 p2i(handler_table_end()),
3662 handler_table_size());
3663 if (scopes_pcs_size () > 0) st->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3664 p2i(scopes_pcs_begin()),
3665 p2i(scopes_pcs_end()),
3666 scopes_pcs_size());
3667 if (scopes_data_size () > 0) st->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3668 p2i(scopes_data_begin()),
3669 p2i(scopes_data_end()),
3670 scopes_data_size());
3671 #if INCLUDE_JVMCI
3672 if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3673 p2i(speculations_begin()),
3674 p2i(speculations_end()),
3675 speculations_size());
3676 #endif
3677 if (AOTCodeCache::is_on() && _aot_code_entry != nullptr) {
3678 _aot_code_entry->print(st);
3679 }
3680 }
3681
3682 void nmethod::print_code() {
3683 ResourceMark m;
3684 ttyLocker ttyl;
3685 // Call the specialized decode method of this class.
3686 decode(tty);
3687 }
3688
3689 #ifndef PRODUCT // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3690
3691 void nmethod::print_dependencies_on(outputStream* out) {
3692 ResourceMark rm;
3693 stringStream st;
3694 st.print_cr("Dependencies:");
3695 for (Dependencies::DepStream deps(this); deps.next(); ) {
3696 deps.print_dependency(&st);
3697 InstanceKlass* ctxk = deps.context_type();
3698 if (ctxk != nullptr) {
3699 if (ctxk->is_dependent_nmethod(this)) {
3759 st->print("scopes:");
3760 if (scopes_pcs_begin() < scopes_pcs_end()) {
3761 st->cr();
3762 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3763 if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3764 continue;
3765
3766 ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3767 while (sd != nullptr) {
3768 sd->print_on(st, p); // print output ends with a newline
3769 sd = sd->sender();
3770 }
3771 }
3772 } else {
3773 st->print_cr(" <list empty>");
3774 }
3775 }
3776 #endif
3777
3778 #ifndef PRODUCT // RelocIterator does support printing only then.
3779 void nmethod::print_relocations_on(outputStream* st) {
3780 ResourceMark m; // in case methods get printed via the debugger
3781 st->print_cr("relocations:");
3782 RelocIterator iter(this);
3783 iter.print_on(st);
3784 }
3785 #endif
3786
3787 void nmethod::print_pcs_on(outputStream* st) {
3788 ResourceMark m; // in case methods get printed via debugger
3789 st->print("pc-bytecode offsets:");
3790 if (scopes_pcs_begin() < scopes_pcs_end()) {
3791 st->cr();
3792 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3793 p->print_on(st, this); // print output ends with a newline
3794 }
3795 } else {
3796 st->print_cr(" <list empty>");
3797 }
3798 }
3799
3800 void nmethod::print_handler_table() {
3801 ExceptionHandlerTable(this).print(code_begin());
3802 }
3803
4618 void nmethod::update_speculation(JavaThread* thread) {
4619 jlong speculation = thread->pending_failed_speculation();
4620 if (speculation != 0) {
4621 guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4622 jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4623 thread->set_pending_failed_speculation(0);
4624 }
4625 }
4626
4627 const char* nmethod::jvmci_name() {
4628 if (jvmci_nmethod_data() != nullptr) {
4629 return jvmci_nmethod_data()->name();
4630 }
4631 return nullptr;
4632 }
4633
4634 bool nmethod::jvmci_skip_profile_deopt() const {
4635 return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4636 }
4637 #endif
4638
4639 void nmethod::prepare_for_archiving_impl() {
4640 CodeBlob::prepare_for_archiving_impl();
4641 _deoptimization_generation = 0;
4642 _gc_epoch = 0;
4643 _method_profiling_count = 0;
4644 _osr_link = nullptr;
4645 _method = nullptr;
4646 _immutable_data = nullptr;
4647 _pc_desc_container = nullptr;
4648 _exception_cache = nullptr;
4649 _gc_data = nullptr;
4650 _oops_do_mark_link = nullptr;
4651 _compiled_ic_data = nullptr;
4652 _osr_entry_point = nullptr;
4653 _compile_id = -1;
4654 _deoptimization_status = not_marked;
4655 _is_unloading_state = 0;
4656 _state = not_installed;
4657 }
|