12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/assembler.inline.hpp"
26 #include "code/codeCache.hpp"
27 #include "code/compiledIC.hpp"
28 #include "code/dependencies.hpp"
29 #include "code/nativeInst.hpp"
30 #include "code/nmethod.inline.hpp"
31 #include "code/scopeDesc.hpp"
32 #include "compiler/abstractCompiler.hpp"
33 #include "compiler/compilationLog.hpp"
34 #include "compiler/compileBroker.hpp"
35 #include "compiler/compileLog.hpp"
36 #include "compiler/compileTask.hpp"
37 #include "compiler/compilerDirectives.hpp"
38 #include "compiler/compilerOracle.hpp"
39 #include "compiler/directivesParser.hpp"
40 #include "compiler/disassembler.hpp"
41 #include "compiler/oopMap.inline.hpp"
42 #include "gc/shared/barrierSet.hpp"
43 #include "gc/shared/barrierSetNMethod.hpp"
44 #include "gc/shared/classUnloadingContext.hpp"
45 #include "gc/shared/collectedHeap.hpp"
46 #include "interpreter/bytecode.inline.hpp"
47 #include "jvm.h"
48 #include "logging/log.hpp"
49 #include "logging/logStream.hpp"
50 #include "memory/allocation.inline.hpp"
51 #include "memory/resourceArea.hpp"
768
769 void nmethod::clear_inline_caches() {
770 assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
771 RelocIterator iter(this);
772 while (iter.next()) {
773 iter.reloc()->clear_inline_cache();
774 }
775 }
776
777 #ifdef ASSERT
778 // Check class_loader is alive for this bit of metadata.
779 class CheckClass : public MetadataClosure {
780 void do_metadata(Metadata* md) {
781 Klass* klass = nullptr;
782 if (md->is_klass()) {
783 klass = ((Klass*)md);
784 } else if (md->is_method()) {
785 klass = ((Method*)md)->method_holder();
786 } else if (md->is_methodData()) {
787 klass = ((MethodData*)md)->method()->method_holder();
788 } else {
789 md->print();
790 ShouldNotReachHere();
791 }
792 assert(klass->is_loader_alive(), "must be alive");
793 }
794 };
795 #endif // ASSERT
796
797
798 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
799 ic->clean_metadata();
800 }
801
802 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
803 template <typename CallsiteT>
804 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
805 bool clean_all) {
806 CodeBlob* cb = CodeCache::find_blob(callsite->destination());
807 if (!cb->is_nmethod()) {
1112 nm = new (native_nmethod_size, allow_NonNMethod_space)
1113 nmethod(method(), compiler_none, native_nmethod_size,
1114 compile_id, &offsets,
1115 code_buffer, frame_size,
1116 basic_lock_owner_sp_offset,
1117 basic_lock_sp_offset,
1118 oop_maps, mutable_data_size);
1119 DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1120 NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1121 }
1122
1123 if (nm != nullptr) {
1124 // verify nmethod
1125 debug_only(nm->verify();) // might block
1126
1127 nm->log_new_nmethod();
1128 }
1129 return nm;
1130 }
1131
1132 nmethod* nmethod::new_nmethod(const methodHandle& method,
1133 int compile_id,
1134 int entry_bci,
1135 CodeOffsets* offsets,
1136 int orig_pc_offset,
1137 DebugInformationRecorder* debug_info,
1138 Dependencies* dependencies,
1139 CodeBuffer* code_buffer, int frame_size,
1140 OopMapSet* oop_maps,
1141 ExceptionHandlerTable* handler_table,
1142 ImplicitExceptionTable* nul_chk_table,
1143 AbstractCompiler* compiler,
1144 CompLevel comp_level
1145 #if INCLUDE_JVMCI
1146 , char* speculations,
1147 int speculations_len,
1148 JVMCINMethodData* jvmci_data
1149 #endif
1150 )
1151 {
1152 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1153 code_buffer->finalize_oop_references(method);
1154 // create nmethod
1155 nmethod* nm = nullptr;
1156 int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1157
1158 int immutable_data_size =
1159 adjust_pcs_size(debug_info->pcs_size())
1160 + align_up((int)dependencies->size_in_bytes(), oopSize)
1161 + align_up(handler_table->size_in_bytes() , oopSize)
1162 + align_up(nul_chk_table->size_in_bytes() , oopSize)
1163 #if INCLUDE_JVMCI
1164 + align_up(speculations_len , oopSize)
1168 // First, allocate space for immutable data in C heap.
1169 address immutable_data = nullptr;
1170 if (immutable_data_size > 0) {
1171 immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1172 if (immutable_data == nullptr) {
1173 vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1174 return nullptr;
1175 }
1176 }
1177
1178 int mutable_data_size = required_mutable_data_size(code_buffer
1179 JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1180
1181 {
1182 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1183
1184 nm = new (nmethod_size, comp_level)
1185 nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1186 compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1187 debug_info, dependencies, code_buffer, frame_size, oop_maps,
1188 handler_table, nul_chk_table, compiler, comp_level
1189 #if INCLUDE_JVMCI
1190 , speculations,
1191 speculations_len,
1192 jvmci_data
1193 #endif
1194 );
1195
1196 if (nm != nullptr) {
1197 // To make dependency checking during class loading fast, record
1198 // the nmethod dependencies in the classes it is dependent on.
1199 // This allows the dependency checking code to simply walk the
1200 // class hierarchy above the loaded class, checking only nmethods
1201 // which are dependent on those classes. The slow way is to
1202 // check every nmethod for dependencies which makes it linear in
1203 // the number of methods compiled. For applications with a lot
1204 // classes the slow way is too slow.
1205 for (Dependencies::DepStream deps(nm); deps.next(); ) {
1206 if (deps.type() == Dependencies::call_site_target_value) {
1207 // CallSite dependencies are managed on per-CallSite instance basis.
1208 oop call_site = deps.argument_oop(0);
1209 MethodHandles::add_dependent_nmethod(call_site, nm);
1210 } else {
1211 InstanceKlass* ik = deps.context_type();
1212 if (ik == nullptr) {
1213 continue; // ignore things like evol_method
1214 }
1215 // record this nmethod as dependent on this klass
1216 ik->add_dependent_nmethod(nm);
1217 }
1218 }
1219 NOT_PRODUCT(if (nm != nullptr) note_java_nmethod(nm));
1220 }
1221 }
1222 // Do verification and logging outside CodeCache_lock.
1223 if (nm != nullptr) {
1224 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1225 DEBUG_ONLY(nm->verify();)
1226 nm->log_new_nmethod();
1227 }
1228 return nm;
1229 }
1230
1231 // Fill in default values for various fields
1232 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1233 // avoid uninitialized fields, even for short time periods
1234 _exception_cache = nullptr;
1235 _gc_data = nullptr;
1236 _oops_do_mark_link = nullptr;
1237 _compiled_ic_data = nullptr;
1238
1239 _is_unloading_state = 0;
1240 _state = not_installed;
1241
1242 _has_unsafe_access = 0;
1243 _has_method_handle_invokes = 0;
1244 _has_wide_vectors = 0;
1245 _has_monitors = 0;
1246 _has_scoped_access = 0;
1247 _has_flushed_dependencies = 0;
1248 _is_unlinked = 0;
1249 _load_reported = 0; // jvmti state
1250
1251 _deoptimization_status = not_marked;
1252
1253 // SECT_CONSTS is first in code buffer so the offset should be 0.
1254 int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1255 assert(consts_offset == 0, "const_offset: %d", consts_offset);
1256
1257 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1258
1259 CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
1260 CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1261
1262 _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1263 }
1264
1265 // Post initialization
1266 void nmethod::post_init() {
1267 clear_unloading_state();
1268
1269 finalize_relocations();
1270
1303
1304 _osr_entry_point = nullptr;
1305 _pc_desc_container = nullptr;
1306 _entry_bci = InvocationEntryBci;
1307 _compile_id = compile_id;
1308 _comp_level = CompLevel_none;
1309 _compiler_type = type;
1310 _orig_pc_offset = 0;
1311 _num_stack_arg_slots = 0;
1312
1313 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1314 // Continuation enter intrinsic
1315 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1316 } else {
1317 _exception_offset = 0;
1318 }
1319 // Native wrappers do not have deopt handlers. Make the values
1320 // something that will never match a pc like the nmethod vtable entry
1321 _deopt_handler_offset = 0;
1322 _deopt_mh_handler_offset = 0;
1323 _unwind_handler_offset = 0;
1324
1325 CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1326 int metadata_size = align_up(code_buffer->total_metadata_size(), wordSize);
1327 JVMCI_ONLY( _jvmci_data_size = 0; )
1328 assert(_mutable_data_size == _relocation_size + metadata_size,
1329 "wrong mutable data size: %d != %d + %d",
1330 _mutable_data_size, _relocation_size, metadata_size);
1331
1332 // native wrapper does not have read-only data but we need unique not null address
1333 _immutable_data = blob_end();
1334 _immutable_data_size = 0;
1335 _nul_chk_table_offset = 0;
1336 _handler_table_offset = 0;
1337 _scopes_pcs_offset = 0;
1338 _scopes_data_offset = 0;
1339 #if INCLUDE_JVMCI
1340 _speculations_offset = 0;
1341 #endif
1342
1361 // This is both handled in decode2(), called via print_code() -> decode()
1362 if (PrintNativeNMethods) {
1363 tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1364 print_code();
1365 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1366 #if defined(SUPPORT_DATA_STRUCTS)
1367 if (AbstractDisassembler::show_structs()) {
1368 if (oop_maps != nullptr) {
1369 tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1370 oop_maps->print_on(tty);
1371 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1372 }
1373 }
1374 #endif
1375 } else {
1376 print(); // print the header part only.
1377 }
1378 #if defined(SUPPORT_DATA_STRUCTS)
1379 if (AbstractDisassembler::show_structs()) {
1380 if (PrintRelocations) {
1381 print_relocations();
1382 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1383 }
1384 }
1385 #endif
1386 if (xtty != nullptr) {
1387 xtty->tail("print_native_nmethod");
1388 }
1389 }
1390 }
1391
1392 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1393 return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1394 }
1395
1396 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1397 // Try MethodNonProfiled and MethodProfiled.
1398 void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1399 if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1400 // Try NonNMethod or give up.
1401 return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
1405 nmethod::nmethod(
1406 Method* method,
1407 CompilerType type,
1408 int nmethod_size,
1409 int immutable_data_size,
1410 int mutable_data_size,
1411 int compile_id,
1412 int entry_bci,
1413 address immutable_data,
1414 CodeOffsets* offsets,
1415 int orig_pc_offset,
1416 DebugInformationRecorder* debug_info,
1417 Dependencies* dependencies,
1418 CodeBuffer *code_buffer,
1419 int frame_size,
1420 OopMapSet* oop_maps,
1421 ExceptionHandlerTable* handler_table,
1422 ImplicitExceptionTable* nul_chk_table,
1423 AbstractCompiler* compiler,
1424 CompLevel comp_level
1425 #if INCLUDE_JVMCI
1426 , char* speculations,
1427 int speculations_len,
1428 JVMCINMethodData* jvmci_data
1429 #endif
1430 )
1431 : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1432 offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1433 _deoptimization_generation(0),
1434 _gc_epoch(CodeCache::gc_epoch()),
1435 _method(method),
1436 _osr_link(nullptr)
1437 {
1438 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1439 {
1440 debug_only(NoSafepointVerifier nsv;)
1441 assert_locked_or_safepoint(CodeCache_lock);
1442
1443 init_defaults(code_buffer, offsets);
1444
1445 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1446 _entry_bci = entry_bci;
1447 _compile_id = compile_id;
1448 _comp_level = comp_level;
1449 _compiler_type = type;
1450 _orig_pc_offset = orig_pc_offset;
1451
1452 _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1453
1454 set_ctable_begin(header_begin() + content_offset());
1455
1456 #if INCLUDE_JVMCI
1457 if (compiler->is_jvmci()) {
1458 // JVMCI might not produce any stub sections
1459 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1460 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1461 } else {
1462 _exception_offset = -1;
1463 }
1553 #if INCLUDE_JVMCI
1554 // Copy speculations to nmethod
1555 if (speculations_size() != 0) {
1556 memcpy(speculations_begin(), speculations, speculations_len);
1557 }
1558 #endif
1559
1560 post_init();
1561
1562 // we use the information of entry points to find out if a method is
1563 // static or non static
1564 assert(compiler->is_c2() || compiler->is_jvmci() ||
1565 _method->is_static() == (entry_point() == verified_entry_point()),
1566 " entry points must be same for static methods and vice versa");
1567 }
1568 }
1569
1570 // Print a short set of xml attributes to identify this nmethod. The
1571 // output should be embedded in some other element.
1572 void nmethod::log_identity(xmlStream* log) const {
1573 log->print(" compile_id='%d'", compile_id());
1574 const char* nm_kind = compile_kind();
1575 if (nm_kind != nullptr) log->print(" compile_kind='%s'", nm_kind);
1576 log->print(" compiler='%s'", compiler_name());
1577 if (TieredCompilation) {
1578 log->print(" level='%d'", comp_level());
1579 }
1580 #if INCLUDE_JVMCI
1581 if (jvmci_nmethod_data() != nullptr) {
1582 const char* jvmci_name = jvmci_nmethod_data()->name();
1583 if (jvmci_name != nullptr) {
1584 log->print(" jvmci_mirror_name='");
1585 log->text("%s", jvmci_name);
1586 log->print("'");
1587 }
1588 }
1589 #endif
1590 }
1591
1592
1593 #define LOG_OFFSET(log, name) \
1594 if (p2i(name##_end()) - p2i(name##_begin())) \
1595 log->print(" " XSTR(name) "_offset='%zd'" , \
1596 p2i(name##_begin()) - p2i(this))
1597
1598
1679 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1680 if (oop_maps() != nullptr) {
1681 tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1682 oop_maps()->print_on(tty);
1683 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1684 }
1685 }
1686 #endif
1687 } else {
1688 print(); // print the header part only.
1689 }
1690
1691 #if defined(SUPPORT_DATA_STRUCTS)
1692 if (AbstractDisassembler::show_structs()) {
1693 methodHandle mh(Thread::current(), _method);
1694 if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1695 print_scopes();
1696 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1697 }
1698 if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1699 print_relocations();
1700 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1701 }
1702 if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1703 print_dependencies_on(tty);
1704 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1705 }
1706 if (printmethod || PrintExceptionHandlers) {
1707 print_handler_table();
1708 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1709 print_nul_chk_table();
1710 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1711 }
1712
1713 if (printmethod) {
1714 print_recorded_oops();
1715 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1716 print_recorded_metadata();
1717 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1718 }
1719 }
1720 #endif
1721
1722 if (xtty != nullptr) {
1723 xtty->tail("print_nmethod");
1724 }
1725 }
1726
1727
1728 // Promote one word from an assembly-time handle to a live embedded oop.
1729 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
1730 if (handle == nullptr ||
1731 // As a special case, IC oops are initialized to 1 or -1.
1732 handle == (jobject) Universe::non_oop_word()) {
1733 *(void**)dest = handle;
1734 } else {
1735 *dest = JNIHandles::resolve_non_null(handle);
1736 }
1737 }
1738
1739
1740 // Have to have the same name because it's called by a template
1741 void nmethod::copy_values(GrowableArray<jobject>* array) {
1742 int length = array->length();
1743 assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1744 oop* dest = oops_begin();
1745 for (int index = 0 ; index < length; index++) {
1746 initialize_immediate_oop(&dest[index], array->at(index));
1747 }
1748
1749 // Now we can fix up all the oops in the code. We need to do this
1750 // in the code because the assembler uses jobjects as placeholders.
1751 // The code and relocations have already been initialized by the
1752 // CodeBlob constructor, so it is valid even at this early point to
1753 // iterate over relocations and patch the code.
1754 fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
1755 }
1756
1757 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
1758 int length = array->length();
1766 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
1767 // re-patch all oop-bearing instructions, just in case some oops moved
1768 RelocIterator iter(this, begin, end);
1769 while (iter.next()) {
1770 if (iter.type() == relocInfo::oop_type) {
1771 oop_Relocation* reloc = iter.oop_reloc();
1772 if (initialize_immediates && reloc->oop_is_immediate()) {
1773 oop* dest = reloc->oop_addr();
1774 jobject obj = *reinterpret_cast<jobject*>(dest);
1775 initialize_immediate_oop(dest, obj);
1776 }
1777 // Refresh the oop-related bits of this instruction.
1778 reloc->fix_oop_relocation();
1779 } else if (iter.type() == relocInfo::metadata_type) {
1780 metadata_Relocation* reloc = iter.metadata_reloc();
1781 reloc->fix_metadata_relocation();
1782 }
1783 }
1784 }
1785
1786 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
1787 NativePostCallNop* nop = nativePostCallNop_at((address) pc);
1788 intptr_t cbaddr = (intptr_t) nm;
1789 intptr_t offset = ((intptr_t) pc) - cbaddr;
1790
1791 int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
1792 if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
1793 log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
1794 } else if (!nop->patch(oopmap_slot, offset)) {
1795 log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
1796 }
1797 }
1798
1799 void nmethod::finalize_relocations() {
1800 NoSafepointVerifier nsv;
1801
1802 GrowableArray<NativeMovConstReg*> virtual_call_data;
1803
1804 // Make sure that post call nops fill in nmethod offsets eagerly so
1805 // we don't have to race with deoptimization
1927 Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1928 }
1929
1930 bool nmethod::is_maybe_on_stack() {
1931 // If the condition below is true, it means that the nmethod was found to
1932 // be alive the previous completed marking cycle.
1933 return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1934 }
1935
1936 void nmethod::inc_decompile_count() {
1937 if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1938 // Could be gated by ProfileTraps, but do not bother...
1939 Method* m = method();
1940 if (m == nullptr) return;
1941 MethodData* mdo = m->method_data();
1942 if (mdo == nullptr) return;
1943 // There is a benign race here. See comments in methodData.hpp.
1944 mdo->inc_decompile_count();
1945 }
1946
1947 bool nmethod::try_transition(signed char new_state_int) {
1948 signed char new_state = new_state_int;
1949 assert_lock_strong(NMethodState_lock);
1950 signed char old_state = _state;
1951 if (old_state >= new_state) {
1952 // Ensure monotonicity of transitions.
1953 return false;
1954 }
1955 Atomic::store(&_state, new_state);
1956 return true;
1957 }
1958
1959 void nmethod::invalidate_osr_method() {
1960 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1961 // Remove from list of active nmethods
1962 if (method() != nullptr) {
1963 method()->method_holder()->remove_osr_nmethod(this);
1964 }
1965 }
1966
1978 }
1979 }
1980
1981 ResourceMark rm;
1982 stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
1983 ss.print("made not entrant: %s", reason);
1984
1985 CompileTask::print_ul(this, ss.freeze());
1986 if (PrintCompilation) {
1987 print_on_with_msg(tty, ss.freeze());
1988 }
1989 }
1990
1991 void nmethod::unlink_from_method() {
1992 if (method() != nullptr) {
1993 method()->unlink_code(this);
1994 }
1995 }
1996
1997 // Invalidate code
1998 bool nmethod::make_not_entrant(const char* reason) {
1999 assert(reason != nullptr, "Must provide a reason");
2000
2001 // This can be called while the system is already at a safepoint which is ok
2002 NoSafepointVerifier nsv;
2003
2004 if (is_unloading()) {
2005 // If the nmethod is unloading, then it is already not entrant through
2006 // the nmethod entry barriers. No need to do anything; GC will unload it.
2007 return false;
2008 }
2009
2010 if (Atomic::load(&_state) == not_entrant) {
2011 // Avoid taking the lock if already in required state.
2012 // This is safe from races because the state is an end-state,
2013 // which the nmethod cannot back out of once entered.
2014 // No need for fencing either.
2015 return false;
2016 }
2017
2018 {
2043 }
2044
2045 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2046 if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2047 // If nmethod entry barriers are not supported, we won't mark
2048 // nmethods as on-stack when they become on-stack. So we
2049 // degrade to a less accurate flushing strategy, for now.
2050 mark_as_maybe_on_stack();
2051 }
2052
2053 // Change state
2054 bool success = try_transition(not_entrant);
2055 assert(success, "Transition can't fail");
2056
2057 // Log the transition once
2058 log_state_change(reason);
2059
2060 // Remove nmethod from method.
2061 unlink_from_method();
2062
2063 } // leave critical region under NMethodState_lock
2064
2065 #if INCLUDE_JVMCI
2066 // Invalidate can't occur while holding the NMethodState_lock
2067 JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2068 if (nmethod_data != nullptr) {
2069 nmethod_data->invalidate_nmethod_mirror(this);
2070 }
2071 #endif
2072
2073 #ifdef ASSERT
2074 if (is_osr_method() && method() != nullptr) {
2075 // Make sure osr nmethod is invalidated, i.e. not on the list
2076 bool found = method()->method_holder()->remove_osr_nmethod(this);
2077 assert(!found, "osr nmethod should have been invalidated");
2078 }
2079 #endif
2080
2081 return true;
2082 }
2123
2124 // completely deallocate this method
2125 Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2126 log_debug(codecache)("*flushing %s nmethod %3d/" INTPTR_FORMAT ". Live blobs:" UINT32_FORMAT
2127 "/Free CodeCache:%zuKb",
2128 is_osr_method() ? "osr" : "",_compile_id, p2i(this), CodeCache::blob_count(),
2129 CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024);
2130
2131 // We need to deallocate any ExceptionCache data.
2132 // Note that we do not need to grab the nmethod lock for this, it
2133 // better be thread safe if we're disposing of it!
2134 ExceptionCache* ec = exception_cache();
2135 while(ec != nullptr) {
2136 ExceptionCache* next = ec->next();
2137 delete ec;
2138 ec = next;
2139 }
2140 if (_pc_desc_container != nullptr) {
2141 delete _pc_desc_container;
2142 }
2143 delete[] _compiled_ic_data;
2144
2145 if (_immutable_data != blob_end()) {
2146 os::free(_immutable_data);
2147 _immutable_data = blob_end(); // Valid not null address
2148 }
2149 if (unregister_nmethod) {
2150 Universe::heap()->unregister_nmethod(this);
2151 }
2152 CodeCache::unregister_old_nmethod(this);
2153
2154 CodeBlob::purge();
2155 }
2156
2157 oop nmethod::oop_at(int index) const {
2158 if (index == 0) {
2159 return nullptr;
2160 }
2161
2162 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2163 return bs_nm->oop_load_no_keepalive(this, index);
2164 }
2165
2186 MethodHandles::clean_dependency_context(call_site);
2187 } else {
2188 InstanceKlass* ik = deps.context_type();
2189 if (ik == nullptr) {
2190 continue; // ignore things like evol_method
2191 }
2192 // During GC liveness of dependee determines class that needs to be updated.
2193 // The GC may clean dependency contexts concurrently and in parallel.
2194 ik->clean_dependency_context();
2195 }
2196 }
2197 }
2198 }
2199
2200 void nmethod::post_compiled_method(CompileTask* task) {
2201 task->mark_success();
2202 task->set_nm_content_size(content_size());
2203 task->set_nm_insts_size(insts_size());
2204 task->set_nm_total_size(total_size());
2205
2206 // JVMTI -- compiled method notification (must be done outside lock)
2207 post_compiled_method_load_event();
2208
2209 if (CompilationLog::log() != nullptr) {
2210 CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2211 }
2212
2213 const DirectiveSet* directive = task->directive();
2214 maybe_print_nmethod(directive);
2215 }
2216
2217 // ------------------------------------------------------------------
2218 // post_compiled_method_load_event
2219 // new method for install_code() path
2220 // Transfer information from compilation to jvmti
2221 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2222 // This is a bad time for a safepoint. We don't want
2223 // this nmethod to get unloaded while we're queueing the event.
2224 NoSafepointVerifier nsv;
2225
2917
2918 // Make sure all the entry points are correctly aligned for patching.
2919 NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point());
2920
2921 // assert(oopDesc::is_oop(method()), "must be valid");
2922
2923 ResourceMark rm;
2924
2925 if (!CodeCache::contains(this)) {
2926 fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
2927 }
2928
2929 if(is_native_method() )
2930 return;
2931
2932 nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
2933 if (nm != this) {
2934 fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
2935 }
2936
2937 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
2938 if (! p->verify(this)) {
2939 tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
2940 }
2941 }
2942
2943 #ifdef ASSERT
2944 #if INCLUDE_JVMCI
2945 {
2946 // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
2947 ImmutableOopMapSet* oms = oop_maps();
2948 ImplicitExceptionTable implicit_table(this);
2949 for (uint i = 0; i < implicit_table.len(); i++) {
2950 int exec_offset = (int) implicit_table.get_exec_offset(i);
2951 if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
2952 assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
2953 bool found = false;
2954 for (int i = 0, imax = oms->count(); i < imax; i++) {
2955 if (oms->pair_at(i)->pc_offset() == exec_offset) {
2956 found = true;
2957 break;
2958 }
2959 }
2960 assert(found, "missing oopmap");
2961 }
2962 }
2963 }
2964 #endif
2965 #endif
2966
2967 VerifyOopsClosure voc(this);
2968 oops_do(&voc);
2969 assert(voc.ok(), "embedded oops must be OK");
2970 Universe::heap()->verify_nmethod(this);
2971
2972 assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
2973 nm->method()->external_name(), p2i(_oops_do_mark_link));
2974 verify_scopes();
2975
2976 CompiledICLocker nm_verify(this);
2977 VerifyMetadataClosure vmc;
2978 metadata_do(&vmc);
2979 }
2980
2981
2982 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
2983
2984 // Verify IC only when nmethod installation is finished.
2985 if (!is_not_installed()) {
2986 if (CompiledICLocker::is_safe(this)) {
2987 if (is_inline_cache) {
2988 CompiledIC_at(this, call_site);
2989 } else {
2990 CompiledDirectCall::at(call_site);
2991 }
2992 } else {
2993 CompiledICLocker ml_verify(this);
2994 if (is_inline_cache) {
3123 p2i(nul_chk_table_end()),
3124 nul_chk_table_size());
3125 if (handler_table_size() > 0) st->print_cr(" handler table [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3126 p2i(handler_table_begin()),
3127 p2i(handler_table_end()),
3128 handler_table_size());
3129 if (scopes_pcs_size () > 0) st->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3130 p2i(scopes_pcs_begin()),
3131 p2i(scopes_pcs_end()),
3132 scopes_pcs_size());
3133 if (scopes_data_size () > 0) st->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3134 p2i(scopes_data_begin()),
3135 p2i(scopes_data_end()),
3136 scopes_data_size());
3137 #if INCLUDE_JVMCI
3138 if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3139 p2i(speculations_begin()),
3140 p2i(speculations_end()),
3141 speculations_size());
3142 #endif
3143 }
3144
3145 void nmethod::print_code() {
3146 ResourceMark m;
3147 ttyLocker ttyl;
3148 // Call the specialized decode method of this class.
3149 decode(tty);
3150 }
3151
3152 #ifndef PRODUCT // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3153
3154 void nmethod::print_dependencies_on(outputStream* out) {
3155 ResourceMark rm;
3156 stringStream st;
3157 st.print_cr("Dependencies:");
3158 for (Dependencies::DepStream deps(this); deps.next(); ) {
3159 deps.print_dependency(&st);
3160 InstanceKlass* ctxk = deps.context_type();
3161 if (ctxk != nullptr) {
3162 if (ctxk->is_dependent_nmethod(this)) {
3222 st->print("scopes:");
3223 if (scopes_pcs_begin() < scopes_pcs_end()) {
3224 st->cr();
3225 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3226 if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3227 continue;
3228
3229 ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3230 while (sd != nullptr) {
3231 sd->print_on(st, p); // print output ends with a newline
3232 sd = sd->sender();
3233 }
3234 }
3235 } else {
3236 st->print_cr(" <list empty>");
3237 }
3238 }
3239 #endif
3240
3241 #ifndef PRODUCT // RelocIterator does support printing only then.
3242 void nmethod::print_relocations() {
3243 ResourceMark m; // in case methods get printed via the debugger
3244 tty->print_cr("relocations:");
3245 RelocIterator iter(this);
3246 iter.print();
3247 }
3248 #endif
3249
3250 void nmethod::print_pcs_on(outputStream* st) {
3251 ResourceMark m; // in case methods get printed via debugger
3252 st->print("pc-bytecode offsets:");
3253 if (scopes_pcs_begin() < scopes_pcs_end()) {
3254 st->cr();
3255 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3256 p->print_on(st, this); // print output ends with a newline
3257 }
3258 } else {
3259 st->print_cr(" <list empty>");
3260 }
3261 }
3262
3263 void nmethod::print_handler_table() {
3264 ExceptionHandlerTable(this).print(code_begin());
3265 }
3266
4032
4033 #endif // !PRODUCT
4034
4035 #if INCLUDE_JVMCI
4036 void nmethod::update_speculation(JavaThread* thread) {
4037 jlong speculation = thread->pending_failed_speculation();
4038 if (speculation != 0) {
4039 guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4040 jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4041 thread->set_pending_failed_speculation(0);
4042 }
4043 }
4044
4045 const char* nmethod::jvmci_name() {
4046 if (jvmci_nmethod_data() != nullptr) {
4047 return jvmci_nmethod_data()->name();
4048 }
4049 return nullptr;
4050 }
4051 #endif
|
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/assembler.inline.hpp"
26 #include "code/codeCache.hpp"
27 #include "code/compiledIC.hpp"
28 #include "code/dependencies.hpp"
29 #include "code/nativeInst.hpp"
30 #include "code/nmethod.inline.hpp"
31 #include "code/scopeDesc.hpp"
32 #include "code/SCCache.hpp"
33 #include "compiler/abstractCompiler.hpp"
34 #include "compiler/compilationLog.hpp"
35 #include "compiler/compileBroker.hpp"
36 #include "compiler/compileLog.hpp"
37 #include "compiler/compileTask.hpp"
38 #include "compiler/compilerDirectives.hpp"
39 #include "compiler/compilerOracle.hpp"
40 #include "compiler/directivesParser.hpp"
41 #include "compiler/disassembler.hpp"
42 #include "compiler/oopMap.inline.hpp"
43 #include "gc/shared/barrierSet.hpp"
44 #include "gc/shared/barrierSetNMethod.hpp"
45 #include "gc/shared/classUnloadingContext.hpp"
46 #include "gc/shared/collectedHeap.hpp"
47 #include "interpreter/bytecode.inline.hpp"
48 #include "jvm.h"
49 #include "logging/log.hpp"
50 #include "logging/logStream.hpp"
51 #include "memory/allocation.inline.hpp"
52 #include "memory/resourceArea.hpp"
769
770 void nmethod::clear_inline_caches() {
771 assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
772 RelocIterator iter(this);
773 while (iter.next()) {
774 iter.reloc()->clear_inline_cache();
775 }
776 }
777
778 #ifdef ASSERT
779 // Check class_loader is alive for this bit of metadata.
780 class CheckClass : public MetadataClosure {
781 void do_metadata(Metadata* md) {
782 Klass* klass = nullptr;
783 if (md->is_klass()) {
784 klass = ((Klass*)md);
785 } else if (md->is_method()) {
786 klass = ((Method*)md)->method_holder();
787 } else if (md->is_methodData()) {
788 klass = ((MethodData*)md)->method()->method_holder();
789 } else if (md->is_methodCounters()) {
790 klass = ((MethodCounters*)md)->method()->method_holder();
791 } else {
792 md->print();
793 ShouldNotReachHere();
794 }
795 assert(klass->is_loader_alive(), "must be alive");
796 }
797 };
798 #endif // ASSERT
799
800
801 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
802 ic->clean_metadata();
803 }
804
805 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
806 template <typename CallsiteT>
807 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
808 bool clean_all) {
809 CodeBlob* cb = CodeCache::find_blob(callsite->destination());
810 if (!cb->is_nmethod()) {
1115 nm = new (native_nmethod_size, allow_NonNMethod_space)
1116 nmethod(method(), compiler_none, native_nmethod_size,
1117 compile_id, &offsets,
1118 code_buffer, frame_size,
1119 basic_lock_owner_sp_offset,
1120 basic_lock_sp_offset,
1121 oop_maps, mutable_data_size);
1122 DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1123 NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1124 }
1125
1126 if (nm != nullptr) {
1127 // verify nmethod
1128 debug_only(nm->verify();) // might block
1129
1130 nm->log_new_nmethod();
1131 }
1132 return nm;
1133 }
1134
1135 void nmethod::record_nmethod_dependency() {
1136 // To make dependency checking during class loading fast, record
1137 // the nmethod dependencies in the classes it is dependent on.
1138 // This allows the dependency checking code to simply walk the
1139 // class hierarchy above the loaded class, checking only nmethods
1140 // which are dependent on those classes. The slow way is to
1141 // check every nmethod for dependencies which makes it linear in
1142 // the number of methods compiled. For applications with a lot
1143 // classes the slow way is too slow.
1144 for (Dependencies::DepStream deps(this); deps.next(); ) {
1145 if (deps.type() == Dependencies::call_site_target_value) {
1146 // CallSite dependencies are managed on per-CallSite instance basis.
1147 oop call_site = deps.argument_oop(0);
1148 MethodHandles::add_dependent_nmethod(call_site, this);
1149 } else {
1150 InstanceKlass* ik = deps.context_type();
1151 if (ik == nullptr) {
1152 continue; // ignore things like evol_method
1153 }
1154 // record this nmethod as dependent on this klass
1155 ik->add_dependent_nmethod(this);
1156 }
1157 }
1158 }
1159
1160 nmethod* nmethod::new_nmethod(const methodHandle& method,
1161 int compile_id,
1162 int entry_bci,
1163 CodeOffsets* offsets,
1164 int orig_pc_offset,
1165 DebugInformationRecorder* debug_info,
1166 Dependencies* dependencies,
1167 CodeBuffer* code_buffer, int frame_size,
1168 OopMapSet* oop_maps,
1169 ExceptionHandlerTable* handler_table,
1170 ImplicitExceptionTable* nul_chk_table,
1171 AbstractCompiler* compiler,
1172 CompLevel comp_level
1173 , SCCEntry* scc_entry
1174 #if INCLUDE_JVMCI
1175 , char* speculations,
1176 int speculations_len,
1177 JVMCINMethodData* jvmci_data
1178 #endif
1179 )
1180 {
1181 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1182 code_buffer->finalize_oop_references(method);
1183 // create nmethod
1184 nmethod* nm = nullptr;
1185 int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1186
1187 int immutable_data_size =
1188 adjust_pcs_size(debug_info->pcs_size())
1189 + align_up((int)dependencies->size_in_bytes(), oopSize)
1190 + align_up(handler_table->size_in_bytes() , oopSize)
1191 + align_up(nul_chk_table->size_in_bytes() , oopSize)
1192 #if INCLUDE_JVMCI
1193 + align_up(speculations_len , oopSize)
1197 // First, allocate space for immutable data in C heap.
1198 address immutable_data = nullptr;
1199 if (immutable_data_size > 0) {
1200 immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1201 if (immutable_data == nullptr) {
1202 vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1203 return nullptr;
1204 }
1205 }
1206
1207 int mutable_data_size = required_mutable_data_size(code_buffer
1208 JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1209
1210 {
1211 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1212
1213 nm = new (nmethod_size, comp_level)
1214 nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1215 compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1216 debug_info, dependencies, code_buffer, frame_size, oop_maps,
1217 handler_table, nul_chk_table, compiler, comp_level, scc_entry
1218 #if INCLUDE_JVMCI
1219 , speculations,
1220 speculations_len,
1221 jvmci_data
1222 #endif
1223 );
1224
1225 if (nm != nullptr) {
1226 nm->record_nmethod_dependency();
1227 NOT_PRODUCT(note_java_nmethod(nm));
1228 }
1229 }
1230 // Do verification and logging outside CodeCache_lock.
1231 if (nm != nullptr) {
1232
1233 #ifdef ASSERT
1234 LogTarget(Debug, scc, nmethod) log;
1235 if (log.is_enabled()) {
1236 LogStream out(log);
1237 out.print_cr("== new_nmethod 2");
1238 FlagSetting fs(PrintRelocations, true);
1239 nm->print_on_impl(&out);
1240 nm->decode(&out);
1241 }
1242 #endif
1243
1244 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1245 DEBUG_ONLY(nm->verify();)
1246 nm->log_new_nmethod();
1247 }
1248 return nm;
1249 }
1250
1251 void nmethod::restore_from_archive(nmethod* archived_nm,
1252 const methodHandle& method,
1253 int compile_id,
1254 address reloc_data,
1255 GrowableArray<Handle>& oop_list,
1256 GrowableArray<Metadata*>& metadata_list,
1257 ImmutableOopMapSet* oop_maps,
1258 address immutable_data,
1259 GrowableArray<Handle>& reloc_imm_oop_list,
1260 GrowableArray<Metadata*>& reloc_imm_metadata_list,
1261 #ifndef PRODUCT
1262 AsmRemarks& archived_asm_remarks,
1263 DbgStrings& archived_dbg_strings,
1264 #endif /* PRODUCT */
1265 SCCReader* scc_reader)
1266 {
1267 archived_nm->copy_to((address)this);
1268 set_name("nmethod");
1269 set_method(method());
1270
1271 _compile_id = compile_id;
1272 // allocate _mutable_data before copying relocation data because relocation data is now stored as part of mutable data area
1273 if (archived_nm->mutable_data_size() > 0) {
1274 _mutable_data = (address)os::malloc(archived_nm->mutable_data_size(), mtCode);
1275 if (_mutable_data == nullptr) {
1276 vm_exit_out_of_memory(archived_nm->mutable_data_size(), OOM_MALLOC_ERROR, "codebuffer: no space for mutable data");
1277 }
1278 }
1279 memcpy((address)relocation_begin(), reloc_data, archived_nm->relocation_size());
1280 set_oop_maps(oop_maps);
1281 set_immutable_data(immutable_data);
1282 copy_values(&oop_list);
1283 copy_values(&metadata_list);
1284
1285 scc_reader->apply_relocations(this, reloc_imm_oop_list, reloc_imm_metadata_list);
1286
1287 #ifndef PRODUCT
1288 AsmRemarks::init(asm_remarks());
1289 use_remarks(archived_asm_remarks);
1290 archived_asm_remarks.clear();
1291 DbgStrings::init(dbg_strings());
1292 use_strings(archived_dbg_strings);
1293 archived_dbg_strings.clear();
1294 #endif /* PRODUCT */
1295
1296 // Flush the code block
1297 ICache::invalidate_range(code_begin(), code_size());
1298
1299 // Create cache after PcDesc data is copied - it will be used to initialize cache
1300 _pc_desc_container = new PcDescContainer(scopes_pcs_begin());
1301
1302 set_scc_entry(scc_reader->scc_entry());
1303
1304 post_init();
1305 }
1306
1307 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1308 const methodHandle& method,
1309 AbstractCompiler* compiler,
1310 int compile_id,
1311 address reloc_data,
1312 GrowableArray<Handle>& oop_list,
1313 GrowableArray<Metadata*>& metadata_list,
1314 ImmutableOopMapSet* oop_maps,
1315 address immutable_data,
1316 GrowableArray<Handle>& reloc_imm_oop_list,
1317 GrowableArray<Metadata*>& reloc_imm_metadata_list,
1318 #ifndef PRODUCT
1319 AsmRemarks& asm_remarks,
1320 DbgStrings& dbg_strings,
1321 #endif /* PRODUCT */
1322 SCCReader* scc_reader)
1323 {
1324 nmethod* nm = nullptr;
1325 int nmethod_size = archived_nm->size();
1326 // create nmethod
1327 {
1328 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1329 nm = (nmethod *)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1330 if (nm != nullptr) {
1331 nm->restore_from_archive(archived_nm,
1332 method,
1333 compile_id,
1334 reloc_data,
1335 oop_list,
1336 metadata_list,
1337 oop_maps,
1338 immutable_data,
1339 reloc_imm_oop_list,
1340 reloc_imm_metadata_list,
1341 NOT_PRODUCT_ARG(asm_remarks)
1342 NOT_PRODUCT_ARG(dbg_strings)
1343 scc_reader);
1344 nm->record_nmethod_dependency();
1345 NOT_PRODUCT(note_java_nmethod(nm));
1346 }
1347 }
1348 // Do verification and logging outside CodeCache_lock.
1349 if (nm != nullptr) {
1350 #ifdef ASSERT
1351 LogTarget(Debug, scc, nmethod) log;
1352 if (log.is_enabled()) {
1353 LogStream out(log);
1354 out.print_cr("== new_nmethod 2");
1355 FlagSetting fs(PrintRelocations, true);
1356 nm->print_on_impl(&out);
1357 nm->decode(&out);
1358 }
1359 #endif
1360 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1361 DEBUG_ONLY(nm->verify();)
1362 nm->log_new_nmethod();
1363 }
1364 return nm;
1365 }
1366
1367 // Fill in default values for various fields
1368 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1369 // avoid uninitialized fields, even for short time periods
1370 _exception_cache = nullptr;
1371 _gc_data = nullptr;
1372 _oops_do_mark_link = nullptr;
1373 _compiled_ic_data = nullptr;
1374
1375 _is_unloading_state = 0;
1376 _state = not_installed;
1377
1378 _has_unsafe_access = 0;
1379 _has_method_handle_invokes = 0;
1380 _has_wide_vectors = 0;
1381 _has_monitors = 0;
1382 _has_scoped_access = 0;
1383 _has_flushed_dependencies = 0;
1384 _is_unlinked = 0;
1385 _load_reported = 0; // jvmti state
1386 _preloaded = 0;
1387 _has_clinit_barriers = 0;
1388
1389 _used = false;
1390 _deoptimization_status = not_marked;
1391
1392 // SECT_CONSTS is first in code buffer so the offset should be 0.
1393 int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1394 assert(consts_offset == 0, "const_offset: %d", consts_offset);
1395
1396 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1397
1398 CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
1399 CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1400
1401 _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1402 }
1403
1404 // Post initialization
1405 void nmethod::post_init() {
1406 clear_unloading_state();
1407
1408 finalize_relocations();
1409
1442
1443 _osr_entry_point = nullptr;
1444 _pc_desc_container = nullptr;
1445 _entry_bci = InvocationEntryBci;
1446 _compile_id = compile_id;
1447 _comp_level = CompLevel_none;
1448 _compiler_type = type;
1449 _orig_pc_offset = 0;
1450 _num_stack_arg_slots = 0;
1451
1452 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1453 // Continuation enter intrinsic
1454 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1455 } else {
1456 _exception_offset = 0;
1457 }
1458 // Native wrappers do not have deopt handlers. Make the values
1459 // something that will never match a pc like the nmethod vtable entry
1460 _deopt_handler_offset = 0;
1461 _deopt_mh_handler_offset = 0;
1462 _scc_entry = nullptr;
1463 _method_profiling_count = 0;
1464 _unwind_handler_offset = 0;
1465
1466 CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1467 int metadata_size = align_up(code_buffer->total_metadata_size(), wordSize);
1468 JVMCI_ONLY( _jvmci_data_size = 0; )
1469 assert(_mutable_data_size == _relocation_size + metadata_size,
1470 "wrong mutable data size: %d != %d + %d",
1471 _mutable_data_size, _relocation_size, metadata_size);
1472
1473 // native wrapper does not have read-only data but we need unique not null address
1474 _immutable_data = blob_end();
1475 _immutable_data_size = 0;
1476 _nul_chk_table_offset = 0;
1477 _handler_table_offset = 0;
1478 _scopes_pcs_offset = 0;
1479 _scopes_data_offset = 0;
1480 #if INCLUDE_JVMCI
1481 _speculations_offset = 0;
1482 #endif
1483
1502 // This is both handled in decode2(), called via print_code() -> decode()
1503 if (PrintNativeNMethods) {
1504 tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1505 print_code();
1506 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1507 #if defined(SUPPORT_DATA_STRUCTS)
1508 if (AbstractDisassembler::show_structs()) {
1509 if (oop_maps != nullptr) {
1510 tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1511 oop_maps->print_on(tty);
1512 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1513 }
1514 }
1515 #endif
1516 } else {
1517 print(); // print the header part only.
1518 }
1519 #if defined(SUPPORT_DATA_STRUCTS)
1520 if (AbstractDisassembler::show_structs()) {
1521 if (PrintRelocations) {
1522 print_relocations_on(tty);
1523 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1524 }
1525 }
1526 #endif
1527 if (xtty != nullptr) {
1528 xtty->tail("print_native_nmethod");
1529 }
1530 }
1531 }
1532
1533 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1534 return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1535 }
1536
1537 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1538 // Try MethodNonProfiled and MethodProfiled.
1539 void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1540 if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1541 // Try NonNMethod or give up.
1542 return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
1546 nmethod::nmethod(
1547 Method* method,
1548 CompilerType type,
1549 int nmethod_size,
1550 int immutable_data_size,
1551 int mutable_data_size,
1552 int compile_id,
1553 int entry_bci,
1554 address immutable_data,
1555 CodeOffsets* offsets,
1556 int orig_pc_offset,
1557 DebugInformationRecorder* debug_info,
1558 Dependencies* dependencies,
1559 CodeBuffer *code_buffer,
1560 int frame_size,
1561 OopMapSet* oop_maps,
1562 ExceptionHandlerTable* handler_table,
1563 ImplicitExceptionTable* nul_chk_table,
1564 AbstractCompiler* compiler,
1565 CompLevel comp_level
1566 , SCCEntry* scc_entry
1567 #if INCLUDE_JVMCI
1568 , char* speculations,
1569 int speculations_len,
1570 JVMCINMethodData* jvmci_data
1571 #endif
1572 )
1573 : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1574 offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1575 _deoptimization_generation(0),
1576 _gc_epoch(CodeCache::gc_epoch()),
1577 _method(method),
1578 _osr_link(nullptr)
1579 {
1580 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1581 {
1582 debug_only(NoSafepointVerifier nsv;)
1583 assert_locked_or_safepoint(CodeCache_lock);
1584
1585 init_defaults(code_buffer, offsets);
1586 _scc_entry = scc_entry;
1587 _method_profiling_count = 0;
1588
1589 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1590 _entry_bci = entry_bci;
1591 _compile_id = compile_id;
1592 _comp_level = comp_level;
1593 _compiler_type = type;
1594 _orig_pc_offset = orig_pc_offset;
1595
1596 _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1597
1598 set_ctable_begin(header_begin() + content_offset());
1599
1600 #if INCLUDE_JVMCI
1601 if (compiler->is_jvmci()) {
1602 // JVMCI might not produce any stub sections
1603 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1604 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1605 } else {
1606 _exception_offset = -1;
1607 }
1697 #if INCLUDE_JVMCI
1698 // Copy speculations to nmethod
1699 if (speculations_size() != 0) {
1700 memcpy(speculations_begin(), speculations, speculations_len);
1701 }
1702 #endif
1703
1704 post_init();
1705
1706 // we use the information of entry points to find out if a method is
1707 // static or non static
1708 assert(compiler->is_c2() || compiler->is_jvmci() ||
1709 _method->is_static() == (entry_point() == verified_entry_point()),
1710 " entry points must be same for static methods and vice versa");
1711 }
1712 }
1713
1714 // Print a short set of xml attributes to identify this nmethod. The
1715 // output should be embedded in some other element.
1716 void nmethod::log_identity(xmlStream* log) const {
1717 assert(log->inside_attrs_or_error(), "printing attributes");
1718 log->print(" code_compile_id='%d'", compile_id());
1719 const char* nm_kind = compile_kind();
1720 if (nm_kind != nullptr) log->print(" code_compile_kind='%s'", nm_kind);
1721 log->print(" code_compiler='%s'", compiler_name());
1722 if (TieredCompilation) {
1723 log->print(" code_compile_level='%d'", comp_level());
1724 }
1725 #if INCLUDE_JVMCI
1726 if (jvmci_nmethod_data() != nullptr) {
1727 const char* jvmci_name = jvmci_nmethod_data()->name();
1728 if (jvmci_name != nullptr) {
1729 log->print(" jvmci_mirror_name='");
1730 log->text("%s", jvmci_name);
1731 log->print("'");
1732 }
1733 }
1734 #endif
1735 }
1736
1737
1738 #define LOG_OFFSET(log, name) \
1739 if (p2i(name##_end()) - p2i(name##_begin())) \
1740 log->print(" " XSTR(name) "_offset='%zd'" , \
1741 p2i(name##_begin()) - p2i(this))
1742
1743
1824 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1825 if (oop_maps() != nullptr) {
1826 tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1827 oop_maps()->print_on(tty);
1828 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1829 }
1830 }
1831 #endif
1832 } else {
1833 print(); // print the header part only.
1834 }
1835
1836 #if defined(SUPPORT_DATA_STRUCTS)
1837 if (AbstractDisassembler::show_structs()) {
1838 methodHandle mh(Thread::current(), _method);
1839 if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1840 print_scopes();
1841 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1842 }
1843 if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1844 print_relocations_on(tty);
1845 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1846 }
1847 if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1848 print_dependencies_on(tty);
1849 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1850 }
1851 if (printmethod || PrintExceptionHandlers) {
1852 print_handler_table();
1853 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1854 print_nul_chk_table();
1855 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1856 }
1857
1858 if (printmethod) {
1859 print_recorded_oops();
1860 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1861 print_recorded_metadata();
1862 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1863 }
1864 }
1865 #endif
1866
1867 if (xtty != nullptr) {
1868 xtty->tail("print_nmethod");
1869 }
1870 }
1871
1872
1873 // Promote one word from an assembly-time handle to a live embedded oop.
1874 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
1875 if (handle == nullptr ||
1876 // As a special case, IC oops are initialized to 1 or -1.
1877 handle == (jobject) Universe::non_oop_word()) {
1878 *(void**)dest = handle;
1879 } else {
1880 *dest = JNIHandles::resolve_non_null(handle);
1881 }
1882 }
1883
1884 void nmethod::copy_values(GrowableArray<Handle>* array) {
1885 int length = array->length();
1886 assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1887 oop* dest = oops_begin();
1888 for (int index = 0 ; index < length; index++) {
1889 dest[index] = array->at(index)();
1890 }
1891 }
1892
1893 // Have to have the same name because it's called by a template
1894 void nmethod::copy_values(GrowableArray<jobject>* array) {
1895 int length = array->length();
1896 assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1897 oop* dest = oops_begin();
1898 for (int index = 0 ; index < length; index++) {
1899 initialize_immediate_oop(&dest[index], array->at(index));
1900 }
1901
1902 // Now we can fix up all the oops in the code. We need to do this
1903 // in the code because the assembler uses jobjects as placeholders.
1904 // The code and relocations have already been initialized by the
1905 // CodeBlob constructor, so it is valid even at this early point to
1906 // iterate over relocations and patch the code.
1907 fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
1908 }
1909
1910 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
1911 int length = array->length();
1919 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
1920 // re-patch all oop-bearing instructions, just in case some oops moved
1921 RelocIterator iter(this, begin, end);
1922 while (iter.next()) {
1923 if (iter.type() == relocInfo::oop_type) {
1924 oop_Relocation* reloc = iter.oop_reloc();
1925 if (initialize_immediates && reloc->oop_is_immediate()) {
1926 oop* dest = reloc->oop_addr();
1927 jobject obj = *reinterpret_cast<jobject*>(dest);
1928 initialize_immediate_oop(dest, obj);
1929 }
1930 // Refresh the oop-related bits of this instruction.
1931 reloc->fix_oop_relocation();
1932 } else if (iter.type() == relocInfo::metadata_type) {
1933 metadata_Relocation* reloc = iter.metadata_reloc();
1934 reloc->fix_metadata_relocation();
1935 }
1936 }
1937 }
1938
1939 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
1940 RelocIterator iter(this);
1941 while (iter.next()) {
1942 if (iter.type() == relocInfo::oop_type) {
1943 oop_Relocation* reloc = iter.oop_reloc();
1944 if (reloc->oop_is_immediate()) {
1945 oop dest = reloc->oop_value();
1946 Handle h(thread, dest);
1947 oop_list.append(h);
1948 }
1949 } else if (iter.type() == relocInfo::metadata_type) {
1950 metadata_Relocation* reloc = iter.metadata_reloc();
1951 if (reloc->metadata_is_immediate()) {
1952 Metadata* m = reloc->metadata_value();
1953 metadata_list.append(m);
1954 }
1955 }
1956 }
1957 }
1958
1959 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
1960 NativePostCallNop* nop = nativePostCallNop_at((address) pc);
1961 intptr_t cbaddr = (intptr_t) nm;
1962 intptr_t offset = ((intptr_t) pc) - cbaddr;
1963
1964 int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
1965 if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
1966 log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
1967 } else if (!nop->patch(oopmap_slot, offset)) {
1968 log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
1969 }
1970 }
1971
1972 void nmethod::finalize_relocations() {
1973 NoSafepointVerifier nsv;
1974
1975 GrowableArray<NativeMovConstReg*> virtual_call_data;
1976
1977 // Make sure that post call nops fill in nmethod offsets eagerly so
1978 // we don't have to race with deoptimization
2100 Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
2101 }
2102
2103 bool nmethod::is_maybe_on_stack() {
2104 // If the condition below is true, it means that the nmethod was found to
2105 // be alive the previous completed marking cycle.
2106 return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2107 }
2108
2109 void nmethod::inc_decompile_count() {
2110 if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2111 // Could be gated by ProfileTraps, but do not bother...
2112 Method* m = method();
2113 if (m == nullptr) return;
2114 MethodData* mdo = m->method_data();
2115 if (mdo == nullptr) return;
2116 // There is a benign race here. See comments in methodData.hpp.
2117 mdo->inc_decompile_count();
2118 }
2119
2120 void nmethod::inc_method_profiling_count() {
2121 Atomic::inc(&_method_profiling_count);
2122 }
2123
2124 uint64_t nmethod::method_profiling_count() {
2125 return _method_profiling_count;
2126 }
2127
2128 bool nmethod::try_transition(signed char new_state_int) {
2129 signed char new_state = new_state_int;
2130 assert_lock_strong(NMethodState_lock);
2131 signed char old_state = _state;
2132 if (old_state >= new_state) {
2133 // Ensure monotonicity of transitions.
2134 return false;
2135 }
2136 Atomic::store(&_state, new_state);
2137 return true;
2138 }
2139
2140 void nmethod::invalidate_osr_method() {
2141 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2142 // Remove from list of active nmethods
2143 if (method() != nullptr) {
2144 method()->method_holder()->remove_osr_nmethod(this);
2145 }
2146 }
2147
2159 }
2160 }
2161
2162 ResourceMark rm;
2163 stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2164 ss.print("made not entrant: %s", reason);
2165
2166 CompileTask::print_ul(this, ss.freeze());
2167 if (PrintCompilation) {
2168 print_on_with_msg(tty, ss.freeze());
2169 }
2170 }
2171
2172 void nmethod::unlink_from_method() {
2173 if (method() != nullptr) {
2174 method()->unlink_code(this);
2175 }
2176 }
2177
2178 // Invalidate code
2179 bool nmethod::make_not_entrant(const char* reason, bool make_not_entrant) {
2180 assert(reason != nullptr, "Must provide a reason");
2181
2182 // This can be called while the system is already at a safepoint which is ok
2183 NoSafepointVerifier nsv;
2184
2185 if (is_unloading()) {
2186 // If the nmethod is unloading, then it is already not entrant through
2187 // the nmethod entry barriers. No need to do anything; GC will unload it.
2188 return false;
2189 }
2190
2191 if (Atomic::load(&_state) == not_entrant) {
2192 // Avoid taking the lock if already in required state.
2193 // This is safe from races because the state is an end-state,
2194 // which the nmethod cannot back out of once entered.
2195 // No need for fencing either.
2196 return false;
2197 }
2198
2199 {
2224 }
2225
2226 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2227 if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2228 // If nmethod entry barriers are not supported, we won't mark
2229 // nmethods as on-stack when they become on-stack. So we
2230 // degrade to a less accurate flushing strategy, for now.
2231 mark_as_maybe_on_stack();
2232 }
2233
2234 // Change state
2235 bool success = try_transition(not_entrant);
2236 assert(success, "Transition can't fail");
2237
2238 // Log the transition once
2239 log_state_change(reason);
2240
2241 // Remove nmethod from method.
2242 unlink_from_method();
2243
2244 if (make_not_entrant) {
2245 // Keep cached code if it was simply replaced
2246 // otherwise make it not entrant too.
2247 SCCache::invalidate(_scc_entry);
2248 }
2249
2250 CompileBroker::log_not_entrant(this);
2251 } // leave critical region under NMethodState_lock
2252
2253 #if INCLUDE_JVMCI
2254 // Invalidate can't occur while holding the NMethodState_lock
2255 JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2256 if (nmethod_data != nullptr) {
2257 nmethod_data->invalidate_nmethod_mirror(this);
2258 }
2259 #endif
2260
2261 #ifdef ASSERT
2262 if (is_osr_method() && method() != nullptr) {
2263 // Make sure osr nmethod is invalidated, i.e. not on the list
2264 bool found = method()->method_holder()->remove_osr_nmethod(this);
2265 assert(!found, "osr nmethod should have been invalidated");
2266 }
2267 #endif
2268
2269 return true;
2270 }
2311
2312 // completely deallocate this method
2313 Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2314 log_debug(codecache)("*flushing %s nmethod %3d/" INTPTR_FORMAT ". Live blobs:" UINT32_FORMAT
2315 "/Free CodeCache:%zuKb",
2316 is_osr_method() ? "osr" : "",_compile_id, p2i(this), CodeCache::blob_count(),
2317 CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024);
2318
2319 // We need to deallocate any ExceptionCache data.
2320 // Note that we do not need to grab the nmethod lock for this, it
2321 // better be thread safe if we're disposing of it!
2322 ExceptionCache* ec = exception_cache();
2323 while(ec != nullptr) {
2324 ExceptionCache* next = ec->next();
2325 delete ec;
2326 ec = next;
2327 }
2328 if (_pc_desc_container != nullptr) {
2329 delete _pc_desc_container;
2330 }
2331 if (_compiled_ic_data != nullptr) {
2332 delete[] _compiled_ic_data;
2333 }
2334
2335 if (_immutable_data != data_end() && !SCCache::is_address_in_aot_cache((address)_oop_maps)) {
2336 os::free(_immutable_data);
2337 _immutable_data = blob_end(); // Valid not null address
2338 }
2339 if (unregister_nmethod) {
2340 Universe::heap()->unregister_nmethod(this);
2341 }
2342 CodeCache::unregister_old_nmethod(this);
2343
2344 CodeBlob::purge();
2345 }
2346
2347 oop nmethod::oop_at(int index) const {
2348 if (index == 0) {
2349 return nullptr;
2350 }
2351
2352 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2353 return bs_nm->oop_load_no_keepalive(this, index);
2354 }
2355
2376 MethodHandles::clean_dependency_context(call_site);
2377 } else {
2378 InstanceKlass* ik = deps.context_type();
2379 if (ik == nullptr) {
2380 continue; // ignore things like evol_method
2381 }
2382 // During GC liveness of dependee determines class that needs to be updated.
2383 // The GC may clean dependency contexts concurrently and in parallel.
2384 ik->clean_dependency_context();
2385 }
2386 }
2387 }
2388 }
2389
2390 void nmethod::post_compiled_method(CompileTask* task) {
2391 task->mark_success();
2392 task->set_nm_content_size(content_size());
2393 task->set_nm_insts_size(insts_size());
2394 task->set_nm_total_size(total_size());
2395
2396 // task->is_scc() is true only for loaded cached code.
2397 // nmethod::_scc_entry is set for loaded and stored cached code
2398 // to invalidate the entry when nmethod is deoptimized.
2399 // There is option to not store in archive cached code.
2400 guarantee((_scc_entry != nullptr) || !task->is_scc() || VerifyCachedCode, "sanity");
2401
2402 // JVMTI -- compiled method notification (must be done outside lock)
2403 post_compiled_method_load_event();
2404
2405 if (CompilationLog::log() != nullptr) {
2406 CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2407 }
2408
2409 const DirectiveSet* directive = task->directive();
2410 maybe_print_nmethod(directive);
2411 }
2412
2413 // ------------------------------------------------------------------
2414 // post_compiled_method_load_event
2415 // new method for install_code() path
2416 // Transfer information from compilation to jvmti
2417 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2418 // This is a bad time for a safepoint. We don't want
2419 // this nmethod to get unloaded while we're queueing the event.
2420 NoSafepointVerifier nsv;
2421
3113
3114 // Make sure all the entry points are correctly aligned for patching.
3115 NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point());
3116
3117 // assert(oopDesc::is_oop(method()), "must be valid");
3118
3119 ResourceMark rm;
3120
3121 if (!CodeCache::contains(this)) {
3122 fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3123 }
3124
3125 if(is_native_method() )
3126 return;
3127
3128 nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3129 if (nm != this) {
3130 fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3131 }
3132
3133 // Verification can triggered during shutdown after SCCache is closed.
3134 // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3135 if (!is_scc() || SCCache::is_on()) {
3136 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3137 if (! p->verify(this)) {
3138 tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3139 }
3140 }
3141
3142 #ifdef ASSERT
3143 #if INCLUDE_JVMCI
3144 {
3145 // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3146 ImmutableOopMapSet* oms = oop_maps();
3147 ImplicitExceptionTable implicit_table(this);
3148 for (uint i = 0; i < implicit_table.len(); i++) {
3149 int exec_offset = (int) implicit_table.get_exec_offset(i);
3150 if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3151 assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3152 bool found = false;
3153 for (int i = 0, imax = oms->count(); i < imax; i++) {
3154 if (oms->pair_at(i)->pc_offset() == exec_offset) {
3155 found = true;
3156 break;
3157 }
3158 }
3159 assert(found, "missing oopmap");
3160 }
3161 }
3162 }
3163 #endif
3164 #endif
3165 }
3166
3167 VerifyOopsClosure voc(this);
3168 oops_do(&voc);
3169 assert(voc.ok(), "embedded oops must be OK");
3170 Universe::heap()->verify_nmethod(this);
3171
3172 assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3173 nm->method()->external_name(), p2i(_oops_do_mark_link));
3174 if (!is_scc() || SCCache::is_on()) {
3175 verify_scopes();
3176 }
3177
3178 CompiledICLocker nm_verify(this);
3179 VerifyMetadataClosure vmc;
3180 metadata_do(&vmc);
3181 }
3182
3183
3184 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3185
3186 // Verify IC only when nmethod installation is finished.
3187 if (!is_not_installed()) {
3188 if (CompiledICLocker::is_safe(this)) {
3189 if (is_inline_cache) {
3190 CompiledIC_at(this, call_site);
3191 } else {
3192 CompiledDirectCall::at(call_site);
3193 }
3194 } else {
3195 CompiledICLocker ml_verify(this);
3196 if (is_inline_cache) {
3325 p2i(nul_chk_table_end()),
3326 nul_chk_table_size());
3327 if (handler_table_size() > 0) st->print_cr(" handler table [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3328 p2i(handler_table_begin()),
3329 p2i(handler_table_end()),
3330 handler_table_size());
3331 if (scopes_pcs_size () > 0) st->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3332 p2i(scopes_pcs_begin()),
3333 p2i(scopes_pcs_end()),
3334 scopes_pcs_size());
3335 if (scopes_data_size () > 0) st->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3336 p2i(scopes_data_begin()),
3337 p2i(scopes_data_end()),
3338 scopes_data_size());
3339 #if INCLUDE_JVMCI
3340 if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3341 p2i(speculations_begin()),
3342 p2i(speculations_end()),
3343 speculations_size());
3344 #endif
3345 if (SCCache::is_on() && _scc_entry != nullptr) {
3346 _scc_entry->print(st);
3347 }
3348 }
3349
3350 void nmethod::print_code() {
3351 ResourceMark m;
3352 ttyLocker ttyl;
3353 // Call the specialized decode method of this class.
3354 decode(tty);
3355 }
3356
3357 #ifndef PRODUCT // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3358
3359 void nmethod::print_dependencies_on(outputStream* out) {
3360 ResourceMark rm;
3361 stringStream st;
3362 st.print_cr("Dependencies:");
3363 for (Dependencies::DepStream deps(this); deps.next(); ) {
3364 deps.print_dependency(&st);
3365 InstanceKlass* ctxk = deps.context_type();
3366 if (ctxk != nullptr) {
3367 if (ctxk->is_dependent_nmethod(this)) {
3427 st->print("scopes:");
3428 if (scopes_pcs_begin() < scopes_pcs_end()) {
3429 st->cr();
3430 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3431 if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3432 continue;
3433
3434 ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3435 while (sd != nullptr) {
3436 sd->print_on(st, p); // print output ends with a newline
3437 sd = sd->sender();
3438 }
3439 }
3440 } else {
3441 st->print_cr(" <list empty>");
3442 }
3443 }
3444 #endif
3445
3446 #ifndef PRODUCT // RelocIterator does support printing only then.
3447 void nmethod::print_relocations_on(outputStream* st) {
3448 ResourceMark m; // in case methods get printed via the debugger
3449 st->print_cr("relocations:");
3450 RelocIterator iter(this);
3451 iter.print_on(st);
3452 }
3453 #endif
3454
3455 void nmethod::print_pcs_on(outputStream* st) {
3456 ResourceMark m; // in case methods get printed via debugger
3457 st->print("pc-bytecode offsets:");
3458 if (scopes_pcs_begin() < scopes_pcs_end()) {
3459 st->cr();
3460 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3461 p->print_on(st, this); // print output ends with a newline
3462 }
3463 } else {
3464 st->print_cr(" <list empty>");
3465 }
3466 }
3467
3468 void nmethod::print_handler_table() {
3469 ExceptionHandlerTable(this).print(code_begin());
3470 }
3471
4237
4238 #endif // !PRODUCT
4239
4240 #if INCLUDE_JVMCI
4241 void nmethod::update_speculation(JavaThread* thread) {
4242 jlong speculation = thread->pending_failed_speculation();
4243 if (speculation != 0) {
4244 guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4245 jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4246 thread->set_pending_failed_speculation(0);
4247 }
4248 }
4249
4250 const char* nmethod::jvmci_name() {
4251 if (jvmci_nmethod_data() != nullptr) {
4252 return jvmci_nmethod_data()->name();
4253 }
4254 return nullptr;
4255 }
4256 #endif
4257
4258 void nmethod::prepare_for_archiving() {
4259 CodeBlob::prepare_for_archiving();
4260 _deoptimization_generation = 0;
4261 _gc_epoch = 0;
4262 _method_profiling_count = 0;
4263 _osr_link = nullptr;
4264 _method = nullptr;
4265 _immutable_data = nullptr;
4266 _pc_desc_container = nullptr;
4267 _exception_cache = nullptr;
4268 _gc_data = nullptr;
4269 _oops_do_mark_link = nullptr;
4270 _compiled_ic_data = nullptr;
4271 _osr_entry_point = nullptr;
4272 _compile_id = -1;
4273 _deoptimization_status = not_marked;
4274 _is_unloading_state = 0;
4275 _state = not_installed;
4276 }
|