13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.inline.hpp"
27 #include "code/codeCache.hpp"
28 #include "code/compiledIC.hpp"
29 #include "code/dependencies.hpp"
30 #include "code/nativeInst.hpp"
31 #include "code/nmethod.inline.hpp"
32 #include "code/scopeDesc.hpp"
33 #include "compiler/abstractCompiler.hpp"
34 #include "compiler/compilationLog.hpp"
35 #include "compiler/compileBroker.hpp"
36 #include "compiler/compileLog.hpp"
37 #include "compiler/compileTask.hpp"
38 #include "compiler/compilerDirectives.hpp"
39 #include "compiler/compilerOracle.hpp"
40 #include "compiler/directivesParser.hpp"
41 #include "compiler/disassembler.hpp"
42 #include "compiler/oopMap.inline.hpp"
43 #include "gc/shared/barrierSet.hpp"
44 #include "gc/shared/barrierSetNMethod.hpp"
45 #include "gc/shared/classUnloadingContext.hpp"
46 #include "gc/shared/collectedHeap.hpp"
47 #include "interpreter/bytecode.inline.hpp"
48 #include "jvm.h"
49 #include "logging/log.hpp"
50 #include "logging/logStream.hpp"
51 #include "memory/allocation.inline.hpp"
52 #include "memory/resourceArea.hpp"
768
769 void nmethod::clear_inline_caches() {
770 assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
771 RelocIterator iter(this);
772 while (iter.next()) {
773 iter.reloc()->clear_inline_cache();
774 }
775 }
776
777 #ifdef ASSERT
778 // Check class_loader is alive for this bit of metadata.
779 class CheckClass : public MetadataClosure {
780 void do_metadata(Metadata* md) {
781 Klass* klass = nullptr;
782 if (md->is_klass()) {
783 klass = ((Klass*)md);
784 } else if (md->is_method()) {
785 klass = ((Method*)md)->method_holder();
786 } else if (md->is_methodData()) {
787 klass = ((MethodData*)md)->method()->method_holder();
788 } else {
789 md->print();
790 ShouldNotReachHere();
791 }
792 assert(klass->is_loader_alive(), "must be alive");
793 }
794 };
795 #endif // ASSERT
796
797
798 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
799 ic->clean_metadata();
800 }
801
802 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
803 template <typename CallsiteT>
804 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
805 bool clean_all) {
806 CodeBlob* cb = CodeCache::find_blob(callsite->destination());
807 if (!cb->is_nmethod()) {
1116 debug_only(nm->verify();) // might block
1117
1118 nm->log_new_nmethod();
1119 }
1120 return nm;
1121 }
1122
1123 nmethod* nmethod::new_nmethod(const methodHandle& method,
1124 int compile_id,
1125 int entry_bci,
1126 CodeOffsets* offsets,
1127 int orig_pc_offset,
1128 DebugInformationRecorder* debug_info,
1129 Dependencies* dependencies,
1130 CodeBuffer* code_buffer, int frame_size,
1131 OopMapSet* oop_maps,
1132 ExceptionHandlerTable* handler_table,
1133 ImplicitExceptionTable* nul_chk_table,
1134 AbstractCompiler* compiler,
1135 CompLevel comp_level
1136 #if INCLUDE_JVMCI
1137 , char* speculations,
1138 int speculations_len,
1139 JVMCINMethodData* jvmci_data
1140 #endif
1141 )
1142 {
1143 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1144 code_buffer->finalize_oop_references(method);
1145 // create nmethod
1146 nmethod* nm = nullptr;
1147 int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1148 #if INCLUDE_JVMCI
1149 if (compiler->is_jvmci()) {
1150 nmethod_size += align_up(jvmci_data->size(), oopSize);
1151 }
1152 #endif
1153
1154 int immutable_data_size =
1155 adjust_pcs_size(debug_info->pcs_size())
1160 + align_up(speculations_len , oopSize)
1161 #endif
1162 + align_up(debug_info->data_size() , oopSize);
1163
1164 // First, allocate space for immutable data in C heap.
1165 address immutable_data = nullptr;
1166 if (immutable_data_size > 0) {
1167 immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1168 if (immutable_data == nullptr) {
1169 vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1170 return nullptr;
1171 }
1172 }
1173 {
1174 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1175
1176 nm = new (nmethod_size, comp_level)
1177 nmethod(method(), compiler->type(), nmethod_size, immutable_data_size,
1178 compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1179 debug_info, dependencies, code_buffer, frame_size, oop_maps,
1180 handler_table, nul_chk_table, compiler, comp_level
1181 #if INCLUDE_JVMCI
1182 , speculations,
1183 speculations_len,
1184 jvmci_data
1185 #endif
1186 );
1187
1188 if (nm != nullptr) {
1189 // To make dependency checking during class loading fast, record
1190 // the nmethod dependencies in the classes it is dependent on.
1191 // This allows the dependency checking code to simply walk the
1192 // class hierarchy above the loaded class, checking only nmethods
1193 // which are dependent on those classes. The slow way is to
1194 // check every nmethod for dependencies which makes it linear in
1195 // the number of methods compiled. For applications with a lot
1196 // classes the slow way is too slow.
1197 for (Dependencies::DepStream deps(nm); deps.next(); ) {
1198 if (deps.type() == Dependencies::call_site_target_value) {
1199 // CallSite dependencies are managed on per-CallSite instance basis.
1200 oop call_site = deps.argument_oop(0);
1201 MethodHandles::add_dependent_nmethod(call_site, nm);
1202 } else {
1203 InstanceKlass* ik = deps.context_type();
1204 if (ik == nullptr) {
1205 continue; // ignore things like evol_method
1206 }
1207 // record this nmethod as dependent on this klass
1208 ik->add_dependent_nmethod(nm);
1209 }
1210 }
1211 NOT_PRODUCT(if (nm != nullptr) note_java_nmethod(nm));
1212 }
1213 }
1214 // Do verification and logging outside CodeCache_lock.
1215 if (nm != nullptr) {
1216 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1217 DEBUG_ONLY(nm->verify();)
1218 nm->log_new_nmethod();
1219 }
1220 return nm;
1221 }
1222
1223 // Fill in default values for various fields
1224 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1225 // avoid uninitialized fields, even for short time periods
1226 _exception_cache = nullptr;
1227 _gc_data = nullptr;
1228 _oops_do_mark_link = nullptr;
1229 _compiled_ic_data = nullptr;
1230
1231 _is_unloading_state = 0;
1232 _state = not_installed;
1233
1234 _has_unsafe_access = 0;
1235 _has_method_handle_invokes = 0;
1236 _has_wide_vectors = 0;
1237 _has_monitors = 0;
1238 _has_scoped_access = 0;
1239 _has_flushed_dependencies = 0;
1240 _is_unlinked = 0;
1241 _load_reported = 0; // jvmti state
1242
1243 _deoptimization_status = not_marked;
1244
1245 // SECT_CONSTS is first in code buffer so the offset should be 0.
1246 int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1247 assert(consts_offset == 0, "const_offset: %d", consts_offset);
1248
1249 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1250
1251 CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
1252 CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1253
1254 _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1255 }
1256
1257 // Post initialization
1258 void nmethod::post_init() {
1259 clear_unloading_state();
1260
1261 finalize_relocations();
1262
1294
1295 _osr_entry_point = nullptr;
1296 _pc_desc_container = nullptr;
1297 _entry_bci = InvocationEntryBci;
1298 _compile_id = compile_id;
1299 _comp_level = CompLevel_none;
1300 _compiler_type = type;
1301 _orig_pc_offset = 0;
1302 _num_stack_arg_slots = 0;
1303
1304 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1305 // Continuation enter intrinsic
1306 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1307 } else {
1308 _exception_offset = 0;
1309 }
1310 // Native wrappers do not have deopt handlers. Make the values
1311 // something that will never match a pc like the nmethod vtable entry
1312 _deopt_handler_offset = 0;
1313 _deopt_mh_handler_offset = 0;
1314 _unwind_handler_offset = 0;
1315
1316 CHECKED_CAST(_metadata_offset, uint16_t, (align_up(code_buffer->total_oop_size(), oopSize)));
1317 int data_end_offset = _metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize);
1318 #if INCLUDE_JVMCI
1319 // jvmci_data_size is 0 in native wrapper but we need to set offset
1320 // to correctly calculate metadata_end address
1321 CHECKED_CAST(_jvmci_data_offset, uint16_t, data_end_offset);
1322 #endif
1323 assert((data_offset() + data_end_offset) <= nmethod_size, "wrong nmethod's size: %d < %d", nmethod_size, (data_offset() + data_end_offset));
1324
1325 // native wrapper does not have read-only data but we need unique not null address
1326 _immutable_data = data_end();
1327 _immutable_data_size = 0;
1328 _nul_chk_table_offset = 0;
1329 _handler_table_offset = 0;
1330 _scopes_pcs_offset = 0;
1331 _scopes_data_offset = 0;
1332 #if INCLUDE_JVMCI
1333 _speculations_offset = 0;
1354 // This is both handled in decode2(), called via print_code() -> decode()
1355 if (PrintNativeNMethods) {
1356 tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1357 print_code();
1358 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1359 #if defined(SUPPORT_DATA_STRUCTS)
1360 if (AbstractDisassembler::show_structs()) {
1361 if (oop_maps != nullptr) {
1362 tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1363 oop_maps->print_on(tty);
1364 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1365 }
1366 }
1367 #endif
1368 } else {
1369 print(); // print the header part only.
1370 }
1371 #if defined(SUPPORT_DATA_STRUCTS)
1372 if (AbstractDisassembler::show_structs()) {
1373 if (PrintRelocations) {
1374 print_relocations();
1375 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1376 }
1377 }
1378 #endif
1379 if (xtty != nullptr) {
1380 xtty->tail("print_native_nmethod");
1381 }
1382 }
1383 }
1384
1385 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1386 return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1387 }
1388
1389 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1390 // Try MethodNonProfiled and MethodProfiled.
1391 void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1392 if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1393 // Try NonNMethod or give up.
1394 return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
1397 // For normal JIT compiled code
1398 nmethod::nmethod(
1399 Method* method,
1400 CompilerType type,
1401 int nmethod_size,
1402 int immutable_data_size,
1403 int compile_id,
1404 int entry_bci,
1405 address immutable_data,
1406 CodeOffsets* offsets,
1407 int orig_pc_offset,
1408 DebugInformationRecorder* debug_info,
1409 Dependencies* dependencies,
1410 CodeBuffer *code_buffer,
1411 int frame_size,
1412 OopMapSet* oop_maps,
1413 ExceptionHandlerTable* handler_table,
1414 ImplicitExceptionTable* nul_chk_table,
1415 AbstractCompiler* compiler,
1416 CompLevel comp_level
1417 #if INCLUDE_JVMCI
1418 , char* speculations,
1419 int speculations_len,
1420 JVMCINMethodData* jvmci_data
1421 #endif
1422 )
1423 : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1424 offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
1425 _deoptimization_generation(0),
1426 _gc_epoch(CodeCache::gc_epoch()),
1427 _method(method),
1428 _osr_link(nullptr)
1429 {
1430 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1431 {
1432 debug_only(NoSafepointVerifier nsv;)
1433 assert_locked_or_safepoint(CodeCache_lock);
1434
1435 init_defaults(code_buffer, offsets);
1436
1437 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1438 _entry_bci = entry_bci;
1439 _compile_id = compile_id;
1440 _comp_level = comp_level;
1441 _compiler_type = type;
1442 _orig_pc_offset = orig_pc_offset;
1443
1444 _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1445
1446 set_ctable_begin(header_begin() + content_offset());
1447
1448 #if INCLUDE_JVMCI
1449 if (compiler->is_jvmci()) {
1450 // JVMCI might not produce any stub sections
1451 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1452 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1453 } else {
1454 _exception_offset = -1;
1455 }
1547 #if INCLUDE_JVMCI
1548 // Copy speculations to nmethod
1549 if (speculations_size() != 0) {
1550 memcpy(speculations_begin(), speculations, speculations_len);
1551 }
1552 #endif
1553
1554 post_init();
1555
1556 // we use the information of entry points to find out if a method is
1557 // static or non static
1558 assert(compiler->is_c2() || compiler->is_jvmci() ||
1559 _method->is_static() == (entry_point() == verified_entry_point()),
1560 " entry points must be same for static methods and vice versa");
1561 }
1562 }
1563
1564 // Print a short set of xml attributes to identify this nmethod. The
1565 // output should be embedded in some other element.
1566 void nmethod::log_identity(xmlStream* log) const {
1567 log->print(" compile_id='%d'", compile_id());
1568 const char* nm_kind = compile_kind();
1569 if (nm_kind != nullptr) log->print(" compile_kind='%s'", nm_kind);
1570 log->print(" compiler='%s'", compiler_name());
1571 if (TieredCompilation) {
1572 log->print(" level='%d'", comp_level());
1573 }
1574 #if INCLUDE_JVMCI
1575 if (jvmci_nmethod_data() != nullptr) {
1576 const char* jvmci_name = jvmci_nmethod_data()->name();
1577 if (jvmci_name != nullptr) {
1578 log->print(" jvmci_mirror_name='");
1579 log->text("%s", jvmci_name);
1580 log->print("'");
1581 }
1582 }
1583 #endif
1584 }
1585
1586
1587 #define LOG_OFFSET(log, name) \
1588 if (p2i(name##_end()) - p2i(name##_begin())) \
1589 log->print(" " XSTR(name) "_offset='" INTX_FORMAT "'" , \
1590 p2i(name##_begin()) - p2i(this))
1591
1592
1673 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1674 if (oop_maps() != nullptr) {
1675 tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1676 oop_maps()->print_on(tty);
1677 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1678 }
1679 }
1680 #endif
1681 } else {
1682 print(); // print the header part only.
1683 }
1684
1685 #if defined(SUPPORT_DATA_STRUCTS)
1686 if (AbstractDisassembler::show_structs()) {
1687 methodHandle mh(Thread::current(), _method);
1688 if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1689 print_scopes();
1690 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1691 }
1692 if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1693 print_relocations();
1694 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1695 }
1696 if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1697 print_dependencies_on(tty);
1698 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1699 }
1700 if (printmethod || PrintExceptionHandlers) {
1701 print_handler_table();
1702 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1703 print_nul_chk_table();
1704 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1705 }
1706
1707 if (printmethod) {
1708 print_recorded_oops();
1709 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1710 print_recorded_metadata();
1711 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1712 }
1713 }
1921 Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1922 }
1923
1924 bool nmethod::is_maybe_on_stack() {
1925 // If the condition below is true, it means that the nmethod was found to
1926 // be alive the previous completed marking cycle.
1927 return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1928 }
1929
1930 void nmethod::inc_decompile_count() {
1931 if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1932 // Could be gated by ProfileTraps, but do not bother...
1933 Method* m = method();
1934 if (m == nullptr) return;
1935 MethodData* mdo = m->method_data();
1936 if (mdo == nullptr) return;
1937 // There is a benign race here. See comments in methodData.hpp.
1938 mdo->inc_decompile_count();
1939 }
1940
1941 bool nmethod::try_transition(signed char new_state_int) {
1942 signed char new_state = new_state_int;
1943 assert_lock_strong(NMethodState_lock);
1944 signed char old_state = _state;
1945 if (old_state >= new_state) {
1946 // Ensure monotonicity of transitions.
1947 return false;
1948 }
1949 Atomic::store(&_state, new_state);
1950 return true;
1951 }
1952
1953 void nmethod::invalidate_osr_method() {
1954 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1955 // Remove from list of active nmethods
1956 if (method() != nullptr) {
1957 method()->method_holder()->remove_osr_nmethod(this);
1958 }
1959 }
1960
1966 os::current_thread_id());
1967 log_identity(xtty);
1968 xtty->stamp();
1969 xtty->end_elem();
1970 }
1971 }
1972
1973 CompileTask::print_ul(this, "made not entrant");
1974 if (PrintCompilation) {
1975 print_on(tty, "made not entrant");
1976 }
1977 }
1978
1979 void nmethod::unlink_from_method() {
1980 if (method() != nullptr) {
1981 method()->unlink_code(this);
1982 }
1983 }
1984
1985 // Invalidate code
1986 bool nmethod::make_not_entrant() {
1987 // This can be called while the system is already at a safepoint which is ok
1988 NoSafepointVerifier nsv;
1989
1990 if (is_unloading()) {
1991 // If the nmethod is unloading, then it is already not entrant through
1992 // the nmethod entry barriers. No need to do anything; GC will unload it.
1993 return false;
1994 }
1995
1996 if (Atomic::load(&_state) == not_entrant) {
1997 // Avoid taking the lock if already in required state.
1998 // This is safe from races because the state is an end-state,
1999 // which the nmethod cannot back out of once entered.
2000 // No need for fencing either.
2001 return false;
2002 }
2003
2004 {
2005 // Enter critical section. Does not block for safepoint.
2006 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
2029 }
2030
2031 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2032 if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2033 // If nmethod entry barriers are not supported, we won't mark
2034 // nmethods as on-stack when they become on-stack. So we
2035 // degrade to a less accurate flushing strategy, for now.
2036 mark_as_maybe_on_stack();
2037 }
2038
2039 // Change state
2040 bool success = try_transition(not_entrant);
2041 assert(success, "Transition can't fail");
2042
2043 // Log the transition once
2044 log_state_change();
2045
2046 // Remove nmethod from method.
2047 unlink_from_method();
2048
2049 } // leave critical region under NMethodState_lock
2050
2051 #if INCLUDE_JVMCI
2052 // Invalidate can't occur while holding the NMethodState_lock
2053 JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2054 if (nmethod_data != nullptr) {
2055 nmethod_data->invalidate_nmethod_mirror(this);
2056 }
2057 #endif
2058
2059 #ifdef ASSERT
2060 if (is_osr_method() && method() != nullptr) {
2061 // Make sure osr nmethod is invalidated, i.e. not on the list
2062 bool found = method()->method_holder()->remove_osr_nmethod(this);
2063 assert(!found, "osr nmethod should have been invalidated");
2064 }
2065 #endif
2066
2067 return true;
2068 }
2168 MethodHandles::clean_dependency_context(call_site);
2169 } else {
2170 InstanceKlass* ik = deps.context_type();
2171 if (ik == nullptr) {
2172 continue; // ignore things like evol_method
2173 }
2174 // During GC liveness of dependee determines class that needs to be updated.
2175 // The GC may clean dependency contexts concurrently and in parallel.
2176 ik->clean_dependency_context();
2177 }
2178 }
2179 }
2180 }
2181
2182 void nmethod::post_compiled_method(CompileTask* task) {
2183 task->mark_success();
2184 task->set_nm_content_size(content_size());
2185 task->set_nm_insts_size(insts_size());
2186 task->set_nm_total_size(total_size());
2187
2188 // JVMTI -- compiled method notification (must be done outside lock)
2189 post_compiled_method_load_event();
2190
2191 if (CompilationLog::log() != nullptr) {
2192 CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2193 }
2194
2195 const DirectiveSet* directive = task->directive();
2196 maybe_print_nmethod(directive);
2197 }
2198
2199 // ------------------------------------------------------------------
2200 // post_compiled_method_load_event
2201 // new method for install_code() path
2202 // Transfer information from compilation to jvmti
2203 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2204 // This is a bad time for a safepoint. We don't want
2205 // this nmethod to get unloaded while we're queueing the event.
2206 NoSafepointVerifier nsv;
2207
3106 p2i(nul_chk_table_end()),
3107 nul_chk_table_size());
3108 if (handler_table_size() > 0) st->print_cr(" handler table [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3109 p2i(handler_table_begin()),
3110 p2i(handler_table_end()),
3111 handler_table_size());
3112 if (scopes_pcs_size () > 0) st->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3113 p2i(scopes_pcs_begin()),
3114 p2i(scopes_pcs_end()),
3115 scopes_pcs_size());
3116 if (scopes_data_size () > 0) st->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3117 p2i(scopes_data_begin()),
3118 p2i(scopes_data_end()),
3119 scopes_data_size());
3120 #if INCLUDE_JVMCI
3121 if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3122 p2i(speculations_begin()),
3123 p2i(speculations_end()),
3124 speculations_size());
3125 #endif
3126 }
3127
3128 void nmethod::print_code() {
3129 ResourceMark m;
3130 ttyLocker ttyl;
3131 // Call the specialized decode method of this class.
3132 decode(tty);
3133 }
3134
3135 #ifndef PRODUCT // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3136
3137 void nmethod::print_dependencies_on(outputStream* out) {
3138 ResourceMark rm;
3139 stringStream st;
3140 st.print_cr("Dependencies:");
3141 for (Dependencies::DepStream deps(this); deps.next(); ) {
3142 deps.print_dependency(&st);
3143 InstanceKlass* ctxk = deps.context_type();
3144 if (ctxk != nullptr) {
3145 if (ctxk->is_dependent_nmethod(this)) {
3205 st->print("scopes:");
3206 if (scopes_pcs_begin() < scopes_pcs_end()) {
3207 st->cr();
3208 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3209 if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3210 continue;
3211
3212 ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3213 while (sd != nullptr) {
3214 sd->print_on(st, p); // print output ends with a newline
3215 sd = sd->sender();
3216 }
3217 }
3218 } else {
3219 st->print_cr(" <list empty>");
3220 }
3221 }
3222 #endif
3223
3224 #ifndef PRODUCT // RelocIterator does support printing only then.
3225 void nmethod::print_relocations() {
3226 ResourceMark m; // in case methods get printed via the debugger
3227 tty->print_cr("relocations:");
3228 RelocIterator iter(this);
3229 iter.print();
3230 }
3231 #endif
3232
3233 void nmethod::print_pcs_on(outputStream* st) {
3234 ResourceMark m; // in case methods get printed via debugger
3235 st->print("pc-bytecode offsets:");
3236 if (scopes_pcs_begin() < scopes_pcs_end()) {
3237 st->cr();
3238 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3239 p->print_on(st, this); // print output ends with a newline
3240 }
3241 } else {
3242 st->print_cr(" <list empty>");
3243 }
3244 }
3245
3246 void nmethod::print_handler_table() {
3247 ExceptionHandlerTable(this).print(code_begin());
3248 }
3249
3564 else obj->print_value_on(&st);
3565 st.print(")");
3566 return st.as_string();
3567 }
3568 case relocInfo::metadata_type: {
3569 stringStream st;
3570 metadata_Relocation* r = iter.metadata_reloc();
3571 Metadata* obj = r->metadata_value();
3572 st.print("metadata(");
3573 if (obj == nullptr) st.print("nullptr");
3574 else obj->print_value_on(&st);
3575 st.print(")");
3576 return st.as_string();
3577 }
3578 case relocInfo::runtime_call_type:
3579 case relocInfo::runtime_call_w_cp_type: {
3580 stringStream st;
3581 st.print("runtime_call");
3582 CallRelocation* r = (CallRelocation*)iter.reloc();
3583 address dest = r->destination();
3584 CodeBlob* cb = CodeCache::find_blob(dest);
3585 if (cb != nullptr) {
3586 st.print(" %s", cb->name());
3587 } else {
3588 ResourceMark rm;
3589 const int buflen = 1024;
3590 char* buf = NEW_RESOURCE_ARRAY(char, buflen);
3591 int offset;
3592 if (os::dll_address_to_function_name(dest, buf, buflen, &offset)) {
3593 st.print(" %s", buf);
3594 if (offset != 0) {
3595 st.print("+%d", offset);
3596 }
3597 }
3598 }
3599 return st.as_string();
3600 }
3601 case relocInfo::virtual_call_type: {
3602 stringStream st;
3603 st.print_raw("virtual_call");
|
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.inline.hpp"
27 #include "code/codeCache.hpp"
28 #include "code/compiledIC.hpp"
29 #include "code/dependencies.hpp"
30 #include "code/nativeInst.hpp"
31 #include "code/nmethod.inline.hpp"
32 #include "code/scopeDesc.hpp"
33 #include "code/SCCache.hpp"
34 #include "compiler/abstractCompiler.hpp"
35 #include "compiler/compilationLog.hpp"
36 #include "compiler/compileBroker.hpp"
37 #include "compiler/compileLog.hpp"
38 #include "compiler/compileTask.hpp"
39 #include "compiler/compilerDirectives.hpp"
40 #include "compiler/compilerOracle.hpp"
41 #include "compiler/directivesParser.hpp"
42 #include "compiler/disassembler.hpp"
43 #include "compiler/oopMap.inline.hpp"
44 #include "gc/shared/barrierSet.hpp"
45 #include "gc/shared/barrierSetNMethod.hpp"
46 #include "gc/shared/classUnloadingContext.hpp"
47 #include "gc/shared/collectedHeap.hpp"
48 #include "interpreter/bytecode.inline.hpp"
49 #include "jvm.h"
50 #include "logging/log.hpp"
51 #include "logging/logStream.hpp"
52 #include "memory/allocation.inline.hpp"
53 #include "memory/resourceArea.hpp"
769
770 void nmethod::clear_inline_caches() {
771 assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
772 RelocIterator iter(this);
773 while (iter.next()) {
774 iter.reloc()->clear_inline_cache();
775 }
776 }
777
778 #ifdef ASSERT
779 // Check class_loader is alive for this bit of metadata.
780 class CheckClass : public MetadataClosure {
781 void do_metadata(Metadata* md) {
782 Klass* klass = nullptr;
783 if (md->is_klass()) {
784 klass = ((Klass*)md);
785 } else if (md->is_method()) {
786 klass = ((Method*)md)->method_holder();
787 } else if (md->is_methodData()) {
788 klass = ((MethodData*)md)->method()->method_holder();
789 } else if (md->is_methodCounters()) {
790 klass = ((MethodCounters*)md)->method()->method_holder();
791 } else {
792 md->print();
793 ShouldNotReachHere();
794 }
795 assert(klass->is_loader_alive(), "must be alive");
796 }
797 };
798 #endif // ASSERT
799
800
801 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
802 ic->clean_metadata();
803 }
804
805 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
806 template <typename CallsiteT>
807 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
808 bool clean_all) {
809 CodeBlob* cb = CodeCache::find_blob(callsite->destination());
810 if (!cb->is_nmethod()) {
1119 debug_only(nm->verify();) // might block
1120
1121 nm->log_new_nmethod();
1122 }
1123 return nm;
1124 }
1125
1126 nmethod* nmethod::new_nmethod(const methodHandle& method,
1127 int compile_id,
1128 int entry_bci,
1129 CodeOffsets* offsets,
1130 int orig_pc_offset,
1131 DebugInformationRecorder* debug_info,
1132 Dependencies* dependencies,
1133 CodeBuffer* code_buffer, int frame_size,
1134 OopMapSet* oop_maps,
1135 ExceptionHandlerTable* handler_table,
1136 ImplicitExceptionTable* nul_chk_table,
1137 AbstractCompiler* compiler,
1138 CompLevel comp_level
1139 , SCCEntry* scc_entry
1140 #if INCLUDE_JVMCI
1141 , char* speculations,
1142 int speculations_len,
1143 JVMCINMethodData* jvmci_data
1144 #endif
1145 )
1146 {
1147 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1148 code_buffer->finalize_oop_references(method);
1149 // create nmethod
1150 nmethod* nm = nullptr;
1151 int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1152 #if INCLUDE_JVMCI
1153 if (compiler->is_jvmci()) {
1154 nmethod_size += align_up(jvmci_data->size(), oopSize);
1155 }
1156 #endif
1157
1158 int immutable_data_size =
1159 adjust_pcs_size(debug_info->pcs_size())
1164 + align_up(speculations_len , oopSize)
1165 #endif
1166 + align_up(debug_info->data_size() , oopSize);
1167
1168 // First, allocate space for immutable data in C heap.
1169 address immutable_data = nullptr;
1170 if (immutable_data_size > 0) {
1171 immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1172 if (immutable_data == nullptr) {
1173 vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1174 return nullptr;
1175 }
1176 }
1177 {
1178 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1179
1180 nm = new (nmethod_size, comp_level)
1181 nmethod(method(), compiler->type(), nmethod_size, immutable_data_size,
1182 compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1183 debug_info, dependencies, code_buffer, frame_size, oop_maps,
1184 handler_table, nul_chk_table, compiler, comp_level, scc_entry
1185 #if INCLUDE_JVMCI
1186 , speculations,
1187 speculations_len,
1188 jvmci_data
1189 #endif
1190 );
1191
1192 if (nm != nullptr) {
1193 // To make dependency checking during class loading fast, record
1194 // the nmethod dependencies in the classes it is dependent on.
1195 // This allows the dependency checking code to simply walk the
1196 // class hierarchy above the loaded class, checking only nmethods
1197 // which are dependent on those classes. The slow way is to
1198 // check every nmethod for dependencies which makes it linear in
1199 // the number of methods compiled. For applications with a lot
1200 // classes the slow way is too slow.
1201 for (Dependencies::DepStream deps(nm); deps.next(); ) {
1202 if (deps.type() == Dependencies::call_site_target_value) {
1203 // CallSite dependencies are managed on per-CallSite instance basis.
1204 oop call_site = deps.argument_oop(0);
1205 MethodHandles::add_dependent_nmethod(call_site, nm);
1206 } else {
1207 InstanceKlass* ik = deps.context_type();
1208 if (ik == nullptr) {
1209 continue; // ignore things like evol_method
1210 }
1211 // record this nmethod as dependent on this klass
1212 ik->add_dependent_nmethod(nm);
1213 }
1214 }
1215 NOT_PRODUCT(if (nm != nullptr) note_java_nmethod(nm));
1216 }
1217 }
1218 // Do verification and logging outside CodeCache_lock.
1219 if (nm != nullptr) {
1220
1221 #ifdef ASSERT
1222 LogTarget(Debug, scc, nmethod) log;
1223 if (log.is_enabled()) {
1224 LogStream out(log);
1225 out.print_cr("== new_nmethod 2");
1226 FlagSetting fs(PrintRelocations, true);
1227 nm->print(&out);
1228 nm->decode(&out);
1229 }
1230 #endif
1231
1232 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1233 DEBUG_ONLY(nm->verify();)
1234 nm->log_new_nmethod();
1235 }
1236 return nm;
1237 }
1238
1239 // Fill in default values for various fields
1240 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1241 // avoid uninitialized fields, even for short time periods
1242 _exception_cache = nullptr;
1243 _gc_data = nullptr;
1244 _oops_do_mark_link = nullptr;
1245 _compiled_ic_data = nullptr;
1246
1247 _is_unloading_state = 0;
1248 _state = not_installed;
1249
1250 _has_unsafe_access = 0;
1251 _has_method_handle_invokes = 0;
1252 _has_wide_vectors = 0;
1253 _has_monitors = 0;
1254 _has_scoped_access = 0;
1255 _has_flushed_dependencies = 0;
1256 _is_unlinked = 0;
1257 _load_reported = 0; // jvmti state
1258 _preloaded = 0;
1259 _has_clinit_barriers = 0;
1260
1261 _used = false;
1262 _deoptimization_status = not_marked;
1263
1264 // SECT_CONSTS is first in code buffer so the offset should be 0.
1265 int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1266 assert(consts_offset == 0, "const_offset: %d", consts_offset);
1267
1268 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1269
1270 CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
1271 CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1272
1273 _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1274 }
1275
1276 // Post initialization
1277 void nmethod::post_init() {
1278 clear_unloading_state();
1279
1280 finalize_relocations();
1281
1313
1314 _osr_entry_point = nullptr;
1315 _pc_desc_container = nullptr;
1316 _entry_bci = InvocationEntryBci;
1317 _compile_id = compile_id;
1318 _comp_level = CompLevel_none;
1319 _compiler_type = type;
1320 _orig_pc_offset = 0;
1321 _num_stack_arg_slots = 0;
1322
1323 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1324 // Continuation enter intrinsic
1325 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1326 } else {
1327 _exception_offset = 0;
1328 }
1329 // Native wrappers do not have deopt handlers. Make the values
1330 // something that will never match a pc like the nmethod vtable entry
1331 _deopt_handler_offset = 0;
1332 _deopt_mh_handler_offset = 0;
1333 _scc_entry = nullptr;
1334 _method_profiling_count = 0;
1335 _unwind_handler_offset = 0;
1336
1337 CHECKED_CAST(_metadata_offset, uint16_t, (align_up(code_buffer->total_oop_size(), oopSize)));
1338 int data_end_offset = _metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize);
1339 #if INCLUDE_JVMCI
1340 // jvmci_data_size is 0 in native wrapper but we need to set offset
1341 // to correctly calculate metadata_end address
1342 CHECKED_CAST(_jvmci_data_offset, uint16_t, data_end_offset);
1343 #endif
1344 assert((data_offset() + data_end_offset) <= nmethod_size, "wrong nmethod's size: %d < %d", nmethod_size, (data_offset() + data_end_offset));
1345
1346 // native wrapper does not have read-only data but we need unique not null address
1347 _immutable_data = data_end();
1348 _immutable_data_size = 0;
1349 _nul_chk_table_offset = 0;
1350 _handler_table_offset = 0;
1351 _scopes_pcs_offset = 0;
1352 _scopes_data_offset = 0;
1353 #if INCLUDE_JVMCI
1354 _speculations_offset = 0;
1375 // This is both handled in decode2(), called via print_code() -> decode()
1376 if (PrintNativeNMethods) {
1377 tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1378 print_code();
1379 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1380 #if defined(SUPPORT_DATA_STRUCTS)
1381 if (AbstractDisassembler::show_structs()) {
1382 if (oop_maps != nullptr) {
1383 tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1384 oop_maps->print_on(tty);
1385 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1386 }
1387 }
1388 #endif
1389 } else {
1390 print(); // print the header part only.
1391 }
1392 #if defined(SUPPORT_DATA_STRUCTS)
1393 if (AbstractDisassembler::show_structs()) {
1394 if (PrintRelocations) {
1395 print_relocations_on(tty);
1396 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1397 }
1398 }
1399 #endif
1400 if (xtty != nullptr) {
1401 xtty->tail("print_native_nmethod");
1402 }
1403 }
1404 }
1405
1406 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1407 return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1408 }
1409
1410 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1411 // Try MethodNonProfiled and MethodProfiled.
1412 void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1413 if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1414 // Try NonNMethod or give up.
1415 return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
1418 // For normal JIT compiled code
1419 nmethod::nmethod(
1420 Method* method,
1421 CompilerType type,
1422 int nmethod_size,
1423 int immutable_data_size,
1424 int compile_id,
1425 int entry_bci,
1426 address immutable_data,
1427 CodeOffsets* offsets,
1428 int orig_pc_offset,
1429 DebugInformationRecorder* debug_info,
1430 Dependencies* dependencies,
1431 CodeBuffer *code_buffer,
1432 int frame_size,
1433 OopMapSet* oop_maps,
1434 ExceptionHandlerTable* handler_table,
1435 ImplicitExceptionTable* nul_chk_table,
1436 AbstractCompiler* compiler,
1437 CompLevel comp_level
1438 , SCCEntry* scc_entry
1439 #if INCLUDE_JVMCI
1440 , char* speculations,
1441 int speculations_len,
1442 JVMCINMethodData* jvmci_data
1443 #endif
1444 )
1445 : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1446 offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
1447 _deoptimization_generation(0),
1448 _gc_epoch(CodeCache::gc_epoch()),
1449 _method(method),
1450 _osr_link(nullptr)
1451 {
1452 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1453 {
1454 debug_only(NoSafepointVerifier nsv;)
1455 assert_locked_or_safepoint(CodeCache_lock);
1456
1457 init_defaults(code_buffer, offsets);
1458 _scc_entry = scc_entry;
1459 _method_profiling_count = 0;
1460
1461 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1462 _entry_bci = entry_bci;
1463 _compile_id = compile_id;
1464 _comp_level = comp_level;
1465 _compiler_type = type;
1466 _orig_pc_offset = orig_pc_offset;
1467
1468 _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1469
1470 set_ctable_begin(header_begin() + content_offset());
1471
1472 #if INCLUDE_JVMCI
1473 if (compiler->is_jvmci()) {
1474 // JVMCI might not produce any stub sections
1475 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1476 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1477 } else {
1478 _exception_offset = -1;
1479 }
1571 #if INCLUDE_JVMCI
1572 // Copy speculations to nmethod
1573 if (speculations_size() != 0) {
1574 memcpy(speculations_begin(), speculations, speculations_len);
1575 }
1576 #endif
1577
1578 post_init();
1579
1580 // we use the information of entry points to find out if a method is
1581 // static or non static
1582 assert(compiler->is_c2() || compiler->is_jvmci() ||
1583 _method->is_static() == (entry_point() == verified_entry_point()),
1584 " entry points must be same for static methods and vice versa");
1585 }
1586 }
1587
1588 // Print a short set of xml attributes to identify this nmethod. The
1589 // output should be embedded in some other element.
1590 void nmethod::log_identity(xmlStream* log) const {
1591 assert(log->inside_attrs_or_error(), "printing attributes");
1592 log->print(" code_compile_id='%d'", compile_id());
1593 const char* nm_kind = compile_kind();
1594 if (nm_kind != nullptr) log->print(" code_compile_kind='%s'", nm_kind);
1595 log->print(" code_compiler='%s'", compiler_name());
1596 if (TieredCompilation) {
1597 log->print(" code_compile_level='%d'", comp_level());
1598 }
1599 #if INCLUDE_JVMCI
1600 if (jvmci_nmethod_data() != nullptr) {
1601 const char* jvmci_name = jvmci_nmethod_data()->name();
1602 if (jvmci_name != nullptr) {
1603 log->print(" jvmci_mirror_name='");
1604 log->text("%s", jvmci_name);
1605 log->print("'");
1606 }
1607 }
1608 #endif
1609 }
1610
1611
1612 #define LOG_OFFSET(log, name) \
1613 if (p2i(name##_end()) - p2i(name##_begin())) \
1614 log->print(" " XSTR(name) "_offset='" INTX_FORMAT "'" , \
1615 p2i(name##_begin()) - p2i(this))
1616
1617
1698 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1699 if (oop_maps() != nullptr) {
1700 tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1701 oop_maps()->print_on(tty);
1702 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1703 }
1704 }
1705 #endif
1706 } else {
1707 print(); // print the header part only.
1708 }
1709
1710 #if defined(SUPPORT_DATA_STRUCTS)
1711 if (AbstractDisassembler::show_structs()) {
1712 methodHandle mh(Thread::current(), _method);
1713 if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1714 print_scopes();
1715 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1716 }
1717 if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1718 print_relocations_on(tty);
1719 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1720 }
1721 if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1722 print_dependencies_on(tty);
1723 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1724 }
1725 if (printmethod || PrintExceptionHandlers) {
1726 print_handler_table();
1727 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1728 print_nul_chk_table();
1729 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1730 }
1731
1732 if (printmethod) {
1733 print_recorded_oops();
1734 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1735 print_recorded_metadata();
1736 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1737 }
1738 }
1946 Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1947 }
1948
1949 bool nmethod::is_maybe_on_stack() {
1950 // If the condition below is true, it means that the nmethod was found to
1951 // be alive the previous completed marking cycle.
1952 return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1953 }
1954
1955 void nmethod::inc_decompile_count() {
1956 if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1957 // Could be gated by ProfileTraps, but do not bother...
1958 Method* m = method();
1959 if (m == nullptr) return;
1960 MethodData* mdo = m->method_data();
1961 if (mdo == nullptr) return;
1962 // There is a benign race here. See comments in methodData.hpp.
1963 mdo->inc_decompile_count();
1964 }
1965
1966 void nmethod::inc_method_profiling_count() {
1967 Atomic::inc(&_method_profiling_count);
1968 }
1969
1970 uint64_t nmethod::method_profiling_count() {
1971 return _method_profiling_count;
1972 }
1973
1974 bool nmethod::try_transition(signed char new_state_int) {
1975 signed char new_state = new_state_int;
1976 assert_lock_strong(NMethodState_lock);
1977 signed char old_state = _state;
1978 if (old_state >= new_state) {
1979 // Ensure monotonicity of transitions.
1980 return false;
1981 }
1982 Atomic::store(&_state, new_state);
1983 return true;
1984 }
1985
1986 void nmethod::invalidate_osr_method() {
1987 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1988 // Remove from list of active nmethods
1989 if (method() != nullptr) {
1990 method()->method_holder()->remove_osr_nmethod(this);
1991 }
1992 }
1993
1999 os::current_thread_id());
2000 log_identity(xtty);
2001 xtty->stamp();
2002 xtty->end_elem();
2003 }
2004 }
2005
2006 CompileTask::print_ul(this, "made not entrant");
2007 if (PrintCompilation) {
2008 print_on(tty, "made not entrant");
2009 }
2010 }
2011
2012 void nmethod::unlink_from_method() {
2013 if (method() != nullptr) {
2014 method()->unlink_code(this);
2015 }
2016 }
2017
2018 // Invalidate code
2019 bool nmethod::make_not_entrant(bool make_not_entrant) {
2020 // This can be called while the system is already at a safepoint which is ok
2021 NoSafepointVerifier nsv;
2022
2023 if (is_unloading()) {
2024 // If the nmethod is unloading, then it is already not entrant through
2025 // the nmethod entry barriers. No need to do anything; GC will unload it.
2026 return false;
2027 }
2028
2029 if (Atomic::load(&_state) == not_entrant) {
2030 // Avoid taking the lock if already in required state.
2031 // This is safe from races because the state is an end-state,
2032 // which the nmethod cannot back out of once entered.
2033 // No need for fencing either.
2034 return false;
2035 }
2036
2037 {
2038 // Enter critical section. Does not block for safepoint.
2039 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
2062 }
2063
2064 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2065 if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2066 // If nmethod entry barriers are not supported, we won't mark
2067 // nmethods as on-stack when they become on-stack. So we
2068 // degrade to a less accurate flushing strategy, for now.
2069 mark_as_maybe_on_stack();
2070 }
2071
2072 // Change state
2073 bool success = try_transition(not_entrant);
2074 assert(success, "Transition can't fail");
2075
2076 // Log the transition once
2077 log_state_change();
2078
2079 // Remove nmethod from method.
2080 unlink_from_method();
2081
2082 if (make_not_entrant) {
2083 // Keep cached code if it was simply replaced
2084 // otherwise make it not entrant too.
2085 SCCache::invalidate(_scc_entry);
2086 }
2087
2088 CompileBroker::log_not_entrant(this);
2089 } // leave critical region under NMethodState_lock
2090
2091 #if INCLUDE_JVMCI
2092 // Invalidate can't occur while holding the NMethodState_lock
2093 JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2094 if (nmethod_data != nullptr) {
2095 nmethod_data->invalidate_nmethod_mirror(this);
2096 }
2097 #endif
2098
2099 #ifdef ASSERT
2100 if (is_osr_method() && method() != nullptr) {
2101 // Make sure osr nmethod is invalidated, i.e. not on the list
2102 bool found = method()->method_holder()->remove_osr_nmethod(this);
2103 assert(!found, "osr nmethod should have been invalidated");
2104 }
2105 #endif
2106
2107 return true;
2108 }
2208 MethodHandles::clean_dependency_context(call_site);
2209 } else {
2210 InstanceKlass* ik = deps.context_type();
2211 if (ik == nullptr) {
2212 continue; // ignore things like evol_method
2213 }
2214 // During GC liveness of dependee determines class that needs to be updated.
2215 // The GC may clean dependency contexts concurrently and in parallel.
2216 ik->clean_dependency_context();
2217 }
2218 }
2219 }
2220 }
2221
2222 void nmethod::post_compiled_method(CompileTask* task) {
2223 task->mark_success();
2224 task->set_nm_content_size(content_size());
2225 task->set_nm_insts_size(insts_size());
2226 task->set_nm_total_size(total_size());
2227
2228 // task->is_scc() is true only for loaded cached code.
2229 // nmethod::_scc_entry is set for loaded and stored cached code
2230 // to invalidate the entry when nmethod is deoptimized.
2231 // There is option to not store in archive cached code.
2232 guarantee((_scc_entry != nullptr) || !task->is_scc() || VerifyCachedCode, "sanity");
2233
2234 // JVMTI -- compiled method notification (must be done outside lock)
2235 post_compiled_method_load_event();
2236
2237 if (CompilationLog::log() != nullptr) {
2238 CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2239 }
2240
2241 const DirectiveSet* directive = task->directive();
2242 maybe_print_nmethod(directive);
2243 }
2244
2245 // ------------------------------------------------------------------
2246 // post_compiled_method_load_event
2247 // new method for install_code() path
2248 // Transfer information from compilation to jvmti
2249 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2250 // This is a bad time for a safepoint. We don't want
2251 // this nmethod to get unloaded while we're queueing the event.
2252 NoSafepointVerifier nsv;
2253
3152 p2i(nul_chk_table_end()),
3153 nul_chk_table_size());
3154 if (handler_table_size() > 0) st->print_cr(" handler table [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3155 p2i(handler_table_begin()),
3156 p2i(handler_table_end()),
3157 handler_table_size());
3158 if (scopes_pcs_size () > 0) st->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3159 p2i(scopes_pcs_begin()),
3160 p2i(scopes_pcs_end()),
3161 scopes_pcs_size());
3162 if (scopes_data_size () > 0) st->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3163 p2i(scopes_data_begin()),
3164 p2i(scopes_data_end()),
3165 scopes_data_size());
3166 #if INCLUDE_JVMCI
3167 if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3168 p2i(speculations_begin()),
3169 p2i(speculations_end()),
3170 speculations_size());
3171 #endif
3172 if (SCCache::is_on() && _scc_entry != nullptr) {
3173 _scc_entry->print(st);
3174 }
3175 }
3176
3177 void nmethod::print_code() {
3178 ResourceMark m;
3179 ttyLocker ttyl;
3180 // Call the specialized decode method of this class.
3181 decode(tty);
3182 }
3183
3184 #ifndef PRODUCT // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3185
3186 void nmethod::print_dependencies_on(outputStream* out) {
3187 ResourceMark rm;
3188 stringStream st;
3189 st.print_cr("Dependencies:");
3190 for (Dependencies::DepStream deps(this); deps.next(); ) {
3191 deps.print_dependency(&st);
3192 InstanceKlass* ctxk = deps.context_type();
3193 if (ctxk != nullptr) {
3194 if (ctxk->is_dependent_nmethod(this)) {
3254 st->print("scopes:");
3255 if (scopes_pcs_begin() < scopes_pcs_end()) {
3256 st->cr();
3257 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3258 if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3259 continue;
3260
3261 ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3262 while (sd != nullptr) {
3263 sd->print_on(st, p); // print output ends with a newline
3264 sd = sd->sender();
3265 }
3266 }
3267 } else {
3268 st->print_cr(" <list empty>");
3269 }
3270 }
3271 #endif
3272
3273 #ifndef PRODUCT // RelocIterator does support printing only then.
3274 void nmethod::print_relocations_on(outputStream* st) {
3275 ResourceMark m; // in case methods get printed via the debugger
3276 st->print_cr("relocations:");
3277 RelocIterator iter(this);
3278 iter.print_on(st);
3279 }
3280 #endif
3281
3282 void nmethod::print_pcs_on(outputStream* st) {
3283 ResourceMark m; // in case methods get printed via debugger
3284 st->print("pc-bytecode offsets:");
3285 if (scopes_pcs_begin() < scopes_pcs_end()) {
3286 st->cr();
3287 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3288 p->print_on(st, this); // print output ends with a newline
3289 }
3290 } else {
3291 st->print_cr(" <list empty>");
3292 }
3293 }
3294
3295 void nmethod::print_handler_table() {
3296 ExceptionHandlerTable(this).print(code_begin());
3297 }
3298
3613 else obj->print_value_on(&st);
3614 st.print(")");
3615 return st.as_string();
3616 }
3617 case relocInfo::metadata_type: {
3618 stringStream st;
3619 metadata_Relocation* r = iter.metadata_reloc();
3620 Metadata* obj = r->metadata_value();
3621 st.print("metadata(");
3622 if (obj == nullptr) st.print("nullptr");
3623 else obj->print_value_on(&st);
3624 st.print(")");
3625 return st.as_string();
3626 }
3627 case relocInfo::runtime_call_type:
3628 case relocInfo::runtime_call_w_cp_type: {
3629 stringStream st;
3630 st.print("runtime_call");
3631 CallRelocation* r = (CallRelocation*)iter.reloc();
3632 address dest = r->destination();
3633 if (StubRoutines::contains(dest)) {
3634 StubCodeDesc* desc = StubCodeDesc::desc_for(dest);
3635 if (desc == nullptr) {
3636 desc = StubCodeDesc::desc_for(dest + frame::pc_return_offset);
3637 }
3638 if (desc != nullptr) {
3639 st.print(" Stub::%s", desc->name());
3640 return st.as_string();
3641 }
3642 }
3643 CodeBlob* cb = CodeCache::find_blob(dest);
3644 if (cb != nullptr) {
3645 st.print(" %s", cb->name());
3646 } else {
3647 ResourceMark rm;
3648 const int buflen = 1024;
3649 char* buf = NEW_RESOURCE_ARRAY(char, buflen);
3650 int offset;
3651 if (os::dll_address_to_function_name(dest, buf, buflen, &offset)) {
3652 st.print(" %s", buf);
3653 if (offset != 0) {
3654 st.print("+%d", offset);
3655 }
3656 }
3657 }
3658 return st.as_string();
3659 }
3660 case relocInfo::virtual_call_type: {
3661 stringStream st;
3662 st.print_raw("virtual_call");
|