12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/assembler.inline.hpp"
26 #include "code/codeCache.hpp"
27 #include "code/compiledIC.hpp"
28 #include "code/dependencies.hpp"
29 #include "code/nativeInst.hpp"
30 #include "code/nmethod.inline.hpp"
31 #include "code/scopeDesc.hpp"
32 #include "compiler/abstractCompiler.hpp"
33 #include "compiler/compilationLog.hpp"
34 #include "compiler/compileBroker.hpp"
35 #include "compiler/compileLog.hpp"
36 #include "compiler/compileTask.hpp"
37 #include "compiler/compilerDirectives.hpp"
38 #include "compiler/compilerOracle.hpp"
39 #include "compiler/directivesParser.hpp"
40 #include "compiler/disassembler.hpp"
41 #include "compiler/oopMap.inline.hpp"
42 #include "gc/shared/barrierSet.hpp"
43 #include "gc/shared/barrierSetNMethod.hpp"
44 #include "gc/shared/classUnloadingContext.hpp"
45 #include "gc/shared/collectedHeap.hpp"
46 #include "interpreter/bytecode.inline.hpp"
47 #include "jvm.h"
48 #include "logging/log.hpp"
49 #include "logging/logStream.hpp"
50 #include "memory/allocation.inline.hpp"
51 #include "memory/resourceArea.hpp"
767
768 void nmethod::clear_inline_caches() {
769 assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
770 RelocIterator iter(this);
771 while (iter.next()) {
772 iter.reloc()->clear_inline_cache();
773 }
774 }
775
776 #ifdef ASSERT
777 // Check class_loader is alive for this bit of metadata.
778 class CheckClass : public MetadataClosure {
779 void do_metadata(Metadata* md) {
780 Klass* klass = nullptr;
781 if (md->is_klass()) {
782 klass = ((Klass*)md);
783 } else if (md->is_method()) {
784 klass = ((Method*)md)->method_holder();
785 } else if (md->is_methodData()) {
786 klass = ((MethodData*)md)->method()->method_holder();
787 } else {
788 md->print();
789 ShouldNotReachHere();
790 }
791 assert(klass->is_loader_alive(), "must be alive");
792 }
793 };
794 #endif // ASSERT
795
796
797 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
798 ic->clean_metadata();
799 }
800
801 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
802 template <typename CallsiteT>
803 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
804 bool clean_all) {
805 CodeBlob* cb = CodeCache::find_blob(callsite->destination());
806 if (!cb->is_nmethod()) {
1115 debug_only(nm->verify();) // might block
1116
1117 nm->log_new_nmethod();
1118 }
1119 return nm;
1120 }
1121
1122 nmethod* nmethod::new_nmethod(const methodHandle& method,
1123 int compile_id,
1124 int entry_bci,
1125 CodeOffsets* offsets,
1126 int orig_pc_offset,
1127 DebugInformationRecorder* debug_info,
1128 Dependencies* dependencies,
1129 CodeBuffer* code_buffer, int frame_size,
1130 OopMapSet* oop_maps,
1131 ExceptionHandlerTable* handler_table,
1132 ImplicitExceptionTable* nul_chk_table,
1133 AbstractCompiler* compiler,
1134 CompLevel comp_level
1135 #if INCLUDE_JVMCI
1136 , char* speculations,
1137 int speculations_len,
1138 JVMCINMethodData* jvmci_data
1139 #endif
1140 )
1141 {
1142 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1143 code_buffer->finalize_oop_references(method);
1144 // create nmethod
1145 nmethod* nm = nullptr;
1146 int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1147 #if INCLUDE_JVMCI
1148 if (compiler->is_jvmci()) {
1149 nmethod_size += align_up(jvmci_data->size(), oopSize);
1150 }
1151 #endif
1152
1153 int immutable_data_size =
1154 adjust_pcs_size(debug_info->pcs_size())
1159 + align_up(speculations_len , oopSize)
1160 #endif
1161 + align_up(debug_info->data_size() , oopSize);
1162
1163 // First, allocate space for immutable data in C heap.
1164 address immutable_data = nullptr;
1165 if (immutable_data_size > 0) {
1166 immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1167 if (immutable_data == nullptr) {
1168 vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1169 return nullptr;
1170 }
1171 }
1172 {
1173 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1174
1175 nm = new (nmethod_size, comp_level)
1176 nmethod(method(), compiler->type(), nmethod_size, immutable_data_size,
1177 compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1178 debug_info, dependencies, code_buffer, frame_size, oop_maps,
1179 handler_table, nul_chk_table, compiler, comp_level
1180 #if INCLUDE_JVMCI
1181 , speculations,
1182 speculations_len,
1183 jvmci_data
1184 #endif
1185 );
1186
1187 if (nm != nullptr) {
1188 // To make dependency checking during class loading fast, record
1189 // the nmethod dependencies in the classes it is dependent on.
1190 // This allows the dependency checking code to simply walk the
1191 // class hierarchy above the loaded class, checking only nmethods
1192 // which are dependent on those classes. The slow way is to
1193 // check every nmethod for dependencies which makes it linear in
1194 // the number of methods compiled. For applications with a lot
1195 // classes the slow way is too slow.
1196 for (Dependencies::DepStream deps(nm); deps.next(); ) {
1197 if (deps.type() == Dependencies::call_site_target_value) {
1198 // CallSite dependencies are managed on per-CallSite instance basis.
1199 oop call_site = deps.argument_oop(0);
1200 MethodHandles::add_dependent_nmethod(call_site, nm);
1201 } else {
1202 InstanceKlass* ik = deps.context_type();
1203 if (ik == nullptr) {
1204 continue; // ignore things like evol_method
1205 }
1206 // record this nmethod as dependent on this klass
1207 ik->add_dependent_nmethod(nm);
1208 }
1209 }
1210 NOT_PRODUCT(if (nm != nullptr) note_java_nmethod(nm));
1211 }
1212 }
1213 // Do verification and logging outside CodeCache_lock.
1214 if (nm != nullptr) {
1215 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1216 DEBUG_ONLY(nm->verify();)
1217 nm->log_new_nmethod();
1218 }
1219 return nm;
1220 }
1221
1222 // Fill in default values for various fields
1223 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1224 // avoid uninitialized fields, even for short time periods
1225 _exception_cache = nullptr;
1226 _gc_data = nullptr;
1227 _oops_do_mark_link = nullptr;
1228 _compiled_ic_data = nullptr;
1229
1230 _is_unloading_state = 0;
1231 _state = not_installed;
1232
1233 _has_unsafe_access = 0;
1234 _has_method_handle_invokes = 0;
1235 _has_wide_vectors = 0;
1236 _has_monitors = 0;
1237 _has_scoped_access = 0;
1238 _has_flushed_dependencies = 0;
1239 _is_unlinked = 0;
1240 _load_reported = 0; // jvmti state
1241
1242 _deoptimization_status = not_marked;
1243
1244 // SECT_CONSTS is first in code buffer so the offset should be 0.
1245 int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1246 assert(consts_offset == 0, "const_offset: %d", consts_offset);
1247
1248 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1249
1250 CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
1251 CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1252
1253 _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1254 }
1255
1256 // Post initialization
1257 void nmethod::post_init() {
1258 clear_unloading_state();
1259
1260 finalize_relocations();
1261
1293
1294 _osr_entry_point = nullptr;
1295 _pc_desc_container = nullptr;
1296 _entry_bci = InvocationEntryBci;
1297 _compile_id = compile_id;
1298 _comp_level = CompLevel_none;
1299 _compiler_type = type;
1300 _orig_pc_offset = 0;
1301 _num_stack_arg_slots = 0;
1302
1303 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1304 // Continuation enter intrinsic
1305 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1306 } else {
1307 _exception_offset = 0;
1308 }
1309 // Native wrappers do not have deopt handlers. Make the values
1310 // something that will never match a pc like the nmethod vtable entry
1311 _deopt_handler_offset = 0;
1312 _deopt_mh_handler_offset = 0;
1313 _unwind_handler_offset = 0;
1314
1315 CHECKED_CAST(_metadata_offset, uint16_t, (align_up(code_buffer->total_oop_size(), oopSize)));
1316 int data_end_offset = _metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize);
1317 #if INCLUDE_JVMCI
1318 // jvmci_data_size is 0 in native wrapper but we need to set offset
1319 // to correctly calculate metadata_end address
1320 CHECKED_CAST(_jvmci_data_offset, uint16_t, data_end_offset);
1321 #endif
1322 assert((data_offset() + data_end_offset) <= nmethod_size, "wrong nmethod's size: %d < %d", nmethod_size, (data_offset() + data_end_offset));
1323
1324 // native wrapper does not have read-only data but we need unique not null address
1325 _immutable_data = data_end();
1326 _immutable_data_size = 0;
1327 _nul_chk_table_offset = 0;
1328 _handler_table_offset = 0;
1329 _scopes_pcs_offset = 0;
1330 _scopes_data_offset = 0;
1331 #if INCLUDE_JVMCI
1332 _speculations_offset = 0;
1353 // This is both handled in decode2(), called via print_code() -> decode()
1354 if (PrintNativeNMethods) {
1355 tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1356 print_code();
1357 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1358 #if defined(SUPPORT_DATA_STRUCTS)
1359 if (AbstractDisassembler::show_structs()) {
1360 if (oop_maps != nullptr) {
1361 tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1362 oop_maps->print_on(tty);
1363 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1364 }
1365 }
1366 #endif
1367 } else {
1368 print(); // print the header part only.
1369 }
1370 #if defined(SUPPORT_DATA_STRUCTS)
1371 if (AbstractDisassembler::show_structs()) {
1372 if (PrintRelocations) {
1373 print_relocations();
1374 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1375 }
1376 }
1377 #endif
1378 if (xtty != nullptr) {
1379 xtty->tail("print_native_nmethod");
1380 }
1381 }
1382 }
1383
1384 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1385 return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1386 }
1387
1388 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1389 // Try MethodNonProfiled and MethodProfiled.
1390 void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1391 if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1392 // Try NonNMethod or give up.
1393 return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
1396 // For normal JIT compiled code
1397 nmethod::nmethod(
1398 Method* method,
1399 CompilerType type,
1400 int nmethod_size,
1401 int immutable_data_size,
1402 int compile_id,
1403 int entry_bci,
1404 address immutable_data,
1405 CodeOffsets* offsets,
1406 int orig_pc_offset,
1407 DebugInformationRecorder* debug_info,
1408 Dependencies* dependencies,
1409 CodeBuffer *code_buffer,
1410 int frame_size,
1411 OopMapSet* oop_maps,
1412 ExceptionHandlerTable* handler_table,
1413 ImplicitExceptionTable* nul_chk_table,
1414 AbstractCompiler* compiler,
1415 CompLevel comp_level
1416 #if INCLUDE_JVMCI
1417 , char* speculations,
1418 int speculations_len,
1419 JVMCINMethodData* jvmci_data
1420 #endif
1421 )
1422 : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1423 offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
1424 _deoptimization_generation(0),
1425 _gc_epoch(CodeCache::gc_epoch()),
1426 _method(method),
1427 _osr_link(nullptr)
1428 {
1429 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1430 {
1431 debug_only(NoSafepointVerifier nsv;)
1432 assert_locked_or_safepoint(CodeCache_lock);
1433
1434 init_defaults(code_buffer, offsets);
1435
1436 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1437 _entry_bci = entry_bci;
1438 _compile_id = compile_id;
1439 _comp_level = comp_level;
1440 _compiler_type = type;
1441 _orig_pc_offset = orig_pc_offset;
1442
1443 _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1444
1445 set_ctable_begin(header_begin() + content_offset());
1446
1447 #if INCLUDE_JVMCI
1448 if (compiler->is_jvmci()) {
1449 // JVMCI might not produce any stub sections
1450 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1451 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1452 } else {
1453 _exception_offset = -1;
1454 }
1546 #if INCLUDE_JVMCI
1547 // Copy speculations to nmethod
1548 if (speculations_size() != 0) {
1549 memcpy(speculations_begin(), speculations, speculations_len);
1550 }
1551 #endif
1552
1553 post_init();
1554
1555 // we use the information of entry points to find out if a method is
1556 // static or non static
1557 assert(compiler->is_c2() || compiler->is_jvmci() ||
1558 _method->is_static() == (entry_point() == verified_entry_point()),
1559 " entry points must be same for static methods and vice versa");
1560 }
1561 }
1562
1563 // Print a short set of xml attributes to identify this nmethod. The
1564 // output should be embedded in some other element.
1565 void nmethod::log_identity(xmlStream* log) const {
1566 log->print(" compile_id='%d'", compile_id());
1567 const char* nm_kind = compile_kind();
1568 if (nm_kind != nullptr) log->print(" compile_kind='%s'", nm_kind);
1569 log->print(" compiler='%s'", compiler_name());
1570 if (TieredCompilation) {
1571 log->print(" level='%d'", comp_level());
1572 }
1573 #if INCLUDE_JVMCI
1574 if (jvmci_nmethod_data() != nullptr) {
1575 const char* jvmci_name = jvmci_nmethod_data()->name();
1576 if (jvmci_name != nullptr) {
1577 log->print(" jvmci_mirror_name='");
1578 log->text("%s", jvmci_name);
1579 log->print("'");
1580 }
1581 }
1582 #endif
1583 }
1584
1585
1586 #define LOG_OFFSET(log, name) \
1587 if (p2i(name##_end()) - p2i(name##_begin())) \
1588 log->print(" " XSTR(name) "_offset='%zd'" , \
1589 p2i(name##_begin()) - p2i(this))
1590
1591
1672 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1673 if (oop_maps() != nullptr) {
1674 tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1675 oop_maps()->print_on(tty);
1676 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1677 }
1678 }
1679 #endif
1680 } else {
1681 print(); // print the header part only.
1682 }
1683
1684 #if defined(SUPPORT_DATA_STRUCTS)
1685 if (AbstractDisassembler::show_structs()) {
1686 methodHandle mh(Thread::current(), _method);
1687 if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1688 print_scopes();
1689 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1690 }
1691 if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1692 print_relocations();
1693 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1694 }
1695 if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1696 print_dependencies_on(tty);
1697 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1698 }
1699 if (printmethod || PrintExceptionHandlers) {
1700 print_handler_table();
1701 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1702 print_nul_chk_table();
1703 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1704 }
1705
1706 if (printmethod) {
1707 print_recorded_oops();
1708 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1709 print_recorded_metadata();
1710 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1711 }
1712 }
1920 Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1921 }
1922
1923 bool nmethod::is_maybe_on_stack() {
1924 // If the condition below is true, it means that the nmethod was found to
1925 // be alive the previous completed marking cycle.
1926 return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1927 }
1928
1929 void nmethod::inc_decompile_count() {
1930 if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1931 // Could be gated by ProfileTraps, but do not bother...
1932 Method* m = method();
1933 if (m == nullptr) return;
1934 MethodData* mdo = m->method_data();
1935 if (mdo == nullptr) return;
1936 // There is a benign race here. See comments in methodData.hpp.
1937 mdo->inc_decompile_count();
1938 }
1939
1940 bool nmethod::try_transition(signed char new_state_int) {
1941 signed char new_state = new_state_int;
1942 assert_lock_strong(NMethodState_lock);
1943 signed char old_state = _state;
1944 if (old_state >= new_state) {
1945 // Ensure monotonicity of transitions.
1946 return false;
1947 }
1948 Atomic::store(&_state, new_state);
1949 return true;
1950 }
1951
1952 void nmethod::invalidate_osr_method() {
1953 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1954 // Remove from list of active nmethods
1955 if (method() != nullptr) {
1956 method()->method_holder()->remove_osr_nmethod(this);
1957 }
1958 }
1959
1965 os::current_thread_id());
1966 log_identity(xtty);
1967 xtty->stamp();
1968 xtty->end_elem();
1969 }
1970 }
1971
1972 CompileTask::print_ul(this, "made not entrant");
1973 if (PrintCompilation) {
1974 print_on_with_msg(tty, "made not entrant");
1975 }
1976 }
1977
1978 void nmethod::unlink_from_method() {
1979 if (method() != nullptr) {
1980 method()->unlink_code(this);
1981 }
1982 }
1983
1984 // Invalidate code
1985 bool nmethod::make_not_entrant() {
1986 // This can be called while the system is already at a safepoint which is ok
1987 NoSafepointVerifier nsv;
1988
1989 if (is_unloading()) {
1990 // If the nmethod is unloading, then it is already not entrant through
1991 // the nmethod entry barriers. No need to do anything; GC will unload it.
1992 return false;
1993 }
1994
1995 if (Atomic::load(&_state) == not_entrant) {
1996 // Avoid taking the lock if already in required state.
1997 // This is safe from races because the state is an end-state,
1998 // which the nmethod cannot back out of once entered.
1999 // No need for fencing either.
2000 return false;
2001 }
2002
2003 {
2004 // Enter critical section. Does not block for safepoint.
2005 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
2028 }
2029
2030 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2031 if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2032 // If nmethod entry barriers are not supported, we won't mark
2033 // nmethods as on-stack when they become on-stack. So we
2034 // degrade to a less accurate flushing strategy, for now.
2035 mark_as_maybe_on_stack();
2036 }
2037
2038 // Change state
2039 bool success = try_transition(not_entrant);
2040 assert(success, "Transition can't fail");
2041
2042 // Log the transition once
2043 log_state_change();
2044
2045 // Remove nmethod from method.
2046 unlink_from_method();
2047
2048 } // leave critical region under NMethodState_lock
2049
2050 #if INCLUDE_JVMCI
2051 // Invalidate can't occur while holding the NMethodState_lock
2052 JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2053 if (nmethod_data != nullptr) {
2054 nmethod_data->invalidate_nmethod_mirror(this);
2055 }
2056 #endif
2057
2058 #ifdef ASSERT
2059 if (is_osr_method() && method() != nullptr) {
2060 // Make sure osr nmethod is invalidated, i.e. not on the list
2061 bool found = method()->method_holder()->remove_osr_nmethod(this);
2062 assert(!found, "osr nmethod should have been invalidated");
2063 }
2064 #endif
2065
2066 return true;
2067 }
2171 MethodHandles::clean_dependency_context(call_site);
2172 } else {
2173 InstanceKlass* ik = deps.context_type();
2174 if (ik == nullptr) {
2175 continue; // ignore things like evol_method
2176 }
2177 // During GC liveness of dependee determines class that needs to be updated.
2178 // The GC may clean dependency contexts concurrently and in parallel.
2179 ik->clean_dependency_context();
2180 }
2181 }
2182 }
2183 }
2184
2185 void nmethod::post_compiled_method(CompileTask* task) {
2186 task->mark_success();
2187 task->set_nm_content_size(content_size());
2188 task->set_nm_insts_size(insts_size());
2189 task->set_nm_total_size(total_size());
2190
2191 // JVMTI -- compiled method notification (must be done outside lock)
2192 post_compiled_method_load_event();
2193
2194 if (CompilationLog::log() != nullptr) {
2195 CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2196 }
2197
2198 const DirectiveSet* directive = task->directive();
2199 maybe_print_nmethod(directive);
2200 }
2201
2202 // ------------------------------------------------------------------
2203 // post_compiled_method_load_event
2204 // new method for install_code() path
2205 // Transfer information from compilation to jvmti
2206 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2207 // This is a bad time for a safepoint. We don't want
2208 // this nmethod to get unloaded while we're queueing the event.
2209 NoSafepointVerifier nsv;
2210
3104 p2i(nul_chk_table_end()),
3105 nul_chk_table_size());
3106 if (handler_table_size() > 0) st->print_cr(" handler table [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3107 p2i(handler_table_begin()),
3108 p2i(handler_table_end()),
3109 handler_table_size());
3110 if (scopes_pcs_size () > 0) st->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3111 p2i(scopes_pcs_begin()),
3112 p2i(scopes_pcs_end()),
3113 scopes_pcs_size());
3114 if (scopes_data_size () > 0) st->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3115 p2i(scopes_data_begin()),
3116 p2i(scopes_data_end()),
3117 scopes_data_size());
3118 #if INCLUDE_JVMCI
3119 if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3120 p2i(speculations_begin()),
3121 p2i(speculations_end()),
3122 speculations_size());
3123 #endif
3124 }
3125
3126 void nmethod::print_code() {
3127 ResourceMark m;
3128 ttyLocker ttyl;
3129 // Call the specialized decode method of this class.
3130 decode(tty);
3131 }
3132
3133 #ifndef PRODUCT // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3134
3135 void nmethod::print_dependencies_on(outputStream* out) {
3136 ResourceMark rm;
3137 stringStream st;
3138 st.print_cr("Dependencies:");
3139 for (Dependencies::DepStream deps(this); deps.next(); ) {
3140 deps.print_dependency(&st);
3141 InstanceKlass* ctxk = deps.context_type();
3142 if (ctxk != nullptr) {
3143 if (ctxk->is_dependent_nmethod(this)) {
3203 st->print("scopes:");
3204 if (scopes_pcs_begin() < scopes_pcs_end()) {
3205 st->cr();
3206 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3207 if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3208 continue;
3209
3210 ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3211 while (sd != nullptr) {
3212 sd->print_on(st, p); // print output ends with a newline
3213 sd = sd->sender();
3214 }
3215 }
3216 } else {
3217 st->print_cr(" <list empty>");
3218 }
3219 }
3220 #endif
3221
3222 #ifndef PRODUCT // RelocIterator does support printing only then.
3223 void nmethod::print_relocations() {
3224 ResourceMark m; // in case methods get printed via the debugger
3225 tty->print_cr("relocations:");
3226 RelocIterator iter(this);
3227 iter.print();
3228 }
3229 #endif
3230
3231 void nmethod::print_pcs_on(outputStream* st) {
3232 ResourceMark m; // in case methods get printed via debugger
3233 st->print("pc-bytecode offsets:");
3234 if (scopes_pcs_begin() < scopes_pcs_end()) {
3235 st->cr();
3236 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3237 p->print_on(st, this); // print output ends with a newline
3238 }
3239 } else {
3240 st->print_cr(" <list empty>");
3241 }
3242 }
3243
3244 void nmethod::print_handler_table() {
3245 ExceptionHandlerTable(this).print(code_begin());
3246 }
3247
3562 else obj->print_value_on(&st);
3563 st.print(")");
3564 return st.as_string();
3565 }
3566 case relocInfo::metadata_type: {
3567 stringStream st;
3568 metadata_Relocation* r = iter.metadata_reloc();
3569 Metadata* obj = r->metadata_value();
3570 st.print("metadata(");
3571 if (obj == nullptr) st.print("nullptr");
3572 else obj->print_value_on(&st);
3573 st.print(")");
3574 return st.as_string();
3575 }
3576 case relocInfo::runtime_call_type:
3577 case relocInfo::runtime_call_w_cp_type: {
3578 stringStream st;
3579 st.print("runtime_call");
3580 CallRelocation* r = (CallRelocation*)iter.reloc();
3581 address dest = r->destination();
3582 CodeBlob* cb = CodeCache::find_blob(dest);
3583 if (cb != nullptr) {
3584 st.print(" %s", cb->name());
3585 } else {
3586 ResourceMark rm;
3587 const int buflen = 1024;
3588 char* buf = NEW_RESOURCE_ARRAY(char, buflen);
3589 int offset;
3590 if (os::dll_address_to_function_name(dest, buf, buflen, &offset)) {
3591 st.print(" %s", buf);
3592 if (offset != 0) {
3593 st.print("+%d", offset);
3594 }
3595 }
3596 }
3597 return st.as_string();
3598 }
3599 case relocInfo::virtual_call_type: {
3600 stringStream st;
3601 st.print_raw("virtual_call");
|
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/assembler.inline.hpp"
26 #include "code/codeCache.hpp"
27 #include "code/compiledIC.hpp"
28 #include "code/dependencies.hpp"
29 #include "code/nativeInst.hpp"
30 #include "code/nmethod.inline.hpp"
31 #include "code/scopeDesc.hpp"
32 #include "code/SCCache.hpp"
33 #include "compiler/abstractCompiler.hpp"
34 #include "compiler/compilationLog.hpp"
35 #include "compiler/compileBroker.hpp"
36 #include "compiler/compileLog.hpp"
37 #include "compiler/compileTask.hpp"
38 #include "compiler/compilerDirectives.hpp"
39 #include "compiler/compilerOracle.hpp"
40 #include "compiler/directivesParser.hpp"
41 #include "compiler/disassembler.hpp"
42 #include "compiler/oopMap.inline.hpp"
43 #include "gc/shared/barrierSet.hpp"
44 #include "gc/shared/barrierSetNMethod.hpp"
45 #include "gc/shared/classUnloadingContext.hpp"
46 #include "gc/shared/collectedHeap.hpp"
47 #include "interpreter/bytecode.inline.hpp"
48 #include "jvm.h"
49 #include "logging/log.hpp"
50 #include "logging/logStream.hpp"
51 #include "memory/allocation.inline.hpp"
52 #include "memory/resourceArea.hpp"
768
769 void nmethod::clear_inline_caches() {
770 assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
771 RelocIterator iter(this);
772 while (iter.next()) {
773 iter.reloc()->clear_inline_cache();
774 }
775 }
776
777 #ifdef ASSERT
778 // Check class_loader is alive for this bit of metadata.
779 class CheckClass : public MetadataClosure {
780 void do_metadata(Metadata* md) {
781 Klass* klass = nullptr;
782 if (md->is_klass()) {
783 klass = ((Klass*)md);
784 } else if (md->is_method()) {
785 klass = ((Method*)md)->method_holder();
786 } else if (md->is_methodData()) {
787 klass = ((MethodData*)md)->method()->method_holder();
788 } else if (md->is_methodCounters()) {
789 klass = ((MethodCounters*)md)->method()->method_holder();
790 } else {
791 md->print();
792 ShouldNotReachHere();
793 }
794 assert(klass->is_loader_alive(), "must be alive");
795 }
796 };
797 #endif // ASSERT
798
799
800 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
801 ic->clean_metadata();
802 }
803
804 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
805 template <typename CallsiteT>
806 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
807 bool clean_all) {
808 CodeBlob* cb = CodeCache::find_blob(callsite->destination());
809 if (!cb->is_nmethod()) {
1118 debug_only(nm->verify();) // might block
1119
1120 nm->log_new_nmethod();
1121 }
1122 return nm;
1123 }
1124
1125 nmethod* nmethod::new_nmethod(const methodHandle& method,
1126 int compile_id,
1127 int entry_bci,
1128 CodeOffsets* offsets,
1129 int orig_pc_offset,
1130 DebugInformationRecorder* debug_info,
1131 Dependencies* dependencies,
1132 CodeBuffer* code_buffer, int frame_size,
1133 OopMapSet* oop_maps,
1134 ExceptionHandlerTable* handler_table,
1135 ImplicitExceptionTable* nul_chk_table,
1136 AbstractCompiler* compiler,
1137 CompLevel comp_level
1138 , SCCEntry* scc_entry
1139 #if INCLUDE_JVMCI
1140 , char* speculations,
1141 int speculations_len,
1142 JVMCINMethodData* jvmci_data
1143 #endif
1144 )
1145 {
1146 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1147 code_buffer->finalize_oop_references(method);
1148 // create nmethod
1149 nmethod* nm = nullptr;
1150 int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1151 #if INCLUDE_JVMCI
1152 if (compiler->is_jvmci()) {
1153 nmethod_size += align_up(jvmci_data->size(), oopSize);
1154 }
1155 #endif
1156
1157 int immutable_data_size =
1158 adjust_pcs_size(debug_info->pcs_size())
1163 + align_up(speculations_len , oopSize)
1164 #endif
1165 + align_up(debug_info->data_size() , oopSize);
1166
1167 // First, allocate space for immutable data in C heap.
1168 address immutable_data = nullptr;
1169 if (immutable_data_size > 0) {
1170 immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1171 if (immutable_data == nullptr) {
1172 vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1173 return nullptr;
1174 }
1175 }
1176 {
1177 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1178
1179 nm = new (nmethod_size, comp_level)
1180 nmethod(method(), compiler->type(), nmethod_size, immutable_data_size,
1181 compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1182 debug_info, dependencies, code_buffer, frame_size, oop_maps,
1183 handler_table, nul_chk_table, compiler, comp_level, scc_entry
1184 #if INCLUDE_JVMCI
1185 , speculations,
1186 speculations_len,
1187 jvmci_data
1188 #endif
1189 );
1190
1191 if (nm != nullptr) {
1192 // To make dependency checking during class loading fast, record
1193 // the nmethod dependencies in the classes it is dependent on.
1194 // This allows the dependency checking code to simply walk the
1195 // class hierarchy above the loaded class, checking only nmethods
1196 // which are dependent on those classes. The slow way is to
1197 // check every nmethod for dependencies which makes it linear in
1198 // the number of methods compiled. For applications with a lot
1199 // classes the slow way is too slow.
1200 for (Dependencies::DepStream deps(nm); deps.next(); ) {
1201 if (deps.type() == Dependencies::call_site_target_value) {
1202 // CallSite dependencies are managed on per-CallSite instance basis.
1203 oop call_site = deps.argument_oop(0);
1204 MethodHandles::add_dependent_nmethod(call_site, nm);
1205 } else {
1206 InstanceKlass* ik = deps.context_type();
1207 if (ik == nullptr) {
1208 continue; // ignore things like evol_method
1209 }
1210 // record this nmethod as dependent on this klass
1211 ik->add_dependent_nmethod(nm);
1212 }
1213 }
1214 NOT_PRODUCT(if (nm != nullptr) note_java_nmethod(nm));
1215 }
1216 }
1217 // Do verification and logging outside CodeCache_lock.
1218 if (nm != nullptr) {
1219
1220 #ifdef ASSERT
1221 LogTarget(Debug, scc, nmethod) log;
1222 if (log.is_enabled()) {
1223 LogStream out(log);
1224 out.print_cr("== new_nmethod 2");
1225 FlagSetting fs(PrintRelocations, true);
1226 nm->print_on_impl(&out);
1227 nm->decode(&out);
1228 }
1229 #endif
1230
1231 // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1232 DEBUG_ONLY(nm->verify();)
1233 nm->log_new_nmethod();
1234 }
1235 return nm;
1236 }
1237
1238 // Fill in default values for various fields
1239 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1240 // avoid uninitialized fields, even for short time periods
1241 _exception_cache = nullptr;
1242 _gc_data = nullptr;
1243 _oops_do_mark_link = nullptr;
1244 _compiled_ic_data = nullptr;
1245
1246 _is_unloading_state = 0;
1247 _state = not_installed;
1248
1249 _has_unsafe_access = 0;
1250 _has_method_handle_invokes = 0;
1251 _has_wide_vectors = 0;
1252 _has_monitors = 0;
1253 _has_scoped_access = 0;
1254 _has_flushed_dependencies = 0;
1255 _is_unlinked = 0;
1256 _load_reported = 0; // jvmti state
1257 _preloaded = 0;
1258 _has_clinit_barriers = 0;
1259
1260 _used = false;
1261 _deoptimization_status = not_marked;
1262
1263 // SECT_CONSTS is first in code buffer so the offset should be 0.
1264 int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1265 assert(consts_offset == 0, "const_offset: %d", consts_offset);
1266
1267 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1268
1269 CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
1270 CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1271
1272 _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1273 }
1274
1275 // Post initialization
1276 void nmethod::post_init() {
1277 clear_unloading_state();
1278
1279 finalize_relocations();
1280
1312
1313 _osr_entry_point = nullptr;
1314 _pc_desc_container = nullptr;
1315 _entry_bci = InvocationEntryBci;
1316 _compile_id = compile_id;
1317 _comp_level = CompLevel_none;
1318 _compiler_type = type;
1319 _orig_pc_offset = 0;
1320 _num_stack_arg_slots = 0;
1321
1322 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1323 // Continuation enter intrinsic
1324 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1325 } else {
1326 _exception_offset = 0;
1327 }
1328 // Native wrappers do not have deopt handlers. Make the values
1329 // something that will never match a pc like the nmethod vtable entry
1330 _deopt_handler_offset = 0;
1331 _deopt_mh_handler_offset = 0;
1332 _scc_entry = nullptr;
1333 _method_profiling_count = 0;
1334 _unwind_handler_offset = 0;
1335
1336 CHECKED_CAST(_metadata_offset, uint16_t, (align_up(code_buffer->total_oop_size(), oopSize)));
1337 int data_end_offset = _metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize);
1338 #if INCLUDE_JVMCI
1339 // jvmci_data_size is 0 in native wrapper but we need to set offset
1340 // to correctly calculate metadata_end address
1341 CHECKED_CAST(_jvmci_data_offset, uint16_t, data_end_offset);
1342 #endif
1343 assert((data_offset() + data_end_offset) <= nmethod_size, "wrong nmethod's size: %d < %d", nmethod_size, (data_offset() + data_end_offset));
1344
1345 // native wrapper does not have read-only data but we need unique not null address
1346 _immutable_data = data_end();
1347 _immutable_data_size = 0;
1348 _nul_chk_table_offset = 0;
1349 _handler_table_offset = 0;
1350 _scopes_pcs_offset = 0;
1351 _scopes_data_offset = 0;
1352 #if INCLUDE_JVMCI
1353 _speculations_offset = 0;
1374 // This is both handled in decode2(), called via print_code() -> decode()
1375 if (PrintNativeNMethods) {
1376 tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1377 print_code();
1378 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1379 #if defined(SUPPORT_DATA_STRUCTS)
1380 if (AbstractDisassembler::show_structs()) {
1381 if (oop_maps != nullptr) {
1382 tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1383 oop_maps->print_on(tty);
1384 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1385 }
1386 }
1387 #endif
1388 } else {
1389 print(); // print the header part only.
1390 }
1391 #if defined(SUPPORT_DATA_STRUCTS)
1392 if (AbstractDisassembler::show_structs()) {
1393 if (PrintRelocations) {
1394 print_relocations_on(tty);
1395 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1396 }
1397 }
1398 #endif
1399 if (xtty != nullptr) {
1400 xtty->tail("print_native_nmethod");
1401 }
1402 }
1403 }
1404
1405 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1406 return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1407 }
1408
1409 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1410 // Try MethodNonProfiled and MethodProfiled.
1411 void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1412 if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1413 // Try NonNMethod or give up.
1414 return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
1417 // For normal JIT compiled code
1418 nmethod::nmethod(
1419 Method* method,
1420 CompilerType type,
1421 int nmethod_size,
1422 int immutable_data_size,
1423 int compile_id,
1424 int entry_bci,
1425 address immutable_data,
1426 CodeOffsets* offsets,
1427 int orig_pc_offset,
1428 DebugInformationRecorder* debug_info,
1429 Dependencies* dependencies,
1430 CodeBuffer *code_buffer,
1431 int frame_size,
1432 OopMapSet* oop_maps,
1433 ExceptionHandlerTable* handler_table,
1434 ImplicitExceptionTable* nul_chk_table,
1435 AbstractCompiler* compiler,
1436 CompLevel comp_level
1437 , SCCEntry* scc_entry
1438 #if INCLUDE_JVMCI
1439 , char* speculations,
1440 int speculations_len,
1441 JVMCINMethodData* jvmci_data
1442 #endif
1443 )
1444 : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1445 offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
1446 _deoptimization_generation(0),
1447 _gc_epoch(CodeCache::gc_epoch()),
1448 _method(method),
1449 _osr_link(nullptr)
1450 {
1451 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1452 {
1453 debug_only(NoSafepointVerifier nsv;)
1454 assert_locked_or_safepoint(CodeCache_lock);
1455
1456 init_defaults(code_buffer, offsets);
1457 _scc_entry = scc_entry;
1458 _method_profiling_count = 0;
1459
1460 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1461 _entry_bci = entry_bci;
1462 _compile_id = compile_id;
1463 _comp_level = comp_level;
1464 _compiler_type = type;
1465 _orig_pc_offset = orig_pc_offset;
1466
1467 _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1468
1469 set_ctable_begin(header_begin() + content_offset());
1470
1471 #if INCLUDE_JVMCI
1472 if (compiler->is_jvmci()) {
1473 // JVMCI might not produce any stub sections
1474 if (offsets->value(CodeOffsets::Exceptions) != -1) {
1475 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
1476 } else {
1477 _exception_offset = -1;
1478 }
1570 #if INCLUDE_JVMCI
1571 // Copy speculations to nmethod
1572 if (speculations_size() != 0) {
1573 memcpy(speculations_begin(), speculations, speculations_len);
1574 }
1575 #endif
1576
1577 post_init();
1578
1579 // we use the information of entry points to find out if a method is
1580 // static or non static
1581 assert(compiler->is_c2() || compiler->is_jvmci() ||
1582 _method->is_static() == (entry_point() == verified_entry_point()),
1583 " entry points must be same for static methods and vice versa");
1584 }
1585 }
1586
1587 // Print a short set of xml attributes to identify this nmethod. The
1588 // output should be embedded in some other element.
1589 void nmethod::log_identity(xmlStream* log) const {
1590 assert(log->inside_attrs_or_error(), "printing attributes");
1591 log->print(" code_compile_id='%d'", compile_id());
1592 const char* nm_kind = compile_kind();
1593 if (nm_kind != nullptr) log->print(" code_compile_kind='%s'", nm_kind);
1594 log->print(" code_compiler='%s'", compiler_name());
1595 if (TieredCompilation) {
1596 log->print(" code_compile_level='%d'", comp_level());
1597 }
1598 #if INCLUDE_JVMCI
1599 if (jvmci_nmethod_data() != nullptr) {
1600 const char* jvmci_name = jvmci_nmethod_data()->name();
1601 if (jvmci_name != nullptr) {
1602 log->print(" jvmci_mirror_name='");
1603 log->text("%s", jvmci_name);
1604 log->print("'");
1605 }
1606 }
1607 #endif
1608 }
1609
1610
1611 #define LOG_OFFSET(log, name) \
1612 if (p2i(name##_end()) - p2i(name##_begin())) \
1613 log->print(" " XSTR(name) "_offset='%zd'" , \
1614 p2i(name##_begin()) - p2i(this))
1615
1616
1697 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1698 if (oop_maps() != nullptr) {
1699 tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1700 oop_maps()->print_on(tty);
1701 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1702 }
1703 }
1704 #endif
1705 } else {
1706 print(); // print the header part only.
1707 }
1708
1709 #if defined(SUPPORT_DATA_STRUCTS)
1710 if (AbstractDisassembler::show_structs()) {
1711 methodHandle mh(Thread::current(), _method);
1712 if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1713 print_scopes();
1714 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1715 }
1716 if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1717 print_relocations_on(tty);
1718 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1719 }
1720 if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1721 print_dependencies_on(tty);
1722 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1723 }
1724 if (printmethod || PrintExceptionHandlers) {
1725 print_handler_table();
1726 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1727 print_nul_chk_table();
1728 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1729 }
1730
1731 if (printmethod) {
1732 print_recorded_oops();
1733 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1734 print_recorded_metadata();
1735 tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1736 }
1737 }
1945 Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1946 }
1947
1948 bool nmethod::is_maybe_on_stack() {
1949 // If the condition below is true, it means that the nmethod was found to
1950 // be alive the previous completed marking cycle.
1951 return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1952 }
1953
1954 void nmethod::inc_decompile_count() {
1955 if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1956 // Could be gated by ProfileTraps, but do not bother...
1957 Method* m = method();
1958 if (m == nullptr) return;
1959 MethodData* mdo = m->method_data();
1960 if (mdo == nullptr) return;
1961 // There is a benign race here. See comments in methodData.hpp.
1962 mdo->inc_decompile_count();
1963 }
1964
1965 void nmethod::inc_method_profiling_count() {
1966 Atomic::inc(&_method_profiling_count);
1967 }
1968
1969 uint64_t nmethod::method_profiling_count() {
1970 return _method_profiling_count;
1971 }
1972
1973 bool nmethod::try_transition(signed char new_state_int) {
1974 signed char new_state = new_state_int;
1975 assert_lock_strong(NMethodState_lock);
1976 signed char old_state = _state;
1977 if (old_state >= new_state) {
1978 // Ensure monotonicity of transitions.
1979 return false;
1980 }
1981 Atomic::store(&_state, new_state);
1982 return true;
1983 }
1984
1985 void nmethod::invalidate_osr_method() {
1986 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1987 // Remove from list of active nmethods
1988 if (method() != nullptr) {
1989 method()->method_holder()->remove_osr_nmethod(this);
1990 }
1991 }
1992
1998 os::current_thread_id());
1999 log_identity(xtty);
2000 xtty->stamp();
2001 xtty->end_elem();
2002 }
2003 }
2004
2005 CompileTask::print_ul(this, "made not entrant");
2006 if (PrintCompilation) {
2007 print_on_with_msg(tty, "made not entrant");
2008 }
2009 }
2010
2011 void nmethod::unlink_from_method() {
2012 if (method() != nullptr) {
2013 method()->unlink_code(this);
2014 }
2015 }
2016
2017 // Invalidate code
2018 bool nmethod::make_not_entrant(bool make_not_entrant) {
2019 // This can be called while the system is already at a safepoint which is ok
2020 NoSafepointVerifier nsv;
2021
2022 if (is_unloading()) {
2023 // If the nmethod is unloading, then it is already not entrant through
2024 // the nmethod entry barriers. No need to do anything; GC will unload it.
2025 return false;
2026 }
2027
2028 if (Atomic::load(&_state) == not_entrant) {
2029 // Avoid taking the lock if already in required state.
2030 // This is safe from races because the state is an end-state,
2031 // which the nmethod cannot back out of once entered.
2032 // No need for fencing either.
2033 return false;
2034 }
2035
2036 {
2037 // Enter critical section. Does not block for safepoint.
2038 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
2061 }
2062
2063 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2064 if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2065 // If nmethod entry barriers are not supported, we won't mark
2066 // nmethods as on-stack when they become on-stack. So we
2067 // degrade to a less accurate flushing strategy, for now.
2068 mark_as_maybe_on_stack();
2069 }
2070
2071 // Change state
2072 bool success = try_transition(not_entrant);
2073 assert(success, "Transition can't fail");
2074
2075 // Log the transition once
2076 log_state_change();
2077
2078 // Remove nmethod from method.
2079 unlink_from_method();
2080
2081 if (make_not_entrant) {
2082 // Keep cached code if it was simply replaced
2083 // otherwise make it not entrant too.
2084 SCCache::invalidate(_scc_entry);
2085 }
2086
2087 CompileBroker::log_not_entrant(this);
2088 } // leave critical region under NMethodState_lock
2089
2090 #if INCLUDE_JVMCI
2091 // Invalidate can't occur while holding the NMethodState_lock
2092 JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2093 if (nmethod_data != nullptr) {
2094 nmethod_data->invalidate_nmethod_mirror(this);
2095 }
2096 #endif
2097
2098 #ifdef ASSERT
2099 if (is_osr_method() && method() != nullptr) {
2100 // Make sure osr nmethod is invalidated, i.e. not on the list
2101 bool found = method()->method_holder()->remove_osr_nmethod(this);
2102 assert(!found, "osr nmethod should have been invalidated");
2103 }
2104 #endif
2105
2106 return true;
2107 }
2211 MethodHandles::clean_dependency_context(call_site);
2212 } else {
2213 InstanceKlass* ik = deps.context_type();
2214 if (ik == nullptr) {
2215 continue; // ignore things like evol_method
2216 }
2217 // During GC liveness of dependee determines class that needs to be updated.
2218 // The GC may clean dependency contexts concurrently and in parallel.
2219 ik->clean_dependency_context();
2220 }
2221 }
2222 }
2223 }
2224
2225 void nmethod::post_compiled_method(CompileTask* task) {
2226 task->mark_success();
2227 task->set_nm_content_size(content_size());
2228 task->set_nm_insts_size(insts_size());
2229 task->set_nm_total_size(total_size());
2230
2231 // task->is_scc() is true only for loaded cached code.
2232 // nmethod::_scc_entry is set for loaded and stored cached code
2233 // to invalidate the entry when nmethod is deoptimized.
2234 // There is option to not store in archive cached code.
2235 guarantee((_scc_entry != nullptr) || !task->is_scc() || VerifyCachedCode, "sanity");
2236
2237 // JVMTI -- compiled method notification (must be done outside lock)
2238 post_compiled_method_load_event();
2239
2240 if (CompilationLog::log() != nullptr) {
2241 CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2242 }
2243
2244 const DirectiveSet* directive = task->directive();
2245 maybe_print_nmethod(directive);
2246 }
2247
2248 // ------------------------------------------------------------------
2249 // post_compiled_method_load_event
2250 // new method for install_code() path
2251 // Transfer information from compilation to jvmti
2252 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2253 // This is a bad time for a safepoint. We don't want
2254 // this nmethod to get unloaded while we're queueing the event.
2255 NoSafepointVerifier nsv;
2256
3150 p2i(nul_chk_table_end()),
3151 nul_chk_table_size());
3152 if (handler_table_size() > 0) st->print_cr(" handler table [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3153 p2i(handler_table_begin()),
3154 p2i(handler_table_end()),
3155 handler_table_size());
3156 if (scopes_pcs_size () > 0) st->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3157 p2i(scopes_pcs_begin()),
3158 p2i(scopes_pcs_end()),
3159 scopes_pcs_size());
3160 if (scopes_data_size () > 0) st->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3161 p2i(scopes_data_begin()),
3162 p2i(scopes_data_end()),
3163 scopes_data_size());
3164 #if INCLUDE_JVMCI
3165 if (speculations_size () > 0) st->print_cr(" speculations [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3166 p2i(speculations_begin()),
3167 p2i(speculations_end()),
3168 speculations_size());
3169 #endif
3170 if (SCCache::is_on() && _scc_entry != nullptr) {
3171 _scc_entry->print(st);
3172 }
3173 }
3174
3175 void nmethod::print_code() {
3176 ResourceMark m;
3177 ttyLocker ttyl;
3178 // Call the specialized decode method of this class.
3179 decode(tty);
3180 }
3181
3182 #ifndef PRODUCT // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3183
3184 void nmethod::print_dependencies_on(outputStream* out) {
3185 ResourceMark rm;
3186 stringStream st;
3187 st.print_cr("Dependencies:");
3188 for (Dependencies::DepStream deps(this); deps.next(); ) {
3189 deps.print_dependency(&st);
3190 InstanceKlass* ctxk = deps.context_type();
3191 if (ctxk != nullptr) {
3192 if (ctxk->is_dependent_nmethod(this)) {
3252 st->print("scopes:");
3253 if (scopes_pcs_begin() < scopes_pcs_end()) {
3254 st->cr();
3255 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3256 if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3257 continue;
3258
3259 ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3260 while (sd != nullptr) {
3261 sd->print_on(st, p); // print output ends with a newline
3262 sd = sd->sender();
3263 }
3264 }
3265 } else {
3266 st->print_cr(" <list empty>");
3267 }
3268 }
3269 #endif
3270
3271 #ifndef PRODUCT // RelocIterator does support printing only then.
3272 void nmethod::print_relocations_on(outputStream* st) {
3273 ResourceMark m; // in case methods get printed via the debugger
3274 st->print_cr("relocations:");
3275 RelocIterator iter(this);
3276 iter.print_on(st);
3277 }
3278 #endif
3279
3280 void nmethod::print_pcs_on(outputStream* st) {
3281 ResourceMark m; // in case methods get printed via debugger
3282 st->print("pc-bytecode offsets:");
3283 if (scopes_pcs_begin() < scopes_pcs_end()) {
3284 st->cr();
3285 for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3286 p->print_on(st, this); // print output ends with a newline
3287 }
3288 } else {
3289 st->print_cr(" <list empty>");
3290 }
3291 }
3292
3293 void nmethod::print_handler_table() {
3294 ExceptionHandlerTable(this).print(code_begin());
3295 }
3296
3611 else obj->print_value_on(&st);
3612 st.print(")");
3613 return st.as_string();
3614 }
3615 case relocInfo::metadata_type: {
3616 stringStream st;
3617 metadata_Relocation* r = iter.metadata_reloc();
3618 Metadata* obj = r->metadata_value();
3619 st.print("metadata(");
3620 if (obj == nullptr) st.print("nullptr");
3621 else obj->print_value_on(&st);
3622 st.print(")");
3623 return st.as_string();
3624 }
3625 case relocInfo::runtime_call_type:
3626 case relocInfo::runtime_call_w_cp_type: {
3627 stringStream st;
3628 st.print("runtime_call");
3629 CallRelocation* r = (CallRelocation*)iter.reloc();
3630 address dest = r->destination();
3631 if (StubRoutines::contains(dest)) {
3632 StubCodeDesc* desc = StubCodeDesc::desc_for(dest);
3633 if (desc == nullptr) {
3634 desc = StubCodeDesc::desc_for(dest + frame::pc_return_offset);
3635 }
3636 if (desc != nullptr) {
3637 st.print(" Stub::%s", desc->name());
3638 return st.as_string();
3639 }
3640 }
3641 CodeBlob* cb = CodeCache::find_blob(dest);
3642 if (cb != nullptr) {
3643 st.print(" %s", cb->name());
3644 } else {
3645 ResourceMark rm;
3646 const int buflen = 1024;
3647 char* buf = NEW_RESOURCE_ARRAY(char, buflen);
3648 int offset;
3649 if (os::dll_address_to_function_name(dest, buf, buflen, &offset)) {
3650 st.print(" %s", buf);
3651 if (offset != 0) {
3652 st.print("+%d", offset);
3653 }
3654 }
3655 }
3656 return st.as_string();
3657 }
3658 case relocInfo::virtual_call_type: {
3659 stringStream st;
3660 st.print_raw("virtual_call");
|