< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/assembler.inline.hpp"
  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/scopeDesc.hpp"

  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compilationLog.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compileLog.hpp"
  37 #include "compiler/compileTask.hpp"
  38 #include "compiler/compilerDirectives.hpp"
  39 #include "compiler/compilerOracle.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"
  46 #include "gc/shared/collectedHeap.hpp"
  47 #include "interpreter/bytecode.inline.hpp"
  48 #include "jvm.h"
  49 #include "logging/log.hpp"
  50 #include "logging/logStream.hpp"
  51 #include "memory/allocation.inline.hpp"
  52 #include "memory/resourceArea.hpp"

 772 
 773 void nmethod::clear_inline_caches() {
 774   assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
 775   RelocIterator iter(this);
 776   while (iter.next()) {
 777     iter.reloc()->clear_inline_cache();
 778   }
 779 }
 780 
 781 #ifdef ASSERT
 782 // Check class_loader is alive for this bit of metadata.
 783 class CheckClass : public MetadataClosure {
 784   void do_metadata(Metadata* md) {
 785     Klass* klass = nullptr;
 786     if (md->is_klass()) {
 787       klass = ((Klass*)md);
 788     } else if (md->is_method()) {
 789       klass = ((Method*)md)->method_holder();
 790     } else if (md->is_methodData()) {
 791       klass = ((MethodData*)md)->method()->method_holder();


 792     } else {
 793       md->print();
 794       ShouldNotReachHere();
 795     }
 796     assert(klass->is_loader_alive(), "must be alive");
 797   }
 798 };
 799 #endif // ASSERT
 800 
 801 
 802 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
 803   ic->clean_metadata();
 804 }
 805 
 806 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
 807 template <typename CallsiteT>
 808 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
 809                                          bool clean_all) {
 810   CodeBlob* cb = CodeCache::find_blob(callsite->destination());
 811   if (!cb->is_nmethod()) {

1122     debug_only(nm->verify();) // might block
1123 
1124     nm->log_new_nmethod();
1125   }
1126   return nm;
1127 }
1128 
1129 nmethod* nmethod::new_nmethod(const methodHandle& method,
1130   int compile_id,
1131   int entry_bci,
1132   CodeOffsets* offsets,
1133   int orig_pc_offset,
1134   DebugInformationRecorder* debug_info,
1135   Dependencies* dependencies,
1136   CodeBuffer* code_buffer, int frame_size,
1137   OopMapSet* oop_maps,
1138   ExceptionHandlerTable* handler_table,
1139   ImplicitExceptionTable* nul_chk_table,
1140   AbstractCompiler* compiler,
1141   CompLevel comp_level

1142 #if INCLUDE_JVMCI
1143   , char* speculations,
1144   int speculations_len,
1145   JVMCINMethodData* jvmci_data
1146 #endif
1147 )
1148 {
1149   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1150   code_buffer->finalize_oop_references(method);
1151   // create nmethod
1152   nmethod* nm = nullptr;
1153   int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1154 #if INCLUDE_JVMCI
1155     if (compiler->is_jvmci()) {
1156       nmethod_size += align_up(jvmci_data->size(), oopSize);
1157     }
1158 #endif
1159 
1160   int immutable_data_size =
1161       adjust_pcs_size(debug_info->pcs_size())

1166     + align_up(speculations_len                  , oopSize)
1167 #endif
1168     + align_up(debug_info->data_size()           , oopSize);
1169 
1170   // First, allocate space for immutable data in C heap.
1171   address immutable_data = nullptr;
1172   if (immutable_data_size > 0) {
1173     immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1174     if (immutable_data == nullptr) {
1175       vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1176       return nullptr;
1177     }
1178   }
1179   {
1180     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1181 
1182     nm = new (nmethod_size, comp_level)
1183     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size,
1184             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1185             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1186             handler_table, nul_chk_table, compiler, comp_level
1187 #if INCLUDE_JVMCI
1188             , speculations,
1189             speculations_len,
1190             jvmci_data
1191 #endif
1192             );
1193 
1194     if (nm != nullptr) {
1195       // To make dependency checking during class loading fast, record
1196       // the nmethod dependencies in the classes it is dependent on.
1197       // This allows the dependency checking code to simply walk the
1198       // class hierarchy above the loaded class, checking only nmethods
1199       // which are dependent on those classes.  The slow way is to
1200       // check every nmethod for dependencies which makes it linear in
1201       // the number of methods compiled.  For applications with a lot
1202       // classes the slow way is too slow.
1203       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1204         if (deps.type() == Dependencies::call_site_target_value) {
1205           // CallSite dependencies are managed on per-CallSite instance basis.
1206           oop call_site = deps.argument_oop(0);
1207           MethodHandles::add_dependent_nmethod(call_site, nm);
1208         } else {
1209           InstanceKlass* ik = deps.context_type();
1210           if (ik == nullptr) {
1211             continue;  // ignore things like evol_method
1212           }
1213           // record this nmethod as dependent on this klass
1214           ik->add_dependent_nmethod(nm);
1215         }
1216       }
1217       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));
1218     }
1219   }
1220   // Do verification and logging outside CodeCache_lock.
1221   if (nm != nullptr) {












1222     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1223     DEBUG_ONLY(nm->verify();)
1224     nm->log_new_nmethod();
1225   }
1226   return nm;
1227 }
1228 
1229 // Fill in default values for various fields
1230 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1231   // avoid uninitialized fields, even for short time periods
1232   _exception_cache            = nullptr;
1233   _gc_data                    = nullptr;
1234   _oops_do_mark_link          = nullptr;
1235   _compiled_ic_data           = nullptr;
1236 
1237 #if INCLUDE_RTM_OPT
1238   _rtm_state                  = NoRTM;
1239 #endif
1240   _is_unloading_state         = 0;
1241   _state                      = not_installed;
1242 
1243   _has_unsafe_access          = 0;
1244   _has_method_handle_invokes  = 0;
1245   _has_wide_vectors           = 0;
1246   _has_monitors               = 0;
1247   _has_flushed_dependencies   = 0;
1248   _is_unlinked                = 0;
1249   _load_reported              = 0; // jvmti state


1250 

1251   _deoptimization_status      = not_marked;
1252 
1253   // SECT_CONSTS is first in code buffer so the offset should be 0.
1254   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1255   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1256 
1257   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1258 
1259   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1260   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1261 
1262   int size = code_buffer->main_code_size();
1263   assert(size >= 0, "should be initialized");
1264   // Use instructions section size if it is 0 (e.g. native wrapper)
1265   if (size == 0) size = code_size(); // requires _stub_offset to be set
1266   assert(size <= code_size(), "incorrect size: %d > %d", size, code_size());
1267   _inline_insts_size = size - _verified_entry_offset
1268                      - code_buffer->total_skipped_instructions_size();
1269   assert(_inline_insts_size >= 0, "sanity");
1270 }

1309 
1310     _osr_entry_point         = nullptr;
1311     _pc_desc_container       = nullptr;
1312     _entry_bci               = InvocationEntryBci;
1313     _compile_id              = compile_id;
1314     _comp_level              = CompLevel_none;
1315     _compiler_type           = type;
1316     _orig_pc_offset          = 0;
1317     _num_stack_arg_slots     = _method->constMethod()->num_stack_arg_slots();
1318 
1319     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1320       // Continuation enter intrinsic
1321       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1322     } else {
1323       _exception_offset      = 0;
1324     }
1325     // Native wrappers do not have deopt handlers. Make the values
1326     // something that will never match a pc like the nmethod vtable entry
1327     _deopt_handler_offset    = 0;
1328     _deopt_mh_handler_offset = 0;


1329     _unwind_handler_offset   = 0;
1330 
1331     CHECKED_CAST(_metadata_offset, uint16_t, (align_up(code_buffer->total_oop_size(), oopSize)));
1332     int data_end_offset = _metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize);
1333 #if INCLUDE_JVMCI
1334     // jvmci_data_size is 0 in native wrapper but we need to set offset
1335     // to correctly calculate metadata_end address
1336     CHECKED_CAST(_jvmci_data_offset, uint16_t, data_end_offset);
1337 #endif
1338     assert((data_offset() + data_end_offset) <= nmethod_size, "wrong nmethod's size: %d < %d", nmethod_size, (data_offset() + data_end_offset));
1339 
1340     // native wrapper does not have read-only data but we need unique not null address
1341     _immutable_data          = data_end();
1342     _immutable_data_size     = 0;
1343     _nul_chk_table_offset    = 0;
1344     _handler_table_offset    = 0;
1345     _scopes_pcs_offset       = 0;
1346     _scopes_data_offset      = 0;
1347 #if INCLUDE_JVMCI
1348     _speculations_offset     = 0;

1369     // This is both handled in decode2(), called via print_code() -> decode()
1370     if (PrintNativeNMethods) {
1371       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1372       print_code();
1373       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1374 #if defined(SUPPORT_DATA_STRUCTS)
1375       if (AbstractDisassembler::show_structs()) {
1376         if (oop_maps != nullptr) {
1377           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1378           oop_maps->print_on(tty);
1379           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1380         }
1381       }
1382 #endif
1383     } else {
1384       print(); // print the header part only.
1385     }
1386 #if defined(SUPPORT_DATA_STRUCTS)
1387     if (AbstractDisassembler::show_structs()) {
1388       if (PrintRelocations) {
1389         print_relocations();
1390         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1391       }
1392     }
1393 #endif
1394     if (xtty != nullptr) {
1395       xtty->tail("print_native_nmethod");
1396     }
1397   }
1398 }
1399 
1400 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1401   return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1402 }
1403 
1404 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1405   // Try MethodNonProfiled and MethodProfiled.
1406   void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1407   if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1408   // Try NonNMethod or give up.
1409   return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);

1412 // For normal JIT compiled code
1413 nmethod::nmethod(
1414   Method* method,
1415   CompilerType type,
1416   int nmethod_size,
1417   int immutable_data_size,
1418   int compile_id,
1419   int entry_bci,
1420   address immutable_data,
1421   CodeOffsets* offsets,
1422   int orig_pc_offset,
1423   DebugInformationRecorder* debug_info,
1424   Dependencies* dependencies,
1425   CodeBuffer *code_buffer,
1426   int frame_size,
1427   OopMapSet* oop_maps,
1428   ExceptionHandlerTable* handler_table,
1429   ImplicitExceptionTable* nul_chk_table,
1430   AbstractCompiler* compiler,
1431   CompLevel comp_level

1432 #if INCLUDE_JVMCI
1433   , char* speculations,
1434   int speculations_len,
1435   JVMCINMethodData* jvmci_data
1436 #endif
1437   )
1438   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1439              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
1440   _deoptimization_generation(0),
1441   _gc_epoch(CodeCache::gc_epoch()),
1442   _method(method),
1443   _osr_link(nullptr)
1444 {
1445   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1446   {
1447     debug_only(NoSafepointVerifier nsv;)
1448     assert_locked_or_safepoint(CodeCache_lock);
1449 
1450     init_defaults(code_buffer, offsets);


1451 
1452     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1453     _entry_bci       = entry_bci;
1454     _compile_id      = compile_id;
1455     _comp_level      = comp_level;
1456     _compiler_type   = type;
1457     _orig_pc_offset  = orig_pc_offset;
1458 
1459     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1460 
1461     set_ctable_begin(header_begin() + content_offset());
1462 
1463 #if INCLUDE_JVMCI
1464     if (compiler->is_jvmci()) {
1465       // JVMCI might not produce any stub sections
1466       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1467         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1468       } else {
1469         _exception_offset        = -1;
1470       }

1562 #if INCLUDE_JVMCI
1563     // Copy speculations to nmethod
1564     if (speculations_size() != 0) {
1565       memcpy(speculations_begin(), speculations, speculations_len);
1566     }
1567 #endif
1568 
1569     post_init();
1570 
1571     // we use the information of entry points to find out if a method is
1572     // static or non static
1573     assert(compiler->is_c2() || compiler->is_jvmci() ||
1574            _method->is_static() == (entry_point() == verified_entry_point()),
1575            " entry points must be same for static methods and vice versa");
1576   }
1577 }
1578 
1579 // Print a short set of xml attributes to identify this nmethod.  The
1580 // output should be embedded in some other element.
1581 void nmethod::log_identity(xmlStream* log) const {
1582   log->print(" compile_id='%d'", compile_id());

1583   const char* nm_kind = compile_kind();
1584   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1585   log->print(" compiler='%s'", compiler_name());
1586   if (TieredCompilation) {
1587     log->print(" level='%d'", comp_level());
1588   }
1589 #if INCLUDE_JVMCI
1590   if (jvmci_nmethod_data() != nullptr) {
1591     const char* jvmci_name = jvmci_nmethod_data()->name();
1592     if (jvmci_name != nullptr) {
1593       log->print(" jvmci_mirror_name='");
1594       log->text("%s", jvmci_name);
1595       log->print("'");
1596     }
1597   }
1598 #endif
1599 }
1600 
1601 
1602 #define LOG_OFFSET(log, name)                    \
1603   if (p2i(name##_end()) - p2i(name##_begin())) \
1604     log->print(" " XSTR(name) "_offset='" INTX_FORMAT "'"    , \
1605                p2i(name##_begin()) - p2i(this))
1606 
1607 

1688       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1689       if (oop_maps() != nullptr) {
1690         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1691         oop_maps()->print_on(tty);
1692         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1693       }
1694     }
1695 #endif
1696   } else {
1697     print(); // print the header part only.
1698   }
1699 
1700 #if defined(SUPPORT_DATA_STRUCTS)
1701   if (AbstractDisassembler::show_structs()) {
1702     methodHandle mh(Thread::current(), _method);
1703     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1704       print_scopes();
1705       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1706     }
1707     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1708       print_relocations();
1709       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1710     }
1711     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1712       print_dependencies_on(tty);
1713       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1714     }
1715     if (printmethod || PrintExceptionHandlers) {
1716       print_handler_table();
1717       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1718       print_nul_chk_table();
1719       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1720     }
1721 
1722     if (printmethod) {
1723       print_recorded_oops();
1724       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1725       print_recorded_metadata();
1726       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1727     }
1728   }

1936   Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1937 }
1938 
1939 bool nmethod::is_maybe_on_stack() {
1940   // If the condition below is true, it means that the nmethod was found to
1941   // be alive the previous completed marking cycle.
1942   return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1943 }
1944 
1945 void nmethod::inc_decompile_count() {
1946   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1947   // Could be gated by ProfileTraps, but do not bother...
1948   Method* m = method();
1949   if (m == nullptr)  return;
1950   MethodData* mdo = m->method_data();
1951   if (mdo == nullptr)  return;
1952   // There is a benign race here.  See comments in methodData.hpp.
1953   mdo->inc_decompile_count();
1954 }
1955 








1956 bool nmethod::try_transition(signed char new_state_int) {
1957   signed char new_state = new_state_int;
1958   assert_lock_strong(NMethodState_lock);
1959   signed char old_state = _state;
1960   if (old_state >= new_state) {
1961     // Ensure monotonicity of transitions.
1962     return false;
1963   }
1964   Atomic::store(&_state, new_state);
1965   return true;
1966 }
1967 
1968 void nmethod::invalidate_osr_method() {
1969   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1970   // Remove from list of active nmethods
1971   if (method() != nullptr) {
1972     method()->method_holder()->remove_osr_nmethod(this);
1973   }
1974 }
1975 

1981                        os::current_thread_id());
1982       log_identity(xtty);
1983       xtty->stamp();
1984       xtty->end_elem();
1985     }
1986   }
1987 
1988   CompileTask::print_ul(this, "made not entrant");
1989   if (PrintCompilation) {
1990     print_on(tty, "made not entrant");
1991   }
1992 }
1993 
1994 void nmethod::unlink_from_method() {
1995   if (method() != nullptr) {
1996     method()->unlink_code(this);
1997   }
1998 }
1999 
2000 // Invalidate code
2001 bool nmethod::make_not_entrant() {
2002   // This can be called while the system is already at a safepoint which is ok
2003   NoSafepointVerifier nsv;
2004 
2005   if (is_unloading()) {
2006     // If the nmethod is unloading, then it is already not entrant through
2007     // the nmethod entry barriers. No need to do anything; GC will unload it.
2008     return false;
2009   }
2010 
2011   if (Atomic::load(&_state) == not_entrant) {
2012     // Avoid taking the lock if already in required state.
2013     // This is safe from races because the state is an end-state,
2014     // which the nmethod cannot back out of once entered.
2015     // No need for fencing either.
2016     return false;
2017   }
2018 
2019   {
2020     // Enter critical section.  Does not block for safepoint.
2021     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2044     }
2045 
2046     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2047     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2048       // If nmethod entry barriers are not supported, we won't mark
2049       // nmethods as on-stack when they become on-stack. So we
2050       // degrade to a less accurate flushing strategy, for now.
2051       mark_as_maybe_on_stack();
2052     }
2053 
2054     // Change state
2055     bool success = try_transition(not_entrant);
2056     assert(success, "Transition can't fail");
2057 
2058     // Log the transition once
2059     log_state_change();
2060 
2061     // Remove nmethod from method.
2062     unlink_from_method();
2063 







2064   } // leave critical region under NMethodState_lock
2065 
2066 #if INCLUDE_JVMCI
2067   // Invalidate can't occur while holding the Patching lock
2068   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2069   if (nmethod_data != nullptr) {
2070     nmethod_data->invalidate_nmethod_mirror(this);
2071   }
2072 #endif
2073 
2074 #ifdef ASSERT
2075   if (is_osr_method() && method() != nullptr) {
2076     // Make sure osr nmethod is invalidated, i.e. not on the list
2077     bool found = method()->method_holder()->remove_osr_nmethod(this);
2078     assert(!found, "osr nmethod should have been invalidated");
2079   }
2080 #endif
2081 
2082   return true;
2083 }

2183         MethodHandles::clean_dependency_context(call_site);
2184       } else {
2185         InstanceKlass* ik = deps.context_type();
2186         if (ik == nullptr) {
2187           continue;  // ignore things like evol_method
2188         }
2189         // During GC liveness of dependee determines class that needs to be updated.
2190         // The GC may clean dependency contexts concurrently and in parallel.
2191         ik->clean_dependency_context();
2192       }
2193     }
2194   }
2195 }
2196 
2197 void nmethod::post_compiled_method(CompileTask* task) {
2198   task->mark_success();
2199   task->set_nm_content_size(content_size());
2200   task->set_nm_insts_size(insts_size());
2201   task->set_nm_total_size(total_size());
2202 






2203   // JVMTI -- compiled method notification (must be done outside lock)
2204   post_compiled_method_load_event();
2205 
2206   if (CompilationLog::log() != nullptr) {
2207     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2208   }
2209 
2210   const DirectiveSet* directive = task->directive();
2211   maybe_print_nmethod(directive);
2212 }
2213 
2214 // ------------------------------------------------------------------
2215 // post_compiled_method_load_event
2216 // new method for install_code() path
2217 // Transfer information from compilation to jvmti
2218 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2219   // This is a bad time for a safepoint.  We don't want
2220   // this nmethod to get unloaded while we're queueing the event.
2221   NoSafepointVerifier nsv;
2222 

3121                                              p2i(nul_chk_table_end()),
3122                                              nul_chk_table_size());
3123   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3124                                              p2i(handler_table_begin()),
3125                                              p2i(handler_table_end()),
3126                                              handler_table_size());
3127   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3128                                              p2i(scopes_pcs_begin()),
3129                                              p2i(scopes_pcs_end()),
3130                                              scopes_pcs_size());
3131   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3132                                              p2i(scopes_data_begin()),
3133                                              p2i(scopes_data_end()),
3134                                              scopes_data_size());
3135 #if INCLUDE_JVMCI
3136   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3137                                              p2i(speculations_begin()),
3138                                              p2i(speculations_end()),
3139                                              speculations_size());
3140 #endif



3141 }
3142 
3143 void nmethod::print_code() {
3144   ResourceMark m;
3145   ttyLocker ttyl;
3146   // Call the specialized decode method of this class.
3147   decode(tty);
3148 }
3149 
3150 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3151 
3152 void nmethod::print_dependencies_on(outputStream* out) {
3153   ResourceMark rm;
3154   stringStream st;
3155   st.print_cr("Dependencies:");
3156   for (Dependencies::DepStream deps(this); deps.next(); ) {
3157     deps.print_dependency(&st);
3158     InstanceKlass* ctxk = deps.context_type();
3159     if (ctxk != nullptr) {
3160       if (ctxk->is_dependent_nmethod(this)) {

3220   st->print("scopes:");
3221   if (scopes_pcs_begin() < scopes_pcs_end()) {
3222     st->cr();
3223     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3224       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3225         continue;
3226 
3227       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3228       while (sd != nullptr) {
3229         sd->print_on(st, p);  // print output ends with a newline
3230         sd = sd->sender();
3231       }
3232     }
3233   } else {
3234     st->print_cr(" <list empty>");
3235   }
3236 }
3237 #endif
3238 
3239 #ifndef PRODUCT  // RelocIterator does support printing only then.
3240 void nmethod::print_relocations() {
3241   ResourceMark m;       // in case methods get printed via the debugger
3242   tty->print_cr("relocations:");
3243   RelocIterator iter(this);
3244   iter.print();
3245 }
3246 #endif
3247 
3248 void nmethod::print_pcs_on(outputStream* st) {
3249   ResourceMark m;       // in case methods get printed via debugger
3250   st->print("pc-bytecode offsets:");
3251   if (scopes_pcs_begin() < scopes_pcs_end()) {
3252     st->cr();
3253     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3254       p->print_on(st, this);  // print output ends with a newline
3255     }
3256   } else {
3257     st->print_cr(" <list empty>");
3258   }
3259 }
3260 
3261 void nmethod::print_handler_table() {
3262   ExceptionHandlerTable(this).print(code_begin());
3263 }
3264 

3579           else obj->print_value_on(&st);
3580           st.print(")");
3581           return st.as_string();
3582         }
3583         case relocInfo::metadata_type: {
3584           stringStream st;
3585           metadata_Relocation* r = iter.metadata_reloc();
3586           Metadata* obj = r->metadata_value();
3587           st.print("metadata(");
3588           if (obj == nullptr) st.print("nullptr");
3589           else obj->print_value_on(&st);
3590           st.print(")");
3591           return st.as_string();
3592         }
3593         case relocInfo::runtime_call_type:
3594         case relocInfo::runtime_call_w_cp_type: {
3595           stringStream st;
3596           st.print("runtime_call");
3597           CallRelocation* r = (CallRelocation*)iter.reloc();
3598           address dest = r->destination();










3599           CodeBlob* cb = CodeCache::find_blob(dest);
3600           if (cb != nullptr) {
3601             st.print(" %s", cb->name());
3602           } else {
3603             ResourceMark rm;
3604             const int buflen = 1024;
3605             char* buf = NEW_RESOURCE_ARRAY(char, buflen);
3606             int offset;
3607             if (os::dll_address_to_function_name(dest, buf, buflen, &offset)) {
3608               st.print(" %s", buf);
3609               if (offset != 0) {
3610                 st.print("+%d", offset);
3611               }
3612             }
3613           }
3614           return st.as_string();
3615         }
3616         case relocInfo::virtual_call_type: {
3617           stringStream st;
3618           st.print_raw("virtual_call");

  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/assembler.inline.hpp"
  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "code/SCCache.hpp"
  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compilationLog.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/compileLog.hpp"
  38 #include "compiler/compileTask.hpp"
  39 #include "compiler/compilerDirectives.hpp"
  40 #include "compiler/compilerOracle.hpp"
  41 #include "compiler/directivesParser.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "compiler/oopMap.inline.hpp"
  44 #include "gc/shared/barrierSet.hpp"
  45 #include "gc/shared/barrierSetNMethod.hpp"
  46 #include "gc/shared/classUnloadingContext.hpp"
  47 #include "gc/shared/collectedHeap.hpp"
  48 #include "interpreter/bytecode.inline.hpp"
  49 #include "jvm.h"
  50 #include "logging/log.hpp"
  51 #include "logging/logStream.hpp"
  52 #include "memory/allocation.inline.hpp"
  53 #include "memory/resourceArea.hpp"

 773 
 774 void nmethod::clear_inline_caches() {
 775   assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
 776   RelocIterator iter(this);
 777   while (iter.next()) {
 778     iter.reloc()->clear_inline_cache();
 779   }
 780 }
 781 
 782 #ifdef ASSERT
 783 // Check class_loader is alive for this bit of metadata.
 784 class CheckClass : public MetadataClosure {
 785   void do_metadata(Metadata* md) {
 786     Klass* klass = nullptr;
 787     if (md->is_klass()) {
 788       klass = ((Klass*)md);
 789     } else if (md->is_method()) {
 790       klass = ((Method*)md)->method_holder();
 791     } else if (md->is_methodData()) {
 792       klass = ((MethodData*)md)->method()->method_holder();
 793     } else if (md->is_methodCounters()) {
 794       klass = ((MethodCounters*)md)->method()->method_holder();
 795     } else {
 796       md->print();
 797       ShouldNotReachHere();
 798     }
 799     assert(klass->is_loader_alive(), "must be alive");
 800   }
 801 };
 802 #endif // ASSERT
 803 
 804 
 805 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
 806   ic->clean_metadata();
 807 }
 808 
 809 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
 810 template <typename CallsiteT>
 811 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
 812                                          bool clean_all) {
 813   CodeBlob* cb = CodeCache::find_blob(callsite->destination());
 814   if (!cb->is_nmethod()) {

1125     debug_only(nm->verify();) // might block
1126 
1127     nm->log_new_nmethod();
1128   }
1129   return nm;
1130 }
1131 
1132 nmethod* nmethod::new_nmethod(const methodHandle& method,
1133   int compile_id,
1134   int entry_bci,
1135   CodeOffsets* offsets,
1136   int orig_pc_offset,
1137   DebugInformationRecorder* debug_info,
1138   Dependencies* dependencies,
1139   CodeBuffer* code_buffer, int frame_size,
1140   OopMapSet* oop_maps,
1141   ExceptionHandlerTable* handler_table,
1142   ImplicitExceptionTable* nul_chk_table,
1143   AbstractCompiler* compiler,
1144   CompLevel comp_level
1145   , SCCEntry* scc_entry
1146 #if INCLUDE_JVMCI
1147   , char* speculations,
1148   int speculations_len,
1149   JVMCINMethodData* jvmci_data
1150 #endif
1151 )
1152 {
1153   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1154   code_buffer->finalize_oop_references(method);
1155   // create nmethod
1156   nmethod* nm = nullptr;
1157   int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1158 #if INCLUDE_JVMCI
1159     if (compiler->is_jvmci()) {
1160       nmethod_size += align_up(jvmci_data->size(), oopSize);
1161     }
1162 #endif
1163 
1164   int immutable_data_size =
1165       adjust_pcs_size(debug_info->pcs_size())

1170     + align_up(speculations_len                  , oopSize)
1171 #endif
1172     + align_up(debug_info->data_size()           , oopSize);
1173 
1174   // First, allocate space for immutable data in C heap.
1175   address immutable_data = nullptr;
1176   if (immutable_data_size > 0) {
1177     immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1178     if (immutable_data == nullptr) {
1179       vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1180       return nullptr;
1181     }
1182   }
1183   {
1184     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1185 
1186     nm = new (nmethod_size, comp_level)
1187     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size,
1188             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1189             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1190             handler_table, nul_chk_table, compiler, comp_level, scc_entry
1191 #if INCLUDE_JVMCI
1192             , speculations,
1193             speculations_len,
1194             jvmci_data
1195 #endif
1196             );
1197 
1198     if (nm != nullptr) {
1199       // To make dependency checking during class loading fast, record
1200       // the nmethod dependencies in the classes it is dependent on.
1201       // This allows the dependency checking code to simply walk the
1202       // class hierarchy above the loaded class, checking only nmethods
1203       // which are dependent on those classes.  The slow way is to
1204       // check every nmethod for dependencies which makes it linear in
1205       // the number of methods compiled.  For applications with a lot
1206       // classes the slow way is too slow.
1207       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1208         if (deps.type() == Dependencies::call_site_target_value) {
1209           // CallSite dependencies are managed on per-CallSite instance basis.
1210           oop call_site = deps.argument_oop(0);
1211           MethodHandles::add_dependent_nmethod(call_site, nm);
1212         } else {
1213           InstanceKlass* ik = deps.context_type();
1214           if (ik == nullptr) {
1215             continue;  // ignore things like evol_method
1216           }
1217           // record this nmethod as dependent on this klass
1218           ik->add_dependent_nmethod(nm);
1219         }
1220       }
1221       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));
1222     }
1223   }
1224   // Do verification and logging outside CodeCache_lock.
1225   if (nm != nullptr) {
1226 
1227 #ifdef ASSERT
1228     LogTarget(Debug, scc, nmethod) log;
1229     if (log.is_enabled()) {
1230       LogStream out(log);
1231       out.print_cr("== new_nmethod 2");
1232       FlagSetting fs(PrintRelocations, true);
1233       nm->print(&out);
1234       nm->decode(&out);
1235     }
1236 #endif
1237 
1238     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1239     DEBUG_ONLY(nm->verify();)
1240     nm->log_new_nmethod();
1241   }
1242   return nm;
1243 }
1244 
1245 // Fill in default values for various fields
1246 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1247   // avoid uninitialized fields, even for short time periods
1248   _exception_cache            = nullptr;
1249   _gc_data                    = nullptr;
1250   _oops_do_mark_link          = nullptr;
1251   _compiled_ic_data           = nullptr;
1252 
1253 #if INCLUDE_RTM_OPT
1254   _rtm_state                  = NoRTM;
1255 #endif
1256   _is_unloading_state         = 0;
1257   _state                      = not_installed;
1258 
1259   _has_unsafe_access          = 0;
1260   _has_method_handle_invokes  = 0;
1261   _has_wide_vectors           = 0;
1262   _has_monitors               = 0;
1263   _has_flushed_dependencies   = 0;
1264   _is_unlinked                = 0;
1265   _load_reported              = 0; // jvmti state
1266   _preloaded                  = 0;
1267   _has_clinit_barriers        = 0;
1268 
1269   _used                       = false;
1270   _deoptimization_status      = not_marked;
1271 
1272   // SECT_CONSTS is first in code buffer so the offset should be 0.
1273   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1274   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1275 
1276   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1277 
1278   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1279   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1280 
1281   int size = code_buffer->main_code_size();
1282   assert(size >= 0, "should be initialized");
1283   // Use instructions section size if it is 0 (e.g. native wrapper)
1284   if (size == 0) size = code_size(); // requires _stub_offset to be set
1285   assert(size <= code_size(), "incorrect size: %d > %d", size, code_size());
1286   _inline_insts_size = size - _verified_entry_offset
1287                      - code_buffer->total_skipped_instructions_size();
1288   assert(_inline_insts_size >= 0, "sanity");
1289 }

1328 
1329     _osr_entry_point         = nullptr;
1330     _pc_desc_container       = nullptr;
1331     _entry_bci               = InvocationEntryBci;
1332     _compile_id              = compile_id;
1333     _comp_level              = CompLevel_none;
1334     _compiler_type           = type;
1335     _orig_pc_offset          = 0;
1336     _num_stack_arg_slots     = _method->constMethod()->num_stack_arg_slots();
1337 
1338     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1339       // Continuation enter intrinsic
1340       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1341     } else {
1342       _exception_offset      = 0;
1343     }
1344     // Native wrappers do not have deopt handlers. Make the values
1345     // something that will never match a pc like the nmethod vtable entry
1346     _deopt_handler_offset    = 0;
1347     _deopt_mh_handler_offset = 0;
1348     _scc_entry               = nullptr;
1349     _method_profiling_count  = 0;
1350     _unwind_handler_offset   = 0;
1351 
1352     CHECKED_CAST(_metadata_offset, uint16_t, (align_up(code_buffer->total_oop_size(), oopSize)));
1353     int data_end_offset = _metadata_offset + align_up(code_buffer->total_metadata_size(), wordSize);
1354 #if INCLUDE_JVMCI
1355     // jvmci_data_size is 0 in native wrapper but we need to set offset
1356     // to correctly calculate metadata_end address
1357     CHECKED_CAST(_jvmci_data_offset, uint16_t, data_end_offset);
1358 #endif
1359     assert((data_offset() + data_end_offset) <= nmethod_size, "wrong nmethod's size: %d < %d", nmethod_size, (data_offset() + data_end_offset));
1360 
1361     // native wrapper does not have read-only data but we need unique not null address
1362     _immutable_data          = data_end();
1363     _immutable_data_size     = 0;
1364     _nul_chk_table_offset    = 0;
1365     _handler_table_offset    = 0;
1366     _scopes_pcs_offset       = 0;
1367     _scopes_data_offset      = 0;
1368 #if INCLUDE_JVMCI
1369     _speculations_offset     = 0;

1390     // This is both handled in decode2(), called via print_code() -> decode()
1391     if (PrintNativeNMethods) {
1392       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1393       print_code();
1394       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1395 #if defined(SUPPORT_DATA_STRUCTS)
1396       if (AbstractDisassembler::show_structs()) {
1397         if (oop_maps != nullptr) {
1398           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1399           oop_maps->print_on(tty);
1400           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1401         }
1402       }
1403 #endif
1404     } else {
1405       print(); // print the header part only.
1406     }
1407 #if defined(SUPPORT_DATA_STRUCTS)
1408     if (AbstractDisassembler::show_structs()) {
1409       if (PrintRelocations) {
1410         print_relocations_on(tty);
1411         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1412       }
1413     }
1414 #endif
1415     if (xtty != nullptr) {
1416       xtty->tail("print_native_nmethod");
1417     }
1418   }
1419 }
1420 
1421 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1422   return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1423 }
1424 
1425 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1426   // Try MethodNonProfiled and MethodProfiled.
1427   void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1428   if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1429   // Try NonNMethod or give up.
1430   return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);

1433 // For normal JIT compiled code
1434 nmethod::nmethod(
1435   Method* method,
1436   CompilerType type,
1437   int nmethod_size,
1438   int immutable_data_size,
1439   int compile_id,
1440   int entry_bci,
1441   address immutable_data,
1442   CodeOffsets* offsets,
1443   int orig_pc_offset,
1444   DebugInformationRecorder* debug_info,
1445   Dependencies* dependencies,
1446   CodeBuffer *code_buffer,
1447   int frame_size,
1448   OopMapSet* oop_maps,
1449   ExceptionHandlerTable* handler_table,
1450   ImplicitExceptionTable* nul_chk_table,
1451   AbstractCompiler* compiler,
1452   CompLevel comp_level
1453   , SCCEntry* scc_entry
1454 #if INCLUDE_JVMCI
1455   , char* speculations,
1456   int speculations_len,
1457   JVMCINMethodData* jvmci_data
1458 #endif
1459   )
1460   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1461              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
1462   _deoptimization_generation(0),
1463   _gc_epoch(CodeCache::gc_epoch()),
1464   _method(method),
1465   _osr_link(nullptr)
1466 {
1467   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1468   {
1469     debug_only(NoSafepointVerifier nsv;)
1470     assert_locked_or_safepoint(CodeCache_lock);
1471 
1472     init_defaults(code_buffer, offsets);
1473     _scc_entry      = scc_entry;
1474     _method_profiling_count  = 0;
1475 
1476     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1477     _entry_bci       = entry_bci;
1478     _compile_id      = compile_id;
1479     _comp_level      = comp_level;
1480     _compiler_type   = type;
1481     _orig_pc_offset  = orig_pc_offset;
1482 
1483     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1484 
1485     set_ctable_begin(header_begin() + content_offset());
1486 
1487 #if INCLUDE_JVMCI
1488     if (compiler->is_jvmci()) {
1489       // JVMCI might not produce any stub sections
1490       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1491         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1492       } else {
1493         _exception_offset        = -1;
1494       }

1586 #if INCLUDE_JVMCI
1587     // Copy speculations to nmethod
1588     if (speculations_size() != 0) {
1589       memcpy(speculations_begin(), speculations, speculations_len);
1590     }
1591 #endif
1592 
1593     post_init();
1594 
1595     // we use the information of entry points to find out if a method is
1596     // static or non static
1597     assert(compiler->is_c2() || compiler->is_jvmci() ||
1598            _method->is_static() == (entry_point() == verified_entry_point()),
1599            " entry points must be same for static methods and vice versa");
1600   }
1601 }
1602 
1603 // Print a short set of xml attributes to identify this nmethod.  The
1604 // output should be embedded in some other element.
1605 void nmethod::log_identity(xmlStream* log) const {
1606   assert(log->inside_attrs_or_error(), "printing attributes");
1607   log->print(" code_compile_id='%d'", compile_id());
1608   const char* nm_kind = compile_kind();
1609   if (nm_kind != nullptr)  log->print(" code_compile_kind='%s'", nm_kind);
1610   log->print(" code_compiler='%s'", compiler_name());
1611   if (TieredCompilation) {
1612     log->print(" code_compile_level='%d'", comp_level());
1613   }
1614 #if INCLUDE_JVMCI
1615   if (jvmci_nmethod_data() != nullptr) {
1616     const char* jvmci_name = jvmci_nmethod_data()->name();
1617     if (jvmci_name != nullptr) {
1618       log->print(" jvmci_mirror_name='");
1619       log->text("%s", jvmci_name);
1620       log->print("'");
1621     }
1622   }
1623 #endif
1624 }
1625 
1626 
1627 #define LOG_OFFSET(log, name)                    \
1628   if (p2i(name##_end()) - p2i(name##_begin())) \
1629     log->print(" " XSTR(name) "_offset='" INTX_FORMAT "'"    , \
1630                p2i(name##_begin()) - p2i(this))
1631 
1632 

1713       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1714       if (oop_maps() != nullptr) {
1715         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1716         oop_maps()->print_on(tty);
1717         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1718       }
1719     }
1720 #endif
1721   } else {
1722     print(); // print the header part only.
1723   }
1724 
1725 #if defined(SUPPORT_DATA_STRUCTS)
1726   if (AbstractDisassembler::show_structs()) {
1727     methodHandle mh(Thread::current(), _method);
1728     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1729       print_scopes();
1730       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1731     }
1732     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1733       print_relocations_on(tty);
1734       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1735     }
1736     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1737       print_dependencies_on(tty);
1738       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1739     }
1740     if (printmethod || PrintExceptionHandlers) {
1741       print_handler_table();
1742       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1743       print_nul_chk_table();
1744       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1745     }
1746 
1747     if (printmethod) {
1748       print_recorded_oops();
1749       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1750       print_recorded_metadata();
1751       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1752     }
1753   }

1961   Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1962 }
1963 
1964 bool nmethod::is_maybe_on_stack() {
1965   // If the condition below is true, it means that the nmethod was found to
1966   // be alive the previous completed marking cycle.
1967   return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1968 }
1969 
1970 void nmethod::inc_decompile_count() {
1971   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1972   // Could be gated by ProfileTraps, but do not bother...
1973   Method* m = method();
1974   if (m == nullptr)  return;
1975   MethodData* mdo = m->method_data();
1976   if (mdo == nullptr)  return;
1977   // There is a benign race here.  See comments in methodData.hpp.
1978   mdo->inc_decompile_count();
1979 }
1980 
1981 void nmethod::inc_method_profiling_count() {
1982   Atomic::inc(&_method_profiling_count);
1983 }
1984 
1985 uint64_t nmethod::method_profiling_count() {
1986   return _method_profiling_count;
1987 }
1988 
1989 bool nmethod::try_transition(signed char new_state_int) {
1990   signed char new_state = new_state_int;
1991   assert_lock_strong(NMethodState_lock);
1992   signed char old_state = _state;
1993   if (old_state >= new_state) {
1994     // Ensure monotonicity of transitions.
1995     return false;
1996   }
1997   Atomic::store(&_state, new_state);
1998   return true;
1999 }
2000 
2001 void nmethod::invalidate_osr_method() {
2002   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2003   // Remove from list of active nmethods
2004   if (method() != nullptr) {
2005     method()->method_holder()->remove_osr_nmethod(this);
2006   }
2007 }
2008 

2014                        os::current_thread_id());
2015       log_identity(xtty);
2016       xtty->stamp();
2017       xtty->end_elem();
2018     }
2019   }
2020 
2021   CompileTask::print_ul(this, "made not entrant");
2022   if (PrintCompilation) {
2023     print_on(tty, "made not entrant");
2024   }
2025 }
2026 
2027 void nmethod::unlink_from_method() {
2028   if (method() != nullptr) {
2029     method()->unlink_code(this);
2030   }
2031 }
2032 
2033 // Invalidate code
2034 bool nmethod::make_not_entrant(bool make_not_entrant) {
2035   // This can be called while the system is already at a safepoint which is ok
2036   NoSafepointVerifier nsv;
2037 
2038   if (is_unloading()) {
2039     // If the nmethod is unloading, then it is already not entrant through
2040     // the nmethod entry barriers. No need to do anything; GC will unload it.
2041     return false;
2042   }
2043 
2044   if (Atomic::load(&_state) == not_entrant) {
2045     // Avoid taking the lock if already in required state.
2046     // This is safe from races because the state is an end-state,
2047     // which the nmethod cannot back out of once entered.
2048     // No need for fencing either.
2049     return false;
2050   }
2051 
2052   {
2053     // Enter critical section.  Does not block for safepoint.
2054     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2077     }
2078 
2079     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2080     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2081       // If nmethod entry barriers are not supported, we won't mark
2082       // nmethods as on-stack when they become on-stack. So we
2083       // degrade to a less accurate flushing strategy, for now.
2084       mark_as_maybe_on_stack();
2085     }
2086 
2087     // Change state
2088     bool success = try_transition(not_entrant);
2089     assert(success, "Transition can't fail");
2090 
2091     // Log the transition once
2092     log_state_change();
2093 
2094     // Remove nmethod from method.
2095     unlink_from_method();
2096 
2097     if (make_not_entrant) {
2098       // Keep cached code if it was simply replaced
2099       // otherwise make it not entrant too.
2100       SCCache::invalidate(_scc_entry);
2101     }
2102 
2103     CompileBroker::log_not_entrant(this);
2104   } // leave critical region under NMethodState_lock
2105 
2106 #if INCLUDE_JVMCI
2107   // Invalidate can't occur while holding the Patching lock
2108   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2109   if (nmethod_data != nullptr) {
2110     nmethod_data->invalidate_nmethod_mirror(this);
2111   }
2112 #endif
2113 
2114 #ifdef ASSERT
2115   if (is_osr_method() && method() != nullptr) {
2116     // Make sure osr nmethod is invalidated, i.e. not on the list
2117     bool found = method()->method_holder()->remove_osr_nmethod(this);
2118     assert(!found, "osr nmethod should have been invalidated");
2119   }
2120 #endif
2121 
2122   return true;
2123 }

2223         MethodHandles::clean_dependency_context(call_site);
2224       } else {
2225         InstanceKlass* ik = deps.context_type();
2226         if (ik == nullptr) {
2227           continue;  // ignore things like evol_method
2228         }
2229         // During GC liveness of dependee determines class that needs to be updated.
2230         // The GC may clean dependency contexts concurrently and in parallel.
2231         ik->clean_dependency_context();
2232       }
2233     }
2234   }
2235 }
2236 
2237 void nmethod::post_compiled_method(CompileTask* task) {
2238   task->mark_success();
2239   task->set_nm_content_size(content_size());
2240   task->set_nm_insts_size(insts_size());
2241   task->set_nm_total_size(total_size());
2242 
2243   // task->is_scc() is true only for loaded cached code.
2244   // nmethod::_scc_entry is set for loaded and stored cached code
2245   // to invalidate the entry when nmethod is deoptimized.
2246   // There is option to not store in archive cached code.
2247   guarantee((_scc_entry != nullptr) || !task->is_scc() || VerifyCachedCode, "sanity");
2248 
2249   // JVMTI -- compiled method notification (must be done outside lock)
2250   post_compiled_method_load_event();
2251 
2252   if (CompilationLog::log() != nullptr) {
2253     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2254   }
2255 
2256   const DirectiveSet* directive = task->directive();
2257   maybe_print_nmethod(directive);
2258 }
2259 
2260 // ------------------------------------------------------------------
2261 // post_compiled_method_load_event
2262 // new method for install_code() path
2263 // Transfer information from compilation to jvmti
2264 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2265   // This is a bad time for a safepoint.  We don't want
2266   // this nmethod to get unloaded while we're queueing the event.
2267   NoSafepointVerifier nsv;
2268 

3167                                              p2i(nul_chk_table_end()),
3168                                              nul_chk_table_size());
3169   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3170                                              p2i(handler_table_begin()),
3171                                              p2i(handler_table_end()),
3172                                              handler_table_size());
3173   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3174                                              p2i(scopes_pcs_begin()),
3175                                              p2i(scopes_pcs_end()),
3176                                              scopes_pcs_size());
3177   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3178                                              p2i(scopes_data_begin()),
3179                                              p2i(scopes_data_end()),
3180                                              scopes_data_size());
3181 #if INCLUDE_JVMCI
3182   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3183                                              p2i(speculations_begin()),
3184                                              p2i(speculations_end()),
3185                                              speculations_size());
3186 #endif
3187   if (SCCache::is_on() && _scc_entry != nullptr) {
3188     _scc_entry->print(st);
3189   }
3190 }
3191 
3192 void nmethod::print_code() {
3193   ResourceMark m;
3194   ttyLocker ttyl;
3195   // Call the specialized decode method of this class.
3196   decode(tty);
3197 }
3198 
3199 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3200 
3201 void nmethod::print_dependencies_on(outputStream* out) {
3202   ResourceMark rm;
3203   stringStream st;
3204   st.print_cr("Dependencies:");
3205   for (Dependencies::DepStream deps(this); deps.next(); ) {
3206     deps.print_dependency(&st);
3207     InstanceKlass* ctxk = deps.context_type();
3208     if (ctxk != nullptr) {
3209       if (ctxk->is_dependent_nmethod(this)) {

3269   st->print("scopes:");
3270   if (scopes_pcs_begin() < scopes_pcs_end()) {
3271     st->cr();
3272     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3273       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3274         continue;
3275 
3276       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3277       while (sd != nullptr) {
3278         sd->print_on(st, p);  // print output ends with a newline
3279         sd = sd->sender();
3280       }
3281     }
3282   } else {
3283     st->print_cr(" <list empty>");
3284   }
3285 }
3286 #endif
3287 
3288 #ifndef PRODUCT  // RelocIterator does support printing only then.
3289 void nmethod::print_relocations_on(outputStream* st) {
3290   ResourceMark m;       // in case methods get printed via the debugger
3291   st->print_cr("relocations:");
3292   RelocIterator iter(this);
3293   iter.print_on(st);
3294 }
3295 #endif
3296 
3297 void nmethod::print_pcs_on(outputStream* st) {
3298   ResourceMark m;       // in case methods get printed via debugger
3299   st->print("pc-bytecode offsets:");
3300   if (scopes_pcs_begin() < scopes_pcs_end()) {
3301     st->cr();
3302     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3303       p->print_on(st, this);  // print output ends with a newline
3304     }
3305   } else {
3306     st->print_cr(" <list empty>");
3307   }
3308 }
3309 
3310 void nmethod::print_handler_table() {
3311   ExceptionHandlerTable(this).print(code_begin());
3312 }
3313 

3628           else obj->print_value_on(&st);
3629           st.print(")");
3630           return st.as_string();
3631         }
3632         case relocInfo::metadata_type: {
3633           stringStream st;
3634           metadata_Relocation* r = iter.metadata_reloc();
3635           Metadata* obj = r->metadata_value();
3636           st.print("metadata(");
3637           if (obj == nullptr) st.print("nullptr");
3638           else obj->print_value_on(&st);
3639           st.print(")");
3640           return st.as_string();
3641         }
3642         case relocInfo::runtime_call_type:
3643         case relocInfo::runtime_call_w_cp_type: {
3644           stringStream st;
3645           st.print("runtime_call");
3646           CallRelocation* r = (CallRelocation*)iter.reloc();
3647           address dest = r->destination();
3648           if (StubRoutines::contains(dest)) {
3649             StubCodeDesc* desc = StubCodeDesc::desc_for(dest);
3650             if (desc == nullptr) {
3651               desc = StubCodeDesc::desc_for(dest + frame::pc_return_offset);
3652             }
3653             if (desc != nullptr) {
3654               st.print(" Stub::%s", desc->name());
3655               return st.as_string();
3656             }
3657           }
3658           CodeBlob* cb = CodeCache::find_blob(dest);
3659           if (cb != nullptr) {
3660             st.print(" %s", cb->name());
3661           } else {
3662             ResourceMark rm;
3663             const int buflen = 1024;
3664             char* buf = NEW_RESOURCE_ARRAY(char, buflen);
3665             int offset;
3666             if (os::dll_address_to_function_name(dest, buf, buflen, &offset)) {
3667               st.print(" %s", buf);
3668               if (offset != 0) {
3669                 st.print("+%d", offset);
3670               }
3671             }
3672           }
3673           return st.as_string();
3674         }
3675         case relocInfo::virtual_call_type: {
3676           stringStream st;
3677           st.print_raw("virtual_call");
< prev index next >