< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "cds/cdsConfig.hpp"

  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compilationLog.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compileLog.hpp"
  37 #include "compiler/compilerDirectives.hpp"
  38 #include "compiler/compilerOracle.hpp"
  39 #include "compiler/compileTask.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"
  46 #include "gc/shared/collectedHeap.hpp"

 991              _method->method_holder()->external_name(),
 992              _method->name()->as_C_string(),
 993              _method->signature()->as_C_string(),
 994              compile_id());
 995   }
 996   return check_evol.has_evol_dependency();
 997 }
 998 
 999 int nmethod::total_size() const {
1000   return
1001     consts_size()        +
1002     insts_size()         +
1003     stub_size()          +
1004     scopes_data_size()   +
1005     scopes_pcs_size()    +
1006     handler_table_size() +
1007     nul_chk_table_size();
1008 }
1009 
1010 const char* nmethod::compile_kind() const {
1011   if (is_osr_method())     return "osr";



1012   if (method() != nullptr && is_native_method()) {
1013     if (method()->is_continuation_native_intrinsic()) {
1014       return "cnt";
1015     }
1016     return "c2n";
1017   }
1018   return nullptr;
1019 }
1020 
1021 const char* nmethod::compiler_name() const {
1022   return compilertype2name(_compiler_type);
1023 }
1024 
1025 #ifdef ASSERT
1026 class CheckForOopsClosure : public OopClosure {
1027   bool _found_oop = false;
1028  public:
1029   virtual void do_oop(oop* o) { _found_oop = true; }
1030   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1031   bool found_oop() { return _found_oop; }

1097     nm = new (native_nmethod_size, allow_NonNMethod_space)
1098     nmethod(method(), compiler_none, native_nmethod_size,
1099             compile_id, &offsets,
1100             code_buffer, frame_size,
1101             basic_lock_owner_sp_offset,
1102             basic_lock_sp_offset,
1103             oop_maps, mutable_data_size);
1104     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1105     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1106   }
1107 
1108   if (nm != nullptr) {
1109     // verify nmethod
1110     DEBUG_ONLY(nm->verify();) // might block
1111 
1112     nm->log_new_nmethod();
1113   }
1114   return nm;
1115 }
1116 

























1117 nmethod* nmethod::new_nmethod(const methodHandle& method,
1118   int compile_id,
1119   int entry_bci,
1120   CodeOffsets* offsets,
1121   int orig_pc_offset,
1122   DebugInformationRecorder* debug_info,
1123   Dependencies* dependencies,
1124   CodeBuffer* code_buffer, int frame_size,
1125   OopMapSet* oop_maps,
1126   ExceptionHandlerTable* handler_table,
1127   ImplicitExceptionTable* nul_chk_table,
1128   AbstractCompiler* compiler,
1129   CompLevel comp_level
1130 #if INCLUDE_JVMCI
1131   , char* speculations,
1132   int speculations_len,
1133   JVMCINMethodData* jvmci_data
1134 #endif
1135 )
1136 {

1163 
1164   int mutable_data_size = required_mutable_data_size(code_buffer
1165     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1166 
1167   {
1168     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1169 
1170     nm = new (nmethod_size, comp_level)
1171     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1172             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1173             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1174             handler_table, nul_chk_table, compiler, comp_level
1175 #if INCLUDE_JVMCI
1176             , speculations,
1177             speculations_len,
1178             jvmci_data
1179 #endif
1180             );
1181 
1182     if (nm != nullptr) {
1183       // To make dependency checking during class loading fast, record
1184       // the nmethod dependencies in the classes it is dependent on.
1185       // This allows the dependency checking code to simply walk the
1186       // class hierarchy above the loaded class, checking only nmethods
1187       // which are dependent on those classes.  The slow way is to
1188       // check every nmethod for dependencies which makes it linear in
1189       // the number of methods compiled.  For applications with a lot
1190       // classes the slow way is too slow.
1191       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1192         if (deps.type() == Dependencies::call_site_target_value) {
1193           // CallSite dependencies are managed on per-CallSite instance basis.
1194           oop call_site = deps.argument_oop(0);
1195           MethodHandles::add_dependent_nmethod(call_site, nm);
1196         } else {
1197           InstanceKlass* ik = deps.context_type();
1198           if (ik == nullptr) {
1199             continue;  // ignore things like evol_method
1200           }
1201           // record this nmethod as dependent on this klass
1202           ik->add_dependent_nmethod(nm);
1203         }
1204       }
1205       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));












































































1206     }
1207   }
1208   // Do verification and logging outside CodeCache_lock.
1209   if (nm != nullptr) {










1210     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1211     DEBUG_ONLY(nm->verify();)
1212     nm->log_new_nmethod();
1213   }
1214   return nm;
1215 }
1216 
1217 // Fill in default values for various fields
1218 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1219   // avoid uninitialized fields, even for short time periods
1220   _exception_cache            = nullptr;
1221   _gc_data                    = nullptr;
1222   _oops_do_mark_link          = nullptr;
1223   _compiled_ic_data           = nullptr;
1224 
1225   _is_unloading_state         = 0;
1226   _state                      = not_installed;
1227 
1228   _has_unsafe_access          = 0;
1229   _has_wide_vectors           = 0;
1230   _has_monitors               = 0;
1231   _has_scoped_access          = 0;
1232   _has_flushed_dependencies   = 0;
1233   _is_unlinked                = 0;
1234   _load_reported              = 0; // jvmti state


1235 

1236   _deoptimization_status      = not_marked;
1237 
1238   // SECT_CONSTS is first in code buffer so the offset should be 0.
1239   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1240   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1241 
1242   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1243 
1244   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1245   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1246 
1247   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1248 }
1249 
1250 // Post initialization
1251 void nmethod::post_init() {
1252   clear_unloading_state();
1253 
1254   finalize_relocations();
1255 

1287     init_defaults(code_buffer, offsets);
1288 
1289     _osr_entry_point         = nullptr;
1290     _pc_desc_container       = nullptr;
1291     _entry_bci               = InvocationEntryBci;
1292     _compile_id              = compile_id;
1293     _comp_level              = CompLevel_none;
1294     _compiler_type           = type;
1295     _orig_pc_offset          = 0;
1296     _num_stack_arg_slots     = 0;
1297 
1298     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1299       // Continuation enter intrinsic
1300       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1301     } else {
1302       _exception_offset      = 0;
1303     }
1304     // Native wrappers do not have deopt handlers. Make the values
1305     // something that will never match a pc like the nmethod vtable entry
1306     _deopt_handler_entry_offset    = 0;


1307     _unwind_handler_offset   = 0;
1308 
1309     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1310     uint16_t metadata_size;
1311     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1312     JVMCI_ONLY( _metadata_size = metadata_size; )
1313     assert(_mutable_data_size == _relocation_size + metadata_size,
1314            "wrong mutable data size: %d != %d + %d",
1315            _mutable_data_size, _relocation_size, metadata_size);
1316 
1317     // native wrapper does not have read-only data but we need unique not null address
1318     _immutable_data          = blob_end();
1319     _immutable_data_size     = 0;
1320     _nul_chk_table_offset    = 0;
1321     _handler_table_offset    = 0;
1322     _scopes_pcs_offset       = 0;
1323     _scopes_data_offset      = 0;
1324 #if INCLUDE_JVMCI
1325     _speculations_offset     = 0;
1326 #endif

1347     // This is both handled in decode2(), called via print_code() -> decode()
1348     if (PrintNativeNMethods) {
1349       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1350       print_code();
1351       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1352 #if defined(SUPPORT_DATA_STRUCTS)
1353       if (AbstractDisassembler::show_structs()) {
1354         if (oop_maps != nullptr) {
1355           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1356           oop_maps->print_on(tty);
1357           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1358         }
1359       }
1360 #endif
1361     } else {
1362       print(); // print the header part only.
1363     }
1364 #if defined(SUPPORT_DATA_STRUCTS)
1365     if (AbstractDisassembler::show_structs()) {
1366       if (PrintRelocations) {
1367         print_relocations();
1368         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1369       }
1370     }
1371 #endif
1372     if (xtty != nullptr) {
1373       xtty->tail("print_native_nmethod");
1374     }
1375   }
1376 }
1377 
1378 
1379 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1380 {
1381 
1382   if (nm._oop_maps != nullptr) {
1383     _oop_maps                   = nm._oop_maps->clone();
1384   } else {
1385     _oop_maps                   = nullptr;
1386   }
1387 

1680   CompLevel comp_level
1681 #if INCLUDE_JVMCI
1682   , char* speculations,
1683   int speculations_len,
1684   JVMCINMethodData* jvmci_data
1685 #endif
1686   )
1687   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1688              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1689   _deoptimization_generation(0),
1690   _gc_epoch(CodeCache::gc_epoch()),
1691   _method(method),
1692   _osr_link(nullptr)
1693 {
1694   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1695   {
1696     DEBUG_ONLY(NoSafepointVerifier nsv;)
1697     assert_locked_or_safepoint(CodeCache_lock);
1698 
1699     init_defaults(code_buffer, offsets);


1700 
1701     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1702     _entry_bci       = entry_bci;
1703     _compile_id      = compile_id;
1704     _comp_level      = comp_level;
1705     _compiler_type   = type;
1706     _orig_pc_offset  = orig_pc_offset;
1707 
1708     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1709 
1710     set_ctable_begin(header_begin() + content_offset());
1711 
1712 #if INCLUDE_JVMCI
1713     if (compiler->is_jvmci()) {
1714       // JVMCI might not produce any stub sections
1715       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1716         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1717       } else {
1718         _exception_offset        = -1;
1719       }

1809     // Copy speculations to nmethod
1810     if (speculations_size() != 0) {
1811       memcpy(speculations_begin(), speculations, speculations_len);
1812     }
1813 #endif
1814     init_immutable_data_ref_count();
1815 
1816     post_init();
1817 
1818     // we use the information of entry points to find out if a method is
1819     // static or non static
1820     assert(compiler->is_c2() || compiler->is_jvmci() ||
1821            _method->is_static() == (entry_point() == verified_entry_point()),
1822            " entry points must be same for static methods and vice versa");
1823   }
1824 }
1825 
1826 // Print a short set of xml attributes to identify this nmethod.  The
1827 // output should be embedded in some other element.
1828 void nmethod::log_identity(xmlStream* log) const {

1829   log->print(" compile_id='%d'", compile_id());
1830   const char* nm_kind = compile_kind();
1831   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1832   log->print(" compiler='%s'", compiler_name());
1833   if (TieredCompilation) {
1834     log->print(" level='%d'", comp_level());
1835   }
1836 #if INCLUDE_JVMCI
1837   if (jvmci_nmethod_data() != nullptr) {
1838     const char* jvmci_name = jvmci_nmethod_data()->name();
1839     if (jvmci_name != nullptr) {
1840       log->print(" jvmci_mirror_name='");
1841       log->text("%s", jvmci_name);
1842       log->print("'");
1843     }
1844   }
1845 #endif
1846 }
1847 
1848 
1849 #define LOG_OFFSET(log, name)                    \
1850   if (p2i(name##_end()) - p2i(name##_begin())) \
1851     log->print(" " XSTR(name) "_offset='%zd'"    , \
1852                p2i(name##_begin()) - p2i(this))
1853 
1854 

1969       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1970       if (oop_maps() != nullptr) {
1971         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1972         oop_maps()->print_on(tty);
1973         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1974       }
1975     }
1976 #endif
1977   } else {
1978     print(); // print the header part only.
1979   }
1980 
1981 #if defined(SUPPORT_DATA_STRUCTS)
1982   if (AbstractDisassembler::show_structs()) {
1983     methodHandle mh(Thread::current(), _method);
1984     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1985       print_scopes();
1986       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1987     }
1988     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1989       print_relocations();
1990       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1991     }
1992     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1993       print_dependencies_on(tty);
1994       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1995     }
1996     if (printmethod || PrintExceptionHandlers) {
1997       print_handler_table();
1998       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1999       print_nul_chk_table();
2000       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2001     }
2002 
2003     if (printmethod) {
2004       print_recorded_oops();
2005       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2006       print_recorded_metadata();
2007       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2008     }
2009   }
2010 #endif
2011 
2012   if (xtty != nullptr) {
2013     xtty->tail("print_nmethod");
2014   }
2015 }
2016 
2017 
2018 // Promote one word from an assembly-time handle to a live embedded oop.
2019 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2020   if (handle == nullptr ||
2021       // As a special case, IC oops are initialized to 1 or -1.
2022       handle == (jobject) Universe::non_oop_word()) {
2023     *(void**)dest = handle;
2024   } else {
2025     *dest = JNIHandles::resolve_non_null(handle);
2026   }
2027 }
2028 








2029 
2030 // Have to have the same name because it's called by a template
2031 void nmethod::copy_values(GrowableArray<jobject>* array) {
2032   int length = array->length();
2033   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2034   oop* dest = oops_begin();
2035   for (int index = 0 ; index < length; index++) {
2036     initialize_immediate_oop(&dest[index], array->at(index));
2037   }
2038 
2039   // Now we can fix up all the oops in the code.  We need to do this
2040   // in the code because the assembler uses jobjects as placeholders.
2041   // The code and relocations have already been initialized by the
2042   // CodeBlob constructor, so it is valid even at this early point to
2043   // iterate over relocations and patch the code.
2044   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2045 }
2046 
2047 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2048   int length = array->length();

2056 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2057   // re-patch all oop-bearing instructions, just in case some oops moved
2058   RelocIterator iter(this, begin, end);
2059   while (iter.next()) {
2060     if (iter.type() == relocInfo::oop_type) {
2061       oop_Relocation* reloc = iter.oop_reloc();
2062       if (initialize_immediates && reloc->oop_is_immediate()) {
2063         oop* dest = reloc->oop_addr();
2064         jobject obj = *reinterpret_cast<jobject*>(dest);
2065         initialize_immediate_oop(dest, obj);
2066       }
2067       // Refresh the oop-related bits of this instruction.
2068       reloc->fix_oop_relocation();
2069     } else if (iter.type() == relocInfo::metadata_type) {
2070       metadata_Relocation* reloc = iter.metadata_reloc();
2071       reloc->fix_metadata_relocation();
2072     }
2073   }
2074 }
2075 




















2076 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2077   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2078   intptr_t cbaddr = (intptr_t) nm;
2079   intptr_t offset = ((intptr_t) pc) - cbaddr;
2080 
2081   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2082   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2083     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2084   } else if (!nop->patch(oopmap_slot, offset)) {
2085     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2086   }
2087 }
2088 
2089 void nmethod::finalize_relocations() {
2090   NoSafepointVerifier nsv;
2091 
2092   GrowableArray<NativeMovConstReg*> virtual_call_data;
2093 
2094   // Make sure that post call nops fill in nmethod offsets eagerly so
2095   // we don't have to race with deoptimization

2226   // be alive the previous completed marking cycle.
2227   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2228 }
2229 
2230 void nmethod::inc_decompile_count() {
2231   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2232   // Could be gated by ProfileTraps, but do not bother...
2233 #if INCLUDE_JVMCI
2234   if (jvmci_skip_profile_deopt()) {
2235     return;
2236   }
2237 #endif
2238   Method* m = method();
2239   if (m == nullptr)  return;
2240   MethodData* mdo = m->method_data();
2241   if (mdo == nullptr)  return;
2242   // There is a benign race here.  See comments in methodData.hpp.
2243   mdo->inc_decompile_count();
2244 }
2245 








2246 bool nmethod::try_transition(signed char new_state_int) {
2247   signed char new_state = new_state_int;
2248   assert_lock_strong(NMethodState_lock);
2249   signed char old_state = _state;
2250   if (old_state >= new_state) {
2251     // Ensure monotonicity of transitions.
2252     return false;
2253   }
2254   AtomicAccess::store(&_state, new_state);
2255   return true;
2256 }
2257 
2258 void nmethod::invalidate_osr_method() {
2259   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2260   // Remove from list of active nmethods
2261   if (method() != nullptr) {
2262     method()->method_holder()->remove_osr_nmethod(this);
2263   }
2264 }
2265 

2275     }
2276   }
2277 
2278   ResourceMark rm;
2279   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2280   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2281 
2282   CompileTask::print_ul(this, ss.freeze());
2283   if (PrintCompilation) {
2284     print_on_with_msg(tty, ss.freeze());
2285   }
2286 }
2287 
2288 void nmethod::unlink_from_method() {
2289   if (method() != nullptr) {
2290     method()->unlink_code(this);
2291   }
2292 }
2293 
2294 // Invalidate code
2295 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason) {
2296   // This can be called while the system is already at a safepoint which is ok
2297   NoSafepointVerifier nsv;
2298 
2299   if (is_unloading()) {
2300     // If the nmethod is unloading, then it is already not entrant through
2301     // the nmethod entry barriers. No need to do anything; GC will unload it.
2302     return false;
2303   }
2304 
2305   if (AtomicAccess::load(&_state) == not_entrant) {
2306     // Avoid taking the lock if already in required state.
2307     // This is safe from races because the state is an end-state,
2308     // which the nmethod cannot back out of once entered.
2309     // No need for fencing either.
2310     return false;
2311   }
2312 
2313   MACOS_AARCH64_ONLY(os::thread_wx_enable_write());
2314 
2315   {

2339     }
2340 
2341     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2342     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2343       // If nmethod entry barriers are not supported, we won't mark
2344       // nmethods as on-stack when they become on-stack. So we
2345       // degrade to a less accurate flushing strategy, for now.
2346       mark_as_maybe_on_stack();
2347     }
2348 
2349     // Change state
2350     bool success = try_transition(not_entrant);
2351     assert(success, "Transition can't fail");
2352 
2353     // Log the transition once
2354     log_state_change(invalidation_reason);
2355 
2356     // Remove nmethod from method.
2357     unlink_from_method();
2358 







2359   } // leave critical region under NMethodState_lock
2360 
2361 #if INCLUDE_JVMCI
2362   // Invalidate can't occur while holding the NMethodState_lock
2363   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2364   if (nmethod_data != nullptr) {
2365     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2366   }
2367 #endif
2368 
2369 #ifdef ASSERT
2370   if (is_osr_method() && method() != nullptr) {
2371     // Make sure osr nmethod is invalidated, i.e. not on the list
2372     bool found = method()->method_holder()->remove_osr_nmethod(this);
2373     assert(!found, "osr nmethod should have been invalidated");
2374   }
2375 #endif
2376 
2377   return true;
2378 }

2403     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2404             nmethod::InvalidationReason::UNLOADING_COLD :
2405             nmethod::InvalidationReason::UNLOADING);
2406   }
2407 #endif
2408 
2409   // Post before flushing as jmethodID is being used
2410   post_compiled_method_unload();
2411 
2412   // Register for flushing when it is safe. For concurrent class unloading,
2413   // that would be after the unloading handshake, and for STW class unloading
2414   // that would be when getting back to the VM thread.
2415   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2416 }
2417 
2418 void nmethod::purge(bool unregister_nmethod) {
2419 
2420   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2421 
2422   // completely deallocate this method
2423   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2424 
2425   LogTarget(Debug, codecache) lt;
2426   if (lt.is_enabled()) {
2427     ResourceMark rm;
2428     LogStream ls(lt);
2429     const char* method_name = method()->name()->as_C_string();
2430     const size_t codecache_capacity = CodeCache::capacity()/1024;
2431     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2432     ls.print("Flushing nmethod %6d/" INTPTR_FORMAT ", level=%d, osr=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2433               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2434               _compile_id, p2i(this), _comp_level, is_osr_method(), is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2435               codecache_capacity, codecache_free_space, method_name, compiler_name());
2436   }
2437 
2438   // We need to deallocate any ExceptionCache data.
2439   // Note that we do not need to grab the nmethod lock for this, it
2440   // better be thread safe if we're disposing of it!
2441   ExceptionCache* ec = exception_cache();
2442   while(ec != nullptr) {
2443     ExceptionCache* next = ec->next();
2444     delete ec;
2445     ec = next;
2446   }
2447   if (_pc_desc_container != nullptr) {
2448     delete _pc_desc_container;
2449   }
2450   delete[] _compiled_ic_data;


2451 
2452   if (_immutable_data != blob_end()) {
2453     // Free memory if this was the last nmethod referencing immutable data
2454     if (dec_immutable_data_ref_count() == 0) {
2455       os::free(_immutable_data);
2456     }
2457 
2458     _immutable_data = blob_end(); // Valid not null address
2459   }
2460 
2461   if (unregister_nmethod) {
2462     Universe::heap()->unregister_nmethod(this);
2463   }
2464   CodeCache::unregister_old_nmethod(this);
2465 
2466   JVMCI_ONLY( _metadata_size = 0; )
2467   CodeBlob::purge();
2468 }
2469 
2470 oop nmethod::oop_at(int index) const {
2471   if (index == 0) {
2472     return nullptr;

2499         MethodHandles::clean_dependency_context(call_site);
2500       } else {
2501         InstanceKlass* ik = deps.context_type();
2502         if (ik == nullptr) {
2503           continue;  // ignore things like evol_method
2504         }
2505         // During GC liveness of dependee determines class that needs to be updated.
2506         // The GC may clean dependency contexts concurrently and in parallel.
2507         ik->clean_dependency_context();
2508       }
2509     }
2510   }
2511 }
2512 
2513 void nmethod::post_compiled_method(CompileTask* task) {
2514   task->mark_success();
2515   task->set_nm_content_size(content_size());
2516   task->set_nm_insts_size(insts_size());
2517   task->set_nm_total_size(total_size());
2518 






2519   // JVMTI -- compiled method notification (must be done outside lock)
2520   post_compiled_method_load_event();
2521 
2522   if (CompilationLog::log() != nullptr) {
2523     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2524   }
2525 
2526   const DirectiveSet* directive = task->directive();
2527   maybe_print_nmethod(directive);
2528 }
2529 
2530 #if INCLUDE_CDS
2531 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2532 
2533 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2534   precond(CDSConfig::is_using_aot_linked_classes());
2535   precond(!ServiceThread::has_started());
2536 
2537   // We are still in single threaded stage of VM bootstrap. No need to lock.
2538   if (_delayed_compiled_method_load_events == nullptr) {

3257 void nmethod::verify() {
3258   if (is_not_entrant())
3259     return;
3260 
3261   // assert(oopDesc::is_oop(method()), "must be valid");
3262 
3263   ResourceMark rm;
3264 
3265   if (!CodeCache::contains(this)) {
3266     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3267   }
3268 
3269   if(is_native_method() )
3270     return;
3271 
3272   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3273   if (nm != this) {
3274     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3275   }
3276 
3277   for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3278     if (! p->verify(this)) {
3279       tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));




3280     }
3281   }
3282 
3283 #ifdef ASSERT
3284 #if INCLUDE_JVMCI
3285   {
3286     // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3287     ImmutableOopMapSet* oms = oop_maps();
3288     ImplicitExceptionTable implicit_table(this);
3289     for (uint i = 0; i < implicit_table.len(); i++) {
3290       int exec_offset = (int) implicit_table.get_exec_offset(i);
3291       if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3292         assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3293         bool found = false;
3294         for (int i = 0, imax = oms->count(); i < imax; i++) {
3295           if (oms->pair_at(i)->pc_offset() == exec_offset) {
3296             found = true;
3297             break;

3298           }

3299         }
3300         assert(found, "missing oopmap");
3301       }
3302     }
3303   }
3304 #endif
3305 #endif

3306 
3307   VerifyOopsClosure voc(this);
3308   oops_do(&voc);
3309   assert(voc.ok(), "embedded oops must be OK");
3310   Universe::heap()->verify_nmethod(this);
3311 
3312   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3313          nm->method()->external_name(), p2i(_oops_do_mark_link));
3314   verify_scopes();


3315 
3316   CompiledICLocker nm_verify(this);
3317   VerifyMetadataClosure vmc;
3318   metadata_do(&vmc);
3319 }
3320 
3321 
3322 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3323 
3324   // Verify IC only when nmethod installation is finished.
3325   if (!is_not_installed()) {
3326     if (CompiledICLocker::is_safe(this)) {
3327       if (is_inline_cache) {
3328         CompiledIC_at(this, call_site);
3329       } else {
3330         CompiledDirectCall::at(call_site);
3331       }
3332     } else {
3333       CompiledICLocker ml_verify(this);
3334       if (is_inline_cache) {

3463                                              p2i(nul_chk_table_end()),
3464                                              nul_chk_table_size());
3465   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3466                                              p2i(handler_table_begin()),
3467                                              p2i(handler_table_end()),
3468                                              handler_table_size());
3469   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3470                                              p2i(scopes_pcs_begin()),
3471                                              p2i(scopes_pcs_end()),
3472                                              scopes_pcs_size());
3473   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3474                                              p2i(scopes_data_begin()),
3475                                              p2i(scopes_data_end()),
3476                                              scopes_data_size());
3477 #if INCLUDE_JVMCI
3478   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3479                                              p2i(speculations_begin()),
3480                                              p2i(speculations_end()),
3481                                              speculations_size());
3482 #endif



3483 }
3484 
3485 void nmethod::print_code() {
3486   ResourceMark m;
3487   ttyLocker ttyl;
3488   // Call the specialized decode method of this class.
3489   decode(tty);
3490 }
3491 
3492 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3493 
3494 void nmethod::print_dependencies_on(outputStream* out) {
3495   ResourceMark rm;
3496   stringStream st;
3497   st.print_cr("Dependencies:");
3498   for (Dependencies::DepStream deps(this); deps.next(); ) {
3499     deps.print_dependency(&st);
3500     InstanceKlass* ctxk = deps.context_type();
3501     if (ctxk != nullptr) {
3502       if (ctxk->is_dependent_nmethod(this)) {

3562   st->print("scopes:");
3563   if (scopes_pcs_begin() < scopes_pcs_end()) {
3564     st->cr();
3565     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3566       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3567         continue;
3568 
3569       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3570       while (sd != nullptr) {
3571         sd->print_on(st, p);  // print output ends with a newline
3572         sd = sd->sender();
3573       }
3574     }
3575   } else {
3576     st->print_cr(" <list empty>");
3577   }
3578 }
3579 #endif
3580 
3581 #ifndef PRODUCT  // RelocIterator does support printing only then.
3582 void nmethod::print_relocations() {
3583   ResourceMark m;       // in case methods get printed via the debugger
3584   tty->print_cr("relocations:");
3585   RelocIterator iter(this);
3586   iter.print_on(tty);
3587 }
3588 #endif
3589 
3590 void nmethod::print_pcs_on(outputStream* st) {
3591   ResourceMark m;       // in case methods get printed via debugger
3592   st->print("pc-bytecode offsets:");
3593   if (scopes_pcs_begin() < scopes_pcs_end()) {
3594     st->cr();
3595     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3596       p->print_on(st, this);  // print output ends with a newline
3597     }
3598   } else {
3599     st->print_cr(" <list empty>");
3600   }
3601 }
3602 
3603 void nmethod::print_handler_table() {
3604   ExceptionHandlerTable(this).print(code_begin());
3605 }
3606 

4421 void nmethod::update_speculation(JavaThread* thread) {
4422   jlong speculation = thread->pending_failed_speculation();
4423   if (speculation != 0) {
4424     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4425     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4426     thread->set_pending_failed_speculation(0);
4427   }
4428 }
4429 
4430 const char* nmethod::jvmci_name() {
4431   if (jvmci_nmethod_data() != nullptr) {
4432     return jvmci_nmethod_data()->name();
4433   }
4434   return nullptr;
4435 }
4436 
4437 bool nmethod::jvmci_skip_profile_deopt() const {
4438   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4439 }
4440 #endif





















   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "cds/cdsConfig.hpp"
  27 #include "code/aotCodeCache.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/compiledIC.hpp"
  30 #include "code/dependencies.hpp"
  31 #include "code/nativeInst.hpp"
  32 #include "code/nmethod.inline.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compilationLog.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/compileLog.hpp"
  38 #include "compiler/compilerDirectives.hpp"
  39 #include "compiler/compilerOracle.hpp"
  40 #include "compiler/compileTask.hpp"
  41 #include "compiler/directivesParser.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "compiler/oopMap.inline.hpp"
  44 #include "gc/shared/barrierSet.hpp"
  45 #include "gc/shared/barrierSetNMethod.hpp"
  46 #include "gc/shared/classUnloadingContext.hpp"
  47 #include "gc/shared/collectedHeap.hpp"

 992              _method->method_holder()->external_name(),
 993              _method->name()->as_C_string(),
 994              _method->signature()->as_C_string(),
 995              compile_id());
 996   }
 997   return check_evol.has_evol_dependency();
 998 }
 999 
1000 int nmethod::total_size() const {
1001   return
1002     consts_size()        +
1003     insts_size()         +
1004     stub_size()          +
1005     scopes_data_size()   +
1006     scopes_pcs_size()    +
1007     handler_table_size() +
1008     nul_chk_table_size();
1009 }
1010 
1011 const char* nmethod::compile_kind() const {
1012   if (is_osr_method()) return "osr";
1013   if (preloaded())     return "AP";
1014   if (is_aot())        return "A";
1015 
1016   if (method() != nullptr && is_native_method()) {
1017     if (method()->is_continuation_native_intrinsic()) {
1018       return "cnt";
1019     }
1020     return "c2n";
1021   }
1022   return nullptr;
1023 }
1024 
1025 const char* nmethod::compiler_name() const {
1026   return compilertype2name(_compiler_type);
1027 }
1028 
1029 #ifdef ASSERT
1030 class CheckForOopsClosure : public OopClosure {
1031   bool _found_oop = false;
1032  public:
1033   virtual void do_oop(oop* o) { _found_oop = true; }
1034   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1035   bool found_oop() { return _found_oop; }

1101     nm = new (native_nmethod_size, allow_NonNMethod_space)
1102     nmethod(method(), compiler_none, native_nmethod_size,
1103             compile_id, &offsets,
1104             code_buffer, frame_size,
1105             basic_lock_owner_sp_offset,
1106             basic_lock_sp_offset,
1107             oop_maps, mutable_data_size);
1108     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1109     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1110   }
1111 
1112   if (nm != nullptr) {
1113     // verify nmethod
1114     DEBUG_ONLY(nm->verify();) // might block
1115 
1116     nm->log_new_nmethod();
1117   }
1118   return nm;
1119 }
1120 
1121 void nmethod::record_nmethod_dependency() {
1122   // To make dependency checking during class loading fast, record
1123   // the nmethod dependencies in the classes it is dependent on.
1124   // This allows the dependency checking code to simply walk the
1125   // class hierarchy above the loaded class, checking only nmethods
1126   // which are dependent on those classes.  The slow way is to
1127   // check every nmethod for dependencies which makes it linear in
1128   // the number of methods compiled.  For applications with a lot
1129   // classes the slow way is too slow.
1130   for (Dependencies::DepStream deps(this); deps.next(); ) {
1131     if (deps.type() == Dependencies::call_site_target_value) {
1132       // CallSite dependencies are managed on per-CallSite instance basis.
1133       oop call_site = deps.argument_oop(0);
1134       MethodHandles::add_dependent_nmethod(call_site, this);
1135     } else {
1136       InstanceKlass* ik = deps.context_type();
1137       if (ik == nullptr) {
1138         continue;  // ignore things like evol_method
1139       }
1140       // record this nmethod as dependent on this klass
1141       ik->add_dependent_nmethod(this);
1142     }
1143   }
1144 }
1145 
1146 nmethod* nmethod::new_nmethod(const methodHandle& method,
1147   int compile_id,
1148   int entry_bci,
1149   CodeOffsets* offsets,
1150   int orig_pc_offset,
1151   DebugInformationRecorder* debug_info,
1152   Dependencies* dependencies,
1153   CodeBuffer* code_buffer, int frame_size,
1154   OopMapSet* oop_maps,
1155   ExceptionHandlerTable* handler_table,
1156   ImplicitExceptionTable* nul_chk_table,
1157   AbstractCompiler* compiler,
1158   CompLevel comp_level
1159 #if INCLUDE_JVMCI
1160   , char* speculations,
1161   int speculations_len,
1162   JVMCINMethodData* jvmci_data
1163 #endif
1164 )
1165 {

1192 
1193   int mutable_data_size = required_mutable_data_size(code_buffer
1194     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1195 
1196   {
1197     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1198 
1199     nm = new (nmethod_size, comp_level)
1200     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1201             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1202             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1203             handler_table, nul_chk_table, compiler, comp_level
1204 #if INCLUDE_JVMCI
1205             , speculations,
1206             speculations_len,
1207             jvmci_data
1208 #endif
1209             );
1210 
1211     if (nm != nullptr) {
1212       nm->record_nmethod_dependency();
1213       NOT_PRODUCT(note_java_nmethod(nm));
1214     }
1215   }
1216   // Do verification and logging outside CodeCache_lock.
1217   if (nm != nullptr) {
1218 
1219 #ifdef ASSERT
1220     LogTarget(Debug, aot, codecache, nmethod) log;
1221     if (log.is_enabled()) {
1222       LogStream out(log);
1223       out.print_cr("== new_nmethod 2");
1224       FlagSetting fs(PrintRelocations, true);
1225       nm->print_on_impl(&out);
1226       nm->decode(&out);
1227     }
1228 #endif
1229 
1230     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1231     DEBUG_ONLY(nm->verify();)
1232     nm->log_new_nmethod();
1233   }
1234   return nm;
1235 }
1236 
1237 nmethod* nmethod::restore(address code_cache_buffer,
1238                           const methodHandle& method,
1239                           int compile_id,
1240                           address reloc_data,
1241                           GrowableArray<Handle>& oop_list,
1242                           GrowableArray<Metadata*>& metadata_list,
1243                           ImmutableOopMapSet* oop_maps,
1244                           address immutable_data,
1245                           GrowableArray<Handle>& reloc_imm_oop_list,
1246                           GrowableArray<Metadata*>& reloc_imm_metadata_list,
1247                           AOTCodeReader* aot_code_reader)
1248 {
1249   CodeBlob::restore(code_cache_buffer, "nmethod", reloc_data, oop_maps);
1250   nmethod* nm = (nmethod*)code_cache_buffer;
1251   nm->set_method(method());
1252   nm->_compile_id = compile_id;
1253   nm->set_immutable_data(immutable_data);
1254   nm->copy_values(&oop_list);
1255   nm->copy_values(&metadata_list);
1256 
1257   aot_code_reader->fix_relocations(nm, &reloc_imm_oop_list, &reloc_imm_metadata_list);
1258 
1259 #ifndef PRODUCT
1260   nm->asm_remarks().init();
1261   aot_code_reader->read_asm_remarks(nm->asm_remarks(), /* use_string_table */ false);
1262   nm->dbg_strings().init();
1263   aot_code_reader->read_dbg_strings(nm->dbg_strings(), /* use_string_table */ false);
1264 #endif
1265 
1266   // Flush the code block
1267   ICache::invalidate_range(nm->code_begin(), nm->code_size());
1268 
1269   // Create cache after PcDesc data is copied - it will be used to initialize cache
1270   nm->_pc_desc_container = new PcDescContainer(nm->scopes_pcs_begin());
1271 
1272   nm->set_aot_code_entry(aot_code_reader->aot_code_entry());
1273 
1274   nm->post_init();
1275   return nm;
1276 }
1277 
1278 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1279                               const methodHandle& method,
1280                               AbstractCompiler* compiler,
1281                               int compile_id,
1282                               address reloc_data,
1283                               GrowableArray<Handle>& oop_list,
1284                               GrowableArray<Metadata*>& metadata_list,
1285                               ImmutableOopMapSet* oop_maps,
1286                               address immutable_data,
1287                               GrowableArray<Handle>& reloc_imm_oop_list,
1288                               GrowableArray<Metadata*>& reloc_imm_metadata_list,
1289                               AOTCodeReader* aot_code_reader)
1290 {
1291   nmethod* nm = nullptr;
1292   int nmethod_size = archived_nm->size();
1293   // create nmethod
1294   {
1295     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1296     address code_cache_buffer = (address)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1297     if (code_cache_buffer != nullptr) {
1298       nm = archived_nm->restore(code_cache_buffer,
1299                                 method,
1300                                 compile_id,
1301                                 reloc_data,
1302                                 oop_list,
1303                                 metadata_list,
1304                                 oop_maps,
1305                                 immutable_data,
1306                                 reloc_imm_oop_list,
1307                                 reloc_imm_metadata_list,
1308                                 aot_code_reader);
1309       nm->record_nmethod_dependency();
1310       NOT_PRODUCT(note_java_nmethod(nm));
1311     }
1312   }
1313   // Do verification and logging outside CodeCache_lock.
1314   if (nm != nullptr) {
1315 #ifdef ASSERT
1316     LogTarget(Debug, aot, codecache, nmethod) log;
1317     if (log.is_enabled()) {
1318       LogStream out(log);
1319       out.print_cr("== new_nmethod 2");
1320       FlagSetting fs(PrintRelocations, true);
1321       nm->print_on_impl(&out);
1322       nm->decode(&out);
1323     }
1324 #endif
1325     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1326     DEBUG_ONLY(nm->verify();)
1327     nm->log_new_nmethod();
1328   }
1329   return nm;
1330 }
1331 
1332 // Fill in default values for various fields
1333 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1334   // avoid uninitialized fields, even for short time periods
1335   _exception_cache            = nullptr;
1336   _gc_data                    = nullptr;
1337   _oops_do_mark_link          = nullptr;
1338   _compiled_ic_data           = nullptr;
1339 
1340   _is_unloading_state         = 0;
1341   _state                      = not_installed;
1342 
1343   _has_unsafe_access          = 0;
1344   _has_wide_vectors           = 0;
1345   _has_monitors               = 0;
1346   _has_scoped_access          = 0;
1347   _has_flushed_dependencies   = 0;
1348   _is_unlinked                = 0;
1349   _load_reported              = 0; // jvmti state
1350   _preloaded                  = 0;
1351   _has_clinit_barriers        = 0;
1352 
1353   _used                       = false;
1354   _deoptimization_status      = not_marked;
1355 
1356   // SECT_CONSTS is first in code buffer so the offset should be 0.
1357   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1358   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1359 
1360   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1361 
1362   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1363   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1364 
1365   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1366 }
1367 
1368 // Post initialization
1369 void nmethod::post_init() {
1370   clear_unloading_state();
1371 
1372   finalize_relocations();
1373 

1405     init_defaults(code_buffer, offsets);
1406 
1407     _osr_entry_point         = nullptr;
1408     _pc_desc_container       = nullptr;
1409     _entry_bci               = InvocationEntryBci;
1410     _compile_id              = compile_id;
1411     _comp_level              = CompLevel_none;
1412     _compiler_type           = type;
1413     _orig_pc_offset          = 0;
1414     _num_stack_arg_slots     = 0;
1415 
1416     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1417       // Continuation enter intrinsic
1418       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1419     } else {
1420       _exception_offset      = 0;
1421     }
1422     // Native wrappers do not have deopt handlers. Make the values
1423     // something that will never match a pc like the nmethod vtable entry
1424     _deopt_handler_entry_offset    = 0;
1425     _aot_code_entry          = nullptr;
1426     _method_profiling_count  = 0;
1427     _unwind_handler_offset   = 0;
1428 
1429     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1430     uint16_t metadata_size;
1431     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1432     JVMCI_ONLY( _metadata_size = metadata_size; )
1433     assert(_mutable_data_size == _relocation_size + metadata_size,
1434            "wrong mutable data size: %d != %d + %d",
1435            _mutable_data_size, _relocation_size, metadata_size);
1436 
1437     // native wrapper does not have read-only data but we need unique not null address
1438     _immutable_data          = blob_end();
1439     _immutable_data_size     = 0;
1440     _nul_chk_table_offset    = 0;
1441     _handler_table_offset    = 0;
1442     _scopes_pcs_offset       = 0;
1443     _scopes_data_offset      = 0;
1444 #if INCLUDE_JVMCI
1445     _speculations_offset     = 0;
1446 #endif

1467     // This is both handled in decode2(), called via print_code() -> decode()
1468     if (PrintNativeNMethods) {
1469       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1470       print_code();
1471       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1472 #if defined(SUPPORT_DATA_STRUCTS)
1473       if (AbstractDisassembler::show_structs()) {
1474         if (oop_maps != nullptr) {
1475           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1476           oop_maps->print_on(tty);
1477           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1478         }
1479       }
1480 #endif
1481     } else {
1482       print(); // print the header part only.
1483     }
1484 #if defined(SUPPORT_DATA_STRUCTS)
1485     if (AbstractDisassembler::show_structs()) {
1486       if (PrintRelocations) {
1487         print_relocations_on(tty);
1488         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1489       }
1490     }
1491 #endif
1492     if (xtty != nullptr) {
1493       xtty->tail("print_native_nmethod");
1494     }
1495   }
1496 }
1497 
1498 
1499 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1500 {
1501 
1502   if (nm._oop_maps != nullptr) {
1503     _oop_maps                   = nm._oop_maps->clone();
1504   } else {
1505     _oop_maps                   = nullptr;
1506   }
1507 

1800   CompLevel comp_level
1801 #if INCLUDE_JVMCI
1802   , char* speculations,
1803   int speculations_len,
1804   JVMCINMethodData* jvmci_data
1805 #endif
1806   )
1807   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1808              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1809   _deoptimization_generation(0),
1810   _gc_epoch(CodeCache::gc_epoch()),
1811   _method(method),
1812   _osr_link(nullptr)
1813 {
1814   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1815   {
1816     DEBUG_ONLY(NoSafepointVerifier nsv;)
1817     assert_locked_or_safepoint(CodeCache_lock);
1818 
1819     init_defaults(code_buffer, offsets);
1820     _aot_code_entry          = nullptr; // runtime compiled nmethod does not have AOTCodeEntry
1821     _method_profiling_count  = 0;
1822 
1823     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1824     _entry_bci       = entry_bci;
1825     _compile_id      = compile_id;
1826     _comp_level      = comp_level;
1827     _compiler_type   = type;
1828     _orig_pc_offset  = orig_pc_offset;
1829 
1830     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1831 
1832     set_ctable_begin(header_begin() + content_offset());
1833 
1834 #if INCLUDE_JVMCI
1835     if (compiler->is_jvmci()) {
1836       // JVMCI might not produce any stub sections
1837       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1838         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1839       } else {
1840         _exception_offset        = -1;
1841       }

1931     // Copy speculations to nmethod
1932     if (speculations_size() != 0) {
1933       memcpy(speculations_begin(), speculations, speculations_len);
1934     }
1935 #endif
1936     init_immutable_data_ref_count();
1937 
1938     post_init();
1939 
1940     // we use the information of entry points to find out if a method is
1941     // static or non static
1942     assert(compiler->is_c2() || compiler->is_jvmci() ||
1943            _method->is_static() == (entry_point() == verified_entry_point()),
1944            " entry points must be same for static methods and vice versa");
1945   }
1946 }
1947 
1948 // Print a short set of xml attributes to identify this nmethod.  The
1949 // output should be embedded in some other element.
1950 void nmethod::log_identity(xmlStream* log) const {
1951   assert(log->inside_attrs_or_error(), "printing attributes");
1952   log->print(" compile_id='%d'", compile_id());
1953   const char* nm_kind = compile_kind();
1954   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1955   log->print(" compiler='%s'", compiler_name());
1956   if (TieredCompilation) {
1957     log->print(" compile_level='%d'", comp_level());
1958   }
1959 #if INCLUDE_JVMCI
1960   if (jvmci_nmethod_data() != nullptr) {
1961     const char* jvmci_name = jvmci_nmethod_data()->name();
1962     if (jvmci_name != nullptr) {
1963       log->print(" jvmci_mirror_name='");
1964       log->text("%s", jvmci_name);
1965       log->print("'");
1966     }
1967   }
1968 #endif
1969 }
1970 
1971 
1972 #define LOG_OFFSET(log, name)                    \
1973   if (p2i(name##_end()) - p2i(name##_begin())) \
1974     log->print(" " XSTR(name) "_offset='%zd'"    , \
1975                p2i(name##_begin()) - p2i(this))
1976 
1977 

2092       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2093       if (oop_maps() != nullptr) {
2094         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
2095         oop_maps()->print_on(tty);
2096         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2097       }
2098     }
2099 #endif
2100   } else {
2101     print(); // print the header part only.
2102   }
2103 
2104 #if defined(SUPPORT_DATA_STRUCTS)
2105   if (AbstractDisassembler::show_structs()) {
2106     methodHandle mh(Thread::current(), _method);
2107     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
2108       print_scopes();
2109       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2110     }
2111     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
2112       print_relocations_on(tty);
2113       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2114     }
2115     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
2116       print_dependencies_on(tty);
2117       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2118     }
2119     if (printmethod || PrintExceptionHandlers) {
2120       print_handler_table();
2121       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2122       print_nul_chk_table();
2123       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2124     }
2125 
2126     if (printmethod) {
2127       print_recorded_oops();
2128       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2129       print_recorded_metadata();
2130       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2131     }
2132   }
2133 #endif
2134 
2135   if (xtty != nullptr) {
2136     xtty->tail("print_nmethod");
2137   }
2138 }
2139 
2140 
2141 // Promote one word from an assembly-time handle to a live embedded oop.
2142 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2143   if (handle == nullptr ||
2144       // As a special case, IC oops are initialized to 1 or -1.
2145       handle == (jobject) Universe::non_oop_word()) {
2146     *(void**)dest = handle;
2147   } else {
2148     *dest = JNIHandles::resolve_non_null(handle);
2149   }
2150 }
2151 
2152 void nmethod::copy_values(GrowableArray<Handle>* array) {
2153   int length = array->length();
2154   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2155   oop* dest = oops_begin();
2156   for (int index = 0 ; index < length; index++) {
2157     dest[index] = array->at(index)();
2158   }
2159 }
2160 
2161 // Have to have the same name because it's called by a template
2162 void nmethod::copy_values(GrowableArray<jobject>* array) {
2163   int length = array->length();
2164   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2165   oop* dest = oops_begin();
2166   for (int index = 0 ; index < length; index++) {
2167     initialize_immediate_oop(&dest[index], array->at(index));
2168   }
2169 
2170   // Now we can fix up all the oops in the code.  We need to do this
2171   // in the code because the assembler uses jobjects as placeholders.
2172   // The code and relocations have already been initialized by the
2173   // CodeBlob constructor, so it is valid even at this early point to
2174   // iterate over relocations and patch the code.
2175   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2176 }
2177 
2178 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2179   int length = array->length();

2187 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2188   // re-patch all oop-bearing instructions, just in case some oops moved
2189   RelocIterator iter(this, begin, end);
2190   while (iter.next()) {
2191     if (iter.type() == relocInfo::oop_type) {
2192       oop_Relocation* reloc = iter.oop_reloc();
2193       if (initialize_immediates && reloc->oop_is_immediate()) {
2194         oop* dest = reloc->oop_addr();
2195         jobject obj = *reinterpret_cast<jobject*>(dest);
2196         initialize_immediate_oop(dest, obj);
2197       }
2198       // Refresh the oop-related bits of this instruction.
2199       reloc->fix_oop_relocation();
2200     } else if (iter.type() == relocInfo::metadata_type) {
2201       metadata_Relocation* reloc = iter.metadata_reloc();
2202       reloc->fix_metadata_relocation();
2203     }
2204   }
2205 }
2206 
2207 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
2208   RelocIterator iter(this);
2209   while (iter.next()) {
2210     if (iter.type() == relocInfo::oop_type) {
2211       oop_Relocation* reloc = iter.oop_reloc();
2212       if (reloc->oop_is_immediate()) {
2213         oop dest = reloc->oop_value();
2214         Handle h(thread, dest);
2215         oop_list.append(h);
2216       }
2217     } else if (iter.type() == relocInfo::metadata_type) {
2218       metadata_Relocation* reloc = iter.metadata_reloc();
2219       if (reloc->metadata_is_immediate()) {
2220         Metadata* m = reloc->metadata_value();
2221         metadata_list.append(m);
2222       }
2223     }
2224   }
2225 }
2226 
2227 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2228   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2229   intptr_t cbaddr = (intptr_t) nm;
2230   intptr_t offset = ((intptr_t) pc) - cbaddr;
2231 
2232   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2233   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2234     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2235   } else if (!nop->patch(oopmap_slot, offset)) {
2236     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2237   }
2238 }
2239 
2240 void nmethod::finalize_relocations() {
2241   NoSafepointVerifier nsv;
2242 
2243   GrowableArray<NativeMovConstReg*> virtual_call_data;
2244 
2245   // Make sure that post call nops fill in nmethod offsets eagerly so
2246   // we don't have to race with deoptimization

2377   // be alive the previous completed marking cycle.
2378   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2379 }
2380 
2381 void nmethod::inc_decompile_count() {
2382   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2383   // Could be gated by ProfileTraps, but do not bother...
2384 #if INCLUDE_JVMCI
2385   if (jvmci_skip_profile_deopt()) {
2386     return;
2387   }
2388 #endif
2389   Method* m = method();
2390   if (m == nullptr)  return;
2391   MethodData* mdo = m->method_data();
2392   if (mdo == nullptr)  return;
2393   // There is a benign race here.  See comments in methodData.hpp.
2394   mdo->inc_decompile_count();
2395 }
2396 
2397 void nmethod::inc_method_profiling_count() {
2398   AtomicAccess::inc(&_method_profiling_count);
2399 }
2400 
2401 uint64_t nmethod::method_profiling_count() {
2402   return _method_profiling_count;
2403 }
2404 
2405 bool nmethod::try_transition(signed char new_state_int) {
2406   signed char new_state = new_state_int;
2407   assert_lock_strong(NMethodState_lock);
2408   signed char old_state = _state;
2409   if (old_state >= new_state) {
2410     // Ensure monotonicity of transitions.
2411     return false;
2412   }
2413   AtomicAccess::store(&_state, new_state);
2414   return true;
2415 }
2416 
2417 void nmethod::invalidate_osr_method() {
2418   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2419   // Remove from list of active nmethods
2420   if (method() != nullptr) {
2421     method()->method_holder()->remove_osr_nmethod(this);
2422   }
2423 }
2424 

2434     }
2435   }
2436 
2437   ResourceMark rm;
2438   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2439   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2440 
2441   CompileTask::print_ul(this, ss.freeze());
2442   if (PrintCompilation) {
2443     print_on_with_msg(tty, ss.freeze());
2444   }
2445 }
2446 
2447 void nmethod::unlink_from_method() {
2448   if (method() != nullptr) {
2449     method()->unlink_code(this);
2450   }
2451 }
2452 
2453 // Invalidate code
2454 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason, bool keep_aot_entry) {
2455   // This can be called while the system is already at a safepoint which is ok
2456   NoSafepointVerifier nsv;
2457 
2458   if (is_unloading()) {
2459     // If the nmethod is unloading, then it is already not entrant through
2460     // the nmethod entry barriers. No need to do anything; GC will unload it.
2461     return false;
2462   }
2463 
2464   if (AtomicAccess::load(&_state) == not_entrant) {
2465     // Avoid taking the lock if already in required state.
2466     // This is safe from races because the state is an end-state,
2467     // which the nmethod cannot back out of once entered.
2468     // No need for fencing either.
2469     return false;
2470   }
2471 
2472   MACOS_AARCH64_ONLY(os::thread_wx_enable_write());
2473 
2474   {

2498     }
2499 
2500     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2501     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2502       // If nmethod entry barriers are not supported, we won't mark
2503       // nmethods as on-stack when they become on-stack. So we
2504       // degrade to a less accurate flushing strategy, for now.
2505       mark_as_maybe_on_stack();
2506     }
2507 
2508     // Change state
2509     bool success = try_transition(not_entrant);
2510     assert(success, "Transition can't fail");
2511 
2512     // Log the transition once
2513     log_state_change(invalidation_reason);
2514 
2515     // Remove nmethod from method.
2516     unlink_from_method();
2517 
2518     if (!keep_aot_entry) {
2519       // Keep AOT code if it was simply replaced
2520       // otherwise make it not entrant too.
2521       AOTCodeCache::invalidate(_aot_code_entry);
2522     }
2523 
2524     CompileBroker::log_not_entrant(this);
2525   } // leave critical region under NMethodState_lock
2526 
2527 #if INCLUDE_JVMCI
2528   // Invalidate can't occur while holding the NMethodState_lock
2529   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2530   if (nmethod_data != nullptr) {
2531     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2532   }
2533 #endif
2534 
2535 #ifdef ASSERT
2536   if (is_osr_method() && method() != nullptr) {
2537     // Make sure osr nmethod is invalidated, i.e. not on the list
2538     bool found = method()->method_holder()->remove_osr_nmethod(this);
2539     assert(!found, "osr nmethod should have been invalidated");
2540   }
2541 #endif
2542 
2543   return true;
2544 }

2569     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2570             nmethod::InvalidationReason::UNLOADING_COLD :
2571             nmethod::InvalidationReason::UNLOADING);
2572   }
2573 #endif
2574 
2575   // Post before flushing as jmethodID is being used
2576   post_compiled_method_unload();
2577 
2578   // Register for flushing when it is safe. For concurrent class unloading,
2579   // that would be after the unloading handshake, and for STW class unloading
2580   // that would be when getting back to the VM thread.
2581   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2582 }
2583 
2584 void nmethod::purge(bool unregister_nmethod) {
2585 
2586   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2587 
2588   // completely deallocate this method
2589   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, compile_kind(), p2i(this));
2590 
2591   LogTarget(Debug, codecache) lt;
2592   if (lt.is_enabled()) {
2593     ResourceMark rm;
2594     LogStream ls(lt);
2595     const char* method_name = method()->name()->as_C_string();
2596     const size_t codecache_capacity = CodeCache::capacity()/1024;
2597     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2598     ls.print("Flushing %s nmethod %6d/" INTPTR_FORMAT ", level=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2599               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2600               compile_kind(), _compile_id, p2i(this), _comp_level, is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2601               codecache_capacity, codecache_free_space, method_name, compiler_name());
2602   }
2603 
2604   // We need to deallocate any ExceptionCache data.
2605   // Note that we do not need to grab the nmethod lock for this, it
2606   // better be thread safe if we're disposing of it!
2607   ExceptionCache* ec = exception_cache();
2608   while(ec != nullptr) {
2609     ExceptionCache* next = ec->next();
2610     delete ec;
2611     ec = next;
2612   }
2613   if (_pc_desc_container != nullptr) {
2614     delete _pc_desc_container;
2615   }
2616   if (_compiled_ic_data != nullptr) {
2617     delete[] _compiled_ic_data;
2618   }
2619 
2620   if (_immutable_data != blob_end() && !AOTCodeCache::is_address_in_aot_cache((address)_oop_maps)) {
2621     // Free memory if this was the last nmethod referencing immutable data
2622     if (dec_immutable_data_ref_count() == 0) {
2623       os::free(_immutable_data);
2624     }
2625 
2626     _immutable_data = blob_end(); // Valid not null address
2627   }
2628 
2629   if (unregister_nmethod) {
2630     Universe::heap()->unregister_nmethod(this);
2631   }
2632   CodeCache::unregister_old_nmethod(this);
2633 
2634   JVMCI_ONLY( _metadata_size = 0; )
2635   CodeBlob::purge();
2636 }
2637 
2638 oop nmethod::oop_at(int index) const {
2639   if (index == 0) {
2640     return nullptr;

2667         MethodHandles::clean_dependency_context(call_site);
2668       } else {
2669         InstanceKlass* ik = deps.context_type();
2670         if (ik == nullptr) {
2671           continue;  // ignore things like evol_method
2672         }
2673         // During GC liveness of dependee determines class that needs to be updated.
2674         // The GC may clean dependency contexts concurrently and in parallel.
2675         ik->clean_dependency_context();
2676       }
2677     }
2678   }
2679 }
2680 
2681 void nmethod::post_compiled_method(CompileTask* task) {
2682   task->mark_success();
2683   task->set_nm_content_size(content_size());
2684   task->set_nm_insts_size(insts_size());
2685   task->set_nm_total_size(total_size());
2686 
2687   // task->is_aot_load() is true only for loaded AOT code.
2688   // nmethod::_aot_code_entry is set for loaded and stored AOT code
2689   // to invalidate the entry when nmethod is deoptimized.
2690   // VerifyAOTCode is option to not store in archive AOT code.
2691   guarantee((_aot_code_entry != nullptr) || !task->is_aot_load() || VerifyAOTCode, "sanity");
2692 
2693   // JVMTI -- compiled method notification (must be done outside lock)
2694   post_compiled_method_load_event();
2695 
2696   if (CompilationLog::log() != nullptr) {
2697     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2698   }
2699 
2700   const DirectiveSet* directive = task->directive();
2701   maybe_print_nmethod(directive);
2702 }
2703 
2704 #if INCLUDE_CDS
2705 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2706 
2707 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2708   precond(CDSConfig::is_using_aot_linked_classes());
2709   precond(!ServiceThread::has_started());
2710 
2711   // We are still in single threaded stage of VM bootstrap. No need to lock.
2712   if (_delayed_compiled_method_load_events == nullptr) {

3431 void nmethod::verify() {
3432   if (is_not_entrant())
3433     return;
3434 
3435   // assert(oopDesc::is_oop(method()), "must be valid");
3436 
3437   ResourceMark rm;
3438 
3439   if (!CodeCache::contains(this)) {
3440     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3441   }
3442 
3443   if(is_native_method() )
3444     return;
3445 
3446   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3447   if (nm != this) {
3448     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3449   }
3450 
3451   // Verification can triggered during shutdown after AOTCodeCache is closed.
3452   // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3453   if (!is_aot() || AOTCodeCache::is_on()) {
3454     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3455       if (! p->verify(this)) {
3456         tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3457       }
3458     }

3459 
3460 #ifdef ASSERT
3461 #if INCLUDE_JVMCI
3462     {
3463       // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3464       ImmutableOopMapSet* oms = oop_maps();
3465       ImplicitExceptionTable implicit_table(this);
3466       for (uint i = 0; i < implicit_table.len(); i++) {
3467         int exec_offset = (int) implicit_table.get_exec_offset(i);
3468         if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3469           assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3470           bool found = false;
3471           for (int i = 0, imax = oms->count(); i < imax; i++) {
3472             if (oms->pair_at(i)->pc_offset() == exec_offset) {
3473               found = true;
3474               break;
3475             }
3476           }
3477           assert(found, "missing oopmap");
3478         }

3479       }
3480     }

3481 #endif
3482 #endif
3483   }
3484 
3485   VerifyOopsClosure voc(this);
3486   oops_do(&voc);
3487   assert(voc.ok(), "embedded oops must be OK");
3488   Universe::heap()->verify_nmethod(this);
3489 
3490   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3491          nm->method()->external_name(), p2i(_oops_do_mark_link));
3492   if (!is_aot() || AOTCodeCache::is_on()) {
3493     verify_scopes();
3494   }
3495 
3496   CompiledICLocker nm_verify(this);
3497   VerifyMetadataClosure vmc;
3498   metadata_do(&vmc);
3499 }
3500 
3501 
3502 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3503 
3504   // Verify IC only when nmethod installation is finished.
3505   if (!is_not_installed()) {
3506     if (CompiledICLocker::is_safe(this)) {
3507       if (is_inline_cache) {
3508         CompiledIC_at(this, call_site);
3509       } else {
3510         CompiledDirectCall::at(call_site);
3511       }
3512     } else {
3513       CompiledICLocker ml_verify(this);
3514       if (is_inline_cache) {

3643                                              p2i(nul_chk_table_end()),
3644                                              nul_chk_table_size());
3645   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3646                                              p2i(handler_table_begin()),
3647                                              p2i(handler_table_end()),
3648                                              handler_table_size());
3649   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3650                                              p2i(scopes_pcs_begin()),
3651                                              p2i(scopes_pcs_end()),
3652                                              scopes_pcs_size());
3653   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3654                                              p2i(scopes_data_begin()),
3655                                              p2i(scopes_data_end()),
3656                                              scopes_data_size());
3657 #if INCLUDE_JVMCI
3658   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3659                                              p2i(speculations_begin()),
3660                                              p2i(speculations_end()),
3661                                              speculations_size());
3662 #endif
3663   if (AOTCodeCache::is_on() && _aot_code_entry != nullptr) {
3664     _aot_code_entry->print(st);
3665   }
3666 }
3667 
3668 void nmethod::print_code() {
3669   ResourceMark m;
3670   ttyLocker ttyl;
3671   // Call the specialized decode method of this class.
3672   decode(tty);
3673 }
3674 
3675 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3676 
3677 void nmethod::print_dependencies_on(outputStream* out) {
3678   ResourceMark rm;
3679   stringStream st;
3680   st.print_cr("Dependencies:");
3681   for (Dependencies::DepStream deps(this); deps.next(); ) {
3682     deps.print_dependency(&st);
3683     InstanceKlass* ctxk = deps.context_type();
3684     if (ctxk != nullptr) {
3685       if (ctxk->is_dependent_nmethod(this)) {

3745   st->print("scopes:");
3746   if (scopes_pcs_begin() < scopes_pcs_end()) {
3747     st->cr();
3748     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3749       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3750         continue;
3751 
3752       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3753       while (sd != nullptr) {
3754         sd->print_on(st, p);  // print output ends with a newline
3755         sd = sd->sender();
3756       }
3757     }
3758   } else {
3759     st->print_cr(" <list empty>");
3760   }
3761 }
3762 #endif
3763 
3764 #ifndef PRODUCT  // RelocIterator does support printing only then.
3765 void nmethod::print_relocations_on(outputStream* st) {
3766   ResourceMark m;       // in case methods get printed via the debugger
3767   st->print_cr("relocations:");
3768   RelocIterator iter(this);
3769   iter.print_on(st);
3770 }
3771 #endif
3772 
3773 void nmethod::print_pcs_on(outputStream* st) {
3774   ResourceMark m;       // in case methods get printed via debugger
3775   st->print("pc-bytecode offsets:");
3776   if (scopes_pcs_begin() < scopes_pcs_end()) {
3777     st->cr();
3778     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3779       p->print_on(st, this);  // print output ends with a newline
3780     }
3781   } else {
3782     st->print_cr(" <list empty>");
3783   }
3784 }
3785 
3786 void nmethod::print_handler_table() {
3787   ExceptionHandlerTable(this).print(code_begin());
3788 }
3789 

4604 void nmethod::update_speculation(JavaThread* thread) {
4605   jlong speculation = thread->pending_failed_speculation();
4606   if (speculation != 0) {
4607     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4608     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4609     thread->set_pending_failed_speculation(0);
4610   }
4611 }
4612 
4613 const char* nmethod::jvmci_name() {
4614   if (jvmci_nmethod_data() != nullptr) {
4615     return jvmci_nmethod_data()->name();
4616   }
4617   return nullptr;
4618 }
4619 
4620 bool nmethod::jvmci_skip_profile_deopt() const {
4621   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4622 }
4623 #endif
4624 
4625 void nmethod::prepare_for_archiving_impl() {
4626   CodeBlob::prepare_for_archiving_impl();
4627   _deoptimization_generation = 0;
4628   _gc_epoch = 0;
4629   _method_profiling_count = 0;
4630   _osr_link = nullptr;
4631   _method = nullptr;
4632   _immutable_data = nullptr;
4633   _pc_desc_container = nullptr;
4634   _exception_cache = nullptr;
4635   _gc_data = nullptr;
4636   _oops_do_mark_link = nullptr;
4637   _compiled_ic_data = nullptr;
4638   _osr_entry_point = nullptr;
4639   _compile_id = -1;
4640   _deoptimization_status = not_marked;
4641   _is_unloading_state = 0;
4642   _state = not_installed;
4643 }
< prev index next >