< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "cds/cdsConfig.hpp"

  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compilationLog.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compileLog.hpp"
  37 #include "compiler/compilerDirectives.hpp"
  38 #include "compiler/compilerOracle.hpp"
  39 #include "compiler/compileTask.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"
  46 #include "gc/shared/collectedHeap.hpp"

 990              _method->method_holder()->external_name(),
 991              _method->name()->as_C_string(),
 992              _method->signature()->as_C_string(),
 993              compile_id());
 994   }
 995   return check_evol.has_evol_dependency();
 996 }
 997 
 998 int nmethod::total_size() const {
 999   return
1000     consts_size()        +
1001     insts_size()         +
1002     stub_size()          +
1003     scopes_data_size()   +
1004     scopes_pcs_size()    +
1005     handler_table_size() +
1006     nul_chk_table_size();
1007 }
1008 
1009 const char* nmethod::compile_kind() const {
1010   if (is_osr_method())     return "osr";



1011   if (method() != nullptr && is_native_method()) {
1012     if (method()->is_continuation_native_intrinsic()) {
1013       return "cnt";
1014     }
1015     return "c2n";
1016   }
1017   return nullptr;
1018 }
1019 
1020 const char* nmethod::compiler_name() const {
1021   return compilertype2name(_compiler_type);
1022 }
1023 
1024 #ifdef ASSERT
1025 class CheckForOopsClosure : public OopClosure {
1026   bool _found_oop = false;
1027  public:
1028   virtual void do_oop(oop* o) { _found_oop = true; }
1029   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1030   bool found_oop() { return _found_oop; }

1096     nm = new (native_nmethod_size, allow_NonNMethod_space)
1097     nmethod(method(), compiler_none, native_nmethod_size,
1098             compile_id, &offsets,
1099             code_buffer, frame_size,
1100             basic_lock_owner_sp_offset,
1101             basic_lock_sp_offset,
1102             oop_maps, mutable_data_size);
1103     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1104     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1105   }
1106 
1107   if (nm != nullptr) {
1108     // verify nmethod
1109     DEBUG_ONLY(nm->verify();) // might block
1110 
1111     nm->log_new_nmethod();
1112   }
1113   return nm;
1114 }
1115 

























1116 nmethod* nmethod::new_nmethod(const methodHandle& method,
1117   int compile_id,
1118   int entry_bci,
1119   CodeOffsets* offsets,
1120   int orig_pc_offset,
1121   DebugInformationRecorder* debug_info,
1122   Dependencies* dependencies,
1123   CodeBuffer* code_buffer, int frame_size,
1124   OopMapSet* oop_maps,
1125   ExceptionHandlerTable* handler_table,
1126   ImplicitExceptionTable* nul_chk_table,
1127   AbstractCompiler* compiler,
1128   CompLevel comp_level
1129 #if INCLUDE_JVMCI
1130   , char* speculations,
1131   int speculations_len,
1132   JVMCINMethodData* jvmci_data
1133 #endif
1134 )
1135 {

1162 
1163   int mutable_data_size = required_mutable_data_size(code_buffer
1164     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1165 
1166   {
1167     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1168 
1169     nm = new (nmethod_size, comp_level)
1170     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1171             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1172             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1173             handler_table, nul_chk_table, compiler, comp_level
1174 #if INCLUDE_JVMCI
1175             , speculations,
1176             speculations_len,
1177             jvmci_data
1178 #endif
1179             );
1180 
1181     if (nm != nullptr) {
1182       // To make dependency checking during class loading fast, record
1183       // the nmethod dependencies in the classes it is dependent on.
1184       // This allows the dependency checking code to simply walk the
1185       // class hierarchy above the loaded class, checking only nmethods
1186       // which are dependent on those classes.  The slow way is to
1187       // check every nmethod for dependencies which makes it linear in
1188       // the number of methods compiled.  For applications with a lot
1189       // classes the slow way is too slow.
1190       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1191         if (deps.type() == Dependencies::call_site_target_value) {
1192           // CallSite dependencies are managed on per-CallSite instance basis.
1193           oop call_site = deps.argument_oop(0);
1194           MethodHandles::add_dependent_nmethod(call_site, nm);
1195         } else {
1196           InstanceKlass* ik = deps.context_type();
1197           if (ik == nullptr) {
1198             continue;  // ignore things like evol_method
1199           }
1200           // record this nmethod as dependent on this klass
1201           ik->add_dependent_nmethod(nm);
1202         }
1203       }
1204       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));












































































1205     }
1206   }
1207   // Do verification and logging outside CodeCache_lock.
1208   if (nm != nullptr) {










1209     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1210     DEBUG_ONLY(nm->verify();)
1211     nm->log_new_nmethod();
1212   }
1213   return nm;
1214 }
1215 
1216 // Fill in default values for various fields
1217 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1218   // avoid uninitialized fields, even for short time periods
1219   _exception_cache            = nullptr;
1220   _gc_data                    = nullptr;
1221   _oops_do_mark_link          = nullptr;
1222   _compiled_ic_data           = nullptr;
1223 
1224   _is_unloading_state         = 0;
1225   _state                      = not_installed;
1226 
1227   _has_unsafe_access          = 0;
1228   _has_wide_vectors           = 0;
1229   _has_monitors               = 0;
1230   _has_scoped_access          = 0;
1231   _has_flushed_dependencies   = 0;
1232   _is_unlinked                = 0;
1233   _load_reported              = 0; // jvmti state


1234 

1235   _deoptimization_status      = not_marked;
1236 
1237   // SECT_CONSTS is first in code buffer so the offset should be 0.
1238   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1239   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1240 
1241   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1242 
1243   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1244   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1245 
1246   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1247 }
1248 
1249 // Post initialization
1250 void nmethod::post_init() {
1251   clear_unloading_state();
1252 
1253   finalize_relocations();
1254 

1286     init_defaults(code_buffer, offsets);
1287 
1288     _osr_entry_point         = nullptr;
1289     _pc_desc_container       = nullptr;
1290     _entry_bci               = InvocationEntryBci;
1291     _compile_id              = compile_id;
1292     _comp_level              = CompLevel_none;
1293     _compiler_type           = type;
1294     _orig_pc_offset          = 0;
1295     _num_stack_arg_slots     = 0;
1296 
1297     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1298       // Continuation enter intrinsic
1299       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1300     } else {
1301       _exception_offset      = 0;
1302     }
1303     // Native wrappers do not have deopt handlers. Make the values
1304     // something that will never match a pc like the nmethod vtable entry
1305     _deopt_handler_offset    = 0;


1306     _unwind_handler_offset   = 0;
1307 
1308     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1309     uint16_t metadata_size;
1310     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1311     JVMCI_ONLY( _metadata_size = metadata_size; )
1312     assert(_mutable_data_size == _relocation_size + metadata_size,
1313            "wrong mutable data size: %d != %d + %d",
1314            _mutable_data_size, _relocation_size, metadata_size);
1315 
1316     // native wrapper does not have read-only data but we need unique not null address
1317     _immutable_data          = blob_end();
1318     _immutable_data_size     = 0;
1319     _nul_chk_table_offset    = 0;
1320     _handler_table_offset    = 0;
1321     _scopes_pcs_offset       = 0;
1322     _scopes_data_offset      = 0;
1323 #if INCLUDE_JVMCI
1324     _speculations_offset     = 0;
1325 #endif

1346     // This is both handled in decode2(), called via print_code() -> decode()
1347     if (PrintNativeNMethods) {
1348       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1349       print_code();
1350       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1351 #if defined(SUPPORT_DATA_STRUCTS)
1352       if (AbstractDisassembler::show_structs()) {
1353         if (oop_maps != nullptr) {
1354           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1355           oop_maps->print_on(tty);
1356           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1357         }
1358       }
1359 #endif
1360     } else {
1361       print(); // print the header part only.
1362     }
1363 #if defined(SUPPORT_DATA_STRUCTS)
1364     if (AbstractDisassembler::show_structs()) {
1365       if (PrintRelocations) {
1366         print_relocations();
1367         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1368       }
1369     }
1370 #endif
1371     if (xtty != nullptr) {
1372       xtty->tail("print_native_nmethod");
1373     }
1374   }
1375 }
1376 
1377 
1378 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1379 {
1380 
1381   if (nm._oop_maps != nullptr) {
1382     _oop_maps                   = nm._oop_maps->clone();
1383   } else {
1384     _oop_maps                   = nullptr;
1385   }
1386 

1666   CompLevel comp_level
1667 #if INCLUDE_JVMCI
1668   , char* speculations,
1669   int speculations_len,
1670   JVMCINMethodData* jvmci_data
1671 #endif
1672   )
1673   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1674              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1675   _deoptimization_generation(0),
1676   _gc_epoch(CodeCache::gc_epoch()),
1677   _method(method),
1678   _osr_link(nullptr)
1679 {
1680   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1681   {
1682     DEBUG_ONLY(NoSafepointVerifier nsv;)
1683     assert_locked_or_safepoint(CodeCache_lock);
1684 
1685     init_defaults(code_buffer, offsets);


1686 
1687     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1688     _entry_bci       = entry_bci;
1689     _compile_id      = compile_id;
1690     _comp_level      = comp_level;
1691     _compiler_type   = type;
1692     _orig_pc_offset  = orig_pc_offset;
1693 
1694     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1695 
1696     set_ctable_begin(header_begin() + content_offset());
1697 
1698 #if INCLUDE_JVMCI
1699     if (compiler->is_jvmci()) {
1700       // JVMCI might not produce any stub sections
1701       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1702         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1703       } else {
1704         _exception_offset        = -1;
1705       }

1788     // Copy speculations to nmethod
1789     if (speculations_size() != 0) {
1790       memcpy(speculations_begin(), speculations, speculations_len);
1791     }
1792 #endif
1793     init_immutable_data_ref_count();
1794 
1795     post_init();
1796 
1797     // we use the information of entry points to find out if a method is
1798     // static or non static
1799     assert(compiler->is_c2() || compiler->is_jvmci() ||
1800            _method->is_static() == (entry_point() == verified_entry_point()),
1801            " entry points must be same for static methods and vice versa");
1802   }
1803 }
1804 
1805 // Print a short set of xml attributes to identify this nmethod.  The
1806 // output should be embedded in some other element.
1807 void nmethod::log_identity(xmlStream* log) const {

1808   log->print(" compile_id='%d'", compile_id());
1809   const char* nm_kind = compile_kind();
1810   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1811   log->print(" compiler='%s'", compiler_name());
1812   if (TieredCompilation) {
1813     log->print(" level='%d'", comp_level());
1814   }
1815 #if INCLUDE_JVMCI
1816   if (jvmci_nmethod_data() != nullptr) {
1817     const char* jvmci_name = jvmci_nmethod_data()->name();
1818     if (jvmci_name != nullptr) {
1819       log->print(" jvmci_mirror_name='");
1820       log->text("%s", jvmci_name);
1821       log->print("'");
1822     }
1823   }
1824 #endif
1825 }
1826 
1827 
1828 #define LOG_OFFSET(log, name)                    \
1829   if (p2i(name##_end()) - p2i(name##_begin())) \
1830     log->print(" " XSTR(name) "_offset='%zd'"    , \
1831                p2i(name##_begin()) - p2i(this))
1832 
1833 

1948       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1949       if (oop_maps() != nullptr) {
1950         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1951         oop_maps()->print_on(tty);
1952         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1953       }
1954     }
1955 #endif
1956   } else {
1957     print(); // print the header part only.
1958   }
1959 
1960 #if defined(SUPPORT_DATA_STRUCTS)
1961   if (AbstractDisassembler::show_structs()) {
1962     methodHandle mh(Thread::current(), _method);
1963     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1964       print_scopes();
1965       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1966     }
1967     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1968       print_relocations();
1969       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1970     }
1971     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1972       print_dependencies_on(tty);
1973       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1974     }
1975     if (printmethod || PrintExceptionHandlers) {
1976       print_handler_table();
1977       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1978       print_nul_chk_table();
1979       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1980     }
1981 
1982     if (printmethod) {
1983       print_recorded_oops();
1984       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1985       print_recorded_metadata();
1986       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1987     }
1988   }
1989 #endif
1990 
1991   if (xtty != nullptr) {
1992     xtty->tail("print_nmethod");
1993   }
1994 }
1995 
1996 
1997 // Promote one word from an assembly-time handle to a live embedded oop.
1998 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
1999   if (handle == nullptr ||
2000       // As a special case, IC oops are initialized to 1 or -1.
2001       handle == (jobject) Universe::non_oop_word()) {
2002     *(void**)dest = handle;
2003   } else {
2004     *dest = JNIHandles::resolve_non_null(handle);
2005   }
2006 }
2007 








2008 
2009 // Have to have the same name because it's called by a template
2010 void nmethod::copy_values(GrowableArray<jobject>* array) {
2011   int length = array->length();
2012   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2013   oop* dest = oops_begin();
2014   for (int index = 0 ; index < length; index++) {
2015     initialize_immediate_oop(&dest[index], array->at(index));
2016   }
2017 
2018   // Now we can fix up all the oops in the code.  We need to do this
2019   // in the code because the assembler uses jobjects as placeholders.
2020   // The code and relocations have already been initialized by the
2021   // CodeBlob constructor, so it is valid even at this early point to
2022   // iterate over relocations and patch the code.
2023   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2024 }
2025 
2026 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2027   int length = array->length();

2035 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2036   // re-patch all oop-bearing instructions, just in case some oops moved
2037   RelocIterator iter(this, begin, end);
2038   while (iter.next()) {
2039     if (iter.type() == relocInfo::oop_type) {
2040       oop_Relocation* reloc = iter.oop_reloc();
2041       if (initialize_immediates && reloc->oop_is_immediate()) {
2042         oop* dest = reloc->oop_addr();
2043         jobject obj = *reinterpret_cast<jobject*>(dest);
2044         initialize_immediate_oop(dest, obj);
2045       }
2046       // Refresh the oop-related bits of this instruction.
2047       reloc->fix_oop_relocation();
2048     } else if (iter.type() == relocInfo::metadata_type) {
2049       metadata_Relocation* reloc = iter.metadata_reloc();
2050       reloc->fix_metadata_relocation();
2051     }
2052   }
2053 }
2054 




















2055 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2056   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2057   intptr_t cbaddr = (intptr_t) nm;
2058   intptr_t offset = ((intptr_t) pc) - cbaddr;
2059 
2060   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2061   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2062     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2063   } else if (!nop->patch(oopmap_slot, offset)) {
2064     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2065   }
2066 }
2067 
2068 void nmethod::finalize_relocations() {
2069   NoSafepointVerifier nsv;
2070 
2071   GrowableArray<NativeMovConstReg*> virtual_call_data;
2072 
2073   // Make sure that post call nops fill in nmethod offsets eagerly so
2074   // we don't have to race with deoptimization

2201   // be alive the previous completed marking cycle.
2202   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2203 }
2204 
2205 void nmethod::inc_decompile_count() {
2206   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2207   // Could be gated by ProfileTraps, but do not bother...
2208 #if INCLUDE_JVMCI
2209   if (jvmci_skip_profile_deopt()) {
2210     return;
2211   }
2212 #endif
2213   Method* m = method();
2214   if (m == nullptr)  return;
2215   MethodData* mdo = m->method_data();
2216   if (mdo == nullptr)  return;
2217   // There is a benign race here.  See comments in methodData.hpp.
2218   mdo->inc_decompile_count();
2219 }
2220 








2221 bool nmethod::try_transition(signed char new_state_int) {
2222   signed char new_state = new_state_int;
2223   assert_lock_strong(NMethodState_lock);
2224   signed char old_state = _state;
2225   if (old_state >= new_state) {
2226     // Ensure monotonicity of transitions.
2227     return false;
2228   }
2229   AtomicAccess::store(&_state, new_state);
2230   return true;
2231 }
2232 
2233 void nmethod::invalidate_osr_method() {
2234   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2235   // Remove from list of active nmethods
2236   if (method() != nullptr) {
2237     method()->method_holder()->remove_osr_nmethod(this);
2238   }
2239 }
2240 

2250     }
2251   }
2252 
2253   ResourceMark rm;
2254   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2255   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2256 
2257   CompileTask::print_ul(this, ss.freeze());
2258   if (PrintCompilation) {
2259     print_on_with_msg(tty, ss.freeze());
2260   }
2261 }
2262 
2263 void nmethod::unlink_from_method() {
2264   if (method() != nullptr) {
2265     method()->unlink_code(this);
2266   }
2267 }
2268 
2269 // Invalidate code
2270 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason) {
2271   // This can be called while the system is already at a safepoint which is ok
2272   NoSafepointVerifier nsv;
2273 
2274   if (is_unloading()) {
2275     // If the nmethod is unloading, then it is already not entrant through
2276     // the nmethod entry barriers. No need to do anything; GC will unload it.
2277     return false;
2278   }
2279 
2280   if (AtomicAccess::load(&_state) == not_entrant) {
2281     // Avoid taking the lock if already in required state.
2282     // This is safe from races because the state is an end-state,
2283     // which the nmethod cannot back out of once entered.
2284     // No need for fencing either.
2285     return false;
2286   }
2287 
2288   {
2289     // Enter critical section.  Does not block for safepoint.
2290     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2312     }
2313 
2314     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2315     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2316       // If nmethod entry barriers are not supported, we won't mark
2317       // nmethods as on-stack when they become on-stack. So we
2318       // degrade to a less accurate flushing strategy, for now.
2319       mark_as_maybe_on_stack();
2320     }
2321 
2322     // Change state
2323     bool success = try_transition(not_entrant);
2324     assert(success, "Transition can't fail");
2325 
2326     // Log the transition once
2327     log_state_change(invalidation_reason);
2328 
2329     // Remove nmethod from method.
2330     unlink_from_method();
2331 







2332   } // leave critical region under NMethodState_lock
2333 
2334 #if INCLUDE_JVMCI
2335   // Invalidate can't occur while holding the NMethodState_lock
2336   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2337   if (nmethod_data != nullptr) {
2338     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2339   }
2340 #endif
2341 
2342 #ifdef ASSERT
2343   if (is_osr_method() && method() != nullptr) {
2344     // Make sure osr nmethod is invalidated, i.e. not on the list
2345     bool found = method()->method_holder()->remove_osr_nmethod(this);
2346     assert(!found, "osr nmethod should have been invalidated");
2347   }
2348 #endif
2349 
2350   return true;
2351 }

2376     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2377             nmethod::InvalidationReason::UNLOADING_COLD :
2378             nmethod::InvalidationReason::UNLOADING);
2379   }
2380 #endif
2381 
2382   // Post before flushing as jmethodID is being used
2383   post_compiled_method_unload();
2384 
2385   // Register for flushing when it is safe. For concurrent class unloading,
2386   // that would be after the unloading handshake, and for STW class unloading
2387   // that would be when getting back to the VM thread.
2388   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2389 }
2390 
2391 void nmethod::purge(bool unregister_nmethod) {
2392 
2393   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2394 
2395   // completely deallocate this method
2396   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2397 
2398   LogTarget(Debug, codecache) lt;
2399   if (lt.is_enabled()) {
2400     ResourceMark rm;
2401     LogStream ls(lt);
2402     const char* method_name = method()->name()->as_C_string();
2403     const size_t codecache_capacity = CodeCache::capacity()/1024;
2404     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2405     ls.print("Flushing nmethod %6d/" INTPTR_FORMAT ", level=%d, osr=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2406               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2407               _compile_id, p2i(this), _comp_level, is_osr_method(), is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2408               codecache_capacity, codecache_free_space, method_name, compiler_name());
2409   }
2410 
2411   // We need to deallocate any ExceptionCache data.
2412   // Note that we do not need to grab the nmethod lock for this, it
2413   // better be thread safe if we're disposing of it!
2414   ExceptionCache* ec = exception_cache();
2415   while(ec != nullptr) {
2416     ExceptionCache* next = ec->next();
2417     delete ec;
2418     ec = next;
2419   }
2420   if (_pc_desc_container != nullptr) {
2421     delete _pc_desc_container;
2422   }
2423   delete[] _compiled_ic_data;


2424 
2425   if (_immutable_data != blob_end()) {
2426     // Free memory if this was the last nmethod referencing immutable data
2427     if (dec_immutable_data_ref_count() == 0) {
2428       os::free(_immutable_data);
2429     }
2430 
2431     _immutable_data = blob_end(); // Valid not null address
2432   }
2433 
2434   if (unregister_nmethod) {
2435     Universe::heap()->unregister_nmethod(this);
2436   }
2437   CodeCache::unregister_old_nmethod(this);
2438 
2439   JVMCI_ONLY( _metadata_size = 0; )
2440   CodeBlob::purge();
2441 }
2442 
2443 oop nmethod::oop_at(int index) const {
2444   if (index == 0) {
2445     return nullptr;

2472         MethodHandles::clean_dependency_context(call_site);
2473       } else {
2474         InstanceKlass* ik = deps.context_type();
2475         if (ik == nullptr) {
2476           continue;  // ignore things like evol_method
2477         }
2478         // During GC liveness of dependee determines class that needs to be updated.
2479         // The GC may clean dependency contexts concurrently and in parallel.
2480         ik->clean_dependency_context();
2481       }
2482     }
2483   }
2484 }
2485 
2486 void nmethod::post_compiled_method(CompileTask* task) {
2487   task->mark_success();
2488   task->set_nm_content_size(content_size());
2489   task->set_nm_insts_size(insts_size());
2490   task->set_nm_total_size(total_size());
2491 






2492   // JVMTI -- compiled method notification (must be done outside lock)
2493   post_compiled_method_load_event();
2494 
2495   if (CompilationLog::log() != nullptr) {
2496     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2497   }
2498 
2499   const DirectiveSet* directive = task->directive();
2500   maybe_print_nmethod(directive);
2501 }
2502 
2503 #if INCLUDE_CDS
2504 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2505 
2506 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2507   precond(CDSConfig::is_using_aot_linked_classes());
2508   precond(!ServiceThread::has_started());
2509 
2510   // We are still in single threaded stage of VM bootstrap. No need to lock.
2511   if (_delayed_compiled_method_load_events == nullptr) {

3228 void nmethod::verify() {
3229   if (is_not_entrant())
3230     return;
3231 
3232   // assert(oopDesc::is_oop(method()), "must be valid");
3233 
3234   ResourceMark rm;
3235 
3236   if (!CodeCache::contains(this)) {
3237     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3238   }
3239 
3240   if(is_native_method() )
3241     return;
3242 
3243   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3244   if (nm != this) {
3245     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3246   }
3247 
3248   for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3249     if (! p->verify(this)) {
3250       tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));




3251     }
3252   }
3253 
3254 #ifdef ASSERT
3255 #if INCLUDE_JVMCI
3256   {
3257     // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3258     ImmutableOopMapSet* oms = oop_maps();
3259     ImplicitExceptionTable implicit_table(this);
3260     for (uint i = 0; i < implicit_table.len(); i++) {
3261       int exec_offset = (int) implicit_table.get_exec_offset(i);
3262       if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3263         assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3264         bool found = false;
3265         for (int i = 0, imax = oms->count(); i < imax; i++) {
3266           if (oms->pair_at(i)->pc_offset() == exec_offset) {
3267             found = true;
3268             break;

3269           }

3270         }
3271         assert(found, "missing oopmap");
3272       }
3273     }
3274   }
3275 #endif
3276 #endif

3277 
3278   VerifyOopsClosure voc(this);
3279   oops_do(&voc);
3280   assert(voc.ok(), "embedded oops must be OK");
3281   Universe::heap()->verify_nmethod(this);
3282 
3283   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3284          nm->method()->external_name(), p2i(_oops_do_mark_link));
3285   verify_scopes();


3286 
3287   CompiledICLocker nm_verify(this);
3288   VerifyMetadataClosure vmc;
3289   metadata_do(&vmc);
3290 }
3291 
3292 
3293 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3294 
3295   // Verify IC only when nmethod installation is finished.
3296   if (!is_not_installed()) {
3297     if (CompiledICLocker::is_safe(this)) {
3298       if (is_inline_cache) {
3299         CompiledIC_at(this, call_site);
3300       } else {
3301         CompiledDirectCall::at(call_site);
3302       }
3303     } else {
3304       CompiledICLocker ml_verify(this);
3305       if (is_inline_cache) {

3434                                              p2i(nul_chk_table_end()),
3435                                              nul_chk_table_size());
3436   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3437                                              p2i(handler_table_begin()),
3438                                              p2i(handler_table_end()),
3439                                              handler_table_size());
3440   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3441                                              p2i(scopes_pcs_begin()),
3442                                              p2i(scopes_pcs_end()),
3443                                              scopes_pcs_size());
3444   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3445                                              p2i(scopes_data_begin()),
3446                                              p2i(scopes_data_end()),
3447                                              scopes_data_size());
3448 #if INCLUDE_JVMCI
3449   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3450                                              p2i(speculations_begin()),
3451                                              p2i(speculations_end()),
3452                                              speculations_size());
3453 #endif



3454 }
3455 
3456 void nmethod::print_code() {
3457   ResourceMark m;
3458   ttyLocker ttyl;
3459   // Call the specialized decode method of this class.
3460   decode(tty);
3461 }
3462 
3463 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3464 
3465 void nmethod::print_dependencies_on(outputStream* out) {
3466   ResourceMark rm;
3467   stringStream st;
3468   st.print_cr("Dependencies:");
3469   for (Dependencies::DepStream deps(this); deps.next(); ) {
3470     deps.print_dependency(&st);
3471     InstanceKlass* ctxk = deps.context_type();
3472     if (ctxk != nullptr) {
3473       if (ctxk->is_dependent_nmethod(this)) {

3533   st->print("scopes:");
3534   if (scopes_pcs_begin() < scopes_pcs_end()) {
3535     st->cr();
3536     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3537       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3538         continue;
3539 
3540       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3541       while (sd != nullptr) {
3542         sd->print_on(st, p);  // print output ends with a newline
3543         sd = sd->sender();
3544       }
3545     }
3546   } else {
3547     st->print_cr(" <list empty>");
3548   }
3549 }
3550 #endif
3551 
3552 #ifndef PRODUCT  // RelocIterator does support printing only then.
3553 void nmethod::print_relocations() {
3554   ResourceMark m;       // in case methods get printed via the debugger
3555   tty->print_cr("relocations:");
3556   RelocIterator iter(this);
3557   iter.print_on(tty);
3558 }
3559 #endif
3560 
3561 void nmethod::print_pcs_on(outputStream* st) {
3562   ResourceMark m;       // in case methods get printed via debugger
3563   st->print("pc-bytecode offsets:");
3564   if (scopes_pcs_begin() < scopes_pcs_end()) {
3565     st->cr();
3566     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3567       p->print_on(st, this);  // print output ends with a newline
3568     }
3569   } else {
3570     st->print_cr(" <list empty>");
3571   }
3572 }
3573 
3574 void nmethod::print_handler_table() {
3575   ExceptionHandlerTable(this).print(code_begin());
3576 }
3577 

4392 void nmethod::update_speculation(JavaThread* thread) {
4393   jlong speculation = thread->pending_failed_speculation();
4394   if (speculation != 0) {
4395     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4396     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4397     thread->set_pending_failed_speculation(0);
4398   }
4399 }
4400 
4401 const char* nmethod::jvmci_name() {
4402   if (jvmci_nmethod_data() != nullptr) {
4403     return jvmci_nmethod_data()->name();
4404   }
4405   return nullptr;
4406 }
4407 
4408 bool nmethod::jvmci_skip_profile_deopt() const {
4409   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4410 }
4411 #endif





















   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "cds/cdsConfig.hpp"
  27 #include "code/aotCodeCache.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/compiledIC.hpp"
  30 #include "code/dependencies.hpp"
  31 #include "code/nativeInst.hpp"
  32 #include "code/nmethod.inline.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compilationLog.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/compileLog.hpp"
  38 #include "compiler/compilerDirectives.hpp"
  39 #include "compiler/compilerOracle.hpp"
  40 #include "compiler/compileTask.hpp"
  41 #include "compiler/directivesParser.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "compiler/oopMap.inline.hpp"
  44 #include "gc/shared/barrierSet.hpp"
  45 #include "gc/shared/barrierSetNMethod.hpp"
  46 #include "gc/shared/classUnloadingContext.hpp"
  47 #include "gc/shared/collectedHeap.hpp"

 991              _method->method_holder()->external_name(),
 992              _method->name()->as_C_string(),
 993              _method->signature()->as_C_string(),
 994              compile_id());
 995   }
 996   return check_evol.has_evol_dependency();
 997 }
 998 
 999 int nmethod::total_size() const {
1000   return
1001     consts_size()        +
1002     insts_size()         +
1003     stub_size()          +
1004     scopes_data_size()   +
1005     scopes_pcs_size()    +
1006     handler_table_size() +
1007     nul_chk_table_size();
1008 }
1009 
1010 const char* nmethod::compile_kind() const {
1011   if (is_osr_method()) return "osr";
1012   if (preloaded())     return "AP";
1013   if (is_aot())        return "A";
1014 
1015   if (method() != nullptr && is_native_method()) {
1016     if (method()->is_continuation_native_intrinsic()) {
1017       return "cnt";
1018     }
1019     return "c2n";
1020   }
1021   return nullptr;
1022 }
1023 
1024 const char* nmethod::compiler_name() const {
1025   return compilertype2name(_compiler_type);
1026 }
1027 
1028 #ifdef ASSERT
1029 class CheckForOopsClosure : public OopClosure {
1030   bool _found_oop = false;
1031  public:
1032   virtual void do_oop(oop* o) { _found_oop = true; }
1033   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1034   bool found_oop() { return _found_oop; }

1100     nm = new (native_nmethod_size, allow_NonNMethod_space)
1101     nmethod(method(), compiler_none, native_nmethod_size,
1102             compile_id, &offsets,
1103             code_buffer, frame_size,
1104             basic_lock_owner_sp_offset,
1105             basic_lock_sp_offset,
1106             oop_maps, mutable_data_size);
1107     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1108     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1109   }
1110 
1111   if (nm != nullptr) {
1112     // verify nmethod
1113     DEBUG_ONLY(nm->verify();) // might block
1114 
1115     nm->log_new_nmethod();
1116   }
1117   return nm;
1118 }
1119 
1120 void nmethod::record_nmethod_dependency() {
1121   // To make dependency checking during class loading fast, record
1122   // the nmethod dependencies in the classes it is dependent on.
1123   // This allows the dependency checking code to simply walk the
1124   // class hierarchy above the loaded class, checking only nmethods
1125   // which are dependent on those classes.  The slow way is to
1126   // check every nmethod for dependencies which makes it linear in
1127   // the number of methods compiled.  For applications with a lot
1128   // classes the slow way is too slow.
1129   for (Dependencies::DepStream deps(this); deps.next(); ) {
1130     if (deps.type() == Dependencies::call_site_target_value) {
1131       // CallSite dependencies are managed on per-CallSite instance basis.
1132       oop call_site = deps.argument_oop(0);
1133       MethodHandles::add_dependent_nmethod(call_site, this);
1134     } else {
1135       InstanceKlass* ik = deps.context_type();
1136       if (ik == nullptr) {
1137         continue;  // ignore things like evol_method
1138       }
1139       // record this nmethod as dependent on this klass
1140       ik->add_dependent_nmethod(this);
1141     }
1142   }
1143 }
1144 
1145 nmethod* nmethod::new_nmethod(const methodHandle& method,
1146   int compile_id,
1147   int entry_bci,
1148   CodeOffsets* offsets,
1149   int orig_pc_offset,
1150   DebugInformationRecorder* debug_info,
1151   Dependencies* dependencies,
1152   CodeBuffer* code_buffer, int frame_size,
1153   OopMapSet* oop_maps,
1154   ExceptionHandlerTable* handler_table,
1155   ImplicitExceptionTable* nul_chk_table,
1156   AbstractCompiler* compiler,
1157   CompLevel comp_level
1158 #if INCLUDE_JVMCI
1159   , char* speculations,
1160   int speculations_len,
1161   JVMCINMethodData* jvmci_data
1162 #endif
1163 )
1164 {

1191 
1192   int mutable_data_size = required_mutable_data_size(code_buffer
1193     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1194 
1195   {
1196     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1197 
1198     nm = new (nmethod_size, comp_level)
1199     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1200             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1201             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1202             handler_table, nul_chk_table, compiler, comp_level
1203 #if INCLUDE_JVMCI
1204             , speculations,
1205             speculations_len,
1206             jvmci_data
1207 #endif
1208             );
1209 
1210     if (nm != nullptr) {
1211       nm->record_nmethod_dependency();
1212       NOT_PRODUCT(note_java_nmethod(nm));
1213     }
1214   }
1215   // Do verification and logging outside CodeCache_lock.
1216   if (nm != nullptr) {
1217 
1218 #ifdef ASSERT
1219     LogTarget(Debug, aot, codecache, nmethod) log;
1220     if (log.is_enabled()) {
1221       LogStream out(log);
1222       out.print_cr("== new_nmethod 2");
1223       FlagSetting fs(PrintRelocations, true);
1224       nm->print_on_impl(&out);
1225       nm->decode(&out);
1226     }
1227 #endif
1228 
1229     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1230     DEBUG_ONLY(nm->verify();)
1231     nm->log_new_nmethod();
1232   }
1233   return nm;
1234 }
1235 
1236 nmethod* nmethod::restore(address code_cache_buffer,
1237                           const methodHandle& method,
1238                           int compile_id,
1239                           address reloc_data,
1240                           GrowableArray<Handle>& oop_list,
1241                           GrowableArray<Metadata*>& metadata_list,
1242                           ImmutableOopMapSet* oop_maps,
1243                           address immutable_data,
1244                           GrowableArray<Handle>& reloc_imm_oop_list,
1245                           GrowableArray<Metadata*>& reloc_imm_metadata_list,
1246                           AOTCodeReader* aot_code_reader)
1247 {
1248   CodeBlob::restore(code_cache_buffer, "nmethod", reloc_data, oop_maps);
1249   nmethod* nm = (nmethod*)code_cache_buffer;
1250   nm->set_method(method());
1251   nm->_compile_id = compile_id;
1252   nm->set_immutable_data(immutable_data);
1253   nm->copy_values(&oop_list);
1254   nm->copy_values(&metadata_list);
1255 
1256   aot_code_reader->fix_relocations(nm, &reloc_imm_oop_list, &reloc_imm_metadata_list);
1257 
1258 #ifndef PRODUCT
1259   nm->asm_remarks().init();
1260   aot_code_reader->read_asm_remarks(nm->asm_remarks(), /* use_string_table */ false);
1261   nm->dbg_strings().init();
1262   aot_code_reader->read_dbg_strings(nm->dbg_strings(), /* use_string_table */ false);
1263 #endif
1264 
1265   // Flush the code block
1266   ICache::invalidate_range(nm->code_begin(), nm->code_size());
1267 
1268   // Create cache after PcDesc data is copied - it will be used to initialize cache
1269   nm->_pc_desc_container = new PcDescContainer(nm->scopes_pcs_begin());
1270 
1271   nm->set_aot_code_entry(aot_code_reader->aot_code_entry());
1272 
1273   nm->post_init();
1274   return nm;
1275 }
1276 
1277 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1278                               const methodHandle& method,
1279                               AbstractCompiler* compiler,
1280                               int compile_id,
1281                               address reloc_data,
1282                               GrowableArray<Handle>& oop_list,
1283                               GrowableArray<Metadata*>& metadata_list,
1284                               ImmutableOopMapSet* oop_maps,
1285                               address immutable_data,
1286                               GrowableArray<Handle>& reloc_imm_oop_list,
1287                               GrowableArray<Metadata*>& reloc_imm_metadata_list,
1288                               AOTCodeReader* aot_code_reader)
1289 {
1290   nmethod* nm = nullptr;
1291   int nmethod_size = archived_nm->size();
1292   // create nmethod
1293   {
1294     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1295     address code_cache_buffer = (address)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1296     if (code_cache_buffer != nullptr) {
1297       nm = archived_nm->restore(code_cache_buffer,
1298                                 method,
1299                                 compile_id,
1300                                 reloc_data,
1301                                 oop_list,
1302                                 metadata_list,
1303                                 oop_maps,
1304                                 immutable_data,
1305                                 reloc_imm_oop_list,
1306                                 reloc_imm_metadata_list,
1307                                 aot_code_reader);
1308       nm->record_nmethod_dependency();
1309       NOT_PRODUCT(note_java_nmethod(nm));
1310     }
1311   }
1312   // Do verification and logging outside CodeCache_lock.
1313   if (nm != nullptr) {
1314 #ifdef ASSERT
1315     LogTarget(Debug, aot, codecache, nmethod) log;
1316     if (log.is_enabled()) {
1317       LogStream out(log);
1318       out.print_cr("== new_nmethod 2");
1319       FlagSetting fs(PrintRelocations, true);
1320       nm->print_on_impl(&out);
1321       nm->decode(&out);
1322     }
1323 #endif
1324     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1325     DEBUG_ONLY(nm->verify();)
1326     nm->log_new_nmethod();
1327   }
1328   return nm;
1329 }
1330 
1331 // Fill in default values for various fields
1332 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1333   // avoid uninitialized fields, even for short time periods
1334   _exception_cache            = nullptr;
1335   _gc_data                    = nullptr;
1336   _oops_do_mark_link          = nullptr;
1337   _compiled_ic_data           = nullptr;
1338 
1339   _is_unloading_state         = 0;
1340   _state                      = not_installed;
1341 
1342   _has_unsafe_access          = 0;
1343   _has_wide_vectors           = 0;
1344   _has_monitors               = 0;
1345   _has_scoped_access          = 0;
1346   _has_flushed_dependencies   = 0;
1347   _is_unlinked                = 0;
1348   _load_reported              = 0; // jvmti state
1349   _preloaded                  = 0;
1350   _has_clinit_barriers        = 0;
1351 
1352   _used                       = false;
1353   _deoptimization_status      = not_marked;
1354 
1355   // SECT_CONSTS is first in code buffer so the offset should be 0.
1356   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1357   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1358 
1359   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1360 
1361   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1362   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1363 
1364   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1365 }
1366 
1367 // Post initialization
1368 void nmethod::post_init() {
1369   clear_unloading_state();
1370 
1371   finalize_relocations();
1372 

1404     init_defaults(code_buffer, offsets);
1405 
1406     _osr_entry_point         = nullptr;
1407     _pc_desc_container       = nullptr;
1408     _entry_bci               = InvocationEntryBci;
1409     _compile_id              = compile_id;
1410     _comp_level              = CompLevel_none;
1411     _compiler_type           = type;
1412     _orig_pc_offset          = 0;
1413     _num_stack_arg_slots     = 0;
1414 
1415     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1416       // Continuation enter intrinsic
1417       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1418     } else {
1419       _exception_offset      = 0;
1420     }
1421     // Native wrappers do not have deopt handlers. Make the values
1422     // something that will never match a pc like the nmethod vtable entry
1423     _deopt_handler_offset    = 0;
1424     _aot_code_entry          = nullptr;
1425     _method_profiling_count  = 0;
1426     _unwind_handler_offset   = 0;
1427 
1428     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1429     uint16_t metadata_size;
1430     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1431     JVMCI_ONLY( _metadata_size = metadata_size; )
1432     assert(_mutable_data_size == _relocation_size + metadata_size,
1433            "wrong mutable data size: %d != %d + %d",
1434            _mutable_data_size, _relocation_size, metadata_size);
1435 
1436     // native wrapper does not have read-only data but we need unique not null address
1437     _immutable_data          = blob_end();
1438     _immutable_data_size     = 0;
1439     _nul_chk_table_offset    = 0;
1440     _handler_table_offset    = 0;
1441     _scopes_pcs_offset       = 0;
1442     _scopes_data_offset      = 0;
1443 #if INCLUDE_JVMCI
1444     _speculations_offset     = 0;
1445 #endif

1466     // This is both handled in decode2(), called via print_code() -> decode()
1467     if (PrintNativeNMethods) {
1468       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1469       print_code();
1470       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1471 #if defined(SUPPORT_DATA_STRUCTS)
1472       if (AbstractDisassembler::show_structs()) {
1473         if (oop_maps != nullptr) {
1474           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1475           oop_maps->print_on(tty);
1476           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1477         }
1478       }
1479 #endif
1480     } else {
1481       print(); // print the header part only.
1482     }
1483 #if defined(SUPPORT_DATA_STRUCTS)
1484     if (AbstractDisassembler::show_structs()) {
1485       if (PrintRelocations) {
1486         print_relocations_on(tty);
1487         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1488       }
1489     }
1490 #endif
1491     if (xtty != nullptr) {
1492       xtty->tail("print_native_nmethod");
1493     }
1494   }
1495 }
1496 
1497 
1498 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1499 {
1500 
1501   if (nm._oop_maps != nullptr) {
1502     _oop_maps                   = nm._oop_maps->clone();
1503   } else {
1504     _oop_maps                   = nullptr;
1505   }
1506 

1786   CompLevel comp_level
1787 #if INCLUDE_JVMCI
1788   , char* speculations,
1789   int speculations_len,
1790   JVMCINMethodData* jvmci_data
1791 #endif
1792   )
1793   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1794              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1795   _deoptimization_generation(0),
1796   _gc_epoch(CodeCache::gc_epoch()),
1797   _method(method),
1798   _osr_link(nullptr)
1799 {
1800   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1801   {
1802     DEBUG_ONLY(NoSafepointVerifier nsv;)
1803     assert_locked_or_safepoint(CodeCache_lock);
1804 
1805     init_defaults(code_buffer, offsets);
1806     _aot_code_entry          = nullptr; // runtime compiled nmethod does not have AOTCodeEntry
1807     _method_profiling_count  = 0;
1808 
1809     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1810     _entry_bci       = entry_bci;
1811     _compile_id      = compile_id;
1812     _comp_level      = comp_level;
1813     _compiler_type   = type;
1814     _orig_pc_offset  = orig_pc_offset;
1815 
1816     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1817 
1818     set_ctable_begin(header_begin() + content_offset());
1819 
1820 #if INCLUDE_JVMCI
1821     if (compiler->is_jvmci()) {
1822       // JVMCI might not produce any stub sections
1823       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1824         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1825       } else {
1826         _exception_offset        = -1;
1827       }

1910     // Copy speculations to nmethod
1911     if (speculations_size() != 0) {
1912       memcpy(speculations_begin(), speculations, speculations_len);
1913     }
1914 #endif
1915     init_immutable_data_ref_count();
1916 
1917     post_init();
1918 
1919     // we use the information of entry points to find out if a method is
1920     // static or non static
1921     assert(compiler->is_c2() || compiler->is_jvmci() ||
1922            _method->is_static() == (entry_point() == verified_entry_point()),
1923            " entry points must be same for static methods and vice versa");
1924   }
1925 }
1926 
1927 // Print a short set of xml attributes to identify this nmethod.  The
1928 // output should be embedded in some other element.
1929 void nmethod::log_identity(xmlStream* log) const {
1930   assert(log->inside_attrs_or_error(), "printing attributes");
1931   log->print(" compile_id='%d'", compile_id());
1932   const char* nm_kind = compile_kind();
1933   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1934   log->print(" compiler='%s'", compiler_name());
1935   if (TieredCompilation) {
1936     log->print(" compile_level='%d'", comp_level());
1937   }
1938 #if INCLUDE_JVMCI
1939   if (jvmci_nmethod_data() != nullptr) {
1940     const char* jvmci_name = jvmci_nmethod_data()->name();
1941     if (jvmci_name != nullptr) {
1942       log->print(" jvmci_mirror_name='");
1943       log->text("%s", jvmci_name);
1944       log->print("'");
1945     }
1946   }
1947 #endif
1948 }
1949 
1950 
1951 #define LOG_OFFSET(log, name)                    \
1952   if (p2i(name##_end()) - p2i(name##_begin())) \
1953     log->print(" " XSTR(name) "_offset='%zd'"    , \
1954                p2i(name##_begin()) - p2i(this))
1955 
1956 

2071       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2072       if (oop_maps() != nullptr) {
2073         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
2074         oop_maps()->print_on(tty);
2075         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2076       }
2077     }
2078 #endif
2079   } else {
2080     print(); // print the header part only.
2081   }
2082 
2083 #if defined(SUPPORT_DATA_STRUCTS)
2084   if (AbstractDisassembler::show_structs()) {
2085     methodHandle mh(Thread::current(), _method);
2086     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
2087       print_scopes();
2088       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2089     }
2090     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
2091       print_relocations_on(tty);
2092       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2093     }
2094     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
2095       print_dependencies_on(tty);
2096       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2097     }
2098     if (printmethod || PrintExceptionHandlers) {
2099       print_handler_table();
2100       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2101       print_nul_chk_table();
2102       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2103     }
2104 
2105     if (printmethod) {
2106       print_recorded_oops();
2107       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2108       print_recorded_metadata();
2109       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2110     }
2111   }
2112 #endif
2113 
2114   if (xtty != nullptr) {
2115     xtty->tail("print_nmethod");
2116   }
2117 }
2118 
2119 
2120 // Promote one word from an assembly-time handle to a live embedded oop.
2121 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2122   if (handle == nullptr ||
2123       // As a special case, IC oops are initialized to 1 or -1.
2124       handle == (jobject) Universe::non_oop_word()) {
2125     *(void**)dest = handle;
2126   } else {
2127     *dest = JNIHandles::resolve_non_null(handle);
2128   }
2129 }
2130 
2131 void nmethod::copy_values(GrowableArray<Handle>* array) {
2132   int length = array->length();
2133   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2134   oop* dest = oops_begin();
2135   for (int index = 0 ; index < length; index++) {
2136     dest[index] = array->at(index)();
2137   }
2138 }
2139 
2140 // Have to have the same name because it's called by a template
2141 void nmethod::copy_values(GrowableArray<jobject>* array) {
2142   int length = array->length();
2143   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2144   oop* dest = oops_begin();
2145   for (int index = 0 ; index < length; index++) {
2146     initialize_immediate_oop(&dest[index], array->at(index));
2147   }
2148 
2149   // Now we can fix up all the oops in the code.  We need to do this
2150   // in the code because the assembler uses jobjects as placeholders.
2151   // The code and relocations have already been initialized by the
2152   // CodeBlob constructor, so it is valid even at this early point to
2153   // iterate over relocations and patch the code.
2154   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2155 }
2156 
2157 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2158   int length = array->length();

2166 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2167   // re-patch all oop-bearing instructions, just in case some oops moved
2168   RelocIterator iter(this, begin, end);
2169   while (iter.next()) {
2170     if (iter.type() == relocInfo::oop_type) {
2171       oop_Relocation* reloc = iter.oop_reloc();
2172       if (initialize_immediates && reloc->oop_is_immediate()) {
2173         oop* dest = reloc->oop_addr();
2174         jobject obj = *reinterpret_cast<jobject*>(dest);
2175         initialize_immediate_oop(dest, obj);
2176       }
2177       // Refresh the oop-related bits of this instruction.
2178       reloc->fix_oop_relocation();
2179     } else if (iter.type() == relocInfo::metadata_type) {
2180       metadata_Relocation* reloc = iter.metadata_reloc();
2181       reloc->fix_metadata_relocation();
2182     }
2183   }
2184 }
2185 
2186 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
2187   RelocIterator iter(this);
2188   while (iter.next()) {
2189     if (iter.type() == relocInfo::oop_type) {
2190       oop_Relocation* reloc = iter.oop_reloc();
2191       if (reloc->oop_is_immediate()) {
2192         oop dest = reloc->oop_value();
2193         Handle h(thread, dest);
2194         oop_list.append(h);
2195       }
2196     } else if (iter.type() == relocInfo::metadata_type) {
2197       metadata_Relocation* reloc = iter.metadata_reloc();
2198       if (reloc->metadata_is_immediate()) {
2199         Metadata* m = reloc->metadata_value();
2200         metadata_list.append(m);
2201       }
2202     }
2203   }
2204 }
2205 
2206 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2207   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2208   intptr_t cbaddr = (intptr_t) nm;
2209   intptr_t offset = ((intptr_t) pc) - cbaddr;
2210 
2211   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2212   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2213     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2214   } else if (!nop->patch(oopmap_slot, offset)) {
2215     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2216   }
2217 }
2218 
2219 void nmethod::finalize_relocations() {
2220   NoSafepointVerifier nsv;
2221 
2222   GrowableArray<NativeMovConstReg*> virtual_call_data;
2223 
2224   // Make sure that post call nops fill in nmethod offsets eagerly so
2225   // we don't have to race with deoptimization

2352   // be alive the previous completed marking cycle.
2353   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2354 }
2355 
2356 void nmethod::inc_decompile_count() {
2357   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2358   // Could be gated by ProfileTraps, but do not bother...
2359 #if INCLUDE_JVMCI
2360   if (jvmci_skip_profile_deopt()) {
2361     return;
2362   }
2363 #endif
2364   Method* m = method();
2365   if (m == nullptr)  return;
2366   MethodData* mdo = m->method_data();
2367   if (mdo == nullptr)  return;
2368   // There is a benign race here.  See comments in methodData.hpp.
2369   mdo->inc_decompile_count();
2370 }
2371 
2372 void nmethod::inc_method_profiling_count() {
2373   AtomicAccess::inc(&_method_profiling_count);
2374 }
2375 
2376 uint64_t nmethod::method_profiling_count() {
2377   return _method_profiling_count;
2378 }
2379 
2380 bool nmethod::try_transition(signed char new_state_int) {
2381   signed char new_state = new_state_int;
2382   assert_lock_strong(NMethodState_lock);
2383   signed char old_state = _state;
2384   if (old_state >= new_state) {
2385     // Ensure monotonicity of transitions.
2386     return false;
2387   }
2388   AtomicAccess::store(&_state, new_state);
2389   return true;
2390 }
2391 
2392 void nmethod::invalidate_osr_method() {
2393   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2394   // Remove from list of active nmethods
2395   if (method() != nullptr) {
2396     method()->method_holder()->remove_osr_nmethod(this);
2397   }
2398 }
2399 

2409     }
2410   }
2411 
2412   ResourceMark rm;
2413   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2414   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2415 
2416   CompileTask::print_ul(this, ss.freeze());
2417   if (PrintCompilation) {
2418     print_on_with_msg(tty, ss.freeze());
2419   }
2420 }
2421 
2422 void nmethod::unlink_from_method() {
2423   if (method() != nullptr) {
2424     method()->unlink_code(this);
2425   }
2426 }
2427 
2428 // Invalidate code
2429 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason, bool keep_aot_entry) {
2430   // This can be called while the system is already at a safepoint which is ok
2431   NoSafepointVerifier nsv;
2432 
2433   if (is_unloading()) {
2434     // If the nmethod is unloading, then it is already not entrant through
2435     // the nmethod entry barriers. No need to do anything; GC will unload it.
2436     return false;
2437   }
2438 
2439   if (AtomicAccess::load(&_state) == not_entrant) {
2440     // Avoid taking the lock if already in required state.
2441     // This is safe from races because the state is an end-state,
2442     // which the nmethod cannot back out of once entered.
2443     // No need for fencing either.
2444     return false;
2445   }
2446 
2447   {
2448     // Enter critical section.  Does not block for safepoint.
2449     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2471     }
2472 
2473     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2474     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2475       // If nmethod entry barriers are not supported, we won't mark
2476       // nmethods as on-stack when they become on-stack. So we
2477       // degrade to a less accurate flushing strategy, for now.
2478       mark_as_maybe_on_stack();
2479     }
2480 
2481     // Change state
2482     bool success = try_transition(not_entrant);
2483     assert(success, "Transition can't fail");
2484 
2485     // Log the transition once
2486     log_state_change(invalidation_reason);
2487 
2488     // Remove nmethod from method.
2489     unlink_from_method();
2490 
2491     if (!keep_aot_entry) {
2492       // Keep AOT code if it was simply replaced
2493       // otherwise make it not entrant too.
2494       AOTCodeCache::invalidate(_aot_code_entry);
2495     }
2496 
2497     CompileBroker::log_not_entrant(this);
2498   } // leave critical region under NMethodState_lock
2499 
2500 #if INCLUDE_JVMCI
2501   // Invalidate can't occur while holding the NMethodState_lock
2502   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2503   if (nmethod_data != nullptr) {
2504     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2505   }
2506 #endif
2507 
2508 #ifdef ASSERT
2509   if (is_osr_method() && method() != nullptr) {
2510     // Make sure osr nmethod is invalidated, i.e. not on the list
2511     bool found = method()->method_holder()->remove_osr_nmethod(this);
2512     assert(!found, "osr nmethod should have been invalidated");
2513   }
2514 #endif
2515 
2516   return true;
2517 }

2542     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2543             nmethod::InvalidationReason::UNLOADING_COLD :
2544             nmethod::InvalidationReason::UNLOADING);
2545   }
2546 #endif
2547 
2548   // Post before flushing as jmethodID is being used
2549   post_compiled_method_unload();
2550 
2551   // Register for flushing when it is safe. For concurrent class unloading,
2552   // that would be after the unloading handshake, and for STW class unloading
2553   // that would be when getting back to the VM thread.
2554   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2555 }
2556 
2557 void nmethod::purge(bool unregister_nmethod) {
2558 
2559   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2560 
2561   // completely deallocate this method
2562   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, compile_kind(), p2i(this));
2563 
2564   LogTarget(Debug, codecache) lt;
2565   if (lt.is_enabled()) {
2566     ResourceMark rm;
2567     LogStream ls(lt);
2568     const char* method_name = method()->name()->as_C_string();
2569     const size_t codecache_capacity = CodeCache::capacity()/1024;
2570     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2571     ls.print("Flushing %s nmethod %6d/" INTPTR_FORMAT ", level=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2572               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2573               compile_kind(), _compile_id, p2i(this), _comp_level, is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2574               codecache_capacity, codecache_free_space, method_name, compiler_name());
2575   }
2576 
2577   // We need to deallocate any ExceptionCache data.
2578   // Note that we do not need to grab the nmethod lock for this, it
2579   // better be thread safe if we're disposing of it!
2580   ExceptionCache* ec = exception_cache();
2581   while(ec != nullptr) {
2582     ExceptionCache* next = ec->next();
2583     delete ec;
2584     ec = next;
2585   }
2586   if (_pc_desc_container != nullptr) {
2587     delete _pc_desc_container;
2588   }
2589   if (_compiled_ic_data != nullptr) {
2590     delete[] _compiled_ic_data;
2591   }
2592 
2593   if (_immutable_data != blob_end() && !AOTCodeCache::is_address_in_aot_cache((address)_oop_maps)) {
2594     // Free memory if this was the last nmethod referencing immutable data
2595     if (dec_immutable_data_ref_count() == 0) {
2596       os::free(_immutable_data);
2597     }
2598 
2599     _immutable_data = blob_end(); // Valid not null address
2600   }
2601 
2602   if (unregister_nmethod) {
2603     Universe::heap()->unregister_nmethod(this);
2604   }
2605   CodeCache::unregister_old_nmethod(this);
2606 
2607   JVMCI_ONLY( _metadata_size = 0; )
2608   CodeBlob::purge();
2609 }
2610 
2611 oop nmethod::oop_at(int index) const {
2612   if (index == 0) {
2613     return nullptr;

2640         MethodHandles::clean_dependency_context(call_site);
2641       } else {
2642         InstanceKlass* ik = deps.context_type();
2643         if (ik == nullptr) {
2644           continue;  // ignore things like evol_method
2645         }
2646         // During GC liveness of dependee determines class that needs to be updated.
2647         // The GC may clean dependency contexts concurrently and in parallel.
2648         ik->clean_dependency_context();
2649       }
2650     }
2651   }
2652 }
2653 
2654 void nmethod::post_compiled_method(CompileTask* task) {
2655   task->mark_success();
2656   task->set_nm_content_size(content_size());
2657   task->set_nm_insts_size(insts_size());
2658   task->set_nm_total_size(total_size());
2659 
2660   // task->is_aot_load() is true only for loaded AOT code.
2661   // nmethod::_aot_code_entry is set for loaded and stored AOT code
2662   // to invalidate the entry when nmethod is deoptimized.
2663   // VerifyAOTCode is option to not store in archive AOT code.
2664   guarantee((_aot_code_entry != nullptr) || !task->is_aot_load() || VerifyAOTCode, "sanity");
2665 
2666   // JVMTI -- compiled method notification (must be done outside lock)
2667   post_compiled_method_load_event();
2668 
2669   if (CompilationLog::log() != nullptr) {
2670     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2671   }
2672 
2673   const DirectiveSet* directive = task->directive();
2674   maybe_print_nmethod(directive);
2675 }
2676 
2677 #if INCLUDE_CDS
2678 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2679 
2680 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2681   precond(CDSConfig::is_using_aot_linked_classes());
2682   precond(!ServiceThread::has_started());
2683 
2684   // We are still in single threaded stage of VM bootstrap. No need to lock.
2685   if (_delayed_compiled_method_load_events == nullptr) {

3402 void nmethod::verify() {
3403   if (is_not_entrant())
3404     return;
3405 
3406   // assert(oopDesc::is_oop(method()), "must be valid");
3407 
3408   ResourceMark rm;
3409 
3410   if (!CodeCache::contains(this)) {
3411     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3412   }
3413 
3414   if(is_native_method() )
3415     return;
3416 
3417   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3418   if (nm != this) {
3419     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3420   }
3421 
3422   // Verification can triggered during shutdown after AOTCodeCache is closed.
3423   // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3424   if (!is_aot() || AOTCodeCache::is_on()) {
3425     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3426       if (! p->verify(this)) {
3427         tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3428       }
3429     }

3430 
3431 #ifdef ASSERT
3432 #if INCLUDE_JVMCI
3433     {
3434       // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3435       ImmutableOopMapSet* oms = oop_maps();
3436       ImplicitExceptionTable implicit_table(this);
3437       for (uint i = 0; i < implicit_table.len(); i++) {
3438         int exec_offset = (int) implicit_table.get_exec_offset(i);
3439         if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3440           assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3441           bool found = false;
3442           for (int i = 0, imax = oms->count(); i < imax; i++) {
3443             if (oms->pair_at(i)->pc_offset() == exec_offset) {
3444               found = true;
3445               break;
3446             }
3447           }
3448           assert(found, "missing oopmap");
3449         }

3450       }
3451     }

3452 #endif
3453 #endif
3454   }
3455 
3456   VerifyOopsClosure voc(this);
3457   oops_do(&voc);
3458   assert(voc.ok(), "embedded oops must be OK");
3459   Universe::heap()->verify_nmethod(this);
3460 
3461   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3462          nm->method()->external_name(), p2i(_oops_do_mark_link));
3463   if (!is_aot() || AOTCodeCache::is_on()) {
3464     verify_scopes();
3465   }
3466 
3467   CompiledICLocker nm_verify(this);
3468   VerifyMetadataClosure vmc;
3469   metadata_do(&vmc);
3470 }
3471 
3472 
3473 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3474 
3475   // Verify IC only when nmethod installation is finished.
3476   if (!is_not_installed()) {
3477     if (CompiledICLocker::is_safe(this)) {
3478       if (is_inline_cache) {
3479         CompiledIC_at(this, call_site);
3480       } else {
3481         CompiledDirectCall::at(call_site);
3482       }
3483     } else {
3484       CompiledICLocker ml_verify(this);
3485       if (is_inline_cache) {

3614                                              p2i(nul_chk_table_end()),
3615                                              nul_chk_table_size());
3616   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3617                                              p2i(handler_table_begin()),
3618                                              p2i(handler_table_end()),
3619                                              handler_table_size());
3620   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3621                                              p2i(scopes_pcs_begin()),
3622                                              p2i(scopes_pcs_end()),
3623                                              scopes_pcs_size());
3624   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3625                                              p2i(scopes_data_begin()),
3626                                              p2i(scopes_data_end()),
3627                                              scopes_data_size());
3628 #if INCLUDE_JVMCI
3629   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3630                                              p2i(speculations_begin()),
3631                                              p2i(speculations_end()),
3632                                              speculations_size());
3633 #endif
3634   if (AOTCodeCache::is_on() && _aot_code_entry != nullptr) {
3635     _aot_code_entry->print(st);
3636   }
3637 }
3638 
3639 void nmethod::print_code() {
3640   ResourceMark m;
3641   ttyLocker ttyl;
3642   // Call the specialized decode method of this class.
3643   decode(tty);
3644 }
3645 
3646 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3647 
3648 void nmethod::print_dependencies_on(outputStream* out) {
3649   ResourceMark rm;
3650   stringStream st;
3651   st.print_cr("Dependencies:");
3652   for (Dependencies::DepStream deps(this); deps.next(); ) {
3653     deps.print_dependency(&st);
3654     InstanceKlass* ctxk = deps.context_type();
3655     if (ctxk != nullptr) {
3656       if (ctxk->is_dependent_nmethod(this)) {

3716   st->print("scopes:");
3717   if (scopes_pcs_begin() < scopes_pcs_end()) {
3718     st->cr();
3719     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3720       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3721         continue;
3722 
3723       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3724       while (sd != nullptr) {
3725         sd->print_on(st, p);  // print output ends with a newline
3726         sd = sd->sender();
3727       }
3728     }
3729   } else {
3730     st->print_cr(" <list empty>");
3731   }
3732 }
3733 #endif
3734 
3735 #ifndef PRODUCT  // RelocIterator does support printing only then.
3736 void nmethod::print_relocations_on(outputStream* st) {
3737   ResourceMark m;       // in case methods get printed via the debugger
3738   st->print_cr("relocations:");
3739   RelocIterator iter(this);
3740   iter.print_on(st);
3741 }
3742 #endif
3743 
3744 void nmethod::print_pcs_on(outputStream* st) {
3745   ResourceMark m;       // in case methods get printed via debugger
3746   st->print("pc-bytecode offsets:");
3747   if (scopes_pcs_begin() < scopes_pcs_end()) {
3748     st->cr();
3749     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3750       p->print_on(st, this);  // print output ends with a newline
3751     }
3752   } else {
3753     st->print_cr(" <list empty>");
3754   }
3755 }
3756 
3757 void nmethod::print_handler_table() {
3758   ExceptionHandlerTable(this).print(code_begin());
3759 }
3760 

4575 void nmethod::update_speculation(JavaThread* thread) {
4576   jlong speculation = thread->pending_failed_speculation();
4577   if (speculation != 0) {
4578     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4579     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4580     thread->set_pending_failed_speculation(0);
4581   }
4582 }
4583 
4584 const char* nmethod::jvmci_name() {
4585   if (jvmci_nmethod_data() != nullptr) {
4586     return jvmci_nmethod_data()->name();
4587   }
4588   return nullptr;
4589 }
4590 
4591 bool nmethod::jvmci_skip_profile_deopt() const {
4592   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4593 }
4594 #endif
4595 
4596 void nmethod::prepare_for_archiving_impl() {
4597   CodeBlob::prepare_for_archiving_impl();
4598   _deoptimization_generation = 0;
4599   _gc_epoch = 0;
4600   _method_profiling_count = 0;
4601   _osr_link = nullptr;
4602   _method = nullptr;
4603   _immutable_data = nullptr;
4604   _pc_desc_container = nullptr;
4605   _exception_cache = nullptr;
4606   _gc_data = nullptr;
4607   _oops_do_mark_link = nullptr;
4608   _compiled_ic_data = nullptr;
4609   _osr_entry_point = nullptr;
4610   _compile_id = -1;
4611   _deoptimization_status = not_marked;
4612   _is_unloading_state = 0;
4613   _state = not_installed;
4614 }
< prev index next >