< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "cds/cdsConfig.hpp"

  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compilationLog.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compileLog.hpp"
  37 #include "compiler/compilerDirectives.hpp"
  38 #include "compiler/compilerOracle.hpp"
  39 #include "compiler/compileTask.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"
  46 #include "gc/shared/collectedHeap.hpp"

 990              _method->method_holder()->external_name(),
 991              _method->name()->as_C_string(),
 992              _method->signature()->as_C_string(),
 993              compile_id());
 994   }
 995   return check_evol.has_evol_dependency();
 996 }
 997 
 998 int nmethod::total_size() const {
 999   return
1000     consts_size()        +
1001     insts_size()         +
1002     stub_size()          +
1003     scopes_data_size()   +
1004     scopes_pcs_size()    +
1005     handler_table_size() +
1006     nul_chk_table_size();
1007 }
1008 
1009 const char* nmethod::compile_kind() const {
1010   if (is_osr_method())     return "osr";



1011   if (method() != nullptr && is_native_method()) {
1012     if (method()->is_continuation_native_intrinsic()) {
1013       return "cnt";
1014     }
1015     return "c2n";
1016   }
1017   return nullptr;
1018 }
1019 
1020 const char* nmethod::compiler_name() const {
1021   return compilertype2name(_compiler_type);
1022 }
1023 
1024 #ifdef ASSERT
1025 class CheckForOopsClosure : public OopClosure {
1026   bool _found_oop = false;
1027  public:
1028   virtual void do_oop(oop* o) { _found_oop = true; }
1029   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1030   bool found_oop() { return _found_oop; }

1096     nm = new (native_nmethod_size, allow_NonNMethod_space)
1097     nmethod(method(), compiler_none, native_nmethod_size,
1098             compile_id, &offsets,
1099             code_buffer, frame_size,
1100             basic_lock_owner_sp_offset,
1101             basic_lock_sp_offset,
1102             oop_maps, mutable_data_size);
1103     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1104     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1105   }
1106 
1107   if (nm != nullptr) {
1108     // verify nmethod
1109     DEBUG_ONLY(nm->verify();) // might block
1110 
1111     nm->log_new_nmethod();
1112   }
1113   return nm;
1114 }
1115 

























1116 nmethod* nmethod::new_nmethod(const methodHandle& method,
1117   int compile_id,
1118   int entry_bci,
1119   CodeOffsets* offsets,
1120   int orig_pc_offset,
1121   DebugInformationRecorder* debug_info,
1122   Dependencies* dependencies,
1123   CodeBuffer* code_buffer, int frame_size,
1124   OopMapSet* oop_maps,
1125   ExceptionHandlerTable* handler_table,
1126   ImplicitExceptionTable* nul_chk_table,
1127   AbstractCompiler* compiler,
1128   CompLevel comp_level
1129 #if INCLUDE_JVMCI
1130   , char* speculations,
1131   int speculations_len,
1132   JVMCINMethodData* jvmci_data
1133 #endif
1134 )
1135 {

1162 
1163   int mutable_data_size = required_mutable_data_size(code_buffer
1164     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1165 
1166   {
1167     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1168 
1169     nm = new (nmethod_size, comp_level)
1170     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1171             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1172             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1173             handler_table, nul_chk_table, compiler, comp_level
1174 #if INCLUDE_JVMCI
1175             , speculations,
1176             speculations_len,
1177             jvmci_data
1178 #endif
1179             );
1180 
1181     if (nm != nullptr) {
1182       // To make dependency checking during class loading fast, record
1183       // the nmethod dependencies in the classes it is dependent on.
1184       // This allows the dependency checking code to simply walk the
1185       // class hierarchy above the loaded class, checking only nmethods
1186       // which are dependent on those classes.  The slow way is to
1187       // check every nmethod for dependencies which makes it linear in
1188       // the number of methods compiled.  For applications with a lot
1189       // classes the slow way is too slow.
1190       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1191         if (deps.type() == Dependencies::call_site_target_value) {
1192           // CallSite dependencies are managed on per-CallSite instance basis.
1193           oop call_site = deps.argument_oop(0);
1194           MethodHandles::add_dependent_nmethod(call_site, nm);
1195         } else {
1196           InstanceKlass* ik = deps.context_type();
1197           if (ik == nullptr) {
1198             continue;  // ignore things like evol_method
1199           }
1200           // record this nmethod as dependent on this klass
1201           ik->add_dependent_nmethod(nm);
1202         }
1203       }
1204       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));












































































1205     }
1206   }
1207   // Do verification and logging outside CodeCache_lock.
1208   if (nm != nullptr) {










1209     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1210     DEBUG_ONLY(nm->verify();)
1211     nm->log_new_nmethod();
1212   }
1213   return nm;
1214 }
1215 
1216 // Fill in default values for various fields
1217 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1218   // avoid uninitialized fields, even for short time periods
1219   _exception_cache            = nullptr;
1220   _gc_data                    = nullptr;
1221   _oops_do_mark_link          = nullptr;
1222   _compiled_ic_data           = nullptr;
1223 
1224   _is_unloading_state         = 0;
1225   _state                      = not_installed;
1226 
1227   _has_unsafe_access          = 0;
1228   _has_wide_vectors           = 0;
1229   _has_monitors               = 0;
1230   _has_scoped_access          = 0;
1231   _has_flushed_dependencies   = 0;
1232   _is_unlinked                = 0;
1233   _load_reported              = 0; // jvmti state


1234 

1235   _deoptimization_status      = not_marked;
1236 
1237   // SECT_CONSTS is first in code buffer so the offset should be 0.
1238   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1239   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1240 
1241   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1242 
1243   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1244   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1245 
1246   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1247 }
1248 
1249 // Post initialization
1250 void nmethod::post_init() {
1251   clear_unloading_state();
1252 
1253   finalize_relocations();
1254 

1286     init_defaults(code_buffer, offsets);
1287 
1288     _osr_entry_point         = nullptr;
1289     _pc_desc_container       = nullptr;
1290     _entry_bci               = InvocationEntryBci;
1291     _compile_id              = compile_id;
1292     _comp_level              = CompLevel_none;
1293     _compiler_type           = type;
1294     _orig_pc_offset          = 0;
1295     _num_stack_arg_slots     = 0;
1296 
1297     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1298       // Continuation enter intrinsic
1299       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1300     } else {
1301       _exception_offset      = 0;
1302     }
1303     // Native wrappers do not have deopt handlers. Make the values
1304     // something that will never match a pc like the nmethod vtable entry
1305     _deopt_handler_entry_offset    = 0;


1306     _unwind_handler_offset   = 0;
1307 
1308     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1309     uint16_t metadata_size;
1310     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1311     JVMCI_ONLY( _metadata_size = metadata_size; )
1312     assert(_mutable_data_size == _relocation_size + metadata_size,
1313            "wrong mutable data size: %d != %d + %d",
1314            _mutable_data_size, _relocation_size, metadata_size);
1315 
1316     // native wrapper does not have read-only data but we need unique not null address
1317     _immutable_data          = blob_end();
1318     _immutable_data_size     = 0;
1319     _nul_chk_table_offset    = 0;
1320     _handler_table_offset    = 0;
1321     _scopes_pcs_offset       = 0;
1322     _scopes_data_offset      = 0;
1323 #if INCLUDE_JVMCI
1324     _speculations_offset     = 0;
1325 #endif

1346     // This is both handled in decode2(), called via print_code() -> decode()
1347     if (PrintNativeNMethods) {
1348       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1349       print_code();
1350       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1351 #if defined(SUPPORT_DATA_STRUCTS)
1352       if (AbstractDisassembler::show_structs()) {
1353         if (oop_maps != nullptr) {
1354           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1355           oop_maps->print_on(tty);
1356           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1357         }
1358       }
1359 #endif
1360     } else {
1361       print(); // print the header part only.
1362     }
1363 #if defined(SUPPORT_DATA_STRUCTS)
1364     if (AbstractDisassembler::show_structs()) {
1365       if (PrintRelocations) {
1366         print_relocations();
1367         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1368       }
1369     }
1370 #endif
1371     if (xtty != nullptr) {
1372       xtty->tail("print_native_nmethod");
1373     }
1374   }
1375 }
1376 
1377 
1378 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1379 {
1380 
1381   if (nm._oop_maps != nullptr) {
1382     _oop_maps                   = nm._oop_maps->clone();
1383   } else {
1384     _oop_maps                   = nullptr;
1385   }
1386 

1679   CompLevel comp_level
1680 #if INCLUDE_JVMCI
1681   , char* speculations,
1682   int speculations_len,
1683   JVMCINMethodData* jvmci_data
1684 #endif
1685   )
1686   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1687              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1688   _deoptimization_generation(0),
1689   _gc_epoch(CodeCache::gc_epoch()),
1690   _method(method),
1691   _osr_link(nullptr)
1692 {
1693   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1694   {
1695     DEBUG_ONLY(NoSafepointVerifier nsv;)
1696     assert_locked_or_safepoint(CodeCache_lock);
1697 
1698     init_defaults(code_buffer, offsets);


1699 
1700     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1701     _entry_bci       = entry_bci;
1702     _compile_id      = compile_id;
1703     _comp_level      = comp_level;
1704     _compiler_type   = type;
1705     _orig_pc_offset  = orig_pc_offset;
1706 
1707     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1708 
1709     set_ctable_begin(header_begin() + content_offset());
1710 
1711 #if INCLUDE_JVMCI
1712     if (compiler->is_jvmci()) {
1713       // JVMCI might not produce any stub sections
1714       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1715         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1716       } else {
1717         _exception_offset        = -1;
1718       }

1808     // Copy speculations to nmethod
1809     if (speculations_size() != 0) {
1810       memcpy(speculations_begin(), speculations, speculations_len);
1811     }
1812 #endif
1813     init_immutable_data_ref_count();
1814 
1815     post_init();
1816 
1817     // we use the information of entry points to find out if a method is
1818     // static or non static
1819     assert(compiler->is_c2() || compiler->is_jvmci() ||
1820            _method->is_static() == (entry_point() == verified_entry_point()),
1821            " entry points must be same for static methods and vice versa");
1822   }
1823 }
1824 
1825 // Print a short set of xml attributes to identify this nmethod.  The
1826 // output should be embedded in some other element.
1827 void nmethod::log_identity(xmlStream* log) const {

1828   log->print(" compile_id='%d'", compile_id());
1829   const char* nm_kind = compile_kind();
1830   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1831   log->print(" compiler='%s'", compiler_name());
1832   if (TieredCompilation) {
1833     log->print(" level='%d'", comp_level());
1834   }
1835 #if INCLUDE_JVMCI
1836   if (jvmci_nmethod_data() != nullptr) {
1837     const char* jvmci_name = jvmci_nmethod_data()->name();
1838     if (jvmci_name != nullptr) {
1839       log->print(" jvmci_mirror_name='");
1840       log->text("%s", jvmci_name);
1841       log->print("'");
1842     }
1843   }
1844 #endif
1845 }
1846 
1847 
1848 #define LOG_OFFSET(log, name)                    \
1849   if (p2i(name##_end()) - p2i(name##_begin())) \
1850     log->print(" " XSTR(name) "_offset='%zd'"    , \
1851                p2i(name##_begin()) - p2i(this))
1852 
1853 

1968       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1969       if (oop_maps() != nullptr) {
1970         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1971         oop_maps()->print_on(tty);
1972         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1973       }
1974     }
1975 #endif
1976   } else {
1977     print(); // print the header part only.
1978   }
1979 
1980 #if defined(SUPPORT_DATA_STRUCTS)
1981   if (AbstractDisassembler::show_structs()) {
1982     methodHandle mh(Thread::current(), _method);
1983     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1984       print_scopes();
1985       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1986     }
1987     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1988       print_relocations();
1989       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1990     }
1991     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1992       print_dependencies_on(tty);
1993       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1994     }
1995     if (printmethod || PrintExceptionHandlers) {
1996       print_handler_table();
1997       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1998       print_nul_chk_table();
1999       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2000     }
2001 
2002     if (printmethod) {
2003       print_recorded_oops();
2004       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2005       print_recorded_metadata();
2006       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2007     }
2008   }
2009 #endif
2010 
2011   if (xtty != nullptr) {
2012     xtty->tail("print_nmethod");
2013   }
2014 }
2015 
2016 
2017 // Promote one word from an assembly-time handle to a live embedded oop.
2018 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2019   if (handle == nullptr ||
2020       // As a special case, IC oops are initialized to 1 or -1.
2021       handle == (jobject) Universe::non_oop_word()) {
2022     *(void**)dest = handle;
2023   } else {
2024     *dest = JNIHandles::resolve_non_null(handle);
2025   }
2026 }
2027 








2028 
2029 // Have to have the same name because it's called by a template
2030 void nmethod::copy_values(GrowableArray<jobject>* array) {
2031   int length = array->length();
2032   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2033   oop* dest = oops_begin();
2034   for (int index = 0 ; index < length; index++) {
2035     initialize_immediate_oop(&dest[index], array->at(index));
2036   }
2037 
2038   // Now we can fix up all the oops in the code.  We need to do this
2039   // in the code because the assembler uses jobjects as placeholders.
2040   // The code and relocations have already been initialized by the
2041   // CodeBlob constructor, so it is valid even at this early point to
2042   // iterate over relocations and patch the code.
2043   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2044 }
2045 
2046 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2047   int length = array->length();

2055 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2056   // re-patch all oop-bearing instructions, just in case some oops moved
2057   RelocIterator iter(this, begin, end);
2058   while (iter.next()) {
2059     if (iter.type() == relocInfo::oop_type) {
2060       oop_Relocation* reloc = iter.oop_reloc();
2061       if (initialize_immediates && reloc->oop_is_immediate()) {
2062         oop* dest = reloc->oop_addr();
2063         jobject obj = *reinterpret_cast<jobject*>(dest);
2064         initialize_immediate_oop(dest, obj);
2065       }
2066       // Refresh the oop-related bits of this instruction.
2067       reloc->fix_oop_relocation();
2068     } else if (iter.type() == relocInfo::metadata_type) {
2069       metadata_Relocation* reloc = iter.metadata_reloc();
2070       reloc->fix_metadata_relocation();
2071     }
2072   }
2073 }
2074 




















2075 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2076   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2077   intptr_t cbaddr = (intptr_t) nm;
2078   intptr_t offset = ((intptr_t) pc) - cbaddr;
2079 
2080   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2081   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2082     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2083   } else if (!nop->patch(oopmap_slot, offset)) {
2084     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2085   }
2086 }
2087 
2088 void nmethod::finalize_relocations() {
2089   NoSafepointVerifier nsv;
2090 
2091   GrowableArray<NativeMovConstReg*> virtual_call_data;
2092 
2093   // Make sure that post call nops fill in nmethod offsets eagerly so
2094   // we don't have to race with deoptimization

2221   // be alive the previous completed marking cycle.
2222   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2223 }
2224 
2225 void nmethod::inc_decompile_count() {
2226   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2227   // Could be gated by ProfileTraps, but do not bother...
2228 #if INCLUDE_JVMCI
2229   if (jvmci_skip_profile_deopt()) {
2230     return;
2231   }
2232 #endif
2233   Method* m = method();
2234   if (m == nullptr)  return;
2235   MethodData* mdo = m->method_data();
2236   if (mdo == nullptr)  return;
2237   // There is a benign race here.  See comments in methodData.hpp.
2238   mdo->inc_decompile_count();
2239 }
2240 








2241 bool nmethod::try_transition(signed char new_state_int) {
2242   signed char new_state = new_state_int;
2243   assert_lock_strong(NMethodState_lock);
2244   signed char old_state = _state;
2245   if (old_state >= new_state) {
2246     // Ensure monotonicity of transitions.
2247     return false;
2248   }
2249   AtomicAccess::store(&_state, new_state);
2250   return true;
2251 }
2252 
2253 void nmethod::invalidate_osr_method() {
2254   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2255   // Remove from list of active nmethods
2256   if (method() != nullptr) {
2257     method()->method_holder()->remove_osr_nmethod(this);
2258   }
2259 }
2260 

2270     }
2271   }
2272 
2273   ResourceMark rm;
2274   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2275   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2276 
2277   CompileTask::print_ul(this, ss.freeze());
2278   if (PrintCompilation) {
2279     print_on_with_msg(tty, ss.freeze());
2280   }
2281 }
2282 
2283 void nmethod::unlink_from_method() {
2284   if (method() != nullptr) {
2285     method()->unlink_code(this);
2286   }
2287 }
2288 
2289 // Invalidate code
2290 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason) {
2291   // This can be called while the system is already at a safepoint which is ok
2292   NoSafepointVerifier nsv;
2293 
2294   if (is_unloading()) {
2295     // If the nmethod is unloading, then it is already not entrant through
2296     // the nmethod entry barriers. No need to do anything; GC will unload it.
2297     return false;
2298   }
2299 
2300   if (AtomicAccess::load(&_state) == not_entrant) {
2301     // Avoid taking the lock if already in required state.
2302     // This is safe from races because the state is an end-state,
2303     // which the nmethod cannot back out of once entered.
2304     // No need for fencing either.
2305     return false;
2306   }
2307 
2308   {
2309     // Enter critical section.  Does not block for safepoint.
2310     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2332     }
2333 
2334     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2335     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2336       // If nmethod entry barriers are not supported, we won't mark
2337       // nmethods as on-stack when they become on-stack. So we
2338       // degrade to a less accurate flushing strategy, for now.
2339       mark_as_maybe_on_stack();
2340     }
2341 
2342     // Change state
2343     bool success = try_transition(not_entrant);
2344     assert(success, "Transition can't fail");
2345 
2346     // Log the transition once
2347     log_state_change(invalidation_reason);
2348 
2349     // Remove nmethod from method.
2350     unlink_from_method();
2351 







2352   } // leave critical region under NMethodState_lock
2353 
2354 #if INCLUDE_JVMCI
2355   // Invalidate can't occur while holding the NMethodState_lock
2356   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2357   if (nmethod_data != nullptr) {
2358     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2359   }
2360 #endif
2361 
2362 #ifdef ASSERT
2363   if (is_osr_method() && method() != nullptr) {
2364     // Make sure osr nmethod is invalidated, i.e. not on the list
2365     bool found = method()->method_holder()->remove_osr_nmethod(this);
2366     assert(!found, "osr nmethod should have been invalidated");
2367   }
2368 #endif
2369 
2370   return true;
2371 }

2396     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2397             nmethod::InvalidationReason::UNLOADING_COLD :
2398             nmethod::InvalidationReason::UNLOADING);
2399   }
2400 #endif
2401 
2402   // Post before flushing as jmethodID is being used
2403   post_compiled_method_unload();
2404 
2405   // Register for flushing when it is safe. For concurrent class unloading,
2406   // that would be after the unloading handshake, and for STW class unloading
2407   // that would be when getting back to the VM thread.
2408   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2409 }
2410 
2411 void nmethod::purge(bool unregister_nmethod) {
2412 
2413   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2414 
2415   // completely deallocate this method
2416   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2417 
2418   LogTarget(Debug, codecache) lt;
2419   if (lt.is_enabled()) {
2420     ResourceMark rm;
2421     LogStream ls(lt);
2422     const char* method_name = method()->name()->as_C_string();
2423     const size_t codecache_capacity = CodeCache::capacity()/1024;
2424     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2425     ls.print("Flushing nmethod %6d/" INTPTR_FORMAT ", level=%d, osr=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2426               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2427               _compile_id, p2i(this), _comp_level, is_osr_method(), is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2428               codecache_capacity, codecache_free_space, method_name, compiler_name());
2429   }
2430 
2431   // We need to deallocate any ExceptionCache data.
2432   // Note that we do not need to grab the nmethod lock for this, it
2433   // better be thread safe if we're disposing of it!
2434   ExceptionCache* ec = exception_cache();
2435   while(ec != nullptr) {
2436     ExceptionCache* next = ec->next();
2437     delete ec;
2438     ec = next;
2439   }
2440   if (_pc_desc_container != nullptr) {
2441     delete _pc_desc_container;
2442   }
2443   delete[] _compiled_ic_data;


2444 
2445   if (_immutable_data != blob_end()) {
2446     // Free memory if this was the last nmethod referencing immutable data
2447     if (dec_immutable_data_ref_count() == 0) {
2448       os::free(_immutable_data);
2449     }
2450 
2451     _immutable_data = blob_end(); // Valid not null address
2452   }
2453 
2454   if (unregister_nmethod) {
2455     Universe::heap()->unregister_nmethod(this);
2456   }
2457   CodeCache::unregister_old_nmethod(this);
2458 
2459   JVMCI_ONLY( _metadata_size = 0; )
2460   CodeBlob::purge();
2461 }
2462 
2463 oop nmethod::oop_at(int index) const {
2464   if (index == 0) {
2465     return nullptr;

2492         MethodHandles::clean_dependency_context(call_site);
2493       } else {
2494         InstanceKlass* ik = deps.context_type();
2495         if (ik == nullptr) {
2496           continue;  // ignore things like evol_method
2497         }
2498         // During GC liveness of dependee determines class that needs to be updated.
2499         // The GC may clean dependency contexts concurrently and in parallel.
2500         ik->clean_dependency_context();
2501       }
2502     }
2503   }
2504 }
2505 
2506 void nmethod::post_compiled_method(CompileTask* task) {
2507   task->mark_success();
2508   task->set_nm_content_size(content_size());
2509   task->set_nm_insts_size(insts_size());
2510   task->set_nm_total_size(total_size());
2511 






2512   // JVMTI -- compiled method notification (must be done outside lock)
2513   post_compiled_method_load_event();
2514 
2515   if (CompilationLog::log() != nullptr) {
2516     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2517   }
2518 
2519   const DirectiveSet* directive = task->directive();
2520   maybe_print_nmethod(directive);
2521 }
2522 
2523 #if INCLUDE_CDS
2524 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2525 
2526 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2527   precond(CDSConfig::is_using_aot_linked_classes());
2528   precond(!ServiceThread::has_started());
2529 
2530   // We are still in single threaded stage of VM bootstrap. No need to lock.
2531   if (_delayed_compiled_method_load_events == nullptr) {

3248 void nmethod::verify() {
3249   if (is_not_entrant())
3250     return;
3251 
3252   // assert(oopDesc::is_oop(method()), "must be valid");
3253 
3254   ResourceMark rm;
3255 
3256   if (!CodeCache::contains(this)) {
3257     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3258   }
3259 
3260   if(is_native_method() )
3261     return;
3262 
3263   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3264   if (nm != this) {
3265     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3266   }
3267 
3268   for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3269     if (! p->verify(this)) {
3270       tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));




3271     }
3272   }
3273 
3274 #ifdef ASSERT
3275 #if INCLUDE_JVMCI
3276   {
3277     // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3278     ImmutableOopMapSet* oms = oop_maps();
3279     ImplicitExceptionTable implicit_table(this);
3280     for (uint i = 0; i < implicit_table.len(); i++) {
3281       int exec_offset = (int) implicit_table.get_exec_offset(i);
3282       if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3283         assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3284         bool found = false;
3285         for (int i = 0, imax = oms->count(); i < imax; i++) {
3286           if (oms->pair_at(i)->pc_offset() == exec_offset) {
3287             found = true;
3288             break;

3289           }

3290         }
3291         assert(found, "missing oopmap");
3292       }
3293     }
3294   }
3295 #endif
3296 #endif

3297 
3298   VerifyOopsClosure voc(this);
3299   oops_do(&voc);
3300   assert(voc.ok(), "embedded oops must be OK");
3301   Universe::heap()->verify_nmethod(this);
3302 
3303   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3304          nm->method()->external_name(), p2i(_oops_do_mark_link));
3305   verify_scopes();


3306 
3307   CompiledICLocker nm_verify(this);
3308   VerifyMetadataClosure vmc;
3309   metadata_do(&vmc);
3310 }
3311 
3312 
3313 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3314 
3315   // Verify IC only when nmethod installation is finished.
3316   if (!is_not_installed()) {
3317     if (CompiledICLocker::is_safe(this)) {
3318       if (is_inline_cache) {
3319         CompiledIC_at(this, call_site);
3320       } else {
3321         CompiledDirectCall::at(call_site);
3322       }
3323     } else {
3324       CompiledICLocker ml_verify(this);
3325       if (is_inline_cache) {

3454                                              p2i(nul_chk_table_end()),
3455                                              nul_chk_table_size());
3456   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3457                                              p2i(handler_table_begin()),
3458                                              p2i(handler_table_end()),
3459                                              handler_table_size());
3460   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3461                                              p2i(scopes_pcs_begin()),
3462                                              p2i(scopes_pcs_end()),
3463                                              scopes_pcs_size());
3464   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3465                                              p2i(scopes_data_begin()),
3466                                              p2i(scopes_data_end()),
3467                                              scopes_data_size());
3468 #if INCLUDE_JVMCI
3469   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3470                                              p2i(speculations_begin()),
3471                                              p2i(speculations_end()),
3472                                              speculations_size());
3473 #endif



3474 }
3475 
3476 void nmethod::print_code() {
3477   ResourceMark m;
3478   ttyLocker ttyl;
3479   // Call the specialized decode method of this class.
3480   decode(tty);
3481 }
3482 
3483 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3484 
3485 void nmethod::print_dependencies_on(outputStream* out) {
3486   ResourceMark rm;
3487   stringStream st;
3488   st.print_cr("Dependencies:");
3489   for (Dependencies::DepStream deps(this); deps.next(); ) {
3490     deps.print_dependency(&st);
3491     InstanceKlass* ctxk = deps.context_type();
3492     if (ctxk != nullptr) {
3493       if (ctxk->is_dependent_nmethod(this)) {

3553   st->print("scopes:");
3554   if (scopes_pcs_begin() < scopes_pcs_end()) {
3555     st->cr();
3556     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3557       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3558         continue;
3559 
3560       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3561       while (sd != nullptr) {
3562         sd->print_on(st, p);  // print output ends with a newline
3563         sd = sd->sender();
3564       }
3565     }
3566   } else {
3567     st->print_cr(" <list empty>");
3568   }
3569 }
3570 #endif
3571 
3572 #ifndef PRODUCT  // RelocIterator does support printing only then.
3573 void nmethod::print_relocations() {
3574   ResourceMark m;       // in case methods get printed via the debugger
3575   tty->print_cr("relocations:");
3576   RelocIterator iter(this);
3577   iter.print_on(tty);
3578 }
3579 #endif
3580 
3581 void nmethod::print_pcs_on(outputStream* st) {
3582   ResourceMark m;       // in case methods get printed via debugger
3583   st->print("pc-bytecode offsets:");
3584   if (scopes_pcs_begin() < scopes_pcs_end()) {
3585     st->cr();
3586     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3587       p->print_on(st, this);  // print output ends with a newline
3588     }
3589   } else {
3590     st->print_cr(" <list empty>");
3591   }
3592 }
3593 
3594 void nmethod::print_handler_table() {
3595   ExceptionHandlerTable(this).print(code_begin());
3596 }
3597 

4412 void nmethod::update_speculation(JavaThread* thread) {
4413   jlong speculation = thread->pending_failed_speculation();
4414   if (speculation != 0) {
4415     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4416     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4417     thread->set_pending_failed_speculation(0);
4418   }
4419 }
4420 
4421 const char* nmethod::jvmci_name() {
4422   if (jvmci_nmethod_data() != nullptr) {
4423     return jvmci_nmethod_data()->name();
4424   }
4425   return nullptr;
4426 }
4427 
4428 bool nmethod::jvmci_skip_profile_deopt() const {
4429   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4430 }
4431 #endif





















   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "cds/cdsConfig.hpp"
  27 #include "code/aotCodeCache.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/compiledIC.hpp"
  30 #include "code/dependencies.hpp"
  31 #include "code/nativeInst.hpp"
  32 #include "code/nmethod.inline.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compilationLog.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/compileLog.hpp"
  38 #include "compiler/compilerDirectives.hpp"
  39 #include "compiler/compilerOracle.hpp"
  40 #include "compiler/compileTask.hpp"
  41 #include "compiler/directivesParser.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "compiler/oopMap.inline.hpp"
  44 #include "gc/shared/barrierSet.hpp"
  45 #include "gc/shared/barrierSetNMethod.hpp"
  46 #include "gc/shared/classUnloadingContext.hpp"
  47 #include "gc/shared/collectedHeap.hpp"

 991              _method->method_holder()->external_name(),
 992              _method->name()->as_C_string(),
 993              _method->signature()->as_C_string(),
 994              compile_id());
 995   }
 996   return check_evol.has_evol_dependency();
 997 }
 998 
 999 int nmethod::total_size() const {
1000   return
1001     consts_size()        +
1002     insts_size()         +
1003     stub_size()          +
1004     scopes_data_size()   +
1005     scopes_pcs_size()    +
1006     handler_table_size() +
1007     nul_chk_table_size();
1008 }
1009 
1010 const char* nmethod::compile_kind() const {
1011   if (is_osr_method()) return "osr";
1012   if (preloaded())     return "AP";
1013   if (is_aot())        return "A";
1014 
1015   if (method() != nullptr && is_native_method()) {
1016     if (method()->is_continuation_native_intrinsic()) {
1017       return "cnt";
1018     }
1019     return "c2n";
1020   }
1021   return nullptr;
1022 }
1023 
1024 const char* nmethod::compiler_name() const {
1025   return compilertype2name(_compiler_type);
1026 }
1027 
1028 #ifdef ASSERT
1029 class CheckForOopsClosure : public OopClosure {
1030   bool _found_oop = false;
1031  public:
1032   virtual void do_oop(oop* o) { _found_oop = true; }
1033   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1034   bool found_oop() { return _found_oop; }

1100     nm = new (native_nmethod_size, allow_NonNMethod_space)
1101     nmethod(method(), compiler_none, native_nmethod_size,
1102             compile_id, &offsets,
1103             code_buffer, frame_size,
1104             basic_lock_owner_sp_offset,
1105             basic_lock_sp_offset,
1106             oop_maps, mutable_data_size);
1107     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1108     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1109   }
1110 
1111   if (nm != nullptr) {
1112     // verify nmethod
1113     DEBUG_ONLY(nm->verify();) // might block
1114 
1115     nm->log_new_nmethod();
1116   }
1117   return nm;
1118 }
1119 
1120 void nmethod::record_nmethod_dependency() {
1121   // To make dependency checking during class loading fast, record
1122   // the nmethod dependencies in the classes it is dependent on.
1123   // This allows the dependency checking code to simply walk the
1124   // class hierarchy above the loaded class, checking only nmethods
1125   // which are dependent on those classes.  The slow way is to
1126   // check every nmethod for dependencies which makes it linear in
1127   // the number of methods compiled.  For applications with a lot
1128   // classes the slow way is too slow.
1129   for (Dependencies::DepStream deps(this); deps.next(); ) {
1130     if (deps.type() == Dependencies::call_site_target_value) {
1131       // CallSite dependencies are managed on per-CallSite instance basis.
1132       oop call_site = deps.argument_oop(0);
1133       MethodHandles::add_dependent_nmethod(call_site, this);
1134     } else {
1135       InstanceKlass* ik = deps.context_type();
1136       if (ik == nullptr) {
1137         continue;  // ignore things like evol_method
1138       }
1139       // record this nmethod as dependent on this klass
1140       ik->add_dependent_nmethod(this);
1141     }
1142   }
1143 }
1144 
1145 nmethod* nmethod::new_nmethod(const methodHandle& method,
1146   int compile_id,
1147   int entry_bci,
1148   CodeOffsets* offsets,
1149   int orig_pc_offset,
1150   DebugInformationRecorder* debug_info,
1151   Dependencies* dependencies,
1152   CodeBuffer* code_buffer, int frame_size,
1153   OopMapSet* oop_maps,
1154   ExceptionHandlerTable* handler_table,
1155   ImplicitExceptionTable* nul_chk_table,
1156   AbstractCompiler* compiler,
1157   CompLevel comp_level
1158 #if INCLUDE_JVMCI
1159   , char* speculations,
1160   int speculations_len,
1161   JVMCINMethodData* jvmci_data
1162 #endif
1163 )
1164 {

1191 
1192   int mutable_data_size = required_mutable_data_size(code_buffer
1193     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1194 
1195   {
1196     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1197 
1198     nm = new (nmethod_size, comp_level)
1199     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1200             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1201             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1202             handler_table, nul_chk_table, compiler, comp_level
1203 #if INCLUDE_JVMCI
1204             , speculations,
1205             speculations_len,
1206             jvmci_data
1207 #endif
1208             );
1209 
1210     if (nm != nullptr) {
1211       nm->record_nmethod_dependency();
1212       NOT_PRODUCT(note_java_nmethod(nm));
1213     }
1214   }
1215   // Do verification and logging outside CodeCache_lock.
1216   if (nm != nullptr) {
1217 
1218 #ifdef ASSERT
1219     LogTarget(Debug, aot, codecache, nmethod) log;
1220     if (log.is_enabled()) {
1221       LogStream out(log);
1222       out.print_cr("== new_nmethod 2");
1223       FlagSetting fs(PrintRelocations, true);
1224       nm->print_on_impl(&out);
1225       nm->decode(&out);
1226     }
1227 #endif
1228 
1229     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1230     DEBUG_ONLY(nm->verify();)
1231     nm->log_new_nmethod();
1232   }
1233   return nm;
1234 }
1235 
1236 nmethod* nmethod::restore(address code_cache_buffer,
1237                           const methodHandle& method,
1238                           int compile_id,
1239                           address reloc_data,
1240                           GrowableArray<Handle>& oop_list,
1241                           GrowableArray<Metadata*>& metadata_list,
1242                           ImmutableOopMapSet* oop_maps,
1243                           address immutable_data,
1244                           GrowableArray<Handle>& reloc_imm_oop_list,
1245                           GrowableArray<Metadata*>& reloc_imm_metadata_list,
1246                           AOTCodeReader* aot_code_reader)
1247 {
1248   CodeBlob::restore(code_cache_buffer, "nmethod", reloc_data, oop_maps);
1249   nmethod* nm = (nmethod*)code_cache_buffer;
1250   nm->set_method(method());
1251   nm->_compile_id = compile_id;
1252   nm->set_immutable_data(immutable_data);
1253   nm->copy_values(&oop_list);
1254   nm->copy_values(&metadata_list);
1255 
1256   aot_code_reader->fix_relocations(nm, &reloc_imm_oop_list, &reloc_imm_metadata_list);
1257 
1258 #ifndef PRODUCT
1259   nm->asm_remarks().init();
1260   aot_code_reader->read_asm_remarks(nm->asm_remarks(), /* use_string_table */ false);
1261   nm->dbg_strings().init();
1262   aot_code_reader->read_dbg_strings(nm->dbg_strings(), /* use_string_table */ false);
1263 #endif
1264 
1265   // Flush the code block
1266   ICache::invalidate_range(nm->code_begin(), nm->code_size());
1267 
1268   // Create cache after PcDesc data is copied - it will be used to initialize cache
1269   nm->_pc_desc_container = new PcDescContainer(nm->scopes_pcs_begin());
1270 
1271   nm->set_aot_code_entry(aot_code_reader->aot_code_entry());
1272 
1273   nm->post_init();
1274   return nm;
1275 }
1276 
1277 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1278                               const methodHandle& method,
1279                               AbstractCompiler* compiler,
1280                               int compile_id,
1281                               address reloc_data,
1282                               GrowableArray<Handle>& oop_list,
1283                               GrowableArray<Metadata*>& metadata_list,
1284                               ImmutableOopMapSet* oop_maps,
1285                               address immutable_data,
1286                               GrowableArray<Handle>& reloc_imm_oop_list,
1287                               GrowableArray<Metadata*>& reloc_imm_metadata_list,
1288                               AOTCodeReader* aot_code_reader)
1289 {
1290   nmethod* nm = nullptr;
1291   int nmethod_size = archived_nm->size();
1292   // create nmethod
1293   {
1294     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1295     address code_cache_buffer = (address)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1296     if (code_cache_buffer != nullptr) {
1297       nm = archived_nm->restore(code_cache_buffer,
1298                                 method,
1299                                 compile_id,
1300                                 reloc_data,
1301                                 oop_list,
1302                                 metadata_list,
1303                                 oop_maps,
1304                                 immutable_data,
1305                                 reloc_imm_oop_list,
1306                                 reloc_imm_metadata_list,
1307                                 aot_code_reader);
1308       nm->record_nmethod_dependency();
1309       NOT_PRODUCT(note_java_nmethod(nm));
1310     }
1311   }
1312   // Do verification and logging outside CodeCache_lock.
1313   if (nm != nullptr) {
1314 #ifdef ASSERT
1315     LogTarget(Debug, aot, codecache, nmethod) log;
1316     if (log.is_enabled()) {
1317       LogStream out(log);
1318       out.print_cr("== new_nmethod 2");
1319       FlagSetting fs(PrintRelocations, true);
1320       nm->print_on_impl(&out);
1321       nm->decode(&out);
1322     }
1323 #endif
1324     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1325     DEBUG_ONLY(nm->verify();)
1326     nm->log_new_nmethod();
1327   }
1328   return nm;
1329 }
1330 
1331 // Fill in default values for various fields
1332 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1333   // avoid uninitialized fields, even for short time periods
1334   _exception_cache            = nullptr;
1335   _gc_data                    = nullptr;
1336   _oops_do_mark_link          = nullptr;
1337   _compiled_ic_data           = nullptr;
1338 
1339   _is_unloading_state         = 0;
1340   _state                      = not_installed;
1341 
1342   _has_unsafe_access          = 0;
1343   _has_wide_vectors           = 0;
1344   _has_monitors               = 0;
1345   _has_scoped_access          = 0;
1346   _has_flushed_dependencies   = 0;
1347   _is_unlinked                = 0;
1348   _load_reported              = 0; // jvmti state
1349   _preloaded                  = 0;
1350   _has_clinit_barriers        = 0;
1351 
1352   _used                       = false;
1353   _deoptimization_status      = not_marked;
1354 
1355   // SECT_CONSTS is first in code buffer so the offset should be 0.
1356   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1357   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1358 
1359   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1360 
1361   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1362   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1363 
1364   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1365 }
1366 
1367 // Post initialization
1368 void nmethod::post_init() {
1369   clear_unloading_state();
1370 
1371   finalize_relocations();
1372 

1404     init_defaults(code_buffer, offsets);
1405 
1406     _osr_entry_point         = nullptr;
1407     _pc_desc_container       = nullptr;
1408     _entry_bci               = InvocationEntryBci;
1409     _compile_id              = compile_id;
1410     _comp_level              = CompLevel_none;
1411     _compiler_type           = type;
1412     _orig_pc_offset          = 0;
1413     _num_stack_arg_slots     = 0;
1414 
1415     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1416       // Continuation enter intrinsic
1417       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1418     } else {
1419       _exception_offset      = 0;
1420     }
1421     // Native wrappers do not have deopt handlers. Make the values
1422     // something that will never match a pc like the nmethod vtable entry
1423     _deopt_handler_entry_offset    = 0;
1424     _aot_code_entry          = nullptr;
1425     _method_profiling_count  = 0;
1426     _unwind_handler_offset   = 0;
1427 
1428     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1429     uint16_t metadata_size;
1430     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1431     JVMCI_ONLY( _metadata_size = metadata_size; )
1432     assert(_mutable_data_size == _relocation_size + metadata_size,
1433            "wrong mutable data size: %d != %d + %d",
1434            _mutable_data_size, _relocation_size, metadata_size);
1435 
1436     // native wrapper does not have read-only data but we need unique not null address
1437     _immutable_data          = blob_end();
1438     _immutable_data_size     = 0;
1439     _nul_chk_table_offset    = 0;
1440     _handler_table_offset    = 0;
1441     _scopes_pcs_offset       = 0;
1442     _scopes_data_offset      = 0;
1443 #if INCLUDE_JVMCI
1444     _speculations_offset     = 0;
1445 #endif

1466     // This is both handled in decode2(), called via print_code() -> decode()
1467     if (PrintNativeNMethods) {
1468       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1469       print_code();
1470       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1471 #if defined(SUPPORT_DATA_STRUCTS)
1472       if (AbstractDisassembler::show_structs()) {
1473         if (oop_maps != nullptr) {
1474           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1475           oop_maps->print_on(tty);
1476           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1477         }
1478       }
1479 #endif
1480     } else {
1481       print(); // print the header part only.
1482     }
1483 #if defined(SUPPORT_DATA_STRUCTS)
1484     if (AbstractDisassembler::show_structs()) {
1485       if (PrintRelocations) {
1486         print_relocations_on(tty);
1487         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1488       }
1489     }
1490 #endif
1491     if (xtty != nullptr) {
1492       xtty->tail("print_native_nmethod");
1493     }
1494   }
1495 }
1496 
1497 
1498 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1499 {
1500 
1501   if (nm._oop_maps != nullptr) {
1502     _oop_maps                   = nm._oop_maps->clone();
1503   } else {
1504     _oop_maps                   = nullptr;
1505   }
1506 

1799   CompLevel comp_level
1800 #if INCLUDE_JVMCI
1801   , char* speculations,
1802   int speculations_len,
1803   JVMCINMethodData* jvmci_data
1804 #endif
1805   )
1806   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1807              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1808   _deoptimization_generation(0),
1809   _gc_epoch(CodeCache::gc_epoch()),
1810   _method(method),
1811   _osr_link(nullptr)
1812 {
1813   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1814   {
1815     DEBUG_ONLY(NoSafepointVerifier nsv;)
1816     assert_locked_or_safepoint(CodeCache_lock);
1817 
1818     init_defaults(code_buffer, offsets);
1819     _aot_code_entry          = nullptr; // runtime compiled nmethod does not have AOTCodeEntry
1820     _method_profiling_count  = 0;
1821 
1822     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1823     _entry_bci       = entry_bci;
1824     _compile_id      = compile_id;
1825     _comp_level      = comp_level;
1826     _compiler_type   = type;
1827     _orig_pc_offset  = orig_pc_offset;
1828 
1829     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1830 
1831     set_ctable_begin(header_begin() + content_offset());
1832 
1833 #if INCLUDE_JVMCI
1834     if (compiler->is_jvmci()) {
1835       // JVMCI might not produce any stub sections
1836       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1837         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1838       } else {
1839         _exception_offset        = -1;
1840       }

1930     // Copy speculations to nmethod
1931     if (speculations_size() != 0) {
1932       memcpy(speculations_begin(), speculations, speculations_len);
1933     }
1934 #endif
1935     init_immutable_data_ref_count();
1936 
1937     post_init();
1938 
1939     // we use the information of entry points to find out if a method is
1940     // static or non static
1941     assert(compiler->is_c2() || compiler->is_jvmci() ||
1942            _method->is_static() == (entry_point() == verified_entry_point()),
1943            " entry points must be same for static methods and vice versa");
1944   }
1945 }
1946 
1947 // Print a short set of xml attributes to identify this nmethod.  The
1948 // output should be embedded in some other element.
1949 void nmethod::log_identity(xmlStream* log) const {
1950   assert(log->inside_attrs_or_error(), "printing attributes");
1951   log->print(" compile_id='%d'", compile_id());
1952   const char* nm_kind = compile_kind();
1953   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1954   log->print(" compiler='%s'", compiler_name());
1955   if (TieredCompilation) {
1956     log->print(" compile_level='%d'", comp_level());
1957   }
1958 #if INCLUDE_JVMCI
1959   if (jvmci_nmethod_data() != nullptr) {
1960     const char* jvmci_name = jvmci_nmethod_data()->name();
1961     if (jvmci_name != nullptr) {
1962       log->print(" jvmci_mirror_name='");
1963       log->text("%s", jvmci_name);
1964       log->print("'");
1965     }
1966   }
1967 #endif
1968 }
1969 
1970 
1971 #define LOG_OFFSET(log, name)                    \
1972   if (p2i(name##_end()) - p2i(name##_begin())) \
1973     log->print(" " XSTR(name) "_offset='%zd'"    , \
1974                p2i(name##_begin()) - p2i(this))
1975 
1976 

2091       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2092       if (oop_maps() != nullptr) {
2093         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
2094         oop_maps()->print_on(tty);
2095         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2096       }
2097     }
2098 #endif
2099   } else {
2100     print(); // print the header part only.
2101   }
2102 
2103 #if defined(SUPPORT_DATA_STRUCTS)
2104   if (AbstractDisassembler::show_structs()) {
2105     methodHandle mh(Thread::current(), _method);
2106     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
2107       print_scopes();
2108       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2109     }
2110     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
2111       print_relocations_on(tty);
2112       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2113     }
2114     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
2115       print_dependencies_on(tty);
2116       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2117     }
2118     if (printmethod || PrintExceptionHandlers) {
2119       print_handler_table();
2120       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2121       print_nul_chk_table();
2122       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2123     }
2124 
2125     if (printmethod) {
2126       print_recorded_oops();
2127       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2128       print_recorded_metadata();
2129       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2130     }
2131   }
2132 #endif
2133 
2134   if (xtty != nullptr) {
2135     xtty->tail("print_nmethod");
2136   }
2137 }
2138 
2139 
2140 // Promote one word from an assembly-time handle to a live embedded oop.
2141 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2142   if (handle == nullptr ||
2143       // As a special case, IC oops are initialized to 1 or -1.
2144       handle == (jobject) Universe::non_oop_word()) {
2145     *(void**)dest = handle;
2146   } else {
2147     *dest = JNIHandles::resolve_non_null(handle);
2148   }
2149 }
2150 
2151 void nmethod::copy_values(GrowableArray<Handle>* array) {
2152   int length = array->length();
2153   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2154   oop* dest = oops_begin();
2155   for (int index = 0 ; index < length; index++) {
2156     dest[index] = array->at(index)();
2157   }
2158 }
2159 
2160 // Have to have the same name because it's called by a template
2161 void nmethod::copy_values(GrowableArray<jobject>* array) {
2162   int length = array->length();
2163   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2164   oop* dest = oops_begin();
2165   for (int index = 0 ; index < length; index++) {
2166     initialize_immediate_oop(&dest[index], array->at(index));
2167   }
2168 
2169   // Now we can fix up all the oops in the code.  We need to do this
2170   // in the code because the assembler uses jobjects as placeholders.
2171   // The code and relocations have already been initialized by the
2172   // CodeBlob constructor, so it is valid even at this early point to
2173   // iterate over relocations and patch the code.
2174   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2175 }
2176 
2177 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2178   int length = array->length();

2186 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2187   // re-patch all oop-bearing instructions, just in case some oops moved
2188   RelocIterator iter(this, begin, end);
2189   while (iter.next()) {
2190     if (iter.type() == relocInfo::oop_type) {
2191       oop_Relocation* reloc = iter.oop_reloc();
2192       if (initialize_immediates && reloc->oop_is_immediate()) {
2193         oop* dest = reloc->oop_addr();
2194         jobject obj = *reinterpret_cast<jobject*>(dest);
2195         initialize_immediate_oop(dest, obj);
2196       }
2197       // Refresh the oop-related bits of this instruction.
2198       reloc->fix_oop_relocation();
2199     } else if (iter.type() == relocInfo::metadata_type) {
2200       metadata_Relocation* reloc = iter.metadata_reloc();
2201       reloc->fix_metadata_relocation();
2202     }
2203   }
2204 }
2205 
2206 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
2207   RelocIterator iter(this);
2208   while (iter.next()) {
2209     if (iter.type() == relocInfo::oop_type) {
2210       oop_Relocation* reloc = iter.oop_reloc();
2211       if (reloc->oop_is_immediate()) {
2212         oop dest = reloc->oop_value();
2213         Handle h(thread, dest);
2214         oop_list.append(h);
2215       }
2216     } else if (iter.type() == relocInfo::metadata_type) {
2217       metadata_Relocation* reloc = iter.metadata_reloc();
2218       if (reloc->metadata_is_immediate()) {
2219         Metadata* m = reloc->metadata_value();
2220         metadata_list.append(m);
2221       }
2222     }
2223   }
2224 }
2225 
2226 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2227   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2228   intptr_t cbaddr = (intptr_t) nm;
2229   intptr_t offset = ((intptr_t) pc) - cbaddr;
2230 
2231   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2232   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2233     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2234   } else if (!nop->patch(oopmap_slot, offset)) {
2235     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2236   }
2237 }
2238 
2239 void nmethod::finalize_relocations() {
2240   NoSafepointVerifier nsv;
2241 
2242   GrowableArray<NativeMovConstReg*> virtual_call_data;
2243 
2244   // Make sure that post call nops fill in nmethod offsets eagerly so
2245   // we don't have to race with deoptimization

2372   // be alive the previous completed marking cycle.
2373   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2374 }
2375 
2376 void nmethod::inc_decompile_count() {
2377   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2378   // Could be gated by ProfileTraps, but do not bother...
2379 #if INCLUDE_JVMCI
2380   if (jvmci_skip_profile_deopt()) {
2381     return;
2382   }
2383 #endif
2384   Method* m = method();
2385   if (m == nullptr)  return;
2386   MethodData* mdo = m->method_data();
2387   if (mdo == nullptr)  return;
2388   // There is a benign race here.  See comments in methodData.hpp.
2389   mdo->inc_decompile_count();
2390 }
2391 
2392 void nmethod::inc_method_profiling_count() {
2393   AtomicAccess::inc(&_method_profiling_count);
2394 }
2395 
2396 uint64_t nmethod::method_profiling_count() {
2397   return _method_profiling_count;
2398 }
2399 
2400 bool nmethod::try_transition(signed char new_state_int) {
2401   signed char new_state = new_state_int;
2402   assert_lock_strong(NMethodState_lock);
2403   signed char old_state = _state;
2404   if (old_state >= new_state) {
2405     // Ensure monotonicity of transitions.
2406     return false;
2407   }
2408   AtomicAccess::store(&_state, new_state);
2409   return true;
2410 }
2411 
2412 void nmethod::invalidate_osr_method() {
2413   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2414   // Remove from list of active nmethods
2415   if (method() != nullptr) {
2416     method()->method_holder()->remove_osr_nmethod(this);
2417   }
2418 }
2419 

2429     }
2430   }
2431 
2432   ResourceMark rm;
2433   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2434   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2435 
2436   CompileTask::print_ul(this, ss.freeze());
2437   if (PrintCompilation) {
2438     print_on_with_msg(tty, ss.freeze());
2439   }
2440 }
2441 
2442 void nmethod::unlink_from_method() {
2443   if (method() != nullptr) {
2444     method()->unlink_code(this);
2445   }
2446 }
2447 
2448 // Invalidate code
2449 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason, bool keep_aot_entry) {
2450   // This can be called while the system is already at a safepoint which is ok
2451   NoSafepointVerifier nsv;
2452 
2453   if (is_unloading()) {
2454     // If the nmethod is unloading, then it is already not entrant through
2455     // the nmethod entry barriers. No need to do anything; GC will unload it.
2456     return false;
2457   }
2458 
2459   if (AtomicAccess::load(&_state) == not_entrant) {
2460     // Avoid taking the lock if already in required state.
2461     // This is safe from races because the state is an end-state,
2462     // which the nmethod cannot back out of once entered.
2463     // No need for fencing either.
2464     return false;
2465   }
2466 
2467   {
2468     // Enter critical section.  Does not block for safepoint.
2469     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2491     }
2492 
2493     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2494     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2495       // If nmethod entry barriers are not supported, we won't mark
2496       // nmethods as on-stack when they become on-stack. So we
2497       // degrade to a less accurate flushing strategy, for now.
2498       mark_as_maybe_on_stack();
2499     }
2500 
2501     // Change state
2502     bool success = try_transition(not_entrant);
2503     assert(success, "Transition can't fail");
2504 
2505     // Log the transition once
2506     log_state_change(invalidation_reason);
2507 
2508     // Remove nmethod from method.
2509     unlink_from_method();
2510 
2511     if (!keep_aot_entry) {
2512       // Keep AOT code if it was simply replaced
2513       // otherwise make it not entrant too.
2514       AOTCodeCache::invalidate(_aot_code_entry);
2515     }
2516 
2517     CompileBroker::log_not_entrant(this);
2518   } // leave critical region under NMethodState_lock
2519 
2520 #if INCLUDE_JVMCI
2521   // Invalidate can't occur while holding the NMethodState_lock
2522   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2523   if (nmethod_data != nullptr) {
2524     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2525   }
2526 #endif
2527 
2528 #ifdef ASSERT
2529   if (is_osr_method() && method() != nullptr) {
2530     // Make sure osr nmethod is invalidated, i.e. not on the list
2531     bool found = method()->method_holder()->remove_osr_nmethod(this);
2532     assert(!found, "osr nmethod should have been invalidated");
2533   }
2534 #endif
2535 
2536   return true;
2537 }

2562     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2563             nmethod::InvalidationReason::UNLOADING_COLD :
2564             nmethod::InvalidationReason::UNLOADING);
2565   }
2566 #endif
2567 
2568   // Post before flushing as jmethodID is being used
2569   post_compiled_method_unload();
2570 
2571   // Register for flushing when it is safe. For concurrent class unloading,
2572   // that would be after the unloading handshake, and for STW class unloading
2573   // that would be when getting back to the VM thread.
2574   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2575 }
2576 
2577 void nmethod::purge(bool unregister_nmethod) {
2578 
2579   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2580 
2581   // completely deallocate this method
2582   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, compile_kind(), p2i(this));
2583 
2584   LogTarget(Debug, codecache) lt;
2585   if (lt.is_enabled()) {
2586     ResourceMark rm;
2587     LogStream ls(lt);
2588     const char* method_name = method()->name()->as_C_string();
2589     const size_t codecache_capacity = CodeCache::capacity()/1024;
2590     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2591     ls.print("Flushing %s nmethod %6d/" INTPTR_FORMAT ", level=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2592               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2593               compile_kind(), _compile_id, p2i(this), _comp_level, is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2594               codecache_capacity, codecache_free_space, method_name, compiler_name());
2595   }
2596 
2597   // We need to deallocate any ExceptionCache data.
2598   // Note that we do not need to grab the nmethod lock for this, it
2599   // better be thread safe if we're disposing of it!
2600   ExceptionCache* ec = exception_cache();
2601   while(ec != nullptr) {
2602     ExceptionCache* next = ec->next();
2603     delete ec;
2604     ec = next;
2605   }
2606   if (_pc_desc_container != nullptr) {
2607     delete _pc_desc_container;
2608   }
2609   if (_compiled_ic_data != nullptr) {
2610     delete[] _compiled_ic_data;
2611   }
2612 
2613   if (_immutable_data != blob_end() && !AOTCodeCache::is_address_in_aot_cache((address)_oop_maps)) {
2614     // Free memory if this was the last nmethod referencing immutable data
2615     if (dec_immutable_data_ref_count() == 0) {
2616       os::free(_immutable_data);
2617     }
2618 
2619     _immutable_data = blob_end(); // Valid not null address
2620   }
2621 
2622   if (unregister_nmethod) {
2623     Universe::heap()->unregister_nmethod(this);
2624   }
2625   CodeCache::unregister_old_nmethod(this);
2626 
2627   JVMCI_ONLY( _metadata_size = 0; )
2628   CodeBlob::purge();
2629 }
2630 
2631 oop nmethod::oop_at(int index) const {
2632   if (index == 0) {
2633     return nullptr;

2660         MethodHandles::clean_dependency_context(call_site);
2661       } else {
2662         InstanceKlass* ik = deps.context_type();
2663         if (ik == nullptr) {
2664           continue;  // ignore things like evol_method
2665         }
2666         // During GC liveness of dependee determines class that needs to be updated.
2667         // The GC may clean dependency contexts concurrently and in parallel.
2668         ik->clean_dependency_context();
2669       }
2670     }
2671   }
2672 }
2673 
2674 void nmethod::post_compiled_method(CompileTask* task) {
2675   task->mark_success();
2676   task->set_nm_content_size(content_size());
2677   task->set_nm_insts_size(insts_size());
2678   task->set_nm_total_size(total_size());
2679 
2680   // task->is_aot_load() is true only for loaded AOT code.
2681   // nmethod::_aot_code_entry is set for loaded and stored AOT code
2682   // to invalidate the entry when nmethod is deoptimized.
2683   // VerifyAOTCode is option to not store in archive AOT code.
2684   guarantee((_aot_code_entry != nullptr) || !task->is_aot_load() || VerifyAOTCode, "sanity");
2685 
2686   // JVMTI -- compiled method notification (must be done outside lock)
2687   post_compiled_method_load_event();
2688 
2689   if (CompilationLog::log() != nullptr) {
2690     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2691   }
2692 
2693   const DirectiveSet* directive = task->directive();
2694   maybe_print_nmethod(directive);
2695 }
2696 
2697 #if INCLUDE_CDS
2698 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2699 
2700 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2701   precond(CDSConfig::is_using_aot_linked_classes());
2702   precond(!ServiceThread::has_started());
2703 
2704   // We are still in single threaded stage of VM bootstrap. No need to lock.
2705   if (_delayed_compiled_method_load_events == nullptr) {

3422 void nmethod::verify() {
3423   if (is_not_entrant())
3424     return;
3425 
3426   // assert(oopDesc::is_oop(method()), "must be valid");
3427 
3428   ResourceMark rm;
3429 
3430   if (!CodeCache::contains(this)) {
3431     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3432   }
3433 
3434   if(is_native_method() )
3435     return;
3436 
3437   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3438   if (nm != this) {
3439     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3440   }
3441 
3442   // Verification can triggered during shutdown after AOTCodeCache is closed.
3443   // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3444   if (!is_aot() || AOTCodeCache::is_on()) {
3445     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3446       if (! p->verify(this)) {
3447         tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3448       }
3449     }

3450 
3451 #ifdef ASSERT
3452 #if INCLUDE_JVMCI
3453     {
3454       // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3455       ImmutableOopMapSet* oms = oop_maps();
3456       ImplicitExceptionTable implicit_table(this);
3457       for (uint i = 0; i < implicit_table.len(); i++) {
3458         int exec_offset = (int) implicit_table.get_exec_offset(i);
3459         if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3460           assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3461           bool found = false;
3462           for (int i = 0, imax = oms->count(); i < imax; i++) {
3463             if (oms->pair_at(i)->pc_offset() == exec_offset) {
3464               found = true;
3465               break;
3466             }
3467           }
3468           assert(found, "missing oopmap");
3469         }

3470       }
3471     }

3472 #endif
3473 #endif
3474   }
3475 
3476   VerifyOopsClosure voc(this);
3477   oops_do(&voc);
3478   assert(voc.ok(), "embedded oops must be OK");
3479   Universe::heap()->verify_nmethod(this);
3480 
3481   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3482          nm->method()->external_name(), p2i(_oops_do_mark_link));
3483   if (!is_aot() || AOTCodeCache::is_on()) {
3484     verify_scopes();
3485   }
3486 
3487   CompiledICLocker nm_verify(this);
3488   VerifyMetadataClosure vmc;
3489   metadata_do(&vmc);
3490 }
3491 
3492 
3493 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3494 
3495   // Verify IC only when nmethod installation is finished.
3496   if (!is_not_installed()) {
3497     if (CompiledICLocker::is_safe(this)) {
3498       if (is_inline_cache) {
3499         CompiledIC_at(this, call_site);
3500       } else {
3501         CompiledDirectCall::at(call_site);
3502       }
3503     } else {
3504       CompiledICLocker ml_verify(this);
3505       if (is_inline_cache) {

3634                                              p2i(nul_chk_table_end()),
3635                                              nul_chk_table_size());
3636   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3637                                              p2i(handler_table_begin()),
3638                                              p2i(handler_table_end()),
3639                                              handler_table_size());
3640   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3641                                              p2i(scopes_pcs_begin()),
3642                                              p2i(scopes_pcs_end()),
3643                                              scopes_pcs_size());
3644   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3645                                              p2i(scopes_data_begin()),
3646                                              p2i(scopes_data_end()),
3647                                              scopes_data_size());
3648 #if INCLUDE_JVMCI
3649   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3650                                              p2i(speculations_begin()),
3651                                              p2i(speculations_end()),
3652                                              speculations_size());
3653 #endif
3654   if (AOTCodeCache::is_on() && _aot_code_entry != nullptr) {
3655     _aot_code_entry->print(st);
3656   }
3657 }
3658 
3659 void nmethod::print_code() {
3660   ResourceMark m;
3661   ttyLocker ttyl;
3662   // Call the specialized decode method of this class.
3663   decode(tty);
3664 }
3665 
3666 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3667 
3668 void nmethod::print_dependencies_on(outputStream* out) {
3669   ResourceMark rm;
3670   stringStream st;
3671   st.print_cr("Dependencies:");
3672   for (Dependencies::DepStream deps(this); deps.next(); ) {
3673     deps.print_dependency(&st);
3674     InstanceKlass* ctxk = deps.context_type();
3675     if (ctxk != nullptr) {
3676       if (ctxk->is_dependent_nmethod(this)) {

3736   st->print("scopes:");
3737   if (scopes_pcs_begin() < scopes_pcs_end()) {
3738     st->cr();
3739     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3740       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3741         continue;
3742 
3743       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3744       while (sd != nullptr) {
3745         sd->print_on(st, p);  // print output ends with a newline
3746         sd = sd->sender();
3747       }
3748     }
3749   } else {
3750     st->print_cr(" <list empty>");
3751   }
3752 }
3753 #endif
3754 
3755 #ifndef PRODUCT  // RelocIterator does support printing only then.
3756 void nmethod::print_relocations_on(outputStream* st) {
3757   ResourceMark m;       // in case methods get printed via the debugger
3758   st->print_cr("relocations:");
3759   RelocIterator iter(this);
3760   iter.print_on(st);
3761 }
3762 #endif
3763 
3764 void nmethod::print_pcs_on(outputStream* st) {
3765   ResourceMark m;       // in case methods get printed via debugger
3766   st->print("pc-bytecode offsets:");
3767   if (scopes_pcs_begin() < scopes_pcs_end()) {
3768     st->cr();
3769     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3770       p->print_on(st, this);  // print output ends with a newline
3771     }
3772   } else {
3773     st->print_cr(" <list empty>");
3774   }
3775 }
3776 
3777 void nmethod::print_handler_table() {
3778   ExceptionHandlerTable(this).print(code_begin());
3779 }
3780 

4595 void nmethod::update_speculation(JavaThread* thread) {
4596   jlong speculation = thread->pending_failed_speculation();
4597   if (speculation != 0) {
4598     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4599     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4600     thread->set_pending_failed_speculation(0);
4601   }
4602 }
4603 
4604 const char* nmethod::jvmci_name() {
4605   if (jvmci_nmethod_data() != nullptr) {
4606     return jvmci_nmethod_data()->name();
4607   }
4608   return nullptr;
4609 }
4610 
4611 bool nmethod::jvmci_skip_profile_deopt() const {
4612   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4613 }
4614 #endif
4615 
4616 void nmethod::prepare_for_archiving_impl() {
4617   CodeBlob::prepare_for_archiving_impl();
4618   _deoptimization_generation = 0;
4619   _gc_epoch = 0;
4620   _method_profiling_count = 0;
4621   _osr_link = nullptr;
4622   _method = nullptr;
4623   _immutable_data = nullptr;
4624   _pc_desc_container = nullptr;
4625   _exception_cache = nullptr;
4626   _gc_data = nullptr;
4627   _oops_do_mark_link = nullptr;
4628   _compiled_ic_data = nullptr;
4629   _osr_entry_point = nullptr;
4630   _compile_id = -1;
4631   _deoptimization_status = not_marked;
4632   _is_unloading_state = 0;
4633   _state = not_installed;
4634 }
< prev index next >