< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"

  26 #include "code/codeCache.hpp"
  27 #include "code/compiledIC.hpp"
  28 #include "code/dependencies.hpp"
  29 #include "code/nativeInst.hpp"
  30 #include "code/nmethod.inline.hpp"
  31 #include "code/scopeDesc.hpp"
  32 #include "compiler/abstractCompiler.hpp"
  33 #include "compiler/compilationLog.hpp"
  34 #include "compiler/compileBroker.hpp"
  35 #include "compiler/compileLog.hpp"
  36 #include "compiler/compilerDirectives.hpp"
  37 #include "compiler/compilerOracle.hpp"
  38 #include "compiler/compileTask.hpp"
  39 #include "compiler/directivesParser.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "compiler/oopMap.inline.hpp"
  42 #include "gc/shared/barrierSet.hpp"
  43 #include "gc/shared/barrierSetNMethod.hpp"
  44 #include "gc/shared/classUnloadingContext.hpp"
  45 #include "gc/shared/collectedHeap.hpp"

 989              _method->method_holder()->external_name(),
 990              _method->name()->as_C_string(),
 991              _method->signature()->as_C_string(),
 992              compile_id());
 993   }
 994   return check_evol.has_evol_dependency();
 995 }
 996 
 997 int nmethod::total_size() const {
 998   return
 999     consts_size()        +
1000     insts_size()         +
1001     stub_size()          +
1002     scopes_data_size()   +
1003     scopes_pcs_size()    +
1004     handler_table_size() +
1005     nul_chk_table_size();
1006 }
1007 
1008 const char* nmethod::compile_kind() const {
1009   if (is_osr_method())     return "osr";



1010   if (method() != nullptr && is_native_method()) {
1011     if (method()->is_continuation_native_intrinsic()) {
1012       return "cnt";
1013     }
1014     return "c2n";
1015   }
1016   return nullptr;
1017 }
1018 
1019 const char* nmethod::compiler_name() const {
1020   return compilertype2name(_compiler_type);
1021 }
1022 
1023 #ifdef ASSERT
1024 class CheckForOopsClosure : public OopClosure {
1025   bool _found_oop = false;
1026  public:
1027   virtual void do_oop(oop* o) { _found_oop = true; }
1028   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1029   bool found_oop() { return _found_oop; }

1095     nm = new (native_nmethod_size, allow_NonNMethod_space)
1096     nmethod(method(), compiler_none, native_nmethod_size,
1097             compile_id, &offsets,
1098             code_buffer, frame_size,
1099             basic_lock_owner_sp_offset,
1100             basic_lock_sp_offset,
1101             oop_maps, mutable_data_size);
1102     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1103     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1104   }
1105 
1106   if (nm != nullptr) {
1107     // verify nmethod
1108     DEBUG_ONLY(nm->verify();) // might block
1109 
1110     nm->log_new_nmethod();
1111   }
1112   return nm;
1113 }
1114 

























1115 nmethod* nmethod::new_nmethod(const methodHandle& method,
1116   int compile_id,
1117   int entry_bci,
1118   CodeOffsets* offsets,
1119   int orig_pc_offset,
1120   DebugInformationRecorder* debug_info,
1121   Dependencies* dependencies,
1122   CodeBuffer* code_buffer, int frame_size,
1123   OopMapSet* oop_maps,
1124   ExceptionHandlerTable* handler_table,
1125   ImplicitExceptionTable* nul_chk_table,
1126   AbstractCompiler* compiler,
1127   CompLevel comp_level
1128 #if INCLUDE_JVMCI
1129   , char* speculations,
1130   int speculations_len,
1131   JVMCINMethodData* jvmci_data
1132 #endif
1133 )
1134 {

1161 
1162   int mutable_data_size = required_mutable_data_size(code_buffer
1163     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1164 
1165   {
1166     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1167 
1168     nm = new (nmethod_size, comp_level)
1169     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1170             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1171             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1172             handler_table, nul_chk_table, compiler, comp_level
1173 #if INCLUDE_JVMCI
1174             , speculations,
1175             speculations_len,
1176             jvmci_data
1177 #endif
1178             );
1179 
1180     if (nm != nullptr) {
1181       // To make dependency checking during class loading fast, record
1182       // the nmethod dependencies in the classes it is dependent on.
1183       // This allows the dependency checking code to simply walk the
1184       // class hierarchy above the loaded class, checking only nmethods
1185       // which are dependent on those classes.  The slow way is to
1186       // check every nmethod for dependencies which makes it linear in
1187       // the number of methods compiled.  For applications with a lot
1188       // classes the slow way is too slow.
1189       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1190         if (deps.type() == Dependencies::call_site_target_value) {
1191           // CallSite dependencies are managed on per-CallSite instance basis.
1192           oop call_site = deps.argument_oop(0);
1193           MethodHandles::add_dependent_nmethod(call_site, nm);
1194         } else {
1195           InstanceKlass* ik = deps.context_type();
1196           if (ik == nullptr) {
1197             continue;  // ignore things like evol_method
1198           }
1199           // record this nmethod as dependent on this klass
1200           ik->add_dependent_nmethod(nm);
1201         }
1202       }
1203       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));












































































1204     }
1205   }
1206   // Do verification and logging outside CodeCache_lock.
1207   if (nm != nullptr) {










1208     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1209     DEBUG_ONLY(nm->verify();)
1210     nm->log_new_nmethod();
1211   }
1212   return nm;
1213 }
1214 
1215 // Fill in default values for various fields
1216 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1217   // avoid uninitialized fields, even for short time periods
1218   _exception_cache            = nullptr;
1219   _gc_data                    = nullptr;
1220   _oops_do_mark_link          = nullptr;
1221   _compiled_ic_data           = nullptr;
1222 
1223   _is_unloading_state         = 0;
1224   _state                      = not_installed;
1225 
1226   _has_unsafe_access          = 0;
1227   _has_wide_vectors           = 0;
1228   _has_monitors               = 0;
1229   _has_scoped_access          = 0;
1230   _has_flushed_dependencies   = 0;
1231   _is_unlinked                = 0;
1232   _load_reported              = 0; // jvmti state


1233 

1234   _deoptimization_status      = not_marked;
1235 
1236   // SECT_CONSTS is first in code buffer so the offset should be 0.
1237   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1238   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1239 
1240   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1241 
1242   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1243   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1244 
1245   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1246 }
1247 
1248 // Post initialization
1249 void nmethod::post_init() {
1250   clear_unloading_state();
1251 
1252   finalize_relocations();
1253 

1285     init_defaults(code_buffer, offsets);
1286 
1287     _osr_entry_point         = nullptr;
1288     _pc_desc_container       = nullptr;
1289     _entry_bci               = InvocationEntryBci;
1290     _compile_id              = compile_id;
1291     _comp_level              = CompLevel_none;
1292     _compiler_type           = type;
1293     _orig_pc_offset          = 0;
1294     _num_stack_arg_slots     = 0;
1295 
1296     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1297       // Continuation enter intrinsic
1298       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1299     } else {
1300       _exception_offset      = 0;
1301     }
1302     // Native wrappers do not have deopt handlers. Make the values
1303     // something that will never match a pc like the nmethod vtable entry
1304     _deopt_handler_offset    = 0;


1305     _unwind_handler_offset   = 0;
1306 
1307     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1308     uint16_t metadata_size;
1309     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1310     JVMCI_ONLY( _metadata_size = metadata_size; )
1311     assert(_mutable_data_size == _relocation_size + metadata_size,
1312            "wrong mutable data size: %d != %d + %d",
1313            _mutable_data_size, _relocation_size, metadata_size);
1314 
1315     // native wrapper does not have read-only data but we need unique not null address
1316     _immutable_data          = blob_end();
1317     _immutable_data_size     = 0;
1318     _nul_chk_table_offset    = 0;
1319     _handler_table_offset    = 0;
1320     _scopes_pcs_offset       = 0;
1321     _scopes_data_offset      = 0;
1322 #if INCLUDE_JVMCI
1323     _speculations_offset     = 0;
1324 #endif

1344     // This is both handled in decode2(), called via print_code() -> decode()
1345     if (PrintNativeNMethods) {
1346       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1347       print_code();
1348       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1349 #if defined(SUPPORT_DATA_STRUCTS)
1350       if (AbstractDisassembler::show_structs()) {
1351         if (oop_maps != nullptr) {
1352           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1353           oop_maps->print_on(tty);
1354           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1355         }
1356       }
1357 #endif
1358     } else {
1359       print(); // print the header part only.
1360     }
1361 #if defined(SUPPORT_DATA_STRUCTS)
1362     if (AbstractDisassembler::show_structs()) {
1363       if (PrintRelocations) {
1364         print_relocations();
1365         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1366       }
1367     }
1368 #endif
1369     if (xtty != nullptr) {
1370       xtty->tail("print_native_nmethod");
1371     }
1372   }
1373 }
1374 
1375 
1376 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1377 {
1378 
1379   if (nm._oop_maps != nullptr) {
1380     _oop_maps                   = nm._oop_maps->clone();
1381   } else {
1382     _oop_maps                   = nullptr;
1383   }
1384 

1663   CompLevel comp_level
1664 #if INCLUDE_JVMCI
1665   , char* speculations,
1666   int speculations_len,
1667   JVMCINMethodData* jvmci_data
1668 #endif
1669   )
1670   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1671              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1672   _deoptimization_generation(0),
1673   _gc_epoch(CodeCache::gc_epoch()),
1674   _method(method),
1675   _osr_link(nullptr)
1676 {
1677   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1678   {
1679     DEBUG_ONLY(NoSafepointVerifier nsv;)
1680     assert_locked_or_safepoint(CodeCache_lock);
1681 
1682     init_defaults(code_buffer, offsets);


1683 
1684     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1685     _entry_bci       = entry_bci;
1686     _compile_id      = compile_id;
1687     _comp_level      = comp_level;
1688     _compiler_type   = type;
1689     _orig_pc_offset  = orig_pc_offset;
1690 
1691     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1692 
1693     set_ctable_begin(header_begin() + content_offset());
1694 
1695 #if INCLUDE_JVMCI
1696     if (compiler->is_jvmci()) {
1697       // JVMCI might not produce any stub sections
1698       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1699         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1700       } else {
1701         _exception_offset        = -1;
1702       }

1784     // Copy speculations to nmethod
1785     if (speculations_size() != 0) {
1786       memcpy(speculations_begin(), speculations, speculations_len);
1787     }
1788 #endif
1789     set_immutable_data_references_counter(1);
1790 
1791     post_init();
1792 
1793     // we use the information of entry points to find out if a method is
1794     // static or non static
1795     assert(compiler->is_c2() || compiler->is_jvmci() ||
1796            _method->is_static() == (entry_point() == verified_entry_point()),
1797            " entry points must be same for static methods and vice versa");
1798   }
1799 }
1800 
1801 // Print a short set of xml attributes to identify this nmethod.  The
1802 // output should be embedded in some other element.
1803 void nmethod::log_identity(xmlStream* log) const {

1804   log->print(" compile_id='%d'", compile_id());
1805   const char* nm_kind = compile_kind();
1806   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1807   log->print(" compiler='%s'", compiler_name());
1808   if (TieredCompilation) {
1809     log->print(" level='%d'", comp_level());
1810   }
1811 #if INCLUDE_JVMCI
1812   if (jvmci_nmethod_data() != nullptr) {
1813     const char* jvmci_name = jvmci_nmethod_data()->name();
1814     if (jvmci_name != nullptr) {
1815       log->print(" jvmci_mirror_name='");
1816       log->text("%s", jvmci_name);
1817       log->print("'");
1818     }
1819   }
1820 #endif
1821 }
1822 
1823 
1824 #define LOG_OFFSET(log, name)                    \
1825   if (p2i(name##_end()) - p2i(name##_begin())) \
1826     log->print(" " XSTR(name) "_offset='%zd'"    , \
1827                p2i(name##_begin()) - p2i(this))
1828 
1829 

1944       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1945       if (oop_maps() != nullptr) {
1946         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1947         oop_maps()->print_on(tty);
1948         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1949       }
1950     }
1951 #endif
1952   } else {
1953     print(); // print the header part only.
1954   }
1955 
1956 #if defined(SUPPORT_DATA_STRUCTS)
1957   if (AbstractDisassembler::show_structs()) {
1958     methodHandle mh(Thread::current(), _method);
1959     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1960       print_scopes();
1961       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1962     }
1963     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1964       print_relocations();
1965       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1966     }
1967     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1968       print_dependencies_on(tty);
1969       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1970     }
1971     if (printmethod || PrintExceptionHandlers) {
1972       print_handler_table();
1973       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1974       print_nul_chk_table();
1975       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1976     }
1977 
1978     if (printmethod) {
1979       print_recorded_oops();
1980       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1981       print_recorded_metadata();
1982       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1983     }
1984   }
1985 #endif
1986 
1987   if (xtty != nullptr) {
1988     xtty->tail("print_nmethod");
1989   }
1990 }
1991 
1992 
1993 // Promote one word from an assembly-time handle to a live embedded oop.
1994 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
1995   if (handle == nullptr ||
1996       // As a special case, IC oops are initialized to 1 or -1.
1997       handle == (jobject) Universe::non_oop_word()) {
1998     *(void**)dest = handle;
1999   } else {
2000     *dest = JNIHandles::resolve_non_null(handle);
2001   }
2002 }
2003 








2004 
2005 // Have to have the same name because it's called by a template
2006 void nmethod::copy_values(GrowableArray<jobject>* array) {
2007   int length = array->length();
2008   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2009   oop* dest = oops_begin();
2010   for (int index = 0 ; index < length; index++) {
2011     initialize_immediate_oop(&dest[index], array->at(index));
2012   }
2013 
2014   // Now we can fix up all the oops in the code.  We need to do this
2015   // in the code because the assembler uses jobjects as placeholders.
2016   // The code and relocations have already been initialized by the
2017   // CodeBlob constructor, so it is valid even at this early point to
2018   // iterate over relocations and patch the code.
2019   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2020 }
2021 
2022 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2023   int length = array->length();

2031 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2032   // re-patch all oop-bearing instructions, just in case some oops moved
2033   RelocIterator iter(this, begin, end);
2034   while (iter.next()) {
2035     if (iter.type() == relocInfo::oop_type) {
2036       oop_Relocation* reloc = iter.oop_reloc();
2037       if (initialize_immediates && reloc->oop_is_immediate()) {
2038         oop* dest = reloc->oop_addr();
2039         jobject obj = *reinterpret_cast<jobject*>(dest);
2040         initialize_immediate_oop(dest, obj);
2041       }
2042       // Refresh the oop-related bits of this instruction.
2043       reloc->fix_oop_relocation();
2044     } else if (iter.type() == relocInfo::metadata_type) {
2045       metadata_Relocation* reloc = iter.metadata_reloc();
2046       reloc->fix_metadata_relocation();
2047     }
2048   }
2049 }
2050 




















2051 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2052   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2053   intptr_t cbaddr = (intptr_t) nm;
2054   intptr_t offset = ((intptr_t) pc) - cbaddr;
2055 
2056   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2057   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2058     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2059   } else if (!nop->patch(oopmap_slot, offset)) {
2060     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2061   }
2062 }
2063 
2064 void nmethod::finalize_relocations() {
2065   NoSafepointVerifier nsv;
2066 
2067   GrowableArray<NativeMovConstReg*> virtual_call_data;
2068 
2069   // Make sure that post call nops fill in nmethod offsets eagerly so
2070   // we don't have to race with deoptimization

2197   // be alive the previous completed marking cycle.
2198   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2199 }
2200 
2201 void nmethod::inc_decompile_count() {
2202   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2203   // Could be gated by ProfileTraps, but do not bother...
2204 #if INCLUDE_JVMCI
2205   if (jvmci_skip_profile_deopt()) {
2206     return;
2207   }
2208 #endif
2209   Method* m = method();
2210   if (m == nullptr)  return;
2211   MethodData* mdo = m->method_data();
2212   if (mdo == nullptr)  return;
2213   // There is a benign race here.  See comments in methodData.hpp.
2214   mdo->inc_decompile_count();
2215 }
2216 








2217 bool nmethod::try_transition(signed char new_state_int) {
2218   signed char new_state = new_state_int;
2219   assert_lock_strong(NMethodState_lock);
2220   signed char old_state = _state;
2221   if (old_state >= new_state) {
2222     // Ensure monotonicity of transitions.
2223     return false;
2224   }
2225   AtomicAccess::store(&_state, new_state);
2226   return true;
2227 }
2228 
2229 void nmethod::invalidate_osr_method() {
2230   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2231   // Remove from list of active nmethods
2232   if (method() != nullptr) {
2233     method()->method_holder()->remove_osr_nmethod(this);
2234   }
2235 }
2236 

2246     }
2247   }
2248 
2249   ResourceMark rm;
2250   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2251   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2252 
2253   CompileTask::print_ul(this, ss.freeze());
2254   if (PrintCompilation) {
2255     print_on_with_msg(tty, ss.freeze());
2256   }
2257 }
2258 
2259 void nmethod::unlink_from_method() {
2260   if (method() != nullptr) {
2261     method()->unlink_code(this);
2262   }
2263 }
2264 
2265 // Invalidate code
2266 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason) {
2267   // This can be called while the system is already at a safepoint which is ok
2268   NoSafepointVerifier nsv;
2269 
2270   if (is_unloading()) {
2271     // If the nmethod is unloading, then it is already not entrant through
2272     // the nmethod entry barriers. No need to do anything; GC will unload it.
2273     return false;
2274   }
2275 
2276   if (AtomicAccess::load(&_state) == not_entrant) {
2277     // Avoid taking the lock if already in required state.
2278     // This is safe from races because the state is an end-state,
2279     // which the nmethod cannot back out of once entered.
2280     // No need for fencing either.
2281     return false;
2282   }
2283 
2284   {
2285     // Enter critical section.  Does not block for safepoint.
2286     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2308     }
2309 
2310     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2311     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2312       // If nmethod entry barriers are not supported, we won't mark
2313       // nmethods as on-stack when they become on-stack. So we
2314       // degrade to a less accurate flushing strategy, for now.
2315       mark_as_maybe_on_stack();
2316     }
2317 
2318     // Change state
2319     bool success = try_transition(not_entrant);
2320     assert(success, "Transition can't fail");
2321 
2322     // Log the transition once
2323     log_state_change(invalidation_reason);
2324 
2325     // Remove nmethod from method.
2326     unlink_from_method();
2327 







2328   } // leave critical region under NMethodState_lock
2329 
2330 #if INCLUDE_JVMCI
2331   // Invalidate can't occur while holding the NMethodState_lock
2332   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2333   if (nmethod_data != nullptr) {
2334     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2335   }
2336 #endif
2337 
2338 #ifdef ASSERT
2339   if (is_osr_method() && method() != nullptr) {
2340     // Make sure osr nmethod is invalidated, i.e. not on the list
2341     bool found = method()->method_holder()->remove_osr_nmethod(this);
2342     assert(!found, "osr nmethod should have been invalidated");
2343   }
2344 #endif
2345 
2346   return true;
2347 }

2372     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2373             nmethod::InvalidationReason::UNLOADING_COLD :
2374             nmethod::InvalidationReason::UNLOADING);
2375   }
2376 #endif
2377 
2378   // Post before flushing as jmethodID is being used
2379   post_compiled_method_unload();
2380 
2381   // Register for flushing when it is safe. For concurrent class unloading,
2382   // that would be after the unloading handshake, and for STW class unloading
2383   // that would be when getting back to the VM thread.
2384   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2385 }
2386 
2387 void nmethod::purge(bool unregister_nmethod) {
2388 
2389   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2390 
2391   // completely deallocate this method
2392   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2393 
2394   LogTarget(Debug, codecache) lt;
2395   if (lt.is_enabled()) {
2396     ResourceMark rm;
2397     LogStream ls(lt);
2398     const char* method_name = method()->name()->as_C_string();
2399     const size_t codecache_capacity = CodeCache::capacity()/1024;
2400     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2401     ls.print("Flushing nmethod %6d/" INTPTR_FORMAT ", level=%d, osr=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2402               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2403               _compile_id, p2i(this), _comp_level, is_osr_method(), is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2404               codecache_capacity, codecache_free_space, method_name, compiler_name());
2405   }
2406 
2407   // We need to deallocate any ExceptionCache data.
2408   // Note that we do not need to grab the nmethod lock for this, it
2409   // better be thread safe if we're disposing of it!
2410   ExceptionCache* ec = exception_cache();
2411   while(ec != nullptr) {
2412     ExceptionCache* next = ec->next();
2413     delete ec;
2414     ec = next;
2415   }
2416   if (_pc_desc_container != nullptr) {
2417     delete _pc_desc_container;
2418   }
2419   delete[] _compiled_ic_data;


2420 
2421   if (_immutable_data != blob_end()) {
2422     int reference_count = get_immutable_data_references_counter();
2423     assert(reference_count > 0, "immutable data has no references");
2424 
2425     set_immutable_data_references_counter(reference_count - 1);
2426     // Free memory if this is the last nmethod referencing immutable data
2427     if (reference_count == 0) {
2428       os::free(_immutable_data);
2429     }
2430 
2431     _immutable_data = blob_end(); // Valid not null address
2432   }
2433 
2434   if (unregister_nmethod) {
2435     Universe::heap()->unregister_nmethod(this);
2436   }
2437   CodeCache::unregister_old_nmethod(this);
2438 
2439   JVMCI_ONLY( _metadata_size = 0; )
2440   CodeBlob::purge();
2441 }

2472         MethodHandles::clean_dependency_context(call_site);
2473       } else {
2474         InstanceKlass* ik = deps.context_type();
2475         if (ik == nullptr) {
2476           continue;  // ignore things like evol_method
2477         }
2478         // During GC liveness of dependee determines class that needs to be updated.
2479         // The GC may clean dependency contexts concurrently and in parallel.
2480         ik->clean_dependency_context();
2481       }
2482     }
2483   }
2484 }
2485 
2486 void nmethod::post_compiled_method(CompileTask* task) {
2487   task->mark_success();
2488   task->set_nm_content_size(content_size());
2489   task->set_nm_insts_size(insts_size());
2490   task->set_nm_total_size(total_size());
2491 






2492   // JVMTI -- compiled method notification (must be done outside lock)
2493   post_compiled_method_load_event();
2494 
2495   if (CompilationLog::log() != nullptr) {
2496     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2497   }
2498 
2499   const DirectiveSet* directive = task->directive();
2500   maybe_print_nmethod(directive);
2501 }
2502 
2503 // ------------------------------------------------------------------
2504 // post_compiled_method_load_event
2505 // new method for install_code() path
2506 // Transfer information from compilation to jvmti
2507 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2508   // This is a bad time for a safepoint.  We don't want
2509   // this nmethod to get unloaded while we're queueing the event.
2510   NoSafepointVerifier nsv;
2511 

3191 void nmethod::verify() {
3192   if (is_not_entrant())
3193     return;
3194 
3195   // assert(oopDesc::is_oop(method()), "must be valid");
3196 
3197   ResourceMark rm;
3198 
3199   if (!CodeCache::contains(this)) {
3200     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3201   }
3202 
3203   if(is_native_method() )
3204     return;
3205 
3206   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3207   if (nm != this) {
3208     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3209   }
3210 
3211   for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3212     if (! p->verify(this)) {
3213       tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));




3214     }
3215   }
3216 
3217 #ifdef ASSERT
3218 #if INCLUDE_JVMCI
3219   {
3220     // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3221     ImmutableOopMapSet* oms = oop_maps();
3222     ImplicitExceptionTable implicit_table(this);
3223     for (uint i = 0; i < implicit_table.len(); i++) {
3224       int exec_offset = (int) implicit_table.get_exec_offset(i);
3225       if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3226         assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3227         bool found = false;
3228         for (int i = 0, imax = oms->count(); i < imax; i++) {
3229           if (oms->pair_at(i)->pc_offset() == exec_offset) {
3230             found = true;
3231             break;

3232           }

3233         }
3234         assert(found, "missing oopmap");
3235       }
3236     }
3237   }
3238 #endif
3239 #endif

3240 
3241   VerifyOopsClosure voc(this);
3242   oops_do(&voc);
3243   assert(voc.ok(), "embedded oops must be OK");
3244   Universe::heap()->verify_nmethod(this);
3245 
3246   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3247          nm->method()->external_name(), p2i(_oops_do_mark_link));
3248   verify_scopes();


3249 
3250   CompiledICLocker nm_verify(this);
3251   VerifyMetadataClosure vmc;
3252   metadata_do(&vmc);
3253 }
3254 
3255 
3256 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3257 
3258   // Verify IC only when nmethod installation is finished.
3259   if (!is_not_installed()) {
3260     if (CompiledICLocker::is_safe(this)) {
3261       if (is_inline_cache) {
3262         CompiledIC_at(this, call_site);
3263       } else {
3264         CompiledDirectCall::at(call_site);
3265       }
3266     } else {
3267       CompiledICLocker ml_verify(this);
3268       if (is_inline_cache) {

3397                                              p2i(nul_chk_table_end()),
3398                                              nul_chk_table_size());
3399   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3400                                              p2i(handler_table_begin()),
3401                                              p2i(handler_table_end()),
3402                                              handler_table_size());
3403   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3404                                              p2i(scopes_pcs_begin()),
3405                                              p2i(scopes_pcs_end()),
3406                                              scopes_pcs_size());
3407   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3408                                              p2i(scopes_data_begin()),
3409                                              p2i(scopes_data_end()),
3410                                              scopes_data_size());
3411 #if INCLUDE_JVMCI
3412   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3413                                              p2i(speculations_begin()),
3414                                              p2i(speculations_end()),
3415                                              speculations_size());
3416 #endif



3417 }
3418 
3419 void nmethod::print_code() {
3420   ResourceMark m;
3421   ttyLocker ttyl;
3422   // Call the specialized decode method of this class.
3423   decode(tty);
3424 }
3425 
3426 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3427 
3428 void nmethod::print_dependencies_on(outputStream* out) {
3429   ResourceMark rm;
3430   stringStream st;
3431   st.print_cr("Dependencies:");
3432   for (Dependencies::DepStream deps(this); deps.next(); ) {
3433     deps.print_dependency(&st);
3434     InstanceKlass* ctxk = deps.context_type();
3435     if (ctxk != nullptr) {
3436       if (ctxk->is_dependent_nmethod(this)) {

3496   st->print("scopes:");
3497   if (scopes_pcs_begin() < scopes_pcs_end()) {
3498     st->cr();
3499     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3500       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3501         continue;
3502 
3503       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3504       while (sd != nullptr) {
3505         sd->print_on(st, p);  // print output ends with a newline
3506         sd = sd->sender();
3507       }
3508     }
3509   } else {
3510     st->print_cr(" <list empty>");
3511   }
3512 }
3513 #endif
3514 
3515 #ifndef PRODUCT  // RelocIterator does support printing only then.
3516 void nmethod::print_relocations() {
3517   ResourceMark m;       // in case methods get printed via the debugger
3518   tty->print_cr("relocations:");
3519   RelocIterator iter(this);
3520   iter.print_on(tty);
3521 }
3522 #endif
3523 
3524 void nmethod::print_pcs_on(outputStream* st) {
3525   ResourceMark m;       // in case methods get printed via debugger
3526   st->print("pc-bytecode offsets:");
3527   if (scopes_pcs_begin() < scopes_pcs_end()) {
3528     st->cr();
3529     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3530       p->print_on(st, this);  // print output ends with a newline
3531     }
3532   } else {
3533     st->print_cr(" <list empty>");
3534   }
3535 }
3536 
3537 void nmethod::print_handler_table() {
3538   ExceptionHandlerTable(this).print(code_begin());
3539 }
3540 

4315 void nmethod::update_speculation(JavaThread* thread) {
4316   jlong speculation = thread->pending_failed_speculation();
4317   if (speculation != 0) {
4318     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4319     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4320     thread->set_pending_failed_speculation(0);
4321   }
4322 }
4323 
4324 const char* nmethod::jvmci_name() {
4325   if (jvmci_nmethod_data() != nullptr) {
4326     return jvmci_nmethod_data()->name();
4327   }
4328   return nullptr;
4329 }
4330 
4331 bool nmethod::jvmci_skip_profile_deopt() const {
4332   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4333 }
4334 #endif





















   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "code/aotCodeCache.hpp"
  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compilationLog.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compileLog.hpp"
  37 #include "compiler/compilerDirectives.hpp"
  38 #include "compiler/compilerOracle.hpp"
  39 #include "compiler/compileTask.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"
  46 #include "gc/shared/collectedHeap.hpp"

 990              _method->method_holder()->external_name(),
 991              _method->name()->as_C_string(),
 992              _method->signature()->as_C_string(),
 993              compile_id());
 994   }
 995   return check_evol.has_evol_dependency();
 996 }
 997 
 998 int nmethod::total_size() const {
 999   return
1000     consts_size()        +
1001     insts_size()         +
1002     stub_size()          +
1003     scopes_data_size()   +
1004     scopes_pcs_size()    +
1005     handler_table_size() +
1006     nul_chk_table_size();
1007 }
1008 
1009 const char* nmethod::compile_kind() const {
1010   if (is_osr_method()) return "osr";
1011   if (preloaded())     return "AP";
1012   if (is_aot())        return "A";
1013 
1014   if (method() != nullptr && is_native_method()) {
1015     if (method()->is_continuation_native_intrinsic()) {
1016       return "cnt";
1017     }
1018     return "c2n";
1019   }
1020   return nullptr;
1021 }
1022 
1023 const char* nmethod::compiler_name() const {
1024   return compilertype2name(_compiler_type);
1025 }
1026 
1027 #ifdef ASSERT
1028 class CheckForOopsClosure : public OopClosure {
1029   bool _found_oop = false;
1030  public:
1031   virtual void do_oop(oop* o) { _found_oop = true; }
1032   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1033   bool found_oop() { return _found_oop; }

1099     nm = new (native_nmethod_size, allow_NonNMethod_space)
1100     nmethod(method(), compiler_none, native_nmethod_size,
1101             compile_id, &offsets,
1102             code_buffer, frame_size,
1103             basic_lock_owner_sp_offset,
1104             basic_lock_sp_offset,
1105             oop_maps, mutable_data_size);
1106     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1107     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1108   }
1109 
1110   if (nm != nullptr) {
1111     // verify nmethod
1112     DEBUG_ONLY(nm->verify();) // might block
1113 
1114     nm->log_new_nmethod();
1115   }
1116   return nm;
1117 }
1118 
1119 void nmethod::record_nmethod_dependency() {
1120   // To make dependency checking during class loading fast, record
1121   // the nmethod dependencies in the classes it is dependent on.
1122   // This allows the dependency checking code to simply walk the
1123   // class hierarchy above the loaded class, checking only nmethods
1124   // which are dependent on those classes.  The slow way is to
1125   // check every nmethod for dependencies which makes it linear in
1126   // the number of methods compiled.  For applications with a lot
1127   // classes the slow way is too slow.
1128   for (Dependencies::DepStream deps(this); deps.next(); ) {
1129     if (deps.type() == Dependencies::call_site_target_value) {
1130       // CallSite dependencies are managed on per-CallSite instance basis.
1131       oop call_site = deps.argument_oop(0);
1132       MethodHandles::add_dependent_nmethod(call_site, this);
1133     } else {
1134       InstanceKlass* ik = deps.context_type();
1135       if (ik == nullptr) {
1136         continue;  // ignore things like evol_method
1137       }
1138       // record this nmethod as dependent on this klass
1139       ik->add_dependent_nmethod(this);
1140     }
1141   }
1142 }
1143 
1144 nmethod* nmethod::new_nmethod(const methodHandle& method,
1145   int compile_id,
1146   int entry_bci,
1147   CodeOffsets* offsets,
1148   int orig_pc_offset,
1149   DebugInformationRecorder* debug_info,
1150   Dependencies* dependencies,
1151   CodeBuffer* code_buffer, int frame_size,
1152   OopMapSet* oop_maps,
1153   ExceptionHandlerTable* handler_table,
1154   ImplicitExceptionTable* nul_chk_table,
1155   AbstractCompiler* compiler,
1156   CompLevel comp_level
1157 #if INCLUDE_JVMCI
1158   , char* speculations,
1159   int speculations_len,
1160   JVMCINMethodData* jvmci_data
1161 #endif
1162 )
1163 {

1190 
1191   int mutable_data_size = required_mutable_data_size(code_buffer
1192     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1193 
1194   {
1195     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1196 
1197     nm = new (nmethod_size, comp_level)
1198     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1199             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1200             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1201             handler_table, nul_chk_table, compiler, comp_level
1202 #if INCLUDE_JVMCI
1203             , speculations,
1204             speculations_len,
1205             jvmci_data
1206 #endif
1207             );
1208 
1209     if (nm != nullptr) {
1210       nm->record_nmethod_dependency();
1211       NOT_PRODUCT(note_java_nmethod(nm));
1212     }
1213   }
1214   // Do verification and logging outside CodeCache_lock.
1215   if (nm != nullptr) {
1216 
1217 #ifdef ASSERT
1218     LogTarget(Debug, aot, codecache, nmethod) log;
1219     if (log.is_enabled()) {
1220       LogStream out(log);
1221       out.print_cr("== new_nmethod 2");
1222       FlagSetting fs(PrintRelocations, true);
1223       nm->print_on_impl(&out);
1224       nm->decode(&out);
1225     }
1226 #endif
1227 
1228     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1229     DEBUG_ONLY(nm->verify();)
1230     nm->log_new_nmethod();
1231   }
1232   return nm;
1233 }
1234 
1235 nmethod* nmethod::restore(address code_cache_buffer,
1236                           const methodHandle& method,
1237                           int compile_id,
1238                           address reloc_data,
1239                           GrowableArray<Handle>& oop_list,
1240                           GrowableArray<Metadata*>& metadata_list,
1241                           ImmutableOopMapSet* oop_maps,
1242                           address immutable_data,
1243                           GrowableArray<Handle>& reloc_imm_oop_list,
1244                           GrowableArray<Metadata*>& reloc_imm_metadata_list,
1245                           AOTCodeReader* aot_code_reader)
1246 {
1247   CodeBlob::restore(code_cache_buffer, "nmethod", reloc_data, oop_maps);
1248   nmethod* nm = (nmethod*)code_cache_buffer;
1249   nm->set_method(method());
1250   nm->_compile_id = compile_id;
1251   nm->set_immutable_data(immutable_data);
1252   nm->copy_values(&oop_list);
1253   nm->copy_values(&metadata_list);
1254 
1255   aot_code_reader->fix_relocations(nm, &reloc_imm_oop_list, &reloc_imm_metadata_list);
1256 
1257 #ifndef PRODUCT
1258   nm->asm_remarks().init();
1259   aot_code_reader->read_asm_remarks(nm->asm_remarks(), /* use_string_table */ false);
1260   nm->dbg_strings().init();
1261   aot_code_reader->read_dbg_strings(nm->dbg_strings(), /* use_string_table */ false);
1262 #endif
1263 
1264   // Flush the code block
1265   ICache::invalidate_range(nm->code_begin(), nm->code_size());
1266 
1267   // Create cache after PcDesc data is copied - it will be used to initialize cache
1268   nm->_pc_desc_container = new PcDescContainer(nm->scopes_pcs_begin());
1269 
1270   nm->set_aot_code_entry(aot_code_reader->aot_code_entry());
1271 
1272   nm->post_init();
1273   return nm;
1274 }
1275 
1276 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1277                               const methodHandle& method,
1278                               AbstractCompiler* compiler,
1279                               int compile_id,
1280                               address reloc_data,
1281                               GrowableArray<Handle>& oop_list,
1282                               GrowableArray<Metadata*>& metadata_list,
1283                               ImmutableOopMapSet* oop_maps,
1284                               address immutable_data,
1285                               GrowableArray<Handle>& reloc_imm_oop_list,
1286                               GrowableArray<Metadata*>& reloc_imm_metadata_list,
1287                               AOTCodeReader* aot_code_reader)
1288 {
1289   nmethod* nm = nullptr;
1290   int nmethod_size = archived_nm->size();
1291   // create nmethod
1292   {
1293     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1294     address code_cache_buffer = (address)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1295     if (code_cache_buffer != nullptr) {
1296       nm = archived_nm->restore(code_cache_buffer,
1297                                 method,
1298                                 compile_id,
1299                                 reloc_data,
1300                                 oop_list,
1301                                 metadata_list,
1302                                 oop_maps,
1303                                 immutable_data,
1304                                 reloc_imm_oop_list,
1305                                 reloc_imm_metadata_list,
1306                                 aot_code_reader);
1307       nm->record_nmethod_dependency();
1308       NOT_PRODUCT(note_java_nmethod(nm));
1309     }
1310   }
1311   // Do verification and logging outside CodeCache_lock.
1312   if (nm != nullptr) {
1313 #ifdef ASSERT
1314     LogTarget(Debug, aot, codecache, nmethod) log;
1315     if (log.is_enabled()) {
1316       LogStream out(log);
1317       out.print_cr("== new_nmethod 2");
1318       FlagSetting fs(PrintRelocations, true);
1319       nm->print_on_impl(&out);
1320       nm->decode(&out);
1321     }
1322 #endif
1323     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1324     DEBUG_ONLY(nm->verify();)
1325     nm->log_new_nmethod();
1326   }
1327   return nm;
1328 }
1329 
1330 // Fill in default values for various fields
1331 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1332   // avoid uninitialized fields, even for short time periods
1333   _exception_cache            = nullptr;
1334   _gc_data                    = nullptr;
1335   _oops_do_mark_link          = nullptr;
1336   _compiled_ic_data           = nullptr;
1337 
1338   _is_unloading_state         = 0;
1339   _state                      = not_installed;
1340 
1341   _has_unsafe_access          = 0;
1342   _has_wide_vectors           = 0;
1343   _has_monitors               = 0;
1344   _has_scoped_access          = 0;
1345   _has_flushed_dependencies   = 0;
1346   _is_unlinked                = 0;
1347   _load_reported              = 0; // jvmti state
1348   _preloaded                  = 0;
1349   _has_clinit_barriers        = 0;
1350 
1351   _used                       = false;
1352   _deoptimization_status      = not_marked;
1353 
1354   // SECT_CONSTS is first in code buffer so the offset should be 0.
1355   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1356   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1357 
1358   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1359 
1360   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1361   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1362 
1363   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1364 }
1365 
1366 // Post initialization
1367 void nmethod::post_init() {
1368   clear_unloading_state();
1369 
1370   finalize_relocations();
1371 

1403     init_defaults(code_buffer, offsets);
1404 
1405     _osr_entry_point         = nullptr;
1406     _pc_desc_container       = nullptr;
1407     _entry_bci               = InvocationEntryBci;
1408     _compile_id              = compile_id;
1409     _comp_level              = CompLevel_none;
1410     _compiler_type           = type;
1411     _orig_pc_offset          = 0;
1412     _num_stack_arg_slots     = 0;
1413 
1414     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1415       // Continuation enter intrinsic
1416       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1417     } else {
1418       _exception_offset      = 0;
1419     }
1420     // Native wrappers do not have deopt handlers. Make the values
1421     // something that will never match a pc like the nmethod vtable entry
1422     _deopt_handler_offset    = 0;
1423     _aot_code_entry          = nullptr;
1424     _method_profiling_count  = 0;
1425     _unwind_handler_offset   = 0;
1426 
1427     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1428     uint16_t metadata_size;
1429     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1430     JVMCI_ONLY( _metadata_size = metadata_size; )
1431     assert(_mutable_data_size == _relocation_size + metadata_size,
1432            "wrong mutable data size: %d != %d + %d",
1433            _mutable_data_size, _relocation_size, metadata_size);
1434 
1435     // native wrapper does not have read-only data but we need unique not null address
1436     _immutable_data          = blob_end();
1437     _immutable_data_size     = 0;
1438     _nul_chk_table_offset    = 0;
1439     _handler_table_offset    = 0;
1440     _scopes_pcs_offset       = 0;
1441     _scopes_data_offset      = 0;
1442 #if INCLUDE_JVMCI
1443     _speculations_offset     = 0;
1444 #endif

1464     // This is both handled in decode2(), called via print_code() -> decode()
1465     if (PrintNativeNMethods) {
1466       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1467       print_code();
1468       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1469 #if defined(SUPPORT_DATA_STRUCTS)
1470       if (AbstractDisassembler::show_structs()) {
1471         if (oop_maps != nullptr) {
1472           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1473           oop_maps->print_on(tty);
1474           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1475         }
1476       }
1477 #endif
1478     } else {
1479       print(); // print the header part only.
1480     }
1481 #if defined(SUPPORT_DATA_STRUCTS)
1482     if (AbstractDisassembler::show_structs()) {
1483       if (PrintRelocations) {
1484         print_relocations_on(tty);
1485         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1486       }
1487     }
1488 #endif
1489     if (xtty != nullptr) {
1490       xtty->tail("print_native_nmethod");
1491     }
1492   }
1493 }
1494 
1495 
1496 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1497 {
1498 
1499   if (nm._oop_maps != nullptr) {
1500     _oop_maps                   = nm._oop_maps->clone();
1501   } else {
1502     _oop_maps                   = nullptr;
1503   }
1504 

1783   CompLevel comp_level
1784 #if INCLUDE_JVMCI
1785   , char* speculations,
1786   int speculations_len,
1787   JVMCINMethodData* jvmci_data
1788 #endif
1789   )
1790   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1791              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1792   _deoptimization_generation(0),
1793   _gc_epoch(CodeCache::gc_epoch()),
1794   _method(method),
1795   _osr_link(nullptr)
1796 {
1797   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1798   {
1799     DEBUG_ONLY(NoSafepointVerifier nsv;)
1800     assert_locked_or_safepoint(CodeCache_lock);
1801 
1802     init_defaults(code_buffer, offsets);
1803     _aot_code_entry          = nullptr; // runtime compiled nmethod does not have AOTCodeEntry
1804     _method_profiling_count  = 0;
1805 
1806     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1807     _entry_bci       = entry_bci;
1808     _compile_id      = compile_id;
1809     _comp_level      = comp_level;
1810     _compiler_type   = type;
1811     _orig_pc_offset  = orig_pc_offset;
1812 
1813     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1814 
1815     set_ctable_begin(header_begin() + content_offset());
1816 
1817 #if INCLUDE_JVMCI
1818     if (compiler->is_jvmci()) {
1819       // JVMCI might not produce any stub sections
1820       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1821         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1822       } else {
1823         _exception_offset        = -1;
1824       }

1906     // Copy speculations to nmethod
1907     if (speculations_size() != 0) {
1908       memcpy(speculations_begin(), speculations, speculations_len);
1909     }
1910 #endif
1911     set_immutable_data_references_counter(1);
1912 
1913     post_init();
1914 
1915     // we use the information of entry points to find out if a method is
1916     // static or non static
1917     assert(compiler->is_c2() || compiler->is_jvmci() ||
1918            _method->is_static() == (entry_point() == verified_entry_point()),
1919            " entry points must be same for static methods and vice versa");
1920   }
1921 }
1922 
1923 // Print a short set of xml attributes to identify this nmethod.  The
1924 // output should be embedded in some other element.
1925 void nmethod::log_identity(xmlStream* log) const {
1926   assert(log->inside_attrs_or_error(), "printing attributes");
1927   log->print(" compile_id='%d'", compile_id());
1928   const char* nm_kind = compile_kind();
1929   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1930   log->print(" compiler='%s'", compiler_name());
1931   if (TieredCompilation) {
1932     log->print(" compile_level='%d'", comp_level());
1933   }
1934 #if INCLUDE_JVMCI
1935   if (jvmci_nmethod_data() != nullptr) {
1936     const char* jvmci_name = jvmci_nmethod_data()->name();
1937     if (jvmci_name != nullptr) {
1938       log->print(" jvmci_mirror_name='");
1939       log->text("%s", jvmci_name);
1940       log->print("'");
1941     }
1942   }
1943 #endif
1944 }
1945 
1946 
1947 #define LOG_OFFSET(log, name)                    \
1948   if (p2i(name##_end()) - p2i(name##_begin())) \
1949     log->print(" " XSTR(name) "_offset='%zd'"    , \
1950                p2i(name##_begin()) - p2i(this))
1951 
1952 

2067       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2068       if (oop_maps() != nullptr) {
2069         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
2070         oop_maps()->print_on(tty);
2071         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2072       }
2073     }
2074 #endif
2075   } else {
2076     print(); // print the header part only.
2077   }
2078 
2079 #if defined(SUPPORT_DATA_STRUCTS)
2080   if (AbstractDisassembler::show_structs()) {
2081     methodHandle mh(Thread::current(), _method);
2082     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
2083       print_scopes();
2084       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2085     }
2086     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
2087       print_relocations_on(tty);
2088       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2089     }
2090     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
2091       print_dependencies_on(tty);
2092       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2093     }
2094     if (printmethod || PrintExceptionHandlers) {
2095       print_handler_table();
2096       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2097       print_nul_chk_table();
2098       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2099     }
2100 
2101     if (printmethod) {
2102       print_recorded_oops();
2103       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2104       print_recorded_metadata();
2105       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2106     }
2107   }
2108 #endif
2109 
2110   if (xtty != nullptr) {
2111     xtty->tail("print_nmethod");
2112   }
2113 }
2114 
2115 
2116 // Promote one word from an assembly-time handle to a live embedded oop.
2117 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2118   if (handle == nullptr ||
2119       // As a special case, IC oops are initialized to 1 or -1.
2120       handle == (jobject) Universe::non_oop_word()) {
2121     *(void**)dest = handle;
2122   } else {
2123     *dest = JNIHandles::resolve_non_null(handle);
2124   }
2125 }
2126 
2127 void nmethod::copy_values(GrowableArray<Handle>* array) {
2128   int length = array->length();
2129   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2130   oop* dest = oops_begin();
2131   for (int index = 0 ; index < length; index++) {
2132     dest[index] = array->at(index)();
2133   }
2134 }
2135 
2136 // Have to have the same name because it's called by a template
2137 void nmethod::copy_values(GrowableArray<jobject>* array) {
2138   int length = array->length();
2139   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2140   oop* dest = oops_begin();
2141   for (int index = 0 ; index < length; index++) {
2142     initialize_immediate_oop(&dest[index], array->at(index));
2143   }
2144 
2145   // Now we can fix up all the oops in the code.  We need to do this
2146   // in the code because the assembler uses jobjects as placeholders.
2147   // The code and relocations have already been initialized by the
2148   // CodeBlob constructor, so it is valid even at this early point to
2149   // iterate over relocations and patch the code.
2150   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2151 }
2152 
2153 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2154   int length = array->length();

2162 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2163   // re-patch all oop-bearing instructions, just in case some oops moved
2164   RelocIterator iter(this, begin, end);
2165   while (iter.next()) {
2166     if (iter.type() == relocInfo::oop_type) {
2167       oop_Relocation* reloc = iter.oop_reloc();
2168       if (initialize_immediates && reloc->oop_is_immediate()) {
2169         oop* dest = reloc->oop_addr();
2170         jobject obj = *reinterpret_cast<jobject*>(dest);
2171         initialize_immediate_oop(dest, obj);
2172       }
2173       // Refresh the oop-related bits of this instruction.
2174       reloc->fix_oop_relocation();
2175     } else if (iter.type() == relocInfo::metadata_type) {
2176       metadata_Relocation* reloc = iter.metadata_reloc();
2177       reloc->fix_metadata_relocation();
2178     }
2179   }
2180 }
2181 
2182 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
2183   RelocIterator iter(this);
2184   while (iter.next()) {
2185     if (iter.type() == relocInfo::oop_type) {
2186       oop_Relocation* reloc = iter.oop_reloc();
2187       if (reloc->oop_is_immediate()) {
2188         oop dest = reloc->oop_value();
2189         Handle h(thread, dest);
2190         oop_list.append(h);
2191       }
2192     } else if (iter.type() == relocInfo::metadata_type) {
2193       metadata_Relocation* reloc = iter.metadata_reloc();
2194       if (reloc->metadata_is_immediate()) {
2195         Metadata* m = reloc->metadata_value();
2196         metadata_list.append(m);
2197       }
2198     }
2199   }
2200 }
2201 
2202 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2203   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2204   intptr_t cbaddr = (intptr_t) nm;
2205   intptr_t offset = ((intptr_t) pc) - cbaddr;
2206 
2207   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2208   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2209     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2210   } else if (!nop->patch(oopmap_slot, offset)) {
2211     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2212   }
2213 }
2214 
2215 void nmethod::finalize_relocations() {
2216   NoSafepointVerifier nsv;
2217 
2218   GrowableArray<NativeMovConstReg*> virtual_call_data;
2219 
2220   // Make sure that post call nops fill in nmethod offsets eagerly so
2221   // we don't have to race with deoptimization

2348   // be alive the previous completed marking cycle.
2349   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2350 }
2351 
2352 void nmethod::inc_decompile_count() {
2353   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2354   // Could be gated by ProfileTraps, but do not bother...
2355 #if INCLUDE_JVMCI
2356   if (jvmci_skip_profile_deopt()) {
2357     return;
2358   }
2359 #endif
2360   Method* m = method();
2361   if (m == nullptr)  return;
2362   MethodData* mdo = m->method_data();
2363   if (mdo == nullptr)  return;
2364   // There is a benign race here.  See comments in methodData.hpp.
2365   mdo->inc_decompile_count();
2366 }
2367 
2368 void nmethod::inc_method_profiling_count() {
2369   AtomicAccess::inc(&_method_profiling_count);
2370 }
2371 
2372 uint64_t nmethod::method_profiling_count() {
2373   return _method_profiling_count;
2374 }
2375 
2376 bool nmethod::try_transition(signed char new_state_int) {
2377   signed char new_state = new_state_int;
2378   assert_lock_strong(NMethodState_lock);
2379   signed char old_state = _state;
2380   if (old_state >= new_state) {
2381     // Ensure monotonicity of transitions.
2382     return false;
2383   }
2384   AtomicAccess::store(&_state, new_state);
2385   return true;
2386 }
2387 
2388 void nmethod::invalidate_osr_method() {
2389   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2390   // Remove from list of active nmethods
2391   if (method() != nullptr) {
2392     method()->method_holder()->remove_osr_nmethod(this);
2393   }
2394 }
2395 

2405     }
2406   }
2407 
2408   ResourceMark rm;
2409   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2410   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2411 
2412   CompileTask::print_ul(this, ss.freeze());
2413   if (PrintCompilation) {
2414     print_on_with_msg(tty, ss.freeze());
2415   }
2416 }
2417 
2418 void nmethod::unlink_from_method() {
2419   if (method() != nullptr) {
2420     method()->unlink_code(this);
2421   }
2422 }
2423 
2424 // Invalidate code
2425 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason, bool keep_aot_entry) {
2426   // This can be called while the system is already at a safepoint which is ok
2427   NoSafepointVerifier nsv;
2428 
2429   if (is_unloading()) {
2430     // If the nmethod is unloading, then it is already not entrant through
2431     // the nmethod entry barriers. No need to do anything; GC will unload it.
2432     return false;
2433   }
2434 
2435   if (AtomicAccess::load(&_state) == not_entrant) {
2436     // Avoid taking the lock if already in required state.
2437     // This is safe from races because the state is an end-state,
2438     // which the nmethod cannot back out of once entered.
2439     // No need for fencing either.
2440     return false;
2441   }
2442 
2443   {
2444     // Enter critical section.  Does not block for safepoint.
2445     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2467     }
2468 
2469     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2470     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2471       // If nmethod entry barriers are not supported, we won't mark
2472       // nmethods as on-stack when they become on-stack. So we
2473       // degrade to a less accurate flushing strategy, for now.
2474       mark_as_maybe_on_stack();
2475     }
2476 
2477     // Change state
2478     bool success = try_transition(not_entrant);
2479     assert(success, "Transition can't fail");
2480 
2481     // Log the transition once
2482     log_state_change(invalidation_reason);
2483 
2484     // Remove nmethod from method.
2485     unlink_from_method();
2486 
2487     if (!keep_aot_entry) {
2488       // Keep AOT code if it was simply replaced
2489       // otherwise make it not entrant too.
2490       AOTCodeCache::invalidate(_aot_code_entry);
2491     }
2492 
2493     CompileBroker::log_not_entrant(this);
2494   } // leave critical region under NMethodState_lock
2495 
2496 #if INCLUDE_JVMCI
2497   // Invalidate can't occur while holding the NMethodState_lock
2498   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2499   if (nmethod_data != nullptr) {
2500     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2501   }
2502 #endif
2503 
2504 #ifdef ASSERT
2505   if (is_osr_method() && method() != nullptr) {
2506     // Make sure osr nmethod is invalidated, i.e. not on the list
2507     bool found = method()->method_holder()->remove_osr_nmethod(this);
2508     assert(!found, "osr nmethod should have been invalidated");
2509   }
2510 #endif
2511 
2512   return true;
2513 }

2538     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2539             nmethod::InvalidationReason::UNLOADING_COLD :
2540             nmethod::InvalidationReason::UNLOADING);
2541   }
2542 #endif
2543 
2544   // Post before flushing as jmethodID is being used
2545   post_compiled_method_unload();
2546 
2547   // Register for flushing when it is safe. For concurrent class unloading,
2548   // that would be after the unloading handshake, and for STW class unloading
2549   // that would be when getting back to the VM thread.
2550   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2551 }
2552 
2553 void nmethod::purge(bool unregister_nmethod) {
2554 
2555   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2556 
2557   // completely deallocate this method
2558   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, compile_kind(), p2i(this));
2559 
2560   LogTarget(Debug, codecache) lt;
2561   if (lt.is_enabled()) {
2562     ResourceMark rm;
2563     LogStream ls(lt);
2564     const char* method_name = method()->name()->as_C_string();
2565     const size_t codecache_capacity = CodeCache::capacity()/1024;
2566     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2567     ls.print("Flushing %s nmethod %6d/" INTPTR_FORMAT ", level=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2568               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2569               compile_kind(), _compile_id, p2i(this), _comp_level, is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2570               codecache_capacity, codecache_free_space, method_name, compiler_name());
2571   }
2572 
2573   // We need to deallocate any ExceptionCache data.
2574   // Note that we do not need to grab the nmethod lock for this, it
2575   // better be thread safe if we're disposing of it!
2576   ExceptionCache* ec = exception_cache();
2577   while(ec != nullptr) {
2578     ExceptionCache* next = ec->next();
2579     delete ec;
2580     ec = next;
2581   }
2582   if (_pc_desc_container != nullptr) {
2583     delete _pc_desc_container;
2584   }
2585   if (_compiled_ic_data != nullptr) {
2586     delete[] _compiled_ic_data;
2587   }
2588 
2589   if (_immutable_data != blob_end() && !AOTCodeCache::is_address_in_aot_cache((address)_oop_maps)) {
2590     int reference_count = get_immutable_data_references_counter();
2591     assert(reference_count > 0, "immutable data has no references");
2592 
2593     set_immutable_data_references_counter(reference_count - 1);
2594     // Free memory if this is the last nmethod referencing immutable data
2595     if (reference_count == 0) {
2596       os::free(_immutable_data);
2597     }
2598 
2599     _immutable_data = blob_end(); // Valid not null address
2600   }
2601 
2602   if (unregister_nmethod) {
2603     Universe::heap()->unregister_nmethod(this);
2604   }
2605   CodeCache::unregister_old_nmethod(this);
2606 
2607   JVMCI_ONLY( _metadata_size = 0; )
2608   CodeBlob::purge();
2609 }

2640         MethodHandles::clean_dependency_context(call_site);
2641       } else {
2642         InstanceKlass* ik = deps.context_type();
2643         if (ik == nullptr) {
2644           continue;  // ignore things like evol_method
2645         }
2646         // During GC liveness of dependee determines class that needs to be updated.
2647         // The GC may clean dependency contexts concurrently and in parallel.
2648         ik->clean_dependency_context();
2649       }
2650     }
2651   }
2652 }
2653 
2654 void nmethod::post_compiled_method(CompileTask* task) {
2655   task->mark_success();
2656   task->set_nm_content_size(content_size());
2657   task->set_nm_insts_size(insts_size());
2658   task->set_nm_total_size(total_size());
2659 
2660   // task->is_aot_load() is true only for loaded AOT code.
2661   // nmethod::_aot_code_entry is set for loaded and stored AOT code
2662   // to invalidate the entry when nmethod is deoptimized.
2663   // VerifyAOTCode is option to not store in archive AOT code.
2664   guarantee((_aot_code_entry != nullptr) || !task->is_aot_load() || VerifyAOTCode, "sanity");
2665 
2666   // JVMTI -- compiled method notification (must be done outside lock)
2667   post_compiled_method_load_event();
2668 
2669   if (CompilationLog::log() != nullptr) {
2670     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2671   }
2672 
2673   const DirectiveSet* directive = task->directive();
2674   maybe_print_nmethod(directive);
2675 }
2676 
2677 // ------------------------------------------------------------------
2678 // post_compiled_method_load_event
2679 // new method for install_code() path
2680 // Transfer information from compilation to jvmti
2681 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2682   // This is a bad time for a safepoint.  We don't want
2683   // this nmethod to get unloaded while we're queueing the event.
2684   NoSafepointVerifier nsv;
2685 

3365 void nmethod::verify() {
3366   if (is_not_entrant())
3367     return;
3368 
3369   // assert(oopDesc::is_oop(method()), "must be valid");
3370 
3371   ResourceMark rm;
3372 
3373   if (!CodeCache::contains(this)) {
3374     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3375   }
3376 
3377   if(is_native_method() )
3378     return;
3379 
3380   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3381   if (nm != this) {
3382     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3383   }
3384 
3385   // Verification can triggered during shutdown after AOTCodeCache is closed.
3386   // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3387   if (!is_aot() || AOTCodeCache::is_on()) {
3388     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3389       if (! p->verify(this)) {
3390         tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3391       }
3392     }

3393 
3394 #ifdef ASSERT
3395 #if INCLUDE_JVMCI
3396     {
3397       // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3398       ImmutableOopMapSet* oms = oop_maps();
3399       ImplicitExceptionTable implicit_table(this);
3400       for (uint i = 0; i < implicit_table.len(); i++) {
3401         int exec_offset = (int) implicit_table.get_exec_offset(i);
3402         if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3403           assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3404           bool found = false;
3405           for (int i = 0, imax = oms->count(); i < imax; i++) {
3406             if (oms->pair_at(i)->pc_offset() == exec_offset) {
3407               found = true;
3408               break;
3409             }
3410           }
3411           assert(found, "missing oopmap");
3412         }

3413       }
3414     }

3415 #endif
3416 #endif
3417   }
3418 
3419   VerifyOopsClosure voc(this);
3420   oops_do(&voc);
3421   assert(voc.ok(), "embedded oops must be OK");
3422   Universe::heap()->verify_nmethod(this);
3423 
3424   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3425          nm->method()->external_name(), p2i(_oops_do_mark_link));
3426   if (!is_aot() || AOTCodeCache::is_on()) {
3427     verify_scopes();
3428   }
3429 
3430   CompiledICLocker nm_verify(this);
3431   VerifyMetadataClosure vmc;
3432   metadata_do(&vmc);
3433 }
3434 
3435 
3436 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3437 
3438   // Verify IC only when nmethod installation is finished.
3439   if (!is_not_installed()) {
3440     if (CompiledICLocker::is_safe(this)) {
3441       if (is_inline_cache) {
3442         CompiledIC_at(this, call_site);
3443       } else {
3444         CompiledDirectCall::at(call_site);
3445       }
3446     } else {
3447       CompiledICLocker ml_verify(this);
3448       if (is_inline_cache) {

3577                                              p2i(nul_chk_table_end()),
3578                                              nul_chk_table_size());
3579   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3580                                              p2i(handler_table_begin()),
3581                                              p2i(handler_table_end()),
3582                                              handler_table_size());
3583   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3584                                              p2i(scopes_pcs_begin()),
3585                                              p2i(scopes_pcs_end()),
3586                                              scopes_pcs_size());
3587   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3588                                              p2i(scopes_data_begin()),
3589                                              p2i(scopes_data_end()),
3590                                              scopes_data_size());
3591 #if INCLUDE_JVMCI
3592   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3593                                              p2i(speculations_begin()),
3594                                              p2i(speculations_end()),
3595                                              speculations_size());
3596 #endif
3597   if (AOTCodeCache::is_on() && _aot_code_entry != nullptr) {
3598     _aot_code_entry->print(st);
3599   }
3600 }
3601 
3602 void nmethod::print_code() {
3603   ResourceMark m;
3604   ttyLocker ttyl;
3605   // Call the specialized decode method of this class.
3606   decode(tty);
3607 }
3608 
3609 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3610 
3611 void nmethod::print_dependencies_on(outputStream* out) {
3612   ResourceMark rm;
3613   stringStream st;
3614   st.print_cr("Dependencies:");
3615   for (Dependencies::DepStream deps(this); deps.next(); ) {
3616     deps.print_dependency(&st);
3617     InstanceKlass* ctxk = deps.context_type();
3618     if (ctxk != nullptr) {
3619       if (ctxk->is_dependent_nmethod(this)) {

3679   st->print("scopes:");
3680   if (scopes_pcs_begin() < scopes_pcs_end()) {
3681     st->cr();
3682     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3683       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3684         continue;
3685 
3686       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3687       while (sd != nullptr) {
3688         sd->print_on(st, p);  // print output ends with a newline
3689         sd = sd->sender();
3690       }
3691     }
3692   } else {
3693     st->print_cr(" <list empty>");
3694   }
3695 }
3696 #endif
3697 
3698 #ifndef PRODUCT  // RelocIterator does support printing only then.
3699 void nmethod::print_relocations_on(outputStream* st) {
3700   ResourceMark m;       // in case methods get printed via the debugger
3701   st->print_cr("relocations:");
3702   RelocIterator iter(this);
3703   iter.print_on(st);
3704 }
3705 #endif
3706 
3707 void nmethod::print_pcs_on(outputStream* st) {
3708   ResourceMark m;       // in case methods get printed via debugger
3709   st->print("pc-bytecode offsets:");
3710   if (scopes_pcs_begin() < scopes_pcs_end()) {
3711     st->cr();
3712     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3713       p->print_on(st, this);  // print output ends with a newline
3714     }
3715   } else {
3716     st->print_cr(" <list empty>");
3717   }
3718 }
3719 
3720 void nmethod::print_handler_table() {
3721   ExceptionHandlerTable(this).print(code_begin());
3722 }
3723 

4498 void nmethod::update_speculation(JavaThread* thread) {
4499   jlong speculation = thread->pending_failed_speculation();
4500   if (speculation != 0) {
4501     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4502     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4503     thread->set_pending_failed_speculation(0);
4504   }
4505 }
4506 
4507 const char* nmethod::jvmci_name() {
4508   if (jvmci_nmethod_data() != nullptr) {
4509     return jvmci_nmethod_data()->name();
4510   }
4511   return nullptr;
4512 }
4513 
4514 bool nmethod::jvmci_skip_profile_deopt() const {
4515   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4516 }
4517 #endif
4518 
4519 void nmethod::prepare_for_archiving_impl() {
4520   CodeBlob::prepare_for_archiving_impl();
4521   _deoptimization_generation = 0;
4522   _gc_epoch = 0;
4523   _method_profiling_count = 0;
4524   _osr_link = nullptr;
4525   _method = nullptr;
4526   _immutable_data = nullptr;
4527   _pc_desc_container = nullptr;
4528   _exception_cache = nullptr;
4529   _gc_data = nullptr;
4530   _oops_do_mark_link = nullptr;
4531   _compiled_ic_data = nullptr;
4532   _osr_entry_point = nullptr;
4533   _compile_id = -1;
4534   _deoptimization_status = not_marked;
4535   _is_unloading_state = 0;
4536   _state = not_installed;
4537 }
< prev index next >