< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "cds/cdsConfig.hpp"

  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compilationLog.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compileLog.hpp"
  37 #include "compiler/compilerDirectives.hpp"
  38 #include "compiler/compilerOracle.hpp"
  39 #include "compiler/compileTask.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"
  46 #include "gc/shared/collectedHeap.hpp"
  47 #include "interpreter/bytecode.inline.hpp"
  48 #include "jvm.h"
  49 #include "logging/log.hpp"
  50 #include "logging/logStream.hpp"
  51 #include "memory/allocation.inline.hpp"
  52 #include "memory/resourceArea.hpp"
  53 #include "memory/universe.hpp"
  54 #include "oops/access.inline.hpp"
  55 #include "oops/klass.inline.hpp"
  56 #include "oops/method.inline.hpp"
  57 #include "oops/methodData.hpp"
  58 #include "oops/oop.inline.hpp"

  59 #include "oops/weakHandle.inline.hpp"
  60 #include "prims/jvmtiImpl.hpp"
  61 #include "prims/jvmtiThreadState.hpp"
  62 #include "prims/methodHandles.hpp"
  63 #include "runtime/atomicAccess.hpp"
  64 #include "runtime/continuation.hpp"
  65 #include "runtime/deoptimization.hpp"
  66 #include "runtime/flags/flagSetting.hpp"
  67 #include "runtime/frame.inline.hpp"
  68 #include "runtime/handles.inline.hpp"
  69 #include "runtime/jniHandles.inline.hpp"
  70 #include "runtime/orderAccess.hpp"
  71 #include "runtime/os.hpp"
  72 #include "runtime/safepointVerifiers.hpp"
  73 #include "runtime/serviceThread.hpp"
  74 #include "runtime/sharedRuntime.hpp"
  75 #include "runtime/signature.hpp"
  76 #include "runtime/threadWXSetters.inline.hpp"
  77 #include "runtime/vmThread.hpp"
  78 #include "utilities/align.hpp"

 991              _method->method_holder()->external_name(),
 992              _method->name()->as_C_string(),
 993              _method->signature()->as_C_string(),
 994              compile_id());
 995   }
 996   return check_evol.has_evol_dependency();
 997 }
 998 
 999 int nmethod::total_size() const {
1000   return
1001     consts_size()        +
1002     insts_size()         +
1003     stub_size()          +
1004     scopes_data_size()   +
1005     scopes_pcs_size()    +
1006     handler_table_size() +
1007     nul_chk_table_size();
1008 }
1009 
1010 const char* nmethod::compile_kind() const {
1011   if (is_osr_method())     return "osr";



1012   if (method() != nullptr && is_native_method()) {
1013     if (method()->is_continuation_native_intrinsic()) {
1014       return "cnt";
1015     }
1016     return "c2n";
1017   }
1018   return nullptr;
1019 }
1020 
1021 const char* nmethod::compiler_name() const {
1022   return compilertype2name(_compiler_type);
1023 }
1024 
1025 #ifdef ASSERT
1026 class CheckForOopsClosure : public OopClosure {
1027   bool _found_oop = false;
1028  public:
1029   virtual void do_oop(oop* o) { _found_oop = true; }
1030   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1031   bool found_oop() { return _found_oop; }
1032 };
1033 class CheckForMetadataClosure : public MetadataClosure {
1034   bool _found_metadata = false;
1035   Metadata* _ignore = nullptr;
1036  public:
1037   CheckForMetadataClosure(Metadata* ignore) : _ignore(ignore) {}
1038   virtual void do_metadata(Metadata* md) { if (md != _ignore) _found_metadata = true; }

1097     nm = new (native_nmethod_size, allow_NonNMethod_space)
1098     nmethod(method(), compiler_none, native_nmethod_size,
1099             compile_id, &offsets,
1100             code_buffer, frame_size,
1101             basic_lock_owner_sp_offset,
1102             basic_lock_sp_offset,
1103             oop_maps, mutable_data_size);
1104     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1105     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1106   }
1107 
1108   if (nm != nullptr) {
1109     // verify nmethod
1110     DEBUG_ONLY(nm->verify();) // might block
1111 
1112     nm->log_new_nmethod();
1113   }
1114   return nm;
1115 }
1116 

























1117 nmethod* nmethod::new_nmethod(const methodHandle& method,
1118   int compile_id,
1119   int entry_bci,
1120   CodeOffsets* offsets,
1121   int orig_pc_offset,
1122   DebugInformationRecorder* debug_info,
1123   Dependencies* dependencies,
1124   CodeBuffer* code_buffer, int frame_size,
1125   OopMapSet* oop_maps,
1126   ExceptionHandlerTable* handler_table,
1127   ImplicitExceptionTable* nul_chk_table,
1128   AbstractCompiler* compiler,
1129   CompLevel comp_level
1130 #if INCLUDE_JVMCI
1131   , char* speculations,
1132   int speculations_len,
1133   JVMCINMethodData* jvmci_data
1134 #endif
1135 )
1136 {

1163 
1164   int mutable_data_size = required_mutable_data_size(code_buffer
1165     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1166 
1167   {
1168     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1169 
1170     nm = new (nmethod_size, comp_level)
1171     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1172             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1173             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1174             handler_table, nul_chk_table, compiler, comp_level
1175 #if INCLUDE_JVMCI
1176             , speculations,
1177             speculations_len,
1178             jvmci_data
1179 #endif
1180             );
1181 
1182     if (nm != nullptr) {
1183       // To make dependency checking during class loading fast, record
1184       // the nmethod dependencies in the classes it is dependent on.
1185       // This allows the dependency checking code to simply walk the
1186       // class hierarchy above the loaded class, checking only nmethods
1187       // which are dependent on those classes.  The slow way is to
1188       // check every nmethod for dependencies which makes it linear in
1189       // the number of methods compiled.  For applications with a lot
1190       // classes the slow way is too slow.
1191       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1192         if (deps.type() == Dependencies::call_site_target_value) {
1193           // CallSite dependencies are managed on per-CallSite instance basis.
1194           oop call_site = deps.argument_oop(0);
1195           MethodHandles::add_dependent_nmethod(call_site, nm);
1196         } else {
1197           InstanceKlass* ik = deps.context_type();
1198           if (ik == nullptr) {
1199             continue;  // ignore things like evol_method
1200           }
1201           // record this nmethod as dependent on this klass
1202           ik->add_dependent_nmethod(nm);
1203         }
1204       }
1205       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));
1206     }
1207   }
1208   // Do verification and logging outside CodeCache_lock.
1209   if (nm != nullptr) {











































































































1210     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1211     DEBUG_ONLY(nm->verify();)
1212     nm->log_new_nmethod();
1213   }
1214   return nm;
1215 }
1216 
1217 // Fill in default values for various fields
1218 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1219   // avoid uninitialized fields, even for short time periods
1220   _exception_cache            = nullptr;
1221   _gc_data                    = nullptr;
1222   _oops_do_mark_link          = nullptr;
1223   _compiled_ic_data           = nullptr;

1224 
1225   _is_unloading_state         = 0;
1226   _state                      = not_installed;
1227 
1228   _has_unsafe_access          = 0;
1229   _has_wide_vectors           = 0;
1230   _has_monitors               = 0;
1231   _has_scoped_access          = 0;
1232   _has_flushed_dependencies   = 0;
1233   _is_unlinked                = 0;
1234   _load_reported              = 0; // jvmti state


1235 

1236   _deoptimization_status      = not_marked;
1237 
1238   // SECT_CONSTS is first in code buffer so the offset should be 0.
1239   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1240   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1241 
1242   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1243 
1244   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1245   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1246 
1247   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1248 }
1249 
1250 // Post initialization
1251 void nmethod::post_init() {
1252   clear_unloading_state();
1253 
1254   finalize_relocations();
1255 

1287     init_defaults(code_buffer, offsets);
1288 
1289     _osr_entry_point         = nullptr;
1290     _pc_desc_container       = nullptr;
1291     _entry_bci               = InvocationEntryBci;
1292     _compile_id              = compile_id;
1293     _comp_level              = CompLevel_none;
1294     _compiler_type           = type;
1295     _orig_pc_offset          = 0;
1296     _num_stack_arg_slots     = 0;
1297 
1298     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1299       // Continuation enter intrinsic
1300       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1301     } else {
1302       _exception_offset      = 0;
1303     }
1304     // Native wrappers do not have deopt handlers. Make the values
1305     // something that will never match a pc like the nmethod vtable entry
1306     _deopt_handler_entry_offset    = 0;

1307     _unwind_handler_offset   = 0;
1308 
1309     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1310     uint16_t metadata_size;
1311     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1312     JVMCI_ONLY( _metadata_size = metadata_size; )
1313     assert(_mutable_data_size == _relocation_size + metadata_size,
1314            "wrong mutable data size: %d != %d + %d",
1315            _mutable_data_size, _relocation_size, metadata_size);
1316 
1317     // native wrapper does not have read-only data but we need unique not null address
1318     _immutable_data          = blob_end();
1319     _immutable_data_size     = 0;
1320     _nul_chk_table_offset    = 0;
1321     _handler_table_offset    = 0;
1322     _scopes_pcs_offset       = 0;
1323     _scopes_data_offset      = 0;
1324 #if INCLUDE_JVMCI
1325     _speculations_offset     = 0;
1326 #endif

1347     // This is both handled in decode2(), called via print_code() -> decode()
1348     if (PrintNativeNMethods) {
1349       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1350       print_code();
1351       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1352 #if defined(SUPPORT_DATA_STRUCTS)
1353       if (AbstractDisassembler::show_structs()) {
1354         if (oop_maps != nullptr) {
1355           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1356           oop_maps->print_on(tty);
1357           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1358         }
1359       }
1360 #endif
1361     } else {
1362       print(); // print the header part only.
1363     }
1364 #if defined(SUPPORT_DATA_STRUCTS)
1365     if (AbstractDisassembler::show_structs()) {
1366       if (PrintRelocations) {
1367         print_relocations();
1368         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1369       }
1370     }
1371 #endif
1372     if (xtty != nullptr) {
1373       xtty->tail("print_native_nmethod");
1374     }
1375   }
1376 }
1377 
1378 
1379 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1380 {
1381 
1382   if (nm._oop_maps != nullptr) {
1383     _oop_maps                   = nm._oop_maps->clone();
1384   } else {
1385     _oop_maps                   = nullptr;
1386   }
1387 

1411   if (_mutable_data_size > 0) {
1412     _mutable_data = (address)os::malloc(_mutable_data_size, mtCode);
1413     if (_mutable_data == nullptr) {
1414       vm_exit_out_of_memory(_mutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for mutable data");
1415     }
1416     memcpy(mutable_data_begin(), nm.mutable_data_begin(), nm.mutable_data_size());
1417   } else {
1418     _mutable_data               = nullptr;
1419   }
1420 
1421   _deoptimization_generation    = 0;
1422   _gc_epoch                     = CodeCache::gc_epoch();
1423   _method                       = nm._method;
1424   _osr_link                     = nullptr;
1425 
1426   _exception_cache              = nullptr;
1427   _gc_data                      = nullptr;
1428   _oops_do_mark_nmethods        = nullptr;
1429   _oops_do_mark_link            = nullptr;
1430   _compiled_ic_data             = nullptr;

1431 
1432   if (nm._osr_entry_point != nullptr) {
1433     _osr_entry_point            = (nm._osr_entry_point - (address) &nm) + (address) this;
1434   } else {
1435     _osr_entry_point            = nullptr;
1436   }
1437 
1438   _entry_offset                 = nm._entry_offset;
1439   _verified_entry_offset        = nm._verified_entry_offset;
1440   _entry_bci                    = nm._entry_bci;
1441   _immutable_data_size          = nm._immutable_data_size;
1442 
1443   _skipped_instructions_size    = nm._skipped_instructions_size;
1444   _stub_offset                  = nm._stub_offset;
1445   _exception_offset             = nm._exception_offset;
1446   _deopt_handler_entry_offset   = nm._deopt_handler_entry_offset;
1447   _unwind_handler_offset        = nm._unwind_handler_offset;
1448   _num_stack_arg_slots          = nm._num_stack_arg_slots;
1449   _oops_size                    = nm._oops_size;
1450 #if INCLUDE_JVMCI

1464     _immutable_data             = nm._immutable_data;
1465     inc_immutable_data_ref_count();
1466   } else {
1467     _immutable_data             = blob_end();
1468   }
1469 
1470   _orig_pc_offset               = nm._orig_pc_offset;
1471   _compile_id                   = nm._compile_id;
1472   _comp_level                   = nm._comp_level;
1473   _compiler_type                = nm._compiler_type;
1474   _is_unloading_state           = nm._is_unloading_state;
1475   _state                        = not_installed;
1476 
1477   _has_unsafe_access            = nm._has_unsafe_access;
1478   _has_wide_vectors             = nm._has_wide_vectors;
1479   _has_monitors                 = nm._has_monitors;
1480   _has_scoped_access            = nm._has_scoped_access;
1481   _has_flushed_dependencies     = nm._has_flushed_dependencies;
1482   _is_unlinked                  = nm._is_unlinked;
1483   _load_reported                = nm._load_reported;


1484 
1485   _deoptimization_status        = nm._deoptimization_status;
1486 
1487   if (nm._pc_desc_container != nullptr) {
1488     _pc_desc_container          = new PcDescContainer(scopes_pcs_begin());
1489   } else {
1490     _pc_desc_container          = nullptr;
1491   }
1492 
1493   // Copy nmethod contents excluding header
1494   // - Constant part          (doubles, longs and floats used in nmethod)
1495   // - Code part:
1496   //   - Code body
1497   //   - Exception handler
1498   //   - Stub code
1499   //   - OOP table
1500   memcpy(consts_begin(), nm.consts_begin(), nm.data_end() - nm.consts_begin());
1501 
1502   // Fix relocation
1503   RelocIterator iter(this);

1680   CompLevel comp_level
1681 #if INCLUDE_JVMCI
1682   , char* speculations,
1683   int speculations_len,
1684   JVMCINMethodData* jvmci_data
1685 #endif
1686   )
1687   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1688              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1689   _deoptimization_generation(0),
1690   _gc_epoch(CodeCache::gc_epoch()),
1691   _method(method),
1692   _osr_link(nullptr)
1693 {
1694   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1695   {
1696     DEBUG_ONLY(NoSafepointVerifier nsv;)
1697     assert_locked_or_safepoint(CodeCache_lock);
1698 
1699     init_defaults(code_buffer, offsets);

1700 
1701     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1702     _entry_bci       = entry_bci;
1703     _compile_id      = compile_id;
1704     _comp_level      = comp_level;
1705     _compiler_type   = type;
1706     _orig_pc_offset  = orig_pc_offset;
1707 
1708     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1709 
1710     set_ctable_begin(header_begin() + content_offset());
1711 
1712 #if INCLUDE_JVMCI
1713     if (compiler->is_jvmci()) {
1714       // JVMCI might not produce any stub sections
1715       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1716         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1717       } else {
1718         _exception_offset        = -1;
1719       }

1809     // Copy speculations to nmethod
1810     if (speculations_size() != 0) {
1811       memcpy(speculations_begin(), speculations, speculations_len);
1812     }
1813 #endif
1814     init_immutable_data_ref_count();
1815 
1816     post_init();
1817 
1818     // we use the information of entry points to find out if a method is
1819     // static or non static
1820     assert(compiler->is_c2() || compiler->is_jvmci() ||
1821            _method->is_static() == (entry_point() == verified_entry_point()),
1822            " entry points must be same for static methods and vice versa");
1823   }
1824 }
1825 
1826 // Print a short set of xml attributes to identify this nmethod.  The
1827 // output should be embedded in some other element.
1828 void nmethod::log_identity(xmlStream* log) const {

1829   log->print(" compile_id='%d'", compile_id());
1830   const char* nm_kind = compile_kind();
1831   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1832   log->print(" compiler='%s'", compiler_name());
1833   if (TieredCompilation) {
1834     log->print(" level='%d'", comp_level());
1835   }
1836 #if INCLUDE_JVMCI
1837   if (jvmci_nmethod_data() != nullptr) {
1838     const char* jvmci_name = jvmci_nmethod_data()->name();
1839     if (jvmci_name != nullptr) {
1840       log->print(" jvmci_mirror_name='");
1841       log->text("%s", jvmci_name);
1842       log->print("'");
1843     }
1844   }
1845 #endif
1846 }
1847 
1848 
1849 #define LOG_OFFSET(log, name)                    \
1850   if (p2i(name##_end()) - p2i(name##_begin())) \
1851     log->print(" " XSTR(name) "_offset='%zd'"    , \
1852                p2i(name##_begin()) - p2i(this))
1853 
1854 

1969       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1970       if (oop_maps() != nullptr) {
1971         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1972         oop_maps()->print_on(tty);
1973         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1974       }
1975     }
1976 #endif
1977   } else {
1978     print(); // print the header part only.
1979   }
1980 
1981 #if defined(SUPPORT_DATA_STRUCTS)
1982   if (AbstractDisassembler::show_structs()) {
1983     methodHandle mh(Thread::current(), _method);
1984     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1985       print_scopes();
1986       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1987     }
1988     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1989       print_relocations();
1990       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1991     }
1992     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1993       print_dependencies_on(tty);
1994       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1995     }
1996     if (printmethod || PrintExceptionHandlers) {
1997       print_handler_table();
1998       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1999       print_nul_chk_table();
2000       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2001     }
2002 
2003     if (printmethod) {
2004       print_recorded_oops();
2005       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2006       print_recorded_metadata();
2007       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2008     }
2009   }
2010 #endif
2011 
2012   if (xtty != nullptr) {
2013     xtty->tail("print_nmethod");
2014   }
2015 }
2016 
2017 
2018 // Promote one word from an assembly-time handle to a live embedded oop.
2019 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2020   if (handle == nullptr ||
2021       // As a special case, IC oops are initialized to 1 or -1.
2022       handle == (jobject) Universe::non_oop_word()) {
2023     *(void**)dest = handle;
2024   } else {
2025     *dest = JNIHandles::resolve_non_null(handle);
2026   }
2027 }
2028 








2029 
2030 // Have to have the same name because it's called by a template
2031 void nmethod::copy_values(GrowableArray<jobject>* array) {
2032   int length = array->length();
2033   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2034   oop* dest = oops_begin();
2035   for (int index = 0 ; index < length; index++) {
2036     initialize_immediate_oop(&dest[index], array->at(index));
2037   }
2038 
2039   // Now we can fix up all the oops in the code.  We need to do this
2040   // in the code because the assembler uses jobjects as placeholders.
2041   // The code and relocations have already been initialized by the
2042   // CodeBlob constructor, so it is valid even at this early point to
2043   // iterate over relocations and patch the code.
2044   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2045 }
2046 
2047 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2048   int length = array->length();

2056 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2057   // re-patch all oop-bearing instructions, just in case some oops moved
2058   RelocIterator iter(this, begin, end);
2059   while (iter.next()) {
2060     if (iter.type() == relocInfo::oop_type) {
2061       oop_Relocation* reloc = iter.oop_reloc();
2062       if (initialize_immediates && reloc->oop_is_immediate()) {
2063         oop* dest = reloc->oop_addr();
2064         jobject obj = *reinterpret_cast<jobject*>(dest);
2065         initialize_immediate_oop(dest, obj);
2066       }
2067       // Refresh the oop-related bits of this instruction.
2068       reloc->fix_oop_relocation();
2069     } else if (iter.type() == relocInfo::metadata_type) {
2070       metadata_Relocation* reloc = iter.metadata_reloc();
2071       reloc->fix_metadata_relocation();
2072     }
2073   }
2074 }
2075 




















2076 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2077   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2078   intptr_t cbaddr = (intptr_t) nm;
2079   intptr_t offset = ((intptr_t) pc) - cbaddr;
2080 
2081   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2082   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2083     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2084   } else if (!nop->patch(oopmap_slot, offset)) {
2085     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2086   }
2087 }
2088 
2089 void nmethod::finalize_relocations() {
2090   NoSafepointVerifier nsv;
2091 
2092   GrowableArray<NativeMovConstReg*> virtual_call_data;
2093 
2094   // Make sure that post call nops fill in nmethod offsets eagerly so
2095   // we don't have to race with deoptimization

2226   // be alive the previous completed marking cycle.
2227   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2228 }
2229 
2230 void nmethod::inc_decompile_count() {
2231   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2232   // Could be gated by ProfileTraps, but do not bother...
2233 #if INCLUDE_JVMCI
2234   if (jvmci_skip_profile_deopt()) {
2235     return;
2236   }
2237 #endif
2238   Method* m = method();
2239   if (m == nullptr)  return;
2240   MethodData* mdo = m->method_data();
2241   if (mdo == nullptr)  return;
2242   // There is a benign race here.  See comments in methodData.hpp.
2243   mdo->inc_decompile_count();
2244 }
2245 








2246 bool nmethod::try_transition(signed char new_state_int) {
2247   signed char new_state = new_state_int;
2248   assert_lock_strong(NMethodState_lock);
2249   signed char old_state = _state;
2250   if (old_state >= new_state) {
2251     // Ensure monotonicity of transitions.
2252     return false;
2253   }
2254   AtomicAccess::store(&_state, new_state);
2255   return true;
2256 }
2257 
2258 void nmethod::invalidate_osr_method() {
2259   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2260   // Remove from list of active nmethods
2261   if (method() != nullptr) {
2262     method()->method_holder()->remove_osr_nmethod(this);
2263   }
2264 }
2265 

2275     }
2276   }
2277 
2278   ResourceMark rm;
2279   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2280   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2281 
2282   CompileTask::print_ul(this, ss.freeze());
2283   if (PrintCompilation) {
2284     print_on_with_msg(tty, ss.freeze());
2285   }
2286 }
2287 
2288 void nmethod::unlink_from_method() {
2289   if (method() != nullptr) {
2290     method()->unlink_code(this);
2291   }
2292 }
2293 
2294 // Invalidate code
2295 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason) {
2296   // This can be called while the system is already at a safepoint which is ok
2297   NoSafepointVerifier nsv;
2298 
2299   if (is_unloading()) {
2300     // If the nmethod is unloading, then it is already not entrant through
2301     // the nmethod entry barriers. No need to do anything; GC will unload it.
2302     return false;
2303   }
2304 
2305   if (AtomicAccess::load(&_state) == not_entrant) {
2306     // Avoid taking the lock if already in required state.
2307     // This is safe from races because the state is an end-state,
2308     // which the nmethod cannot back out of once entered.
2309     // No need for fencing either.
2310     return false;
2311   }
2312 
2313   MACOS_AARCH64_ONLY(os::thread_wx_enable_write());
2314 
2315   {

2339     }
2340 
2341     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2342     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2343       // If nmethod entry barriers are not supported, we won't mark
2344       // nmethods as on-stack when they become on-stack. So we
2345       // degrade to a less accurate flushing strategy, for now.
2346       mark_as_maybe_on_stack();
2347     }
2348 
2349     // Change state
2350     bool success = try_transition(not_entrant);
2351     assert(success, "Transition can't fail");
2352 
2353     // Log the transition once
2354     log_state_change(invalidation_reason);
2355 
2356     // Remove nmethod from method.
2357     unlink_from_method();
2358 







2359   } // leave critical region under NMethodState_lock
2360 
2361 #if INCLUDE_JVMCI
2362   // Invalidate can't occur while holding the NMethodState_lock
2363   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2364   if (nmethod_data != nullptr) {
2365     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2366   }
2367 #endif
2368 
2369 #ifdef ASSERT
2370   if (is_osr_method() && method() != nullptr) {
2371     // Make sure osr nmethod is invalidated, i.e. not on the list
2372     bool found = method()->method_holder()->remove_osr_nmethod(this);
2373     assert(!found, "osr nmethod should have been invalidated");
2374   }
2375 #endif
2376 
2377   return true;
2378 }

2403     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2404             nmethod::InvalidationReason::UNLOADING_COLD :
2405             nmethod::InvalidationReason::UNLOADING);
2406   }
2407 #endif
2408 
2409   // Post before flushing as jmethodID is being used
2410   post_compiled_method_unload();
2411 
2412   // Register for flushing when it is safe. For concurrent class unloading,
2413   // that would be after the unloading handshake, and for STW class unloading
2414   // that would be when getting back to the VM thread.
2415   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2416 }
2417 
2418 void nmethod::purge(bool unregister_nmethod) {
2419 
2420   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2421 
2422   // completely deallocate this method
2423   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2424 
2425   LogTarget(Debug, codecache) lt;
2426   if (lt.is_enabled()) {
2427     ResourceMark rm;
2428     LogStream ls(lt);
2429     const char* method_name = method()->name()->as_C_string();
2430     const size_t codecache_capacity = CodeCache::capacity()/1024;
2431     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2432     ls.print("Flushing nmethod %6d/" INTPTR_FORMAT ", level=%d, osr=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2433               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2434               _compile_id, p2i(this), _comp_level, is_osr_method(), is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2435               codecache_capacity, codecache_free_space, method_name, compiler_name());
2436   }
2437 
2438   // We need to deallocate any ExceptionCache data.
2439   // Note that we do not need to grab the nmethod lock for this, it
2440   // better be thread safe if we're disposing of it!
2441   ExceptionCache* ec = exception_cache();
2442   while(ec != nullptr) {
2443     ExceptionCache* next = ec->next();
2444     delete ec;
2445     ec = next;
2446   }
2447   if (_pc_desc_container != nullptr) {
2448     delete _pc_desc_container;
2449   }
2450   delete[] _compiled_ic_data;


2451 
2452   if (_immutable_data != blob_end()) {
2453     // Free memory if this was the last nmethod referencing immutable data
2454     if (dec_immutable_data_ref_count() == 0) {
2455       os::free(_immutable_data);
2456     }
2457 
2458     _immutable_data = blob_end(); // Valid not null address
2459   }
2460 
2461   if (unregister_nmethod) {
2462     Universe::heap()->unregister_nmethod(this);
2463   }
2464   CodeCache::unregister_old_nmethod(this);
2465 
2466   JVMCI_ONLY( _metadata_size = 0; )
2467   CodeBlob::purge();
2468 }
2469 
2470 oop nmethod::oop_at(int index) const {
2471   if (index == 0) {
2472     return nullptr;

2499         MethodHandles::clean_dependency_context(call_site);
2500       } else {
2501         InstanceKlass* ik = deps.context_type();
2502         if (ik == nullptr) {
2503           continue;  // ignore things like evol_method
2504         }
2505         // During GC liveness of dependee determines class that needs to be updated.
2506         // The GC may clean dependency contexts concurrently and in parallel.
2507         ik->clean_dependency_context();
2508       }
2509     }
2510   }
2511 }
2512 
2513 void nmethod::post_compiled_method(CompileTask* task) {
2514   task->mark_success();
2515   task->set_nm_content_size(content_size());
2516   task->set_nm_insts_size(insts_size());
2517   task->set_nm_total_size(total_size());
2518 















2519   // JVMTI -- compiled method notification (must be done outside lock)
2520   post_compiled_method_load_event();
2521 
2522   if (CompilationLog::log() != nullptr) {
2523     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2524   }
2525 
2526   const DirectiveSet* directive = task->directive();
2527   maybe_print_nmethod(directive);
2528 }
2529 
2530 #if INCLUDE_CDS
2531 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2532 
2533 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2534   precond(CDSConfig::is_using_aot_linked_classes());
2535   precond(!ServiceThread::has_started());
2536 
2537   // We are still in single threaded stage of VM bootstrap. No need to lock.
2538   if (_delayed_compiled_method_load_events == nullptr) {

3257 void nmethod::verify() {
3258   if (is_not_entrant())
3259     return;
3260 
3261   // assert(oopDesc::is_oop(method()), "must be valid");
3262 
3263   ResourceMark rm;
3264 
3265   if (!CodeCache::contains(this)) {
3266     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3267   }
3268 
3269   if(is_native_method() )
3270     return;
3271 
3272   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3273   if (nm != this) {
3274     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3275   }
3276 
3277   for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3278     if (! p->verify(this)) {
3279       tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));




3280     }
3281   }
3282 
3283 #ifdef ASSERT
3284 #if INCLUDE_JVMCI
3285   {
3286     // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3287     ImmutableOopMapSet* oms = oop_maps();
3288     ImplicitExceptionTable implicit_table(this);
3289     for (uint i = 0; i < implicit_table.len(); i++) {
3290       int exec_offset = (int) implicit_table.get_exec_offset(i);
3291       if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3292         assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3293         bool found = false;
3294         for (int i = 0, imax = oms->count(); i < imax; i++) {
3295           if (oms->pair_at(i)->pc_offset() == exec_offset) {
3296             found = true;
3297             break;

3298           }

3299         }
3300         assert(found, "missing oopmap");
3301       }
3302     }
3303   }
3304 #endif
3305 #endif

3306 
3307   VerifyOopsClosure voc(this);
3308   oops_do(&voc);
3309   assert(voc.ok(), "embedded oops must be OK");
3310   Universe::heap()->verify_nmethod(this);
3311 
3312   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3313          nm->method()->external_name(), p2i(_oops_do_mark_link));
3314   verify_scopes();


3315 
3316   CompiledICLocker nm_verify(this);
3317   VerifyMetadataClosure vmc;
3318   metadata_do(&vmc);
3319 }
3320 
3321 
3322 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3323 
3324   // Verify IC only when nmethod installation is finished.
3325   if (!is_not_installed()) {
3326     if (CompiledICLocker::is_safe(this)) {
3327       if (is_inline_cache) {
3328         CompiledIC_at(this, call_site);
3329       } else {
3330         CompiledDirectCall::at(call_site);
3331       }
3332     } else {
3333       CompiledICLocker ml_verify(this);
3334       if (is_inline_cache) {

3463                                              p2i(nul_chk_table_end()),
3464                                              nul_chk_table_size());
3465   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3466                                              p2i(handler_table_begin()),
3467                                              p2i(handler_table_end()),
3468                                              handler_table_size());
3469   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3470                                              p2i(scopes_pcs_begin()),
3471                                              p2i(scopes_pcs_end()),
3472                                              scopes_pcs_size());
3473   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3474                                              p2i(scopes_data_begin()),
3475                                              p2i(scopes_data_end()),
3476                                              scopes_data_size());
3477 #if INCLUDE_JVMCI
3478   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3479                                              p2i(speculations_begin()),
3480                                              p2i(speculations_end()),
3481                                              speculations_size());
3482 #endif



3483 }
3484 
3485 void nmethod::print_code() {
3486   ResourceMark m;
3487   ttyLocker ttyl;
3488   // Call the specialized decode method of this class.
3489   decode(tty);
3490 }
3491 
3492 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3493 
3494 void nmethod::print_dependencies_on(outputStream* out) {
3495   ResourceMark rm;
3496   stringStream st;
3497   st.print_cr("Dependencies:");
3498   for (Dependencies::DepStream deps(this); deps.next(); ) {
3499     deps.print_dependency(&st);
3500     InstanceKlass* ctxk = deps.context_type();
3501     if (ctxk != nullptr) {
3502       if (ctxk->is_dependent_nmethod(this)) {

3562   st->print("scopes:");
3563   if (scopes_pcs_begin() < scopes_pcs_end()) {
3564     st->cr();
3565     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3566       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3567         continue;
3568 
3569       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3570       while (sd != nullptr) {
3571         sd->print_on(st, p);  // print output ends with a newline
3572         sd = sd->sender();
3573       }
3574     }
3575   } else {
3576     st->print_cr(" <list empty>");
3577   }
3578 }
3579 #endif
3580 
3581 #ifndef PRODUCT  // RelocIterator does support printing only then.
3582 void nmethod::print_relocations() {
3583   ResourceMark m;       // in case methods get printed via the debugger
3584   tty->print_cr("relocations:");
3585   RelocIterator iter(this);
3586   iter.print_on(tty);
3587 }
3588 #endif
3589 
3590 void nmethod::print_pcs_on(outputStream* st) {
3591   ResourceMark m;       // in case methods get printed via debugger
3592   st->print("pc-bytecode offsets:");
3593   if (scopes_pcs_begin() < scopes_pcs_end()) {
3594     st->cr();
3595     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3596       p->print_on(st, this);  // print output ends with a newline
3597     }
3598   } else {
3599     st->print_cr(" <list empty>");
3600   }
3601 }
3602 
3603 void nmethod::print_handler_table() {
3604   ExceptionHandlerTable(this).print(code_begin());
3605 }
3606 

4421 void nmethod::update_speculation(JavaThread* thread) {
4422   jlong speculation = thread->pending_failed_speculation();
4423   if (speculation != 0) {
4424     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4425     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4426     thread->set_pending_failed_speculation(0);
4427   }
4428 }
4429 
4430 const char* nmethod::jvmci_name() {
4431   if (jvmci_nmethod_data() != nullptr) {
4432     return jvmci_nmethod_data()->name();
4433   }
4434   return nullptr;
4435 }
4436 
4437 bool nmethod::jvmci_skip_profile_deopt() const {
4438   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4439 }
4440 #endif





















   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "cds/cdsConfig.hpp"
  27 #include "code/aotCodeCache.hpp"
  28 #include "code/codeCache.hpp"
  29 #include "code/compiledIC.hpp"
  30 #include "code/dependencies.hpp"
  31 #include "code/nativeInst.hpp"
  32 #include "code/nmethod.inline.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compilationLog.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/compileLog.hpp"
  38 #include "compiler/compilerDirectives.hpp"
  39 #include "compiler/compilerOracle.hpp"
  40 #include "compiler/compileTask.hpp"
  41 #include "compiler/directivesParser.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "compiler/oopMap.inline.hpp"
  44 #include "gc/shared/barrierSet.hpp"
  45 #include "gc/shared/barrierSetNMethod.hpp"
  46 #include "gc/shared/classUnloadingContext.hpp"
  47 #include "gc/shared/collectedHeap.hpp"
  48 #include "interpreter/bytecode.inline.hpp"
  49 #include "jvm.h"
  50 #include "logging/log.hpp"
  51 #include "logging/logStream.hpp"
  52 #include "memory/allocation.inline.hpp"
  53 #include "memory/resourceArea.hpp"
  54 #include "memory/universe.hpp"
  55 #include "oops/access.inline.hpp"
  56 #include "oops/klass.inline.hpp"
  57 #include "oops/method.inline.hpp"
  58 #include "oops/methodData.hpp"
  59 #include "oops/oop.inline.hpp"
  60 #include "oops/trainingData.hpp"
  61 #include "oops/weakHandle.inline.hpp"
  62 #include "prims/jvmtiImpl.hpp"
  63 #include "prims/jvmtiThreadState.hpp"
  64 #include "prims/methodHandles.hpp"
  65 #include "runtime/atomicAccess.hpp"
  66 #include "runtime/continuation.hpp"
  67 #include "runtime/deoptimization.hpp"
  68 #include "runtime/flags/flagSetting.hpp"
  69 #include "runtime/frame.inline.hpp"
  70 #include "runtime/handles.inline.hpp"
  71 #include "runtime/jniHandles.inline.hpp"
  72 #include "runtime/orderAccess.hpp"
  73 #include "runtime/os.hpp"
  74 #include "runtime/safepointVerifiers.hpp"
  75 #include "runtime/serviceThread.hpp"
  76 #include "runtime/sharedRuntime.hpp"
  77 #include "runtime/signature.hpp"
  78 #include "runtime/threadWXSetters.inline.hpp"
  79 #include "runtime/vmThread.hpp"
  80 #include "utilities/align.hpp"

 993              _method->method_holder()->external_name(),
 994              _method->name()->as_C_string(),
 995              _method->signature()->as_C_string(),
 996              compile_id());
 997   }
 998   return check_evol.has_evol_dependency();
 999 }
1000 
1001 int nmethod::total_size() const {
1002   return
1003     consts_size()        +
1004     insts_size()         +
1005     stub_size()          +
1006     scopes_data_size()   +
1007     scopes_pcs_size()    +
1008     handler_table_size() +
1009     nul_chk_table_size();
1010 }
1011 
1012 const char* nmethod::compile_kind() const {
1013   if (is_osr_method()) return "osr";
1014   if (preloaded())     return "AP";
1015   if (is_aot())        return "A";
1016 
1017   if (method() != nullptr && is_native_method()) {
1018     if (method()->is_continuation_native_intrinsic()) {
1019       return "cnt";
1020     }
1021     return "c2n";
1022   }
1023   return "";
1024 }
1025 
1026 const char* nmethod::compiler_name() const {
1027   return compilertype2name(_compiler_type);
1028 }
1029 
1030 #ifdef ASSERT
1031 class CheckForOopsClosure : public OopClosure {
1032   bool _found_oop = false;
1033  public:
1034   virtual void do_oop(oop* o) { _found_oop = true; }
1035   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1036   bool found_oop() { return _found_oop; }
1037 };
1038 class CheckForMetadataClosure : public MetadataClosure {
1039   bool _found_metadata = false;
1040   Metadata* _ignore = nullptr;
1041  public:
1042   CheckForMetadataClosure(Metadata* ignore) : _ignore(ignore) {}
1043   virtual void do_metadata(Metadata* md) { if (md != _ignore) _found_metadata = true; }

1102     nm = new (native_nmethod_size, allow_NonNMethod_space)
1103     nmethod(method(), compiler_none, native_nmethod_size,
1104             compile_id, &offsets,
1105             code_buffer, frame_size,
1106             basic_lock_owner_sp_offset,
1107             basic_lock_sp_offset,
1108             oop_maps, mutable_data_size);
1109     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1110     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1111   }
1112 
1113   if (nm != nullptr) {
1114     // verify nmethod
1115     DEBUG_ONLY(nm->verify();) // might block
1116 
1117     nm->log_new_nmethod();
1118   }
1119   return nm;
1120 }
1121 
1122 void nmethod::record_nmethod_dependency() {
1123   // To make dependency checking during class loading fast, record
1124   // the nmethod dependencies in the classes it is dependent on.
1125   // This allows the dependency checking code to simply walk the
1126   // class hierarchy above the loaded class, checking only nmethods
1127   // which are dependent on those classes.  The slow way is to
1128   // check every nmethod for dependencies which makes it linear in
1129   // the number of methods compiled.  For applications with a lot
1130   // classes the slow way is too slow.
1131   for (Dependencies::DepStream deps(this); deps.next(); ) {
1132     if (deps.type() == Dependencies::call_site_target_value) {
1133       // CallSite dependencies are managed on per-CallSite instance basis.
1134       oop call_site = deps.argument_oop(0);
1135       MethodHandles::add_dependent_nmethod(call_site, this);
1136     } else {
1137       InstanceKlass* ik = deps.context_type();
1138       if (ik == nullptr) {
1139         continue;  // ignore things like evol_method
1140       }
1141       // record this nmethod as dependent on this klass
1142       ik->add_dependent_nmethod(this);
1143     }
1144   }
1145 }
1146 
1147 nmethod* nmethod::new_nmethod(const methodHandle& method,
1148   int compile_id,
1149   int entry_bci,
1150   CodeOffsets* offsets,
1151   int orig_pc_offset,
1152   DebugInformationRecorder* debug_info,
1153   Dependencies* dependencies,
1154   CodeBuffer* code_buffer, int frame_size,
1155   OopMapSet* oop_maps,
1156   ExceptionHandlerTable* handler_table,
1157   ImplicitExceptionTable* nul_chk_table,
1158   AbstractCompiler* compiler,
1159   CompLevel comp_level
1160 #if INCLUDE_JVMCI
1161   , char* speculations,
1162   int speculations_len,
1163   JVMCINMethodData* jvmci_data
1164 #endif
1165 )
1166 {

1193 
1194   int mutable_data_size = required_mutable_data_size(code_buffer
1195     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1196 
1197   {
1198     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1199 
1200     nm = new (nmethod_size, comp_level)
1201     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1202             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1203             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1204             handler_table, nul_chk_table, compiler, comp_level
1205 #if INCLUDE_JVMCI
1206             , speculations,
1207             speculations_len,
1208             jvmci_data
1209 #endif
1210             );
1211 
1212     if (nm != nullptr) {
1213       nm->record_nmethod_dependency();
1214       NOT_PRODUCT(note_java_nmethod(nm));





















1215     }
1216   }
1217   // Do verification and logging outside CodeCache_lock.
1218   if (nm != nullptr) {
1219 
1220 #ifdef ASSERT
1221     LogTarget(Debug, aot, codecache, nmethod) log;
1222     if (log.is_enabled()) {
1223       LogStream out(log);
1224       out.print_cr("== new_nmethod 2");
1225       FlagSetting fs(PrintRelocations, true);
1226       nm->print_on_impl(&out);
1227       nm->decode(&out);
1228     }
1229 #endif
1230 
1231     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1232     DEBUG_ONLY(nm->verify();)
1233     nm->log_new_nmethod();
1234   }
1235   return nm;
1236 }
1237 
1238 nmethod* nmethod::restore(address code_cache_buffer,
1239                           const methodHandle& method,
1240                           int compile_id,
1241                           address reloc_data,
1242                           GrowableArray<Handle>& oop_list,
1243                           GrowableArray<Metadata*>& metadata_list,
1244                           ImmutableOopMapSet* oop_maps,
1245                           address immutable_data,
1246                           GrowableArray<Handle>& reloc_imm_oop_list,
1247                           GrowableArray<Metadata*>& reloc_imm_metadata_list,
1248                           AOTCodeReader* aot_code_reader)
1249 {
1250   CodeBlob::restore(code_cache_buffer, "nmethod", reloc_data, oop_maps);
1251   nmethod* nm = (nmethod*)code_cache_buffer;
1252   nm->set_method(method());
1253   nm->_compile_id = compile_id;
1254   nm->set_immutable_data(immutable_data);
1255   nm->copy_values(&oop_list);
1256   nm->copy_values(&metadata_list);
1257 
1258   aot_code_reader->fix_relocations(nm, &reloc_imm_oop_list, &reloc_imm_metadata_list);
1259 
1260 #ifndef PRODUCT
1261   nm->asm_remarks().init();
1262   aot_code_reader->read_asm_remarks(nm->asm_remarks(), /* use_string_table */ false);
1263   nm->dbg_strings().init();
1264   aot_code_reader->read_dbg_strings(nm->dbg_strings(), /* use_string_table */ false);
1265 #endif
1266 
1267   // Flush the code block
1268   ICache::invalidate_range(nm->code_begin(), nm->code_size());
1269 
1270   // Create cache after PcDesc data is copied - it will be used to initialize cache
1271   nm->_pc_desc_container = new PcDescContainer(nm->scopes_pcs_begin());
1272 
1273   nm->set_aot_code_entry(aot_code_reader->aot_code_entry());
1274 
1275   nm->post_init();
1276   return nm;
1277 }
1278 
1279 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1280                               const methodHandle& method,
1281                               AbstractCompiler* compiler,
1282                               int compile_id,
1283                               address reloc_data,
1284                               GrowableArray<Handle>& oop_list,
1285                               GrowableArray<Metadata*>& metadata_list,
1286                               ImmutableOopMapSet* oop_maps,
1287                               address immutable_data,
1288                               GrowableArray<Handle>& reloc_imm_oop_list,
1289                               GrowableArray<Metadata*>& reloc_imm_metadata_list,
1290                               AOTCodeReader* aot_code_reader)
1291 {
1292   nmethod* nm = nullptr;
1293   int nmethod_size = archived_nm->size();
1294   // create nmethod
1295   {
1296     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1297     address code_cache_buffer = (address)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1298     if (code_cache_buffer != nullptr) {
1299       nm = archived_nm->restore(code_cache_buffer,
1300                                 method,
1301                                 compile_id,
1302                                 reloc_data,
1303                                 oop_list,
1304                                 metadata_list,
1305                                 oop_maps,
1306                                 immutable_data,
1307                                 reloc_imm_oop_list,
1308                                 reloc_imm_metadata_list,
1309                                 aot_code_reader);
1310       nm->record_nmethod_dependency();
1311       NOT_PRODUCT(note_java_nmethod(nm));
1312     }
1313   }
1314   // Do verification and logging outside CodeCache_lock.
1315   if (nm != nullptr) {
1316 #ifdef ASSERT
1317     LogTarget(Debug, aot, codecache, nmethod) log;
1318     if (log.is_enabled()) {
1319       LogStream out(log);
1320       out.print_cr("== new_nmethod 2");
1321       FlagSetting fs(PrintRelocations, true);
1322       nm->print_on_impl(&out);
1323       nm->decode(&out);
1324     }
1325 #endif
1326     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1327     DEBUG_ONLY(nm->verify();)
1328     nm->log_new_nmethod();
1329   }
1330   return nm;
1331 }
1332 
1333 // Fill in default values for various fields
1334 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1335   // avoid uninitialized fields, even for short time periods
1336   _exception_cache            = nullptr;
1337   _gc_data                    = nullptr;
1338   _oops_do_mark_link          = nullptr;
1339   _compiled_ic_data           = nullptr;
1340   _aot_code_entry             = nullptr;
1341 
1342   _is_unloading_state         = 0;
1343   _state                      = not_installed;
1344 
1345   _has_unsafe_access          = 0;
1346   _has_wide_vectors           = 0;
1347   _has_monitors               = 0;
1348   _has_scoped_access          = 0;
1349   _has_flushed_dependencies   = 0;
1350   _is_unlinked                = 0;
1351   _load_reported              = 0; // jvmti state
1352   _preloaded                  = 0;
1353   _has_clinit_barriers        = 0;
1354 
1355   _used                       = false;
1356   _deoptimization_status      = not_marked;
1357 
1358   // SECT_CONSTS is first in code buffer so the offset should be 0.
1359   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1360   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1361 
1362   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1363 
1364   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1365   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1366 
1367   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1368 }
1369 
1370 // Post initialization
1371 void nmethod::post_init() {
1372   clear_unloading_state();
1373 
1374   finalize_relocations();
1375 

1407     init_defaults(code_buffer, offsets);
1408 
1409     _osr_entry_point         = nullptr;
1410     _pc_desc_container       = nullptr;
1411     _entry_bci               = InvocationEntryBci;
1412     _compile_id              = compile_id;
1413     _comp_level              = CompLevel_none;
1414     _compiler_type           = type;
1415     _orig_pc_offset          = 0;
1416     _num_stack_arg_slots     = 0;
1417 
1418     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1419       // Continuation enter intrinsic
1420       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1421     } else {
1422       _exception_offset      = 0;
1423     }
1424     // Native wrappers do not have deopt handlers. Make the values
1425     // something that will never match a pc like the nmethod vtable entry
1426     _deopt_handler_entry_offset    = 0;
1427     _method_profiling_count  = 0;
1428     _unwind_handler_offset   = 0;
1429 
1430     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1431     uint16_t metadata_size;
1432     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1433     JVMCI_ONLY( _metadata_size = metadata_size; )
1434     assert(_mutable_data_size == _relocation_size + metadata_size,
1435            "wrong mutable data size: %d != %d + %d",
1436            _mutable_data_size, _relocation_size, metadata_size);
1437 
1438     // native wrapper does not have read-only data but we need unique not null address
1439     _immutable_data          = blob_end();
1440     _immutable_data_size     = 0;
1441     _nul_chk_table_offset    = 0;
1442     _handler_table_offset    = 0;
1443     _scopes_pcs_offset       = 0;
1444     _scopes_data_offset      = 0;
1445 #if INCLUDE_JVMCI
1446     _speculations_offset     = 0;
1447 #endif

1468     // This is both handled in decode2(), called via print_code() -> decode()
1469     if (PrintNativeNMethods) {
1470       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1471       print_code();
1472       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1473 #if defined(SUPPORT_DATA_STRUCTS)
1474       if (AbstractDisassembler::show_structs()) {
1475         if (oop_maps != nullptr) {
1476           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1477           oop_maps->print_on(tty);
1478           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1479         }
1480       }
1481 #endif
1482     } else {
1483       print(); // print the header part only.
1484     }
1485 #if defined(SUPPORT_DATA_STRUCTS)
1486     if (AbstractDisassembler::show_structs()) {
1487       if (PrintRelocations) {
1488         print_relocations_on(tty);
1489         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1490       }
1491     }
1492 #endif
1493     if (xtty != nullptr) {
1494       xtty->tail("print_native_nmethod");
1495     }
1496   }
1497 }
1498 
1499 
1500 nmethod::nmethod(const nmethod &nm) : CodeBlob(nm._name, nm._kind, nm._size, nm._header_size)
1501 {
1502 
1503   if (nm._oop_maps != nullptr) {
1504     _oop_maps                   = nm._oop_maps->clone();
1505   } else {
1506     _oop_maps                   = nullptr;
1507   }
1508 

1532   if (_mutable_data_size > 0) {
1533     _mutable_data = (address)os::malloc(_mutable_data_size, mtCode);
1534     if (_mutable_data == nullptr) {
1535       vm_exit_out_of_memory(_mutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for mutable data");
1536     }
1537     memcpy(mutable_data_begin(), nm.mutable_data_begin(), nm.mutable_data_size());
1538   } else {
1539     _mutable_data               = nullptr;
1540   }
1541 
1542   _deoptimization_generation    = 0;
1543   _gc_epoch                     = CodeCache::gc_epoch();
1544   _method                       = nm._method;
1545   _osr_link                     = nullptr;
1546 
1547   _exception_cache              = nullptr;
1548   _gc_data                      = nullptr;
1549   _oops_do_mark_nmethods        = nullptr;
1550   _oops_do_mark_link            = nullptr;
1551   _compiled_ic_data             = nullptr;
1552   _aot_code_entry               = nm._aot_code_entry;
1553 
1554   if (nm._osr_entry_point != nullptr) {
1555     _osr_entry_point            = (nm._osr_entry_point - (address) &nm) + (address) this;
1556   } else {
1557     _osr_entry_point            = nullptr;
1558   }
1559 
1560   _entry_offset                 = nm._entry_offset;
1561   _verified_entry_offset        = nm._verified_entry_offset;
1562   _entry_bci                    = nm._entry_bci;
1563   _immutable_data_size          = nm._immutable_data_size;
1564 
1565   _skipped_instructions_size    = nm._skipped_instructions_size;
1566   _stub_offset                  = nm._stub_offset;
1567   _exception_offset             = nm._exception_offset;
1568   _deopt_handler_entry_offset   = nm._deopt_handler_entry_offset;
1569   _unwind_handler_offset        = nm._unwind_handler_offset;
1570   _num_stack_arg_slots          = nm._num_stack_arg_slots;
1571   _oops_size                    = nm._oops_size;
1572 #if INCLUDE_JVMCI

1586     _immutable_data             = nm._immutable_data;
1587     inc_immutable_data_ref_count();
1588   } else {
1589     _immutable_data             = blob_end();
1590   }
1591 
1592   _orig_pc_offset               = nm._orig_pc_offset;
1593   _compile_id                   = nm._compile_id;
1594   _comp_level                   = nm._comp_level;
1595   _compiler_type                = nm._compiler_type;
1596   _is_unloading_state           = nm._is_unloading_state;
1597   _state                        = not_installed;
1598 
1599   _has_unsafe_access            = nm._has_unsafe_access;
1600   _has_wide_vectors             = nm._has_wide_vectors;
1601   _has_monitors                 = nm._has_monitors;
1602   _has_scoped_access            = nm._has_scoped_access;
1603   _has_flushed_dependencies     = nm._has_flushed_dependencies;
1604   _is_unlinked                  = nm._is_unlinked;
1605   _load_reported                = nm._load_reported;
1606   _preloaded                    = nm._preloaded;
1607   _has_clinit_barriers          = nm._has_clinit_barriers;
1608 
1609   _deoptimization_status        = nm._deoptimization_status;
1610 
1611   if (nm._pc_desc_container != nullptr) {
1612     _pc_desc_container          = new PcDescContainer(scopes_pcs_begin());
1613   } else {
1614     _pc_desc_container          = nullptr;
1615   }
1616 
1617   // Copy nmethod contents excluding header
1618   // - Constant part          (doubles, longs and floats used in nmethod)
1619   // - Code part:
1620   //   - Code body
1621   //   - Exception handler
1622   //   - Stub code
1623   //   - OOP table
1624   memcpy(consts_begin(), nm.consts_begin(), nm.data_end() - nm.consts_begin());
1625 
1626   // Fix relocation
1627   RelocIterator iter(this);

1804   CompLevel comp_level
1805 #if INCLUDE_JVMCI
1806   , char* speculations,
1807   int speculations_len,
1808   JVMCINMethodData* jvmci_data
1809 #endif
1810   )
1811   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1812              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1813   _deoptimization_generation(0),
1814   _gc_epoch(CodeCache::gc_epoch()),
1815   _method(method),
1816   _osr_link(nullptr)
1817 {
1818   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1819   {
1820     DEBUG_ONLY(NoSafepointVerifier nsv;)
1821     assert_locked_or_safepoint(CodeCache_lock);
1822 
1823     init_defaults(code_buffer, offsets);
1824     _method_profiling_count  = 0;
1825 
1826     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1827     _entry_bci       = entry_bci;
1828     _compile_id      = compile_id;
1829     _comp_level      = comp_level;
1830     _compiler_type   = type;
1831     _orig_pc_offset  = orig_pc_offset;
1832 
1833     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1834 
1835     set_ctable_begin(header_begin() + content_offset());
1836 
1837 #if INCLUDE_JVMCI
1838     if (compiler->is_jvmci()) {
1839       // JVMCI might not produce any stub sections
1840       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1841         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1842       } else {
1843         _exception_offset        = -1;
1844       }

1934     // Copy speculations to nmethod
1935     if (speculations_size() != 0) {
1936       memcpy(speculations_begin(), speculations, speculations_len);
1937     }
1938 #endif
1939     init_immutable_data_ref_count();
1940 
1941     post_init();
1942 
1943     // we use the information of entry points to find out if a method is
1944     // static or non static
1945     assert(compiler->is_c2() || compiler->is_jvmci() ||
1946            _method->is_static() == (entry_point() == verified_entry_point()),
1947            " entry points must be same for static methods and vice versa");
1948   }
1949 }
1950 
1951 // Print a short set of xml attributes to identify this nmethod.  The
1952 // output should be embedded in some other element.
1953 void nmethod::log_identity(xmlStream* log) const {
1954   assert(log->inside_attrs_or_error(), "printing attributes");
1955   log->print(" compile_id='%d'", compile_id());
1956   const char* nm_kind = compile_kind();
1957   log->print(" compile_kind='%s'", nm_kind);
1958   log->print(" compiler='%s'", compiler_name());
1959   if (TieredCompilation) {
1960     log->print(" compile_level='%d'", comp_level());
1961   }
1962 #if INCLUDE_JVMCI
1963   if (jvmci_nmethod_data() != nullptr) {
1964     const char* jvmci_name = jvmci_nmethod_data()->name();
1965     if (jvmci_name != nullptr) {
1966       log->print(" jvmci_mirror_name='");
1967       log->text("%s", jvmci_name);
1968       log->print("'");
1969     }
1970   }
1971 #endif
1972 }
1973 
1974 
1975 #define LOG_OFFSET(log, name)                    \
1976   if (p2i(name##_end()) - p2i(name##_begin())) \
1977     log->print(" " XSTR(name) "_offset='%zd'"    , \
1978                p2i(name##_begin()) - p2i(this))
1979 
1980 

2095       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2096       if (oop_maps() != nullptr) {
2097         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
2098         oop_maps()->print_on(tty);
2099         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2100       }
2101     }
2102 #endif
2103   } else {
2104     print(); // print the header part only.
2105   }
2106 
2107 #if defined(SUPPORT_DATA_STRUCTS)
2108   if (AbstractDisassembler::show_structs()) {
2109     methodHandle mh(Thread::current(), _method);
2110     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
2111       print_scopes();
2112       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2113     }
2114     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
2115       print_relocations_on(tty);
2116       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2117     }
2118     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
2119       print_dependencies_on(tty);
2120       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2121     }
2122     if (printmethod || PrintExceptionHandlers) {
2123       print_handler_table();
2124       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2125       print_nul_chk_table();
2126       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2127     }
2128 
2129     if (printmethod) {
2130       print_recorded_oops();
2131       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2132       print_recorded_metadata();
2133       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
2134     }
2135   }
2136 #endif
2137 
2138   if (xtty != nullptr) {
2139     xtty->tail("print_nmethod");
2140   }
2141 }
2142 
2143 
2144 // Promote one word from an assembly-time handle to a live embedded oop.
2145 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
2146   if (handle == nullptr ||
2147       // As a special case, IC oops are initialized to 1 or -1.
2148       handle == (jobject) Universe::non_oop_word()) {
2149     *(void**)dest = handle;
2150   } else {
2151     *dest = JNIHandles::resolve_non_null(handle);
2152   }
2153 }
2154 
2155 void nmethod::copy_values(GrowableArray<Handle>* array) {
2156   int length = array->length();
2157   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2158   oop* dest = oops_begin();
2159   for (int index = 0 ; index < length; index++) {
2160     dest[index] = array->at(index)();
2161   }
2162 }
2163 
2164 // Have to have the same name because it's called by a template
2165 void nmethod::copy_values(GrowableArray<jobject>* array) {
2166   int length = array->length();
2167   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
2168   oop* dest = oops_begin();
2169   for (int index = 0 ; index < length; index++) {
2170     initialize_immediate_oop(&dest[index], array->at(index));
2171   }
2172 
2173   // Now we can fix up all the oops in the code.  We need to do this
2174   // in the code because the assembler uses jobjects as placeholders.
2175   // The code and relocations have already been initialized by the
2176   // CodeBlob constructor, so it is valid even at this early point to
2177   // iterate over relocations and patch the code.
2178   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
2179 }
2180 
2181 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
2182   int length = array->length();

2190 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
2191   // re-patch all oop-bearing instructions, just in case some oops moved
2192   RelocIterator iter(this, begin, end);
2193   while (iter.next()) {
2194     if (iter.type() == relocInfo::oop_type) {
2195       oop_Relocation* reloc = iter.oop_reloc();
2196       if (initialize_immediates && reloc->oop_is_immediate()) {
2197         oop* dest = reloc->oop_addr();
2198         jobject obj = *reinterpret_cast<jobject*>(dest);
2199         initialize_immediate_oop(dest, obj);
2200       }
2201       // Refresh the oop-related bits of this instruction.
2202       reloc->fix_oop_relocation();
2203     } else if (iter.type() == relocInfo::metadata_type) {
2204       metadata_Relocation* reloc = iter.metadata_reloc();
2205       reloc->fix_metadata_relocation();
2206     }
2207   }
2208 }
2209 
2210 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
2211   RelocIterator iter(this);
2212   while (iter.next()) {
2213     if (iter.type() == relocInfo::oop_type) {
2214       oop_Relocation* reloc = iter.oop_reloc();
2215       if (reloc->oop_is_immediate()) {
2216         oop dest = reloc->oop_value();
2217         Handle h(thread, dest);
2218         oop_list.append(h);
2219       }
2220     } else if (iter.type() == relocInfo::metadata_type) {
2221       metadata_Relocation* reloc = iter.metadata_reloc();
2222       if (reloc->metadata_is_immediate()) {
2223         Metadata* m = reloc->metadata_value();
2224         metadata_list.append(m);
2225       }
2226     }
2227   }
2228 }
2229 
2230 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
2231   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
2232   intptr_t cbaddr = (intptr_t) nm;
2233   intptr_t offset = ((intptr_t) pc) - cbaddr;
2234 
2235   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
2236   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
2237     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
2238   } else if (!nop->patch(oopmap_slot, offset)) {
2239     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
2240   }
2241 }
2242 
2243 void nmethod::finalize_relocations() {
2244   NoSafepointVerifier nsv;
2245 
2246   GrowableArray<NativeMovConstReg*> virtual_call_data;
2247 
2248   // Make sure that post call nops fill in nmethod offsets eagerly so
2249   // we don't have to race with deoptimization

2380   // be alive the previous completed marking cycle.
2381   return AtomicAccess::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2382 }
2383 
2384 void nmethod::inc_decompile_count() {
2385   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2386   // Could be gated by ProfileTraps, but do not bother...
2387 #if INCLUDE_JVMCI
2388   if (jvmci_skip_profile_deopt()) {
2389     return;
2390   }
2391 #endif
2392   Method* m = method();
2393   if (m == nullptr)  return;
2394   MethodData* mdo = m->method_data();
2395   if (mdo == nullptr)  return;
2396   // There is a benign race here.  See comments in methodData.hpp.
2397   mdo->inc_decompile_count();
2398 }
2399 
2400 void nmethod::inc_method_profiling_count() {
2401   AtomicAccess::inc(&_method_profiling_count);
2402 }
2403 
2404 uint64_t nmethod::method_profiling_count() {
2405   return _method_profiling_count;
2406 }
2407 
2408 bool nmethod::try_transition(signed char new_state_int) {
2409   signed char new_state = new_state_int;
2410   assert_lock_strong(NMethodState_lock);
2411   signed char old_state = _state;
2412   if (old_state >= new_state) {
2413     // Ensure monotonicity of transitions.
2414     return false;
2415   }
2416   AtomicAccess::store(&_state, new_state);
2417   return true;
2418 }
2419 
2420 void nmethod::invalidate_osr_method() {
2421   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2422   // Remove from list of active nmethods
2423   if (method() != nullptr) {
2424     method()->method_holder()->remove_osr_nmethod(this);
2425   }
2426 }
2427 

2437     }
2438   }
2439 
2440   ResourceMark rm;
2441   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2442   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2443 
2444   CompileTask::print_ul(this, ss.freeze());
2445   if (PrintCompilation) {
2446     print_on_with_msg(tty, ss.freeze());
2447   }
2448 }
2449 
2450 void nmethod::unlink_from_method() {
2451   if (method() != nullptr) {
2452     method()->unlink_code(this);
2453   }
2454 }
2455 
2456 // Invalidate code
2457 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason, bool keep_aot_entry) {
2458   // This can be called while the system is already at a safepoint which is ok
2459   NoSafepointVerifier nsv;
2460 
2461   if (is_unloading()) {
2462     // If the nmethod is unloading, then it is already not entrant through
2463     // the nmethod entry barriers. No need to do anything; GC will unload it.
2464     return false;
2465   }
2466 
2467   if (AtomicAccess::load(&_state) == not_entrant) {
2468     // Avoid taking the lock if already in required state.
2469     // This is safe from races because the state is an end-state,
2470     // which the nmethod cannot back out of once entered.
2471     // No need for fencing either.
2472     return false;
2473   }
2474 
2475   MACOS_AARCH64_ONLY(os::thread_wx_enable_write());
2476 
2477   {

2501     }
2502 
2503     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2504     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2505       // If nmethod entry barriers are not supported, we won't mark
2506       // nmethods as on-stack when they become on-stack. So we
2507       // degrade to a less accurate flushing strategy, for now.
2508       mark_as_maybe_on_stack();
2509     }
2510 
2511     // Change state
2512     bool success = try_transition(not_entrant);
2513     assert(success, "Transition can't fail");
2514 
2515     // Log the transition once
2516     log_state_change(invalidation_reason);
2517 
2518     // Remove nmethod from method.
2519     unlink_from_method();
2520 
2521     if (!keep_aot_entry) {
2522       // Keep AOT code if it was simply replaced
2523       // otherwise make it not entrant too.
2524       AOTCodeCache::invalidate(_aot_code_entry);
2525     }
2526 
2527     CompileBroker::log_not_entrant(this);
2528   } // leave critical region under NMethodState_lock
2529 
2530 #if INCLUDE_JVMCI
2531   // Invalidate can't occur while holding the NMethodState_lock
2532   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2533   if (nmethod_data != nullptr) {
2534     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2535   }
2536 #endif
2537 
2538 #ifdef ASSERT
2539   if (is_osr_method() && method() != nullptr) {
2540     // Make sure osr nmethod is invalidated, i.e. not on the list
2541     bool found = method()->method_holder()->remove_osr_nmethod(this);
2542     assert(!found, "osr nmethod should have been invalidated");
2543   }
2544 #endif
2545 
2546   return true;
2547 }

2572     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2573             nmethod::InvalidationReason::UNLOADING_COLD :
2574             nmethod::InvalidationReason::UNLOADING);
2575   }
2576 #endif
2577 
2578   // Post before flushing as jmethodID is being used
2579   post_compiled_method_unload();
2580 
2581   // Register for flushing when it is safe. For concurrent class unloading,
2582   // that would be after the unloading handshake, and for STW class unloading
2583   // that would be when getting back to the VM thread.
2584   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2585 }
2586 
2587 void nmethod::purge(bool unregister_nmethod) {
2588 
2589   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2590 
2591   // completely deallocate this method
2592   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, compile_kind(), p2i(this));
2593 
2594   LogTarget(Debug, codecache) lt;
2595   if (lt.is_enabled()) {
2596     ResourceMark rm;
2597     LogStream ls(lt);
2598     const char* method_name = method()->name()->as_C_string();
2599     const size_t codecache_capacity = CodeCache::capacity()/1024;
2600     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2601     ls.print("Flushing %s nmethod %6d/" INTPTR_FORMAT ", level=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2602               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2603               compile_kind(), _compile_id, p2i(this), _comp_level, is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2604               codecache_capacity, codecache_free_space, method_name, compiler_name());
2605   }
2606 
2607   // We need to deallocate any ExceptionCache data.
2608   // Note that we do not need to grab the nmethod lock for this, it
2609   // better be thread safe if we're disposing of it!
2610   ExceptionCache* ec = exception_cache();
2611   while(ec != nullptr) {
2612     ExceptionCache* next = ec->next();
2613     delete ec;
2614     ec = next;
2615   }
2616   if (_pc_desc_container != nullptr) {
2617     delete _pc_desc_container;
2618   }
2619   if (_compiled_ic_data != nullptr) {
2620     delete[] _compiled_ic_data;
2621   }
2622 
2623   if (_immutable_data != blob_end() && !AOTCodeCache::is_address_in_aot_cache((address)_oop_maps)) {
2624     // Free memory if this was the last nmethod referencing immutable data
2625     if (dec_immutable_data_ref_count() == 0) {
2626       os::free(_immutable_data);
2627     }
2628 
2629     _immutable_data = blob_end(); // Valid not null address
2630   }
2631 
2632   if (unregister_nmethod) {
2633     Universe::heap()->unregister_nmethod(this);
2634   }
2635   CodeCache::unregister_old_nmethod(this);
2636 
2637   JVMCI_ONLY( _metadata_size = 0; )
2638   CodeBlob::purge();
2639 }
2640 
2641 oop nmethod::oop_at(int index) const {
2642   if (index == 0) {
2643     return nullptr;

2670         MethodHandles::clean_dependency_context(call_site);
2671       } else {
2672         InstanceKlass* ik = deps.context_type();
2673         if (ik == nullptr) {
2674           continue;  // ignore things like evol_method
2675         }
2676         // During GC liveness of dependee determines class that needs to be updated.
2677         // The GC may clean dependency contexts concurrently and in parallel.
2678         ik->clean_dependency_context();
2679       }
2680     }
2681   }
2682 }
2683 
2684 void nmethod::post_compiled_method(CompileTask* task) {
2685   task->mark_success();
2686   task->set_nm_content_size(content_size());
2687   task->set_nm_insts_size(insts_size());
2688   task->set_nm_total_size(total_size());
2689 
2690   CompileTrainingData* ctd = task->training_data();
2691   if (ctd != nullptr) {
2692     // Record inline code size during training to help inlining during production run
2693     precond(TrainingData::need_data()); // training run
2694     int inline_size = inline_instructions_size();
2695     if (inline_size < 0) inline_size = 0;
2696     ctd->set_inline_instructions_size(inline_size);
2697   }
2698 
2699   // task->is_aot_load() is true only for loaded AOT code.
2700   // nmethod::_aot_code_entry is set for loaded and stored AOT code
2701   // to invalidate the entry when nmethod is deoptimized.
2702   // VerifyAOTCode is option to not store in archive AOT code.
2703   guarantee((_aot_code_entry != nullptr) || !task->is_aot_load() || VerifyAOTCode, "sanity");
2704 
2705   // JVMTI -- compiled method notification (must be done outside lock)
2706   post_compiled_method_load_event();
2707 
2708   if (CompilationLog::log() != nullptr) {
2709     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2710   }
2711 
2712   const DirectiveSet* directive = task->directive();
2713   maybe_print_nmethod(directive);
2714 }
2715 
2716 #if INCLUDE_CDS
2717 static GrowableArrayCHeap<nmethod*, mtClassShared>* _delayed_compiled_method_load_events = nullptr;
2718 
2719 void nmethod::add_delayed_compiled_method_load_event(nmethod* nm) {
2720   precond(CDSConfig::is_using_aot_linked_classes());
2721   precond(!ServiceThread::has_started());
2722 
2723   // We are still in single threaded stage of VM bootstrap. No need to lock.
2724   if (_delayed_compiled_method_load_events == nullptr) {

3443 void nmethod::verify() {
3444   if (is_not_entrant())
3445     return;
3446 
3447   // assert(oopDesc::is_oop(method()), "must be valid");
3448 
3449   ResourceMark rm;
3450 
3451   if (!CodeCache::contains(this)) {
3452     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3453   }
3454 
3455   if(is_native_method() )
3456     return;
3457 
3458   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3459   if (nm != this) {
3460     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3461   }
3462 
3463   // Verification can triggered during shutdown after AOTCodeCache is closed.
3464   // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3465   if (!is_aot() || AOTCodeCache::is_on()) {
3466     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3467       if (! p->verify(this)) {
3468         tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3469       }
3470     }

3471 
3472 #ifdef ASSERT
3473 #if INCLUDE_JVMCI
3474     {
3475       // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3476       ImmutableOopMapSet* oms = oop_maps();
3477       ImplicitExceptionTable implicit_table(this);
3478       for (uint i = 0; i < implicit_table.len(); i++) {
3479         int exec_offset = (int) implicit_table.get_exec_offset(i);
3480         if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3481           assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3482           bool found = false;
3483           for (int i = 0, imax = oms->count(); i < imax; i++) {
3484             if (oms->pair_at(i)->pc_offset() == exec_offset) {
3485               found = true;
3486               break;
3487             }
3488           }
3489           assert(found, "missing oopmap");
3490         }

3491       }
3492     }

3493 #endif
3494 #endif
3495   }
3496 
3497   VerifyOopsClosure voc(this);
3498   oops_do(&voc);
3499   assert(voc.ok(), "embedded oops must be OK");
3500   Universe::heap()->verify_nmethod(this);
3501 
3502   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3503          nm->method()->external_name(), p2i(_oops_do_mark_link));
3504   if (!is_aot() || AOTCodeCache::is_on()) {
3505     verify_scopes();
3506   }
3507 
3508   CompiledICLocker nm_verify(this);
3509   VerifyMetadataClosure vmc;
3510   metadata_do(&vmc);
3511 }
3512 
3513 
3514 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3515 
3516   // Verify IC only when nmethod installation is finished.
3517   if (!is_not_installed()) {
3518     if (CompiledICLocker::is_safe(this)) {
3519       if (is_inline_cache) {
3520         CompiledIC_at(this, call_site);
3521       } else {
3522         CompiledDirectCall::at(call_site);
3523       }
3524     } else {
3525       CompiledICLocker ml_verify(this);
3526       if (is_inline_cache) {

3655                                              p2i(nul_chk_table_end()),
3656                                              nul_chk_table_size());
3657   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3658                                              p2i(handler_table_begin()),
3659                                              p2i(handler_table_end()),
3660                                              handler_table_size());
3661   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3662                                              p2i(scopes_pcs_begin()),
3663                                              p2i(scopes_pcs_end()),
3664                                              scopes_pcs_size());
3665   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3666                                              p2i(scopes_data_begin()),
3667                                              p2i(scopes_data_end()),
3668                                              scopes_data_size());
3669 #if INCLUDE_JVMCI
3670   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3671                                              p2i(speculations_begin()),
3672                                              p2i(speculations_end()),
3673                                              speculations_size());
3674 #endif
3675   if (AOTCodeCache::is_on() && _aot_code_entry != nullptr) {
3676     _aot_code_entry->print(st);
3677   }
3678 }
3679 
3680 void nmethod::print_code() {
3681   ResourceMark m;
3682   ttyLocker ttyl;
3683   // Call the specialized decode method of this class.
3684   decode(tty);
3685 }
3686 
3687 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3688 
3689 void nmethod::print_dependencies_on(outputStream* out) {
3690   ResourceMark rm;
3691   stringStream st;
3692   st.print_cr("Dependencies:");
3693   for (Dependencies::DepStream deps(this); deps.next(); ) {
3694     deps.print_dependency(&st);
3695     InstanceKlass* ctxk = deps.context_type();
3696     if (ctxk != nullptr) {
3697       if (ctxk->is_dependent_nmethod(this)) {

3757   st->print("scopes:");
3758   if (scopes_pcs_begin() < scopes_pcs_end()) {
3759     st->cr();
3760     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3761       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3762         continue;
3763 
3764       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3765       while (sd != nullptr) {
3766         sd->print_on(st, p);  // print output ends with a newline
3767         sd = sd->sender();
3768       }
3769     }
3770   } else {
3771     st->print_cr(" <list empty>");
3772   }
3773 }
3774 #endif
3775 
3776 #ifndef PRODUCT  // RelocIterator does support printing only then.
3777 void nmethod::print_relocations_on(outputStream* st) {
3778   ResourceMark m;       // in case methods get printed via the debugger
3779   st->print_cr("relocations:");
3780   RelocIterator iter(this);
3781   iter.print_on(st);
3782 }
3783 #endif
3784 
3785 void nmethod::print_pcs_on(outputStream* st) {
3786   ResourceMark m;       // in case methods get printed via debugger
3787   st->print("pc-bytecode offsets:");
3788   if (scopes_pcs_begin() < scopes_pcs_end()) {
3789     st->cr();
3790     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3791       p->print_on(st, this);  // print output ends with a newline
3792     }
3793   } else {
3794     st->print_cr(" <list empty>");
3795   }
3796 }
3797 
3798 void nmethod::print_handler_table() {
3799   ExceptionHandlerTable(this).print(code_begin());
3800 }
3801 

4616 void nmethod::update_speculation(JavaThread* thread) {
4617   jlong speculation = thread->pending_failed_speculation();
4618   if (speculation != 0) {
4619     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4620     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4621     thread->set_pending_failed_speculation(0);
4622   }
4623 }
4624 
4625 const char* nmethod::jvmci_name() {
4626   if (jvmci_nmethod_data() != nullptr) {
4627     return jvmci_nmethod_data()->name();
4628   }
4629   return nullptr;
4630 }
4631 
4632 bool nmethod::jvmci_skip_profile_deopt() const {
4633   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4634 }
4635 #endif
4636 
4637 void nmethod::prepare_for_archiving_impl() {
4638   CodeBlob::prepare_for_archiving_impl();
4639   _deoptimization_generation = 0;
4640   _gc_epoch = 0;
4641   _method_profiling_count = 0;
4642   _osr_link = nullptr;
4643   _method = nullptr;
4644   _immutable_data = nullptr;
4645   _pc_desc_container = nullptr;
4646   _exception_cache = nullptr;
4647   _gc_data = nullptr;
4648   _oops_do_mark_link = nullptr;
4649   _compiled_ic_data = nullptr;
4650   _osr_entry_point = nullptr;
4651   _compile_id = -1;
4652   _deoptimization_status = not_marked;
4653   _is_unloading_state = 0;
4654   _state = not_installed;
4655 }
< prev index next >