< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"

  26 #include "code/codeCache.hpp"
  27 #include "code/compiledIC.hpp"
  28 #include "code/dependencies.hpp"
  29 #include "code/nativeInst.hpp"
  30 #include "code/nmethod.inline.hpp"
  31 #include "code/relocInfo.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compilationLog.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compileLog.hpp"
  37 #include "compiler/compileTask.hpp"
  38 #include "compiler/compilerDirectives.hpp"
  39 #include "compiler/compilerOracle.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"

 771 
 772 void nmethod::clear_inline_caches() {
 773   assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
 774   RelocIterator iter(this);
 775   while (iter.next()) {
 776     iter.reloc()->clear_inline_cache();
 777   }
 778 }
 779 
 780 #ifdef ASSERT
 781 // Check class_loader is alive for this bit of metadata.
 782 class CheckClass : public MetadataClosure {
 783   void do_metadata(Metadata* md) {
 784     Klass* klass = nullptr;
 785     if (md->is_klass()) {
 786       klass = ((Klass*)md);
 787     } else if (md->is_method()) {
 788       klass = ((Method*)md)->method_holder();
 789     } else if (md->is_methodData()) {
 790       klass = ((MethodData*)md)->method()->method_holder();


 791     } else {
 792       md->print();
 793       ShouldNotReachHere();
 794     }
 795     assert(klass->is_loader_alive(), "must be alive");
 796   }
 797 };
 798 #endif // ASSERT
 799 
 800 
 801 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
 802   ic->clean_metadata();
 803 }
 804 
 805 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
 806 template <typename CallsiteT>
 807 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
 808                                          bool clean_all) {
 809   CodeBlob* cb = CodeCache::find_blob(callsite->destination());
 810   if (!cb->is_nmethod()) {

1115     nm = new (native_nmethod_size, allow_NonNMethod_space)
1116     nmethod(method(), compiler_none, native_nmethod_size,
1117             compile_id, &offsets,
1118             code_buffer, frame_size,
1119             basic_lock_owner_sp_offset,
1120             basic_lock_sp_offset,
1121             oop_maps, mutable_data_size);
1122     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1123     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1124   }
1125 
1126   if (nm != nullptr) {
1127     // verify nmethod
1128     DEBUG_ONLY(nm->verify();) // might block
1129 
1130     nm->log_new_nmethod();
1131   }
1132   return nm;
1133 }
1134 

























1135 nmethod* nmethod::new_nmethod(const methodHandle& method,
1136   int compile_id,
1137   int entry_bci,
1138   CodeOffsets* offsets,
1139   int orig_pc_offset,
1140   DebugInformationRecorder* debug_info,
1141   Dependencies* dependencies,
1142   CodeBuffer* code_buffer, int frame_size,
1143   OopMapSet* oop_maps,
1144   ExceptionHandlerTable* handler_table,
1145   ImplicitExceptionTable* nul_chk_table,
1146   AbstractCompiler* compiler,
1147   CompLevel comp_level

1148 #if INCLUDE_JVMCI
1149   , char* speculations,
1150   int speculations_len,
1151   JVMCINMethodData* jvmci_data
1152 #endif
1153 )
1154 {
1155   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1156   code_buffer->finalize_oop_references(method);
1157   // create nmethod
1158   nmethod* nm = nullptr;
1159   int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1160 
1161   int immutable_data_size =
1162       adjust_pcs_size(debug_info->pcs_size())
1163     + align_up((int)dependencies->size_in_bytes(), oopSize)
1164     + align_up(handler_table->size_in_bytes()    , oopSize)
1165     + align_up(nul_chk_table->size_in_bytes()    , oopSize)
1166 #if INCLUDE_JVMCI
1167     + align_up(speculations_len                  , oopSize)

1171   // First, allocate space for immutable data in C heap.
1172   address immutable_data = nullptr;
1173   if (immutable_data_size > 0) {
1174     immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1175     if (immutable_data == nullptr) {
1176       vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1177       return nullptr;
1178     }
1179   }
1180 
1181   int mutable_data_size = required_mutable_data_size(code_buffer
1182     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1183 
1184   {
1185     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1186 
1187     nm = new (nmethod_size, comp_level)
1188     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1189             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1190             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1191             handler_table, nul_chk_table, compiler, comp_level
1192 #if INCLUDE_JVMCI
1193             , speculations,
1194             speculations_len,
1195             jvmci_data
1196 #endif
1197             );
1198 
1199     if (nm != nullptr) {
1200       // To make dependency checking during class loading fast, record
1201       // the nmethod dependencies in the classes it is dependent on.
1202       // This allows the dependency checking code to simply walk the
1203       // class hierarchy above the loaded class, checking only nmethods
1204       // which are dependent on those classes.  The slow way is to
1205       // check every nmethod for dependencies which makes it linear in
1206       // the number of methods compiled.  For applications with a lot
1207       // classes the slow way is too slow.
1208       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1209         if (deps.type() == Dependencies::call_site_target_value) {
1210           // CallSite dependencies are managed on per-CallSite instance basis.
1211           oop call_site = deps.argument_oop(0);
1212           MethodHandles::add_dependent_nmethod(call_site, nm);
1213         } else {
1214           InstanceKlass* ik = deps.context_type();
1215           if (ik == nullptr) {
1216             continue;  // ignore things like evol_method
1217           }
1218           // record this nmethod as dependent on this klass
1219           ik->add_dependent_nmethod(nm);
1220         }
1221       }
1222       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));












































































1223     }
1224   }
1225   // Do verification and logging outside CodeCache_lock.
1226   if (nm != nullptr) {










1227     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1228     DEBUG_ONLY(nm->verify();)
1229     nm->log_new_nmethod();
1230   }
1231   return nm;
1232 }
1233 
1234 // Fill in default values for various fields
1235 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1236   // avoid uninitialized fields, even for short time periods
1237   _exception_cache            = nullptr;
1238   _gc_data                    = nullptr;
1239   _oops_do_mark_link          = nullptr;
1240   _compiled_ic_data           = nullptr;
1241 
1242   _is_unloading_state         = 0;
1243   _state                      = not_installed;
1244 
1245   _has_unsafe_access          = 0;
1246   _has_method_handle_invokes  = 0;
1247   _has_wide_vectors           = 0;
1248   _has_monitors               = 0;
1249   _has_scoped_access          = 0;
1250   _has_flushed_dependencies   = 0;
1251   _is_unlinked                = 0;
1252   _load_reported              = 0; // jvmti state


1253 

1254   _deoptimization_status      = not_marked;
1255 
1256   // SECT_CONSTS is first in code buffer so the offset should be 0.
1257   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1258   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1259 
1260   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1261 
1262   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1263   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1264 
1265   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1266 }
1267 
1268 // Post initialization
1269 void nmethod::post_init() {
1270   clear_unloading_state();
1271 
1272   finalize_relocations();
1273 

1306 
1307     _osr_entry_point         = nullptr;
1308     _pc_desc_container       = nullptr;
1309     _entry_bci               = InvocationEntryBci;
1310     _compile_id              = compile_id;
1311     _comp_level              = CompLevel_none;
1312     _compiler_type           = type;
1313     _orig_pc_offset          = 0;
1314     _num_stack_arg_slots     = 0;
1315 
1316     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1317       // Continuation enter intrinsic
1318       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1319     } else {
1320       _exception_offset      = 0;
1321     }
1322     // Native wrappers do not have deopt handlers. Make the values
1323     // something that will never match a pc like the nmethod vtable entry
1324     _deopt_handler_offset    = 0;
1325     _deopt_mh_handler_offset = 0;


1326     _unwind_handler_offset   = 0;
1327 
1328     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1329     uint16_t metadata_size;
1330     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1331     JVMCI_ONLY( _metadata_size = metadata_size; )
1332     assert(_mutable_data_size == _relocation_size + metadata_size,
1333            "wrong mutable data size: %d != %d + %d",
1334            _mutable_data_size, _relocation_size, metadata_size);
1335 
1336     // native wrapper does not have read-only data but we need unique not null address
1337     _immutable_data          = blob_end();
1338     _immutable_data_size     = 0;
1339     _nul_chk_table_offset    = 0;
1340     _handler_table_offset    = 0;
1341     _scopes_pcs_offset       = 0;
1342     _scopes_data_offset      = 0;
1343 #if INCLUDE_JVMCI
1344     _speculations_offset     = 0;
1345 #endif

1365     // This is both handled in decode2(), called via print_code() -> decode()
1366     if (PrintNativeNMethods) {
1367       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1368       print_code();
1369       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1370 #if defined(SUPPORT_DATA_STRUCTS)
1371       if (AbstractDisassembler::show_structs()) {
1372         if (oop_maps != nullptr) {
1373           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1374           oop_maps->print_on(tty);
1375           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1376         }
1377       }
1378 #endif
1379     } else {
1380       print(); // print the header part only.
1381     }
1382 #if defined(SUPPORT_DATA_STRUCTS)
1383     if (AbstractDisassembler::show_structs()) {
1384       if (PrintRelocations) {
1385         print_relocations();
1386         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1387       }
1388     }
1389 #endif
1390     if (xtty != nullptr) {
1391       xtty->tail("print_native_nmethod");
1392     }
1393   }
1394 }
1395 
1396 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1397   return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1398 }
1399 
1400 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1401   // Try MethodNonProfiled and MethodProfiled.
1402   void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1403   if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1404   // Try NonNMethod or give up.
1405   return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);

1409 nmethod::nmethod(
1410   Method* method,
1411   CompilerType type,
1412   int nmethod_size,
1413   int immutable_data_size,
1414   int mutable_data_size,
1415   int compile_id,
1416   int entry_bci,
1417   address immutable_data,
1418   CodeOffsets* offsets,
1419   int orig_pc_offset,
1420   DebugInformationRecorder* debug_info,
1421   Dependencies* dependencies,
1422   CodeBuffer *code_buffer,
1423   int frame_size,
1424   OopMapSet* oop_maps,
1425   ExceptionHandlerTable* handler_table,
1426   ImplicitExceptionTable* nul_chk_table,
1427   AbstractCompiler* compiler,
1428   CompLevel comp_level

1429 #if INCLUDE_JVMCI
1430   , char* speculations,
1431   int speculations_len,
1432   JVMCINMethodData* jvmci_data
1433 #endif
1434   )
1435   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1436              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1437   _deoptimization_generation(0),
1438   _gc_epoch(CodeCache::gc_epoch()),
1439   _method(method),
1440   _osr_link(nullptr)
1441 {
1442   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1443   {
1444     DEBUG_ONLY(NoSafepointVerifier nsv;)
1445     assert_locked_or_safepoint(CodeCache_lock);
1446 
1447     init_defaults(code_buffer, offsets);


1448 
1449     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1450     _entry_bci       = entry_bci;
1451     _compile_id      = compile_id;
1452     _comp_level      = comp_level;
1453     _compiler_type   = type;
1454     _orig_pc_offset  = orig_pc_offset;
1455 
1456     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1457 
1458     set_ctable_begin(header_begin() + content_offset());
1459 
1460 #if INCLUDE_JVMCI
1461     if (compiler->is_jvmci()) {
1462       // JVMCI might not produce any stub sections
1463       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1464         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1465       } else {
1466         _exception_offset        = -1;
1467       }

1558 #if INCLUDE_JVMCI
1559     // Copy speculations to nmethod
1560     if (speculations_size() != 0) {
1561       memcpy(speculations_begin(), speculations, speculations_len);
1562     }
1563 #endif
1564 
1565     post_init();
1566 
1567     // we use the information of entry points to find out if a method is
1568     // static or non static
1569     assert(compiler->is_c2() || compiler->is_jvmci() ||
1570            _method->is_static() == (entry_point() == verified_entry_point()),
1571            " entry points must be same for static methods and vice versa");
1572   }
1573 }
1574 
1575 // Print a short set of xml attributes to identify this nmethod.  The
1576 // output should be embedded in some other element.
1577 void nmethod::log_identity(xmlStream* log) const {
1578   log->print(" compile_id='%d'", compile_id());

1579   const char* nm_kind = compile_kind();
1580   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1581   log->print(" compiler='%s'", compiler_name());
1582   if (TieredCompilation) {
1583     log->print(" level='%d'", comp_level());
1584   }
1585 #if INCLUDE_JVMCI
1586   if (jvmci_nmethod_data() != nullptr) {
1587     const char* jvmci_name = jvmci_nmethod_data()->name();
1588     if (jvmci_name != nullptr) {
1589       log->print(" jvmci_mirror_name='");
1590       log->text("%s", jvmci_name);
1591       log->print("'");
1592     }
1593   }
1594 #endif
1595 }
1596 
1597 
1598 #define LOG_OFFSET(log, name)                    \
1599   if (p2i(name##_end()) - p2i(name##_begin())) \
1600     log->print(" " XSTR(name) "_offset='%zd'"    , \
1601                p2i(name##_begin()) - p2i(this))
1602 
1603 

1688       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1689       if (oop_maps() != nullptr) {
1690         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1691         oop_maps()->print_on(tty);
1692         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1693       }
1694     }
1695 #endif
1696   } else {
1697     print(); // print the header part only.
1698   }
1699 
1700 #if defined(SUPPORT_DATA_STRUCTS)
1701   if (AbstractDisassembler::show_structs()) {
1702     methodHandle mh(Thread::current(), _method);
1703     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1704       print_scopes();
1705       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1706     }
1707     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1708       print_relocations();
1709       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1710     }
1711     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1712       print_dependencies_on(tty);
1713       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1714     }
1715     if (printmethod || PrintExceptionHandlers) {
1716       print_handler_table();
1717       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1718       print_nul_chk_table();
1719       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1720     }
1721 
1722     if (printmethod) {
1723       print_recorded_oops();
1724       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1725       print_recorded_metadata();
1726       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1727     }
1728   }
1729 #endif
1730 
1731   if (xtty != nullptr) {
1732     xtty->tail("print_nmethod");
1733   }
1734 }
1735 
1736 
1737 // Promote one word from an assembly-time handle to a live embedded oop.
1738 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
1739   if (handle == nullptr ||
1740       // As a special case, IC oops are initialized to 1 or -1.
1741       handle == (jobject) Universe::non_oop_word()) {
1742     *(void**)dest = handle;
1743   } else {
1744     *dest = JNIHandles::resolve_non_null(handle);
1745   }
1746 }
1747 








1748 
1749 // Have to have the same name because it's called by a template
1750 void nmethod::copy_values(GrowableArray<jobject>* array) {
1751   int length = array->length();
1752   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1753   oop* dest = oops_begin();
1754   for (int index = 0 ; index < length; index++) {
1755     initialize_immediate_oop(&dest[index], array->at(index));
1756   }
1757 
1758   // Now we can fix up all the oops in the code.  We need to do this
1759   // in the code because the assembler uses jobjects as placeholders.
1760   // The code and relocations have already been initialized by the
1761   // CodeBlob constructor, so it is valid even at this early point to
1762   // iterate over relocations and patch the code.
1763   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
1764 }
1765 
1766 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
1767   int length = array->length();

1775 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
1776   // re-patch all oop-bearing instructions, just in case some oops moved
1777   RelocIterator iter(this, begin, end);
1778   while (iter.next()) {
1779     if (iter.type() == relocInfo::oop_type) {
1780       oop_Relocation* reloc = iter.oop_reloc();
1781       if (initialize_immediates && reloc->oop_is_immediate()) {
1782         oop* dest = reloc->oop_addr();
1783         jobject obj = *reinterpret_cast<jobject*>(dest);
1784         initialize_immediate_oop(dest, obj);
1785       }
1786       // Refresh the oop-related bits of this instruction.
1787       reloc->fix_oop_relocation();
1788     } else if (iter.type() == relocInfo::metadata_type) {
1789       metadata_Relocation* reloc = iter.metadata_reloc();
1790       reloc->fix_metadata_relocation();
1791     }
1792   }
1793 }
1794 




















1795 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
1796   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
1797   intptr_t cbaddr = (intptr_t) nm;
1798   intptr_t offset = ((intptr_t) pc) - cbaddr;
1799 
1800   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
1801   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
1802     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
1803   } else if (!nop->patch(oopmap_slot, offset)) {
1804     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
1805   }
1806 }
1807 
1808 void nmethod::finalize_relocations() {
1809   NoSafepointVerifier nsv;
1810 
1811   GrowableArray<NativeMovConstReg*> virtual_call_data;
1812 
1813   // Make sure that post call nops fill in nmethod offsets eagerly so
1814   // we don't have to race with deoptimization

1936   Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1937 }
1938 
1939 bool nmethod::is_maybe_on_stack() {
1940   // If the condition below is true, it means that the nmethod was found to
1941   // be alive the previous completed marking cycle.
1942   return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1943 }
1944 
1945 void nmethod::inc_decompile_count() {
1946   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1947   // Could be gated by ProfileTraps, but do not bother...
1948   Method* m = method();
1949   if (m == nullptr)  return;
1950   MethodData* mdo = m->method_data();
1951   if (mdo == nullptr)  return;
1952   // There is a benign race here.  See comments in methodData.hpp.
1953   mdo->inc_decompile_count();
1954 }
1955 








1956 bool nmethod::try_transition(signed char new_state_int) {
1957   signed char new_state = new_state_int;
1958   assert_lock_strong(NMethodState_lock);
1959   signed char old_state = _state;
1960   if (old_state >= new_state) {
1961     // Ensure monotonicity of transitions.
1962     return false;
1963   }
1964   Atomic::store(&_state, new_state);
1965   return true;
1966 }
1967 
1968 void nmethod::invalidate_osr_method() {
1969   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1970   // Remove from list of active nmethods
1971   if (method() != nullptr) {
1972     method()->method_holder()->remove_osr_nmethod(this);
1973   }
1974 }
1975 

1987     }
1988   }
1989 
1990   ResourceMark rm;
1991   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
1992   ss.print("made not entrant: %s", reason);
1993 
1994   CompileTask::print_ul(this, ss.freeze());
1995   if (PrintCompilation) {
1996     print_on_with_msg(tty, ss.freeze());
1997   }
1998 }
1999 
2000 void nmethod::unlink_from_method() {
2001   if (method() != nullptr) {
2002     method()->unlink_code(this);
2003   }
2004 }
2005 
2006 // Invalidate code
2007 bool nmethod::make_not_entrant(const char* reason) {
2008   assert(reason != nullptr, "Must provide a reason");
2009 
2010   // This can be called while the system is already at a safepoint which is ok
2011   NoSafepointVerifier nsv;
2012 
2013   if (is_unloading()) {
2014     // If the nmethod is unloading, then it is already not entrant through
2015     // the nmethod entry barriers. No need to do anything; GC will unload it.
2016     return false;
2017   }
2018 
2019   if (Atomic::load(&_state) == not_entrant) {
2020     // Avoid taking the lock if already in required state.
2021     // This is safe from races because the state is an end-state,
2022     // which the nmethod cannot back out of once entered.
2023     // No need for fencing either.
2024     return false;
2025   }
2026 
2027   {

2063     }
2064 
2065     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2066     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2067       // If nmethod entry barriers are not supported, we won't mark
2068       // nmethods as on-stack when they become on-stack. So we
2069       // degrade to a less accurate flushing strategy, for now.
2070       mark_as_maybe_on_stack();
2071     }
2072 
2073     // Change state
2074     bool success = try_transition(not_entrant);
2075     assert(success, "Transition can't fail");
2076 
2077     // Log the transition once
2078     log_state_change(reason);
2079 
2080     // Remove nmethod from method.
2081     unlink_from_method();
2082 







2083   } // leave critical region under NMethodState_lock
2084 
2085 #if INCLUDE_JVMCI
2086   // Invalidate can't occur while holding the NMethodState_lock
2087   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2088   if (nmethod_data != nullptr) {
2089     nmethod_data->invalidate_nmethod_mirror(this);
2090   }
2091 #endif
2092 
2093 #ifdef ASSERT
2094   if (is_osr_method() && method() != nullptr) {
2095     // Make sure osr nmethod is invalidated, i.e. not on the list
2096     bool found = method()->method_holder()->remove_osr_nmethod(this);
2097     assert(!found, "osr nmethod should have been invalidated");
2098   }
2099 #endif
2100 
2101   return true;
2102 }

2143 
2144   // completely deallocate this method
2145   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2146   log_debug(codecache)("*flushing %s nmethod %3d/" INTPTR_FORMAT ". Live blobs:" UINT32_FORMAT
2147                        "/Free CodeCache:%zuKb",
2148                        is_osr_method() ? "osr" : "",_compile_id, p2i(this), CodeCache::blob_count(),
2149                        CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024);
2150 
2151   // We need to deallocate any ExceptionCache data.
2152   // Note that we do not need to grab the nmethod lock for this, it
2153   // better be thread safe if we're disposing of it!
2154   ExceptionCache* ec = exception_cache();
2155   while(ec != nullptr) {
2156     ExceptionCache* next = ec->next();
2157     delete ec;
2158     ec = next;
2159   }
2160   if (_pc_desc_container != nullptr) {
2161     delete _pc_desc_container;
2162   }
2163   delete[] _compiled_ic_data;


2164 
2165   if (_immutable_data != blob_end()) {
2166     os::free(_immutable_data);
2167     _immutable_data = blob_end(); // Valid not null address
2168   }
2169   if (unregister_nmethod) {
2170     Universe::heap()->unregister_nmethod(this);
2171   }
2172   CodeCache::unregister_old_nmethod(this);
2173 
2174   CodeBlob::purge();
2175 }
2176 
2177 oop nmethod::oop_at(int index) const {
2178   if (index == 0) {
2179     return nullptr;
2180   }
2181 
2182   BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2183   return bs_nm->oop_load_no_keepalive(this, index);
2184 }
2185 

2206         MethodHandles::clean_dependency_context(call_site);
2207       } else {
2208         InstanceKlass* ik = deps.context_type();
2209         if (ik == nullptr) {
2210           continue;  // ignore things like evol_method
2211         }
2212         // During GC liveness of dependee determines class that needs to be updated.
2213         // The GC may clean dependency contexts concurrently and in parallel.
2214         ik->clean_dependency_context();
2215       }
2216     }
2217   }
2218 }
2219 
2220 void nmethod::post_compiled_method(CompileTask* task) {
2221   task->mark_success();
2222   task->set_nm_content_size(content_size());
2223   task->set_nm_insts_size(insts_size());
2224   task->set_nm_total_size(total_size());
2225 






2226   // JVMTI -- compiled method notification (must be done outside lock)
2227   post_compiled_method_load_event();
2228 
2229   if (CompilationLog::log() != nullptr) {
2230     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2231   }
2232 
2233   const DirectiveSet* directive = task->directive();
2234   maybe_print_nmethod(directive);
2235 }
2236 
2237 // ------------------------------------------------------------------
2238 // post_compiled_method_load_event
2239 // new method for install_code() path
2240 // Transfer information from compilation to jvmti
2241 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2242   // This is a bad time for a safepoint.  We don't want
2243   // this nmethod to get unloaded while we're queueing the event.
2244   NoSafepointVerifier nsv;
2245 

2937 
2938   // Make sure all the entry points are correctly aligned for patching.
2939   NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point());
2940 
2941   // assert(oopDesc::is_oop(method()), "must be valid");
2942 
2943   ResourceMark rm;
2944 
2945   if (!CodeCache::contains(this)) {
2946     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
2947   }
2948 
2949   if(is_native_method() )
2950     return;
2951 
2952   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
2953   if (nm != this) {
2954     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
2955   }
2956 
2957   for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
2958     if (! p->verify(this)) {
2959       tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));




2960     }
2961   }
2962 
2963 #ifdef ASSERT
2964 #if INCLUDE_JVMCI
2965   {
2966     // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
2967     ImmutableOopMapSet* oms = oop_maps();
2968     ImplicitExceptionTable implicit_table(this);
2969     for (uint i = 0; i < implicit_table.len(); i++) {
2970       int exec_offset = (int) implicit_table.get_exec_offset(i);
2971       if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
2972         assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
2973         bool found = false;
2974         for (int i = 0, imax = oms->count(); i < imax; i++) {
2975           if (oms->pair_at(i)->pc_offset() == exec_offset) {
2976             found = true;
2977             break;

2978           }

2979         }
2980         assert(found, "missing oopmap");
2981       }
2982     }
2983   }
2984 #endif
2985 #endif

2986 
2987   VerifyOopsClosure voc(this);
2988   oops_do(&voc);
2989   assert(voc.ok(), "embedded oops must be OK");
2990   Universe::heap()->verify_nmethod(this);
2991 
2992   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
2993          nm->method()->external_name(), p2i(_oops_do_mark_link));
2994   verify_scopes();


2995 
2996   CompiledICLocker nm_verify(this);
2997   VerifyMetadataClosure vmc;
2998   metadata_do(&vmc);
2999 }
3000 
3001 
3002 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3003 
3004   // Verify IC only when nmethod installation is finished.
3005   if (!is_not_installed()) {
3006     if (CompiledICLocker::is_safe(this)) {
3007       if (is_inline_cache) {
3008         CompiledIC_at(this, call_site);
3009       } else {
3010         CompiledDirectCall::at(call_site);
3011       }
3012     } else {
3013       CompiledICLocker ml_verify(this);
3014       if (is_inline_cache) {

3143                                              p2i(nul_chk_table_end()),
3144                                              nul_chk_table_size());
3145   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3146                                              p2i(handler_table_begin()),
3147                                              p2i(handler_table_end()),
3148                                              handler_table_size());
3149   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3150                                              p2i(scopes_pcs_begin()),
3151                                              p2i(scopes_pcs_end()),
3152                                              scopes_pcs_size());
3153   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3154                                              p2i(scopes_data_begin()),
3155                                              p2i(scopes_data_end()),
3156                                              scopes_data_size());
3157 #if INCLUDE_JVMCI
3158   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3159                                              p2i(speculations_begin()),
3160                                              p2i(speculations_end()),
3161                                              speculations_size());
3162 #endif



3163 }
3164 
3165 void nmethod::print_code() {
3166   ResourceMark m;
3167   ttyLocker ttyl;
3168   // Call the specialized decode method of this class.
3169   decode(tty);
3170 }
3171 
3172 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3173 
3174 void nmethod::print_dependencies_on(outputStream* out) {
3175   ResourceMark rm;
3176   stringStream st;
3177   st.print_cr("Dependencies:");
3178   for (Dependencies::DepStream deps(this); deps.next(); ) {
3179     deps.print_dependency(&st);
3180     InstanceKlass* ctxk = deps.context_type();
3181     if (ctxk != nullptr) {
3182       if (ctxk->is_dependent_nmethod(this)) {

3242   st->print("scopes:");
3243   if (scopes_pcs_begin() < scopes_pcs_end()) {
3244     st->cr();
3245     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3246       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3247         continue;
3248 
3249       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3250       while (sd != nullptr) {
3251         sd->print_on(st, p);  // print output ends with a newline
3252         sd = sd->sender();
3253       }
3254     }
3255   } else {
3256     st->print_cr(" <list empty>");
3257   }
3258 }
3259 #endif
3260 
3261 #ifndef PRODUCT  // RelocIterator does support printing only then.
3262 void nmethod::print_relocations() {
3263   ResourceMark m;       // in case methods get printed via the debugger
3264   tty->print_cr("relocations:");
3265   RelocIterator iter(this);
3266   iter.print_on(tty);
3267 }
3268 #endif
3269 
3270 void nmethod::print_pcs_on(outputStream* st) {
3271   ResourceMark m;       // in case methods get printed via debugger
3272   st->print("pc-bytecode offsets:");
3273   if (scopes_pcs_begin() < scopes_pcs_end()) {
3274     st->cr();
3275     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3276       p->print_on(st, this);  // print output ends with a newline
3277     }
3278   } else {
3279     st->print_cr(" <list empty>");
3280   }
3281 }
3282 
3283 void nmethod::print_handler_table() {
3284   ExceptionHandlerTable(this).print(code_begin());
3285 }
3286 

4052 
4053 #endif // !PRODUCT
4054 
4055 #if INCLUDE_JVMCI
4056 void nmethod::update_speculation(JavaThread* thread) {
4057   jlong speculation = thread->pending_failed_speculation();
4058   if (speculation != 0) {
4059     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4060     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4061     thread->set_pending_failed_speculation(0);
4062   }
4063 }
4064 
4065 const char* nmethod::jvmci_name() {
4066   if (jvmci_nmethod_data() != nullptr) {
4067     return jvmci_nmethod_data()->name();
4068   }
4069   return nullptr;
4070 }
4071 #endif





















   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "code/aotCodeCache.hpp"
  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/relocInfo.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compilationLog.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/compileLog.hpp"
  38 #include "compiler/compileTask.hpp"
  39 #include "compiler/compilerDirectives.hpp"
  40 #include "compiler/compilerOracle.hpp"
  41 #include "compiler/directivesParser.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "compiler/oopMap.inline.hpp"
  44 #include "gc/shared/barrierSet.hpp"
  45 #include "gc/shared/barrierSetNMethod.hpp"
  46 #include "gc/shared/classUnloadingContext.hpp"

 772 
 773 void nmethod::clear_inline_caches() {
 774   assert(SafepointSynchronize::is_at_safepoint(), "clearing of IC's only allowed at safepoint");
 775   RelocIterator iter(this);
 776   while (iter.next()) {
 777     iter.reloc()->clear_inline_cache();
 778   }
 779 }
 780 
 781 #ifdef ASSERT
 782 // Check class_loader is alive for this bit of metadata.
 783 class CheckClass : public MetadataClosure {
 784   void do_metadata(Metadata* md) {
 785     Klass* klass = nullptr;
 786     if (md->is_klass()) {
 787       klass = ((Klass*)md);
 788     } else if (md->is_method()) {
 789       klass = ((Method*)md)->method_holder();
 790     } else if (md->is_methodData()) {
 791       klass = ((MethodData*)md)->method()->method_holder();
 792     } else if (md->is_methodCounters()) {
 793       klass = ((MethodCounters*)md)->method()->method_holder();
 794     } else {
 795       md->print();
 796       ShouldNotReachHere();
 797     }
 798     assert(klass->is_loader_alive(), "must be alive");
 799   }
 800 };
 801 #endif // ASSERT
 802 
 803 
 804 static void clean_ic_if_metadata_is_dead(CompiledIC *ic) {
 805   ic->clean_metadata();
 806 }
 807 
 808 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
 809 template <typename CallsiteT>
 810 static void clean_if_nmethod_is_unloaded(CallsiteT* callsite, nmethod* from,
 811                                          bool clean_all) {
 812   CodeBlob* cb = CodeCache::find_blob(callsite->destination());
 813   if (!cb->is_nmethod()) {

1118     nm = new (native_nmethod_size, allow_NonNMethod_space)
1119     nmethod(method(), compiler_none, native_nmethod_size,
1120             compile_id, &offsets,
1121             code_buffer, frame_size,
1122             basic_lock_owner_sp_offset,
1123             basic_lock_sp_offset,
1124             oop_maps, mutable_data_size);
1125     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1126     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1127   }
1128 
1129   if (nm != nullptr) {
1130     // verify nmethod
1131     DEBUG_ONLY(nm->verify();) // might block
1132 
1133     nm->log_new_nmethod();
1134   }
1135   return nm;
1136 }
1137 
1138 void nmethod::record_nmethod_dependency() {
1139   // To make dependency checking during class loading fast, record
1140   // the nmethod dependencies in the classes it is dependent on.
1141   // This allows the dependency checking code to simply walk the
1142   // class hierarchy above the loaded class, checking only nmethods
1143   // which are dependent on those classes.  The slow way is to
1144   // check every nmethod for dependencies which makes it linear in
1145   // the number of methods compiled.  For applications with a lot
1146   // classes the slow way is too slow.
1147   for (Dependencies::DepStream deps(this); deps.next(); ) {
1148     if (deps.type() == Dependencies::call_site_target_value) {
1149       // CallSite dependencies are managed on per-CallSite instance basis.
1150       oop call_site = deps.argument_oop(0);
1151       MethodHandles::add_dependent_nmethod(call_site, this);
1152     } else {
1153       InstanceKlass* ik = deps.context_type();
1154       if (ik == nullptr) {
1155         continue;  // ignore things like evol_method
1156       }
1157       // record this nmethod as dependent on this klass
1158       ik->add_dependent_nmethod(this);
1159     }
1160   }
1161 }
1162 
1163 nmethod* nmethod::new_nmethod(const methodHandle& method,
1164   int compile_id,
1165   int entry_bci,
1166   CodeOffsets* offsets,
1167   int orig_pc_offset,
1168   DebugInformationRecorder* debug_info,
1169   Dependencies* dependencies,
1170   CodeBuffer* code_buffer, int frame_size,
1171   OopMapSet* oop_maps,
1172   ExceptionHandlerTable* handler_table,
1173   ImplicitExceptionTable* nul_chk_table,
1174   AbstractCompiler* compiler,
1175   CompLevel comp_level
1176   , AOTCodeEntry* aot_code_entry
1177 #if INCLUDE_JVMCI
1178   , char* speculations,
1179   int speculations_len,
1180   JVMCINMethodData* jvmci_data
1181 #endif
1182 )
1183 {
1184   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1185   code_buffer->finalize_oop_references(method);
1186   // create nmethod
1187   nmethod* nm = nullptr;
1188   int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1189 
1190   int immutable_data_size =
1191       adjust_pcs_size(debug_info->pcs_size())
1192     + align_up((int)dependencies->size_in_bytes(), oopSize)
1193     + align_up(handler_table->size_in_bytes()    , oopSize)
1194     + align_up(nul_chk_table->size_in_bytes()    , oopSize)
1195 #if INCLUDE_JVMCI
1196     + align_up(speculations_len                  , oopSize)

1200   // First, allocate space for immutable data in C heap.
1201   address immutable_data = nullptr;
1202   if (immutable_data_size > 0) {
1203     immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1204     if (immutable_data == nullptr) {
1205       vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1206       return nullptr;
1207     }
1208   }
1209 
1210   int mutable_data_size = required_mutable_data_size(code_buffer
1211     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1212 
1213   {
1214     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1215 
1216     nm = new (nmethod_size, comp_level)
1217     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1218             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1219             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1220             handler_table, nul_chk_table, compiler, comp_level, aot_code_entry
1221 #if INCLUDE_JVMCI
1222             , speculations,
1223             speculations_len,
1224             jvmci_data
1225 #endif
1226             );
1227 
1228     if (nm != nullptr) {
1229       nm->record_nmethod_dependency();
1230       NOT_PRODUCT(note_java_nmethod(nm));
1231     }
1232   }
1233   // Do verification and logging outside CodeCache_lock.
1234   if (nm != nullptr) {
1235 
1236 #ifdef ASSERT
1237     LogTarget(Debug, aot, codecache, nmethod) log;
1238     if (log.is_enabled()) {
1239       LogStream out(log);
1240       out.print_cr("== new_nmethod 2");
1241       FlagSetting fs(PrintRelocations, true);
1242       nm->print_on_impl(&out);
1243       nm->decode(&out);
1244     }
1245 #endif
1246 
1247     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1248     DEBUG_ONLY(nm->verify();)
1249     nm->log_new_nmethod();
1250   }
1251   return nm;
1252 }
1253 
1254 nmethod* nmethod::restore(address code_cache_buffer,
1255                           const methodHandle& method,
1256                           int compile_id,
1257                           address reloc_data,
1258                           GrowableArray<Handle>& oop_list,
1259                           GrowableArray<Metadata*>& metadata_list,
1260                           ImmutableOopMapSet* oop_maps,
1261                           address immutable_data,
1262                           GrowableArray<Handle>& reloc_imm_oop_list,
1263                           GrowableArray<Metadata*>& reloc_imm_metadata_list,
1264                           AOTCodeReader* aot_code_reader)
1265 {
1266   CodeBlob::restore(code_cache_buffer, "nmethod", reloc_data, oop_maps);
1267   nmethod* nm = (nmethod*)code_cache_buffer;
1268   nm->set_method(method());
1269   nm->_compile_id = compile_id;
1270   nm->set_immutable_data(immutable_data);
1271   nm->copy_values(&oop_list);
1272   nm->copy_values(&metadata_list);
1273 
1274   aot_code_reader->fix_relocations(nm, &reloc_imm_oop_list, &reloc_imm_metadata_list);
1275 
1276 #ifndef PRODUCT
1277   nm->asm_remarks().init();
1278   aot_code_reader->read_asm_remarks(nm->asm_remarks(), /* use_string_table */ false);
1279   nm->dbg_strings().init();
1280   aot_code_reader->read_dbg_strings(nm->dbg_strings(), /* use_string_table */ false);
1281 #endif
1282 
1283   // Flush the code block
1284   ICache::invalidate_range(nm->code_begin(), nm->code_size());
1285 
1286   // Create cache after PcDesc data is copied - it will be used to initialize cache
1287   nm->_pc_desc_container = new PcDescContainer(nm->scopes_pcs_begin());
1288 
1289   nm->set_aot_code_entry(aot_code_reader->aot_code_entry());
1290 
1291   nm->post_init();
1292   return nm;
1293 }
1294 
1295 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1296                               const methodHandle& method,
1297                               AbstractCompiler* compiler,
1298                               int compile_id,
1299                               address reloc_data,
1300                               GrowableArray<Handle>& oop_list,
1301                               GrowableArray<Metadata*>& metadata_list,
1302                               ImmutableOopMapSet* oop_maps,
1303                               address immutable_data,
1304                               GrowableArray<Handle>& reloc_imm_oop_list,
1305                               GrowableArray<Metadata*>& reloc_imm_metadata_list,
1306                               AOTCodeReader* aot_code_reader)
1307 {
1308   nmethod* nm = nullptr;
1309   int nmethod_size = archived_nm->size();
1310   // create nmethod
1311   {
1312     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1313     address code_cache_buffer = (address)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1314     if (code_cache_buffer != nullptr) {
1315       nm = archived_nm->restore(code_cache_buffer,
1316                                 method,
1317                                 compile_id,
1318                                 reloc_data,
1319                                 oop_list,
1320                                 metadata_list,
1321                                 oop_maps,
1322                                 immutable_data,
1323                                 reloc_imm_oop_list,
1324                                 reloc_imm_metadata_list,
1325                                 aot_code_reader);
1326       nm->record_nmethod_dependency();
1327       NOT_PRODUCT(note_java_nmethod(nm));
1328     }
1329   }
1330   // Do verification and logging outside CodeCache_lock.
1331   if (nm != nullptr) {
1332 #ifdef ASSERT
1333     LogTarget(Debug, aot, codecache, nmethod) log;
1334     if (log.is_enabled()) {
1335       LogStream out(log);
1336       out.print_cr("== new_nmethod 2");
1337       FlagSetting fs(PrintRelocations, true);
1338       nm->print_on_impl(&out);
1339       nm->decode(&out);
1340     }
1341 #endif
1342     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1343     DEBUG_ONLY(nm->verify();)
1344     nm->log_new_nmethod();
1345   }
1346   return nm;
1347 }
1348 
1349 // Fill in default values for various fields
1350 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1351   // avoid uninitialized fields, even for short time periods
1352   _exception_cache            = nullptr;
1353   _gc_data                    = nullptr;
1354   _oops_do_mark_link          = nullptr;
1355   _compiled_ic_data           = nullptr;
1356 
1357   _is_unloading_state         = 0;
1358   _state                      = not_installed;
1359 
1360   _has_unsafe_access          = 0;
1361   _has_method_handle_invokes  = 0;
1362   _has_wide_vectors           = 0;
1363   _has_monitors               = 0;
1364   _has_scoped_access          = 0;
1365   _has_flushed_dependencies   = 0;
1366   _is_unlinked                = 0;
1367   _load_reported              = 0; // jvmti state
1368   _preloaded                  = 0;
1369   _has_clinit_barriers        = 0;
1370 
1371   _used                       = false;
1372   _deoptimization_status      = not_marked;
1373 
1374   // SECT_CONSTS is first in code buffer so the offset should be 0.
1375   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1376   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1377 
1378   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1379 
1380   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1381   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1382 
1383   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1384 }
1385 
1386 // Post initialization
1387 void nmethod::post_init() {
1388   clear_unloading_state();
1389 
1390   finalize_relocations();
1391 

1424 
1425     _osr_entry_point         = nullptr;
1426     _pc_desc_container       = nullptr;
1427     _entry_bci               = InvocationEntryBci;
1428     _compile_id              = compile_id;
1429     _comp_level              = CompLevel_none;
1430     _compiler_type           = type;
1431     _orig_pc_offset          = 0;
1432     _num_stack_arg_slots     = 0;
1433 
1434     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1435       // Continuation enter intrinsic
1436       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1437     } else {
1438       _exception_offset      = 0;
1439     }
1440     // Native wrappers do not have deopt handlers. Make the values
1441     // something that will never match a pc like the nmethod vtable entry
1442     _deopt_handler_offset    = 0;
1443     _deopt_mh_handler_offset = 0;
1444     _aot_code_entry          = nullptr;
1445     _method_profiling_count  = 0;
1446     _unwind_handler_offset   = 0;
1447 
1448     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1449     uint16_t metadata_size;
1450     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1451     JVMCI_ONLY( _metadata_size = metadata_size; )
1452     assert(_mutable_data_size == _relocation_size + metadata_size,
1453            "wrong mutable data size: %d != %d + %d",
1454            _mutable_data_size, _relocation_size, metadata_size);
1455 
1456     // native wrapper does not have read-only data but we need unique not null address
1457     _immutable_data          = blob_end();
1458     _immutable_data_size     = 0;
1459     _nul_chk_table_offset    = 0;
1460     _handler_table_offset    = 0;
1461     _scopes_pcs_offset       = 0;
1462     _scopes_data_offset      = 0;
1463 #if INCLUDE_JVMCI
1464     _speculations_offset     = 0;
1465 #endif

1485     // This is both handled in decode2(), called via print_code() -> decode()
1486     if (PrintNativeNMethods) {
1487       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1488       print_code();
1489       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1490 #if defined(SUPPORT_DATA_STRUCTS)
1491       if (AbstractDisassembler::show_structs()) {
1492         if (oop_maps != nullptr) {
1493           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1494           oop_maps->print_on(tty);
1495           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1496         }
1497       }
1498 #endif
1499     } else {
1500       print(); // print the header part only.
1501     }
1502 #if defined(SUPPORT_DATA_STRUCTS)
1503     if (AbstractDisassembler::show_structs()) {
1504       if (PrintRelocations) {
1505         print_relocations_on(tty);
1506         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1507       }
1508     }
1509 #endif
1510     if (xtty != nullptr) {
1511       xtty->tail("print_native_nmethod");
1512     }
1513   }
1514 }
1515 
1516 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1517   return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1518 }
1519 
1520 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1521   // Try MethodNonProfiled and MethodProfiled.
1522   void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1523   if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1524   // Try NonNMethod or give up.
1525   return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);

1529 nmethod::nmethod(
1530   Method* method,
1531   CompilerType type,
1532   int nmethod_size,
1533   int immutable_data_size,
1534   int mutable_data_size,
1535   int compile_id,
1536   int entry_bci,
1537   address immutable_data,
1538   CodeOffsets* offsets,
1539   int orig_pc_offset,
1540   DebugInformationRecorder* debug_info,
1541   Dependencies* dependencies,
1542   CodeBuffer *code_buffer,
1543   int frame_size,
1544   OopMapSet* oop_maps,
1545   ExceptionHandlerTable* handler_table,
1546   ImplicitExceptionTable* nul_chk_table,
1547   AbstractCompiler* compiler,
1548   CompLevel comp_level
1549   , AOTCodeEntry* aot_code_entry
1550 #if INCLUDE_JVMCI
1551   , char* speculations,
1552   int speculations_len,
1553   JVMCINMethodData* jvmci_data
1554 #endif
1555   )
1556   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1557              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1558   _deoptimization_generation(0),
1559   _gc_epoch(CodeCache::gc_epoch()),
1560   _method(method),
1561   _osr_link(nullptr)
1562 {
1563   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1564   {
1565     DEBUG_ONLY(NoSafepointVerifier nsv;)
1566     assert_locked_or_safepoint(CodeCache_lock);
1567 
1568     init_defaults(code_buffer, offsets);
1569     _aot_code_entry          = aot_code_entry;
1570     _method_profiling_count  = 0;
1571 
1572     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1573     _entry_bci       = entry_bci;
1574     _compile_id      = compile_id;
1575     _comp_level      = comp_level;
1576     _compiler_type   = type;
1577     _orig_pc_offset  = orig_pc_offset;
1578 
1579     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1580 
1581     set_ctable_begin(header_begin() + content_offset());
1582 
1583 #if INCLUDE_JVMCI
1584     if (compiler->is_jvmci()) {
1585       // JVMCI might not produce any stub sections
1586       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1587         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1588       } else {
1589         _exception_offset        = -1;
1590       }

1681 #if INCLUDE_JVMCI
1682     // Copy speculations to nmethod
1683     if (speculations_size() != 0) {
1684       memcpy(speculations_begin(), speculations, speculations_len);
1685     }
1686 #endif
1687 
1688     post_init();
1689 
1690     // we use the information of entry points to find out if a method is
1691     // static or non static
1692     assert(compiler->is_c2() || compiler->is_jvmci() ||
1693            _method->is_static() == (entry_point() == verified_entry_point()),
1694            " entry points must be same for static methods and vice versa");
1695   }
1696 }
1697 
1698 // Print a short set of xml attributes to identify this nmethod.  The
1699 // output should be embedded in some other element.
1700 void nmethod::log_identity(xmlStream* log) const {
1701   assert(log->inside_attrs_or_error(), "printing attributes");
1702   log->print(" code_compile_id='%d'", compile_id());
1703   const char* nm_kind = compile_kind();
1704   if (nm_kind != nullptr)  log->print(" code_compile_kind='%s'", nm_kind);
1705   log->print(" code_compiler='%s'", compiler_name());
1706   if (TieredCompilation) {
1707     log->print(" code_compile_level='%d'", comp_level());
1708   }
1709 #if INCLUDE_JVMCI
1710   if (jvmci_nmethod_data() != nullptr) {
1711     const char* jvmci_name = jvmci_nmethod_data()->name();
1712     if (jvmci_name != nullptr) {
1713       log->print(" jvmci_mirror_name='");
1714       log->text("%s", jvmci_name);
1715       log->print("'");
1716     }
1717   }
1718 #endif
1719 }
1720 
1721 
1722 #define LOG_OFFSET(log, name)                    \
1723   if (p2i(name##_end()) - p2i(name##_begin())) \
1724     log->print(" " XSTR(name) "_offset='%zd'"    , \
1725                p2i(name##_begin()) - p2i(this))
1726 
1727 

1812       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1813       if (oop_maps() != nullptr) {
1814         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1815         oop_maps()->print_on(tty);
1816         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1817       }
1818     }
1819 #endif
1820   } else {
1821     print(); // print the header part only.
1822   }
1823 
1824 #if defined(SUPPORT_DATA_STRUCTS)
1825   if (AbstractDisassembler::show_structs()) {
1826     methodHandle mh(Thread::current(), _method);
1827     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1828       print_scopes();
1829       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1830     }
1831     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1832       print_relocations_on(tty);
1833       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1834     }
1835     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1836       print_dependencies_on(tty);
1837       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1838     }
1839     if (printmethod || PrintExceptionHandlers) {
1840       print_handler_table();
1841       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1842       print_nul_chk_table();
1843       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1844     }
1845 
1846     if (printmethod) {
1847       print_recorded_oops();
1848       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1849       print_recorded_metadata();
1850       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1851     }
1852   }
1853 #endif
1854 
1855   if (xtty != nullptr) {
1856     xtty->tail("print_nmethod");
1857   }
1858 }
1859 
1860 
1861 // Promote one word from an assembly-time handle to a live embedded oop.
1862 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
1863   if (handle == nullptr ||
1864       // As a special case, IC oops are initialized to 1 or -1.
1865       handle == (jobject) Universe::non_oop_word()) {
1866     *(void**)dest = handle;
1867   } else {
1868     *dest = JNIHandles::resolve_non_null(handle);
1869   }
1870 }
1871 
1872 void nmethod::copy_values(GrowableArray<Handle>* array) {
1873   int length = array->length();
1874   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1875   oop* dest = oops_begin();
1876   for (int index = 0 ; index < length; index++) {
1877     dest[index] = array->at(index)();
1878   }
1879 }
1880 
1881 // Have to have the same name because it's called by a template
1882 void nmethod::copy_values(GrowableArray<jobject>* array) {
1883   int length = array->length();
1884   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1885   oop* dest = oops_begin();
1886   for (int index = 0 ; index < length; index++) {
1887     initialize_immediate_oop(&dest[index], array->at(index));
1888   }
1889 
1890   // Now we can fix up all the oops in the code.  We need to do this
1891   // in the code because the assembler uses jobjects as placeholders.
1892   // The code and relocations have already been initialized by the
1893   // CodeBlob constructor, so it is valid even at this early point to
1894   // iterate over relocations and patch the code.
1895   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
1896 }
1897 
1898 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
1899   int length = array->length();

1907 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
1908   // re-patch all oop-bearing instructions, just in case some oops moved
1909   RelocIterator iter(this, begin, end);
1910   while (iter.next()) {
1911     if (iter.type() == relocInfo::oop_type) {
1912       oop_Relocation* reloc = iter.oop_reloc();
1913       if (initialize_immediates && reloc->oop_is_immediate()) {
1914         oop* dest = reloc->oop_addr();
1915         jobject obj = *reinterpret_cast<jobject*>(dest);
1916         initialize_immediate_oop(dest, obj);
1917       }
1918       // Refresh the oop-related bits of this instruction.
1919       reloc->fix_oop_relocation();
1920     } else if (iter.type() == relocInfo::metadata_type) {
1921       metadata_Relocation* reloc = iter.metadata_reloc();
1922       reloc->fix_metadata_relocation();
1923     }
1924   }
1925 }
1926 
1927 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
1928   RelocIterator iter(this);
1929   while (iter.next()) {
1930     if (iter.type() == relocInfo::oop_type) {
1931       oop_Relocation* reloc = iter.oop_reloc();
1932       if (reloc->oop_is_immediate()) {
1933         oop dest = reloc->oop_value();
1934         Handle h(thread, dest);
1935         oop_list.append(h);
1936       }
1937     } else if (iter.type() == relocInfo::metadata_type) {
1938       metadata_Relocation* reloc = iter.metadata_reloc();
1939       if (reloc->metadata_is_immediate()) {
1940         Metadata* m = reloc->metadata_value();
1941         metadata_list.append(m);
1942       }
1943     }
1944   }
1945 }
1946 
1947 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
1948   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
1949   intptr_t cbaddr = (intptr_t) nm;
1950   intptr_t offset = ((intptr_t) pc) - cbaddr;
1951 
1952   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
1953   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
1954     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
1955   } else if (!nop->patch(oopmap_slot, offset)) {
1956     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
1957   }
1958 }
1959 
1960 void nmethod::finalize_relocations() {
1961   NoSafepointVerifier nsv;
1962 
1963   GrowableArray<NativeMovConstReg*> virtual_call_data;
1964 
1965   // Make sure that post call nops fill in nmethod offsets eagerly so
1966   // we don't have to race with deoptimization

2088   Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
2089 }
2090 
2091 bool nmethod::is_maybe_on_stack() {
2092   // If the condition below is true, it means that the nmethod was found to
2093   // be alive the previous completed marking cycle.
2094   return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2095 }
2096 
2097 void nmethod::inc_decompile_count() {
2098   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2099   // Could be gated by ProfileTraps, but do not bother...
2100   Method* m = method();
2101   if (m == nullptr)  return;
2102   MethodData* mdo = m->method_data();
2103   if (mdo == nullptr)  return;
2104   // There is a benign race here.  See comments in methodData.hpp.
2105   mdo->inc_decompile_count();
2106 }
2107 
2108 void nmethod::inc_method_profiling_count() {
2109   Atomic::inc(&_method_profiling_count);
2110 }
2111 
2112 uint64_t nmethod::method_profiling_count() {
2113   return _method_profiling_count;
2114 }
2115 
2116 bool nmethod::try_transition(signed char new_state_int) {
2117   signed char new_state = new_state_int;
2118   assert_lock_strong(NMethodState_lock);
2119   signed char old_state = _state;
2120   if (old_state >= new_state) {
2121     // Ensure monotonicity of transitions.
2122     return false;
2123   }
2124   Atomic::store(&_state, new_state);
2125   return true;
2126 }
2127 
2128 void nmethod::invalidate_osr_method() {
2129   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2130   // Remove from list of active nmethods
2131   if (method() != nullptr) {
2132     method()->method_holder()->remove_osr_nmethod(this);
2133   }
2134 }
2135 

2147     }
2148   }
2149 
2150   ResourceMark rm;
2151   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2152   ss.print("made not entrant: %s", reason);
2153 
2154   CompileTask::print_ul(this, ss.freeze());
2155   if (PrintCompilation) {
2156     print_on_with_msg(tty, ss.freeze());
2157   }
2158 }
2159 
2160 void nmethod::unlink_from_method() {
2161   if (method() != nullptr) {
2162     method()->unlink_code(this);
2163   }
2164 }
2165 
2166 // Invalidate code
2167 bool nmethod::make_not_entrant(const char* reason, bool make_not_entrant) {
2168   assert(reason != nullptr, "Must provide a reason");
2169 
2170   // This can be called while the system is already at a safepoint which is ok
2171   NoSafepointVerifier nsv;
2172 
2173   if (is_unloading()) {
2174     // If the nmethod is unloading, then it is already not entrant through
2175     // the nmethod entry barriers. No need to do anything; GC will unload it.
2176     return false;
2177   }
2178 
2179   if (Atomic::load(&_state) == not_entrant) {
2180     // Avoid taking the lock if already in required state.
2181     // This is safe from races because the state is an end-state,
2182     // which the nmethod cannot back out of once entered.
2183     // No need for fencing either.
2184     return false;
2185   }
2186 
2187   {

2223     }
2224 
2225     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2226     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2227       // If nmethod entry barriers are not supported, we won't mark
2228       // nmethods as on-stack when they become on-stack. So we
2229       // degrade to a less accurate flushing strategy, for now.
2230       mark_as_maybe_on_stack();
2231     }
2232 
2233     // Change state
2234     bool success = try_transition(not_entrant);
2235     assert(success, "Transition can't fail");
2236 
2237     // Log the transition once
2238     log_state_change(reason);
2239 
2240     // Remove nmethod from method.
2241     unlink_from_method();
2242 
2243     if (make_not_entrant) {
2244       // Keep cached code if it was simply replaced
2245       // otherwise make it not entrant too.
2246       AOTCodeCache::invalidate(_aot_code_entry);
2247     }
2248 
2249     CompileBroker::log_not_entrant(this);
2250   } // leave critical region under NMethodState_lock
2251 
2252 #if INCLUDE_JVMCI
2253   // Invalidate can't occur while holding the NMethodState_lock
2254   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2255   if (nmethod_data != nullptr) {
2256     nmethod_data->invalidate_nmethod_mirror(this);
2257   }
2258 #endif
2259 
2260 #ifdef ASSERT
2261   if (is_osr_method() && method() != nullptr) {
2262     // Make sure osr nmethod is invalidated, i.e. not on the list
2263     bool found = method()->method_holder()->remove_osr_nmethod(this);
2264     assert(!found, "osr nmethod should have been invalidated");
2265   }
2266 #endif
2267 
2268   return true;
2269 }

2310 
2311   // completely deallocate this method
2312   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2313   log_debug(codecache)("*flushing %s nmethod %3d/" INTPTR_FORMAT ". Live blobs:" UINT32_FORMAT
2314                        "/Free CodeCache:%zuKb",
2315                        is_osr_method() ? "osr" : "",_compile_id, p2i(this), CodeCache::blob_count(),
2316                        CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024);
2317 
2318   // We need to deallocate any ExceptionCache data.
2319   // Note that we do not need to grab the nmethod lock for this, it
2320   // better be thread safe if we're disposing of it!
2321   ExceptionCache* ec = exception_cache();
2322   while(ec != nullptr) {
2323     ExceptionCache* next = ec->next();
2324     delete ec;
2325     ec = next;
2326   }
2327   if (_pc_desc_container != nullptr) {
2328     delete _pc_desc_container;
2329   }
2330   if (_compiled_ic_data != nullptr) {
2331     delete[] _compiled_ic_data;
2332   }
2333 
2334   if (_immutable_data != data_end() && !AOTCodeCache::is_address_in_aot_cache((address)_oop_maps)) {
2335     os::free(_immutable_data);
2336     _immutable_data = blob_end(); // Valid not null address
2337   }
2338   if (unregister_nmethod) {
2339     Universe::heap()->unregister_nmethod(this);
2340   }
2341   CodeCache::unregister_old_nmethod(this);
2342 
2343   CodeBlob::purge();
2344 }
2345 
2346 oop nmethod::oop_at(int index) const {
2347   if (index == 0) {
2348     return nullptr;
2349   }
2350 
2351   BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2352   return bs_nm->oop_load_no_keepalive(this, index);
2353 }
2354 

2375         MethodHandles::clean_dependency_context(call_site);
2376       } else {
2377         InstanceKlass* ik = deps.context_type();
2378         if (ik == nullptr) {
2379           continue;  // ignore things like evol_method
2380         }
2381         // During GC liveness of dependee determines class that needs to be updated.
2382         // The GC may clean dependency contexts concurrently and in parallel.
2383         ik->clean_dependency_context();
2384       }
2385     }
2386   }
2387 }
2388 
2389 void nmethod::post_compiled_method(CompileTask* task) {
2390   task->mark_success();
2391   task->set_nm_content_size(content_size());
2392   task->set_nm_insts_size(insts_size());
2393   task->set_nm_total_size(total_size());
2394 
2395   // task->is_aot() is true only for loaded cached code.
2396   // nmethod::_aot_code_entry is set for loaded and stored cached code
2397   // to invalidate the entry when nmethod is deoptimized.
2398   // There is option to not store in archive cached code.
2399   guarantee((_aot_code_entry != nullptr) || !task->is_aot() || VerifyCachedCode, "sanity");
2400 
2401   // JVMTI -- compiled method notification (must be done outside lock)
2402   post_compiled_method_load_event();
2403 
2404   if (CompilationLog::log() != nullptr) {
2405     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2406   }
2407 
2408   const DirectiveSet* directive = task->directive();
2409   maybe_print_nmethod(directive);
2410 }
2411 
2412 // ------------------------------------------------------------------
2413 // post_compiled_method_load_event
2414 // new method for install_code() path
2415 // Transfer information from compilation to jvmti
2416 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2417   // This is a bad time for a safepoint.  We don't want
2418   // this nmethod to get unloaded while we're queueing the event.
2419   NoSafepointVerifier nsv;
2420 

3112 
3113   // Make sure all the entry points are correctly aligned for patching.
3114   NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point());
3115 
3116   // assert(oopDesc::is_oop(method()), "must be valid");
3117 
3118   ResourceMark rm;
3119 
3120   if (!CodeCache::contains(this)) {
3121     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3122   }
3123 
3124   if(is_native_method() )
3125     return;
3126 
3127   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3128   if (nm != this) {
3129     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3130   }
3131 
3132   // Verification can triggered during shutdown after AOTCodeCache is closed.
3133   // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3134   if (!is_aot() || AOTCodeCache::is_on()) {
3135     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3136       if (! p->verify(this)) {
3137         tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3138       }
3139     }

3140 
3141 #ifdef ASSERT
3142 #if INCLUDE_JVMCI
3143     {
3144       // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3145       ImmutableOopMapSet* oms = oop_maps();
3146       ImplicitExceptionTable implicit_table(this);
3147       for (uint i = 0; i < implicit_table.len(); i++) {
3148         int exec_offset = (int) implicit_table.get_exec_offset(i);
3149         if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3150           assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3151           bool found = false;
3152           for (int i = 0, imax = oms->count(); i < imax; i++) {
3153             if (oms->pair_at(i)->pc_offset() == exec_offset) {
3154               found = true;
3155               break;
3156             }
3157           }
3158           assert(found, "missing oopmap");
3159         }

3160       }
3161     }

3162 #endif
3163 #endif
3164   }
3165 
3166   VerifyOopsClosure voc(this);
3167   oops_do(&voc);
3168   assert(voc.ok(), "embedded oops must be OK");
3169   Universe::heap()->verify_nmethod(this);
3170 
3171   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3172          nm->method()->external_name(), p2i(_oops_do_mark_link));
3173   if (!is_aot() || AOTCodeCache::is_on()) {
3174     verify_scopes();
3175   }
3176 
3177   CompiledICLocker nm_verify(this);
3178   VerifyMetadataClosure vmc;
3179   metadata_do(&vmc);
3180 }
3181 
3182 
3183 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3184 
3185   // Verify IC only when nmethod installation is finished.
3186   if (!is_not_installed()) {
3187     if (CompiledICLocker::is_safe(this)) {
3188       if (is_inline_cache) {
3189         CompiledIC_at(this, call_site);
3190       } else {
3191         CompiledDirectCall::at(call_site);
3192       }
3193     } else {
3194       CompiledICLocker ml_verify(this);
3195       if (is_inline_cache) {

3324                                              p2i(nul_chk_table_end()),
3325                                              nul_chk_table_size());
3326   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3327                                              p2i(handler_table_begin()),
3328                                              p2i(handler_table_end()),
3329                                              handler_table_size());
3330   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3331                                              p2i(scopes_pcs_begin()),
3332                                              p2i(scopes_pcs_end()),
3333                                              scopes_pcs_size());
3334   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3335                                              p2i(scopes_data_begin()),
3336                                              p2i(scopes_data_end()),
3337                                              scopes_data_size());
3338 #if INCLUDE_JVMCI
3339   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3340                                              p2i(speculations_begin()),
3341                                              p2i(speculations_end()),
3342                                              speculations_size());
3343 #endif
3344   if (AOTCodeCache::is_on() && _aot_code_entry != nullptr) {
3345     _aot_code_entry->print(st);
3346   }
3347 }
3348 
3349 void nmethod::print_code() {
3350   ResourceMark m;
3351   ttyLocker ttyl;
3352   // Call the specialized decode method of this class.
3353   decode(tty);
3354 }
3355 
3356 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3357 
3358 void nmethod::print_dependencies_on(outputStream* out) {
3359   ResourceMark rm;
3360   stringStream st;
3361   st.print_cr("Dependencies:");
3362   for (Dependencies::DepStream deps(this); deps.next(); ) {
3363     deps.print_dependency(&st);
3364     InstanceKlass* ctxk = deps.context_type();
3365     if (ctxk != nullptr) {
3366       if (ctxk->is_dependent_nmethod(this)) {

3426   st->print("scopes:");
3427   if (scopes_pcs_begin() < scopes_pcs_end()) {
3428     st->cr();
3429     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3430       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3431         continue;
3432 
3433       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3434       while (sd != nullptr) {
3435         sd->print_on(st, p);  // print output ends with a newline
3436         sd = sd->sender();
3437       }
3438     }
3439   } else {
3440     st->print_cr(" <list empty>");
3441   }
3442 }
3443 #endif
3444 
3445 #ifndef PRODUCT  // RelocIterator does support printing only then.
3446 void nmethod::print_relocations_on(outputStream* st) {
3447   ResourceMark m;       // in case methods get printed via the debugger
3448   st->print_cr("relocations:");
3449   RelocIterator iter(this);
3450   iter.print_on(st);
3451 }
3452 #endif
3453 
3454 void nmethod::print_pcs_on(outputStream* st) {
3455   ResourceMark m;       // in case methods get printed via debugger
3456   st->print("pc-bytecode offsets:");
3457   if (scopes_pcs_begin() < scopes_pcs_end()) {
3458     st->cr();
3459     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3460       p->print_on(st, this);  // print output ends with a newline
3461     }
3462   } else {
3463     st->print_cr(" <list empty>");
3464   }
3465 }
3466 
3467 void nmethod::print_handler_table() {
3468   ExceptionHandlerTable(this).print(code_begin());
3469 }
3470 

4236 
4237 #endif // !PRODUCT
4238 
4239 #if INCLUDE_JVMCI
4240 void nmethod::update_speculation(JavaThread* thread) {
4241   jlong speculation = thread->pending_failed_speculation();
4242   if (speculation != 0) {
4243     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4244     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4245     thread->set_pending_failed_speculation(0);
4246   }
4247 }
4248 
4249 const char* nmethod::jvmci_name() {
4250   if (jvmci_nmethod_data() != nullptr) {
4251     return jvmci_nmethod_data()->name();
4252   }
4253   return nullptr;
4254 }
4255 #endif
4256 
4257 void nmethod::prepare_for_archiving_impl() {
4258   CodeBlob::prepare_for_archiving_impl();
4259   _deoptimization_generation = 0;
4260   _gc_epoch = 0;
4261   _method_profiling_count = 0;
4262   _osr_link = nullptr;
4263   _method = nullptr;
4264   _immutable_data = nullptr;
4265   _pc_desc_container = nullptr;
4266   _exception_cache = nullptr;
4267   _gc_data = nullptr;
4268   _oops_do_mark_link = nullptr;
4269   _compiled_ic_data = nullptr;
4270   _osr_entry_point = nullptr;
4271   _compile_id = -1;
4272   _deoptimization_status = not_marked;
4273   _is_unloading_state = 0;
4274   _state = not_installed;
4275 }
< prev index next >