< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"

  26 #include "code/codeCache.hpp"
  27 #include "code/compiledIC.hpp"
  28 #include "code/dependencies.hpp"
  29 #include "code/nativeInst.hpp"
  30 #include "code/nmethod.inline.hpp"
  31 #include "code/scopeDesc.hpp"
  32 #include "compiler/abstractCompiler.hpp"
  33 #include "compiler/compilationLog.hpp"
  34 #include "compiler/compileBroker.hpp"
  35 #include "compiler/compileLog.hpp"
  36 #include "compiler/compilerDirectives.hpp"
  37 #include "compiler/compilerOracle.hpp"
  38 #include "compiler/compileTask.hpp"
  39 #include "compiler/directivesParser.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "compiler/oopMap.inline.hpp"
  42 #include "gc/shared/barrierSet.hpp"
  43 #include "gc/shared/barrierSetNMethod.hpp"
  44 #include "gc/shared/classUnloadingContext.hpp"
  45 #include "gc/shared/collectedHeap.hpp"

1003              _method->method_holder()->external_name(),
1004              _method->name()->as_C_string(),
1005              _method->signature()->as_C_string(),
1006              compile_id());
1007   }
1008   return check_evol.has_evol_dependency();
1009 }
1010 
1011 int nmethod::total_size() const {
1012   return
1013     consts_size()        +
1014     insts_size()         +
1015     stub_size()          +
1016     scopes_data_size()   +
1017     scopes_pcs_size()    +
1018     handler_table_size() +
1019     nul_chk_table_size();
1020 }
1021 
1022 const char* nmethod::compile_kind() const {
1023   if (is_osr_method())     return "osr";



1024   if (method() != nullptr && is_native_method()) {
1025     if (method()->is_continuation_native_intrinsic()) {
1026       return "cnt";
1027     }
1028     return "c2n";
1029   }
1030   return nullptr;
1031 }
1032 
1033 const char* nmethod::compiler_name() const {
1034   return compilertype2name(_compiler_type);
1035 }
1036 
1037 #ifdef ASSERT
1038 class CheckForOopsClosure : public OopClosure {
1039   bool _found_oop = false;
1040  public:
1041   virtual void do_oop(oop* o) { _found_oop = true; }
1042   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1043   bool found_oop() { return _found_oop; }

1109     nm = new (native_nmethod_size, allow_NonNMethod_space)
1110     nmethod(method(), compiler_none, native_nmethod_size,
1111             compile_id, &offsets,
1112             code_buffer, frame_size,
1113             basic_lock_owner_sp_offset,
1114             basic_lock_sp_offset,
1115             oop_maps, mutable_data_size);
1116     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1117     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1118   }
1119 
1120   if (nm != nullptr) {
1121     // verify nmethod
1122     DEBUG_ONLY(nm->verify();) // might block
1123 
1124     nm->log_new_nmethod();
1125   }
1126   return nm;
1127 }
1128 

























1129 nmethod* nmethod::new_nmethod(const methodHandle& method,
1130   int compile_id,
1131   int entry_bci,
1132   CodeOffsets* offsets,
1133   int orig_pc_offset,
1134   DebugInformationRecorder* debug_info,
1135   Dependencies* dependencies,
1136   CodeBuffer* code_buffer, int frame_size,
1137   OopMapSet* oop_maps,
1138   ExceptionHandlerTable* handler_table,
1139   ImplicitExceptionTable* nul_chk_table,
1140   AbstractCompiler* compiler,
1141   CompLevel comp_level

1142 #if INCLUDE_JVMCI
1143   , char* speculations,
1144   int speculations_len,
1145   JVMCINMethodData* jvmci_data
1146 #endif
1147 )
1148 {
1149   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1150   code_buffer->finalize_oop_references(method);
1151   // create nmethod
1152   nmethod* nm = nullptr;
1153   int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1154 
1155   int immutable_data_size =
1156       adjust_pcs_size(debug_info->pcs_size())
1157     + align_up((int)dependencies->size_in_bytes(), oopSize)
1158     + align_up(handler_table->size_in_bytes()    , oopSize)
1159     + align_up(nul_chk_table->size_in_bytes()    , oopSize)
1160 #if INCLUDE_JVMCI
1161     + align_up(speculations_len                  , oopSize)

1165   // First, allocate space for immutable data in C heap.
1166   address immutable_data = nullptr;
1167   if (immutable_data_size > 0) {
1168     immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1169     if (immutable_data == nullptr) {
1170       vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1171       return nullptr;
1172     }
1173   }
1174 
1175   int mutable_data_size = required_mutable_data_size(code_buffer
1176     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1177 
1178   {
1179     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1180 
1181     nm = new (nmethod_size, comp_level)
1182     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1183             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1184             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1185             handler_table, nul_chk_table, compiler, comp_level
1186 #if INCLUDE_JVMCI
1187             , speculations,
1188             speculations_len,
1189             jvmci_data
1190 #endif
1191             );
1192 
1193     if (nm != nullptr) {
1194       // To make dependency checking during class loading fast, record
1195       // the nmethod dependencies in the classes it is dependent on.
1196       // This allows the dependency checking code to simply walk the
1197       // class hierarchy above the loaded class, checking only nmethods
1198       // which are dependent on those classes.  The slow way is to
1199       // check every nmethod for dependencies which makes it linear in
1200       // the number of methods compiled.  For applications with a lot
1201       // classes the slow way is too slow.
1202       for (Dependencies::DepStream deps(nm); deps.next(); ) {
1203         if (deps.type() == Dependencies::call_site_target_value) {
1204           // CallSite dependencies are managed on per-CallSite instance basis.
1205           oop call_site = deps.argument_oop(0);
1206           MethodHandles::add_dependent_nmethod(call_site, nm);
1207         } else {
1208           InstanceKlass* ik = deps.context_type();
1209           if (ik == nullptr) {
1210             continue;  // ignore things like evol_method
1211           }
1212           // record this nmethod as dependent on this klass
1213           ik->add_dependent_nmethod(nm);
1214         }
1215       }
1216       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));
1217     }
1218   }
1219   // Do verification and logging outside CodeCache_lock.
1220   if (nm != nullptr) {











































































































1221     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1222     DEBUG_ONLY(nm->verify();)
1223     nm->log_new_nmethod();
1224   }
1225   return nm;
1226 }
1227 
1228 // Fill in default values for various fields
1229 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1230   // avoid uninitialized fields, even for short time periods
1231   _exception_cache            = nullptr;
1232   _gc_data                    = nullptr;
1233   _oops_do_mark_link          = nullptr;
1234   _compiled_ic_data           = nullptr;
1235 
1236   _is_unloading_state         = 0;
1237   _state                      = not_installed;
1238 
1239   _has_unsafe_access          = 0;
1240   _has_method_handle_invokes  = 0;
1241   _has_wide_vectors           = 0;
1242   _has_monitors               = 0;
1243   _has_scoped_access          = 0;
1244   _has_flushed_dependencies   = 0;
1245   _is_unlinked                = 0;
1246   _load_reported              = 0; // jvmti state


1247 

1248   _deoptimization_status      = not_marked;
1249 
1250   // SECT_CONSTS is first in code buffer so the offset should be 0.
1251   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1252   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1253 
1254   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1255 
1256   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1257   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1258 
1259   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1260 }
1261 
1262 // Post initialization
1263 void nmethod::post_init() {
1264   clear_unloading_state();
1265 
1266   finalize_relocations();
1267 

1300 
1301     _osr_entry_point         = nullptr;
1302     _pc_desc_container       = nullptr;
1303     _entry_bci               = InvocationEntryBci;
1304     _compile_id              = compile_id;
1305     _comp_level              = CompLevel_none;
1306     _compiler_type           = type;
1307     _orig_pc_offset          = 0;
1308     _num_stack_arg_slots     = 0;
1309 
1310     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1311       // Continuation enter intrinsic
1312       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1313     } else {
1314       _exception_offset      = 0;
1315     }
1316     // Native wrappers do not have deopt handlers. Make the values
1317     // something that will never match a pc like the nmethod vtable entry
1318     _deopt_handler_offset    = 0;
1319     _deopt_mh_handler_offset = 0;


1320     _unwind_handler_offset   = 0;
1321 
1322     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1323     uint16_t metadata_size;
1324     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1325     JVMCI_ONLY( _metadata_size = metadata_size; )
1326     assert(_mutable_data_size == _relocation_size + metadata_size,
1327            "wrong mutable data size: %d != %d + %d",
1328            _mutable_data_size, _relocation_size, metadata_size);
1329 
1330     // native wrapper does not have read-only data but we need unique not null address
1331     _immutable_data          = blob_end();
1332     _immutable_data_size     = 0;
1333     _nul_chk_table_offset    = 0;
1334     _handler_table_offset    = 0;
1335     _scopes_pcs_offset       = 0;
1336     _scopes_data_offset      = 0;
1337 #if INCLUDE_JVMCI
1338     _speculations_offset     = 0;
1339 #endif

1359     // This is both handled in decode2(), called via print_code() -> decode()
1360     if (PrintNativeNMethods) {
1361       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1362       print_code();
1363       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1364 #if defined(SUPPORT_DATA_STRUCTS)
1365       if (AbstractDisassembler::show_structs()) {
1366         if (oop_maps != nullptr) {
1367           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1368           oop_maps->print_on(tty);
1369           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1370         }
1371       }
1372 #endif
1373     } else {
1374       print(); // print the header part only.
1375     }
1376 #if defined(SUPPORT_DATA_STRUCTS)
1377     if (AbstractDisassembler::show_structs()) {
1378       if (PrintRelocations) {
1379         print_relocations();
1380         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1381       }
1382     }
1383 #endif
1384     if (xtty != nullptr) {
1385       xtty->tail("print_native_nmethod");
1386     }
1387   }
1388 }
1389 
1390 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1391   return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1392 }
1393 
1394 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1395   // Try MethodNonProfiled and MethodProfiled.
1396   void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1397   if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1398   // Try NonNMethod or give up.
1399   return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);

1403 nmethod::nmethod(
1404   Method* method,
1405   CompilerType type,
1406   int nmethod_size,
1407   int immutable_data_size,
1408   int mutable_data_size,
1409   int compile_id,
1410   int entry_bci,
1411   address immutable_data,
1412   CodeOffsets* offsets,
1413   int orig_pc_offset,
1414   DebugInformationRecorder* debug_info,
1415   Dependencies* dependencies,
1416   CodeBuffer *code_buffer,
1417   int frame_size,
1418   OopMapSet* oop_maps,
1419   ExceptionHandlerTable* handler_table,
1420   ImplicitExceptionTable* nul_chk_table,
1421   AbstractCompiler* compiler,
1422   CompLevel comp_level

1423 #if INCLUDE_JVMCI
1424   , char* speculations,
1425   int speculations_len,
1426   JVMCINMethodData* jvmci_data
1427 #endif
1428   )
1429   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1430              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1431   _deoptimization_generation(0),
1432   _gc_epoch(CodeCache::gc_epoch()),
1433   _method(method),
1434   _osr_link(nullptr)
1435 {
1436   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1437   {
1438     DEBUG_ONLY(NoSafepointVerifier nsv;)
1439     assert_locked_or_safepoint(CodeCache_lock);
1440 
1441     init_defaults(code_buffer, offsets);


1442 
1443     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1444     _entry_bci       = entry_bci;
1445     _compile_id      = compile_id;
1446     _comp_level      = comp_level;
1447     _compiler_type   = type;
1448     _orig_pc_offset  = orig_pc_offset;
1449 
1450     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1451 
1452     set_ctable_begin(header_begin() + content_offset());
1453 
1454 #if INCLUDE_JVMCI
1455     if (compiler->is_jvmci()) {
1456       // JVMCI might not produce any stub sections
1457       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1458         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1459       } else {
1460         _exception_offset        = -1;
1461       }

1552 #if INCLUDE_JVMCI
1553     // Copy speculations to nmethod
1554     if (speculations_size() != 0) {
1555       memcpy(speculations_begin(), speculations, speculations_len);
1556     }
1557 #endif
1558 
1559     post_init();
1560 
1561     // we use the information of entry points to find out if a method is
1562     // static or non static
1563     assert(compiler->is_c2() || compiler->is_jvmci() ||
1564            _method->is_static() == (entry_point() == verified_entry_point()),
1565            " entry points must be same for static methods and vice versa");
1566   }
1567 }
1568 
1569 // Print a short set of xml attributes to identify this nmethod.  The
1570 // output should be embedded in some other element.
1571 void nmethod::log_identity(xmlStream* log) const {

1572   log->print(" compile_id='%d'", compile_id());
1573   const char* nm_kind = compile_kind();
1574   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1575   log->print(" compiler='%s'", compiler_name());
1576   if (TieredCompilation) {
1577     log->print(" level='%d'", comp_level());
1578   }
1579 #if INCLUDE_JVMCI
1580   if (jvmci_nmethod_data() != nullptr) {
1581     const char* jvmci_name = jvmci_nmethod_data()->name();
1582     if (jvmci_name != nullptr) {
1583       log->print(" jvmci_mirror_name='");
1584       log->text("%s", jvmci_name);
1585       log->print("'");
1586     }
1587   }
1588 #endif
1589 }
1590 
1591 
1592 #define LOG_OFFSET(log, name)                    \
1593   if (p2i(name##_end()) - p2i(name##_begin())) \
1594     log->print(" " XSTR(name) "_offset='%zd'"    , \
1595                p2i(name##_begin()) - p2i(this))
1596 
1597 

1678       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1679       if (oop_maps() != nullptr) {
1680         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1681         oop_maps()->print_on(tty);
1682         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1683       }
1684     }
1685 #endif
1686   } else {
1687     print(); // print the header part only.
1688   }
1689 
1690 #if defined(SUPPORT_DATA_STRUCTS)
1691   if (AbstractDisassembler::show_structs()) {
1692     methodHandle mh(Thread::current(), _method);
1693     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1694       print_scopes();
1695       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1696     }
1697     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1698       print_relocations();
1699       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1700     }
1701     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1702       print_dependencies_on(tty);
1703       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1704     }
1705     if (printmethod || PrintExceptionHandlers) {
1706       print_handler_table();
1707       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1708       print_nul_chk_table();
1709       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1710     }
1711 
1712     if (printmethod) {
1713       print_recorded_oops();
1714       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1715       print_recorded_metadata();
1716       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1717     }
1718   }
1719 #endif
1720 
1721   if (xtty != nullptr) {
1722     xtty->tail("print_nmethod");
1723   }
1724 }
1725 
1726 
1727 // Promote one word from an assembly-time handle to a live embedded oop.
1728 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
1729   if (handle == nullptr ||
1730       // As a special case, IC oops are initialized to 1 or -1.
1731       handle == (jobject) Universe::non_oop_word()) {
1732     *(void**)dest = handle;
1733   } else {
1734     *dest = JNIHandles::resolve_non_null(handle);
1735   }
1736 }
1737 








1738 
1739 // Have to have the same name because it's called by a template
1740 void nmethod::copy_values(GrowableArray<jobject>* array) {
1741   int length = array->length();
1742   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1743   oop* dest = oops_begin();
1744   for (int index = 0 ; index < length; index++) {
1745     initialize_immediate_oop(&dest[index], array->at(index));
1746   }
1747 
1748   // Now we can fix up all the oops in the code.  We need to do this
1749   // in the code because the assembler uses jobjects as placeholders.
1750   // The code and relocations have already been initialized by the
1751   // CodeBlob constructor, so it is valid even at this early point to
1752   // iterate over relocations and patch the code.
1753   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
1754 }
1755 
1756 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
1757   int length = array->length();

1765 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
1766   // re-patch all oop-bearing instructions, just in case some oops moved
1767   RelocIterator iter(this, begin, end);
1768   while (iter.next()) {
1769     if (iter.type() == relocInfo::oop_type) {
1770       oop_Relocation* reloc = iter.oop_reloc();
1771       if (initialize_immediates && reloc->oop_is_immediate()) {
1772         oop* dest = reloc->oop_addr();
1773         jobject obj = *reinterpret_cast<jobject*>(dest);
1774         initialize_immediate_oop(dest, obj);
1775       }
1776       // Refresh the oop-related bits of this instruction.
1777       reloc->fix_oop_relocation();
1778     } else if (iter.type() == relocInfo::metadata_type) {
1779       metadata_Relocation* reloc = iter.metadata_reloc();
1780       reloc->fix_metadata_relocation();
1781     }
1782   }
1783 }
1784 




















1785 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
1786   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
1787   intptr_t cbaddr = (intptr_t) nm;
1788   intptr_t offset = ((intptr_t) pc) - cbaddr;
1789 
1790   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
1791   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
1792     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
1793   } else if (!nop->patch(oopmap_slot, offset)) {
1794     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
1795   }
1796 }
1797 
1798 void nmethod::finalize_relocations() {
1799   NoSafepointVerifier nsv;
1800 
1801   GrowableArray<NativeMovConstReg*> virtual_call_data;
1802 
1803   // Make sure that post call nops fill in nmethod offsets eagerly so
1804   // we don't have to race with deoptimization

1931   // be alive the previous completed marking cycle.
1932   return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1933 }
1934 
1935 void nmethod::inc_decompile_count() {
1936   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1937   // Could be gated by ProfileTraps, but do not bother...
1938 #if INCLUDE_JVMCI
1939   if (jvmci_skip_profile_deopt()) {
1940     return;
1941   }
1942 #endif
1943   Method* m = method();
1944   if (m == nullptr)  return;
1945   MethodData* mdo = m->method_data();
1946   if (mdo == nullptr)  return;
1947   // There is a benign race here.  See comments in methodData.hpp.
1948   mdo->inc_decompile_count();
1949 }
1950 








1951 bool nmethod::try_transition(signed char new_state_int) {
1952   signed char new_state = new_state_int;
1953   assert_lock_strong(NMethodState_lock);
1954   signed char old_state = _state;
1955   if (old_state >= new_state) {
1956     // Ensure monotonicity of transitions.
1957     return false;
1958   }
1959   Atomic::store(&_state, new_state);
1960   return true;
1961 }
1962 
1963 void nmethod::invalidate_osr_method() {
1964   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1965   // Remove from list of active nmethods
1966   if (method() != nullptr) {
1967     method()->method_holder()->remove_osr_nmethod(this);
1968   }
1969 }
1970 

1980     }
1981   }
1982 
1983   ResourceMark rm;
1984   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
1985   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
1986 
1987   CompileTask::print_ul(this, ss.freeze());
1988   if (PrintCompilation) {
1989     print_on_with_msg(tty, ss.freeze());
1990   }
1991 }
1992 
1993 void nmethod::unlink_from_method() {
1994   if (method() != nullptr) {
1995     method()->unlink_code(this);
1996   }
1997 }
1998 
1999 // Invalidate code
2000 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason) {
2001   // This can be called while the system is already at a safepoint which is ok
2002   NoSafepointVerifier nsv;
2003 
2004   if (is_unloading()) {
2005     // If the nmethod is unloading, then it is already not entrant through
2006     // the nmethod entry barriers. No need to do anything; GC will unload it.
2007     return false;
2008   }
2009 
2010   if (Atomic::load(&_state) == not_entrant) {
2011     // Avoid taking the lock if already in required state.
2012     // This is safe from races because the state is an end-state,
2013     // which the nmethod cannot back out of once entered.
2014     // No need for fencing either.
2015     return false;
2016   }
2017 
2018   {
2019     // Enter critical section.  Does not block for safepoint.
2020     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2042     }
2043 
2044     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2045     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2046       // If nmethod entry barriers are not supported, we won't mark
2047       // nmethods as on-stack when they become on-stack. So we
2048       // degrade to a less accurate flushing strategy, for now.
2049       mark_as_maybe_on_stack();
2050     }
2051 
2052     // Change state
2053     bool success = try_transition(not_entrant);
2054     assert(success, "Transition can't fail");
2055 
2056     // Log the transition once
2057     log_state_change(invalidation_reason);
2058 
2059     // Remove nmethod from method.
2060     unlink_from_method();
2061 







2062   } // leave critical region under NMethodState_lock
2063 
2064 #if INCLUDE_JVMCI
2065   // Invalidate can't occur while holding the NMethodState_lock
2066   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2067   if (nmethod_data != nullptr) {
2068     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2069   }
2070 #endif
2071 
2072 #ifdef ASSERT
2073   if (is_osr_method() && method() != nullptr) {
2074     // Make sure osr nmethod is invalidated, i.e. not on the list
2075     bool found = method()->method_holder()->remove_osr_nmethod(this);
2076     assert(!found, "osr nmethod should have been invalidated");
2077   }
2078 #endif
2079 
2080   return true;
2081 }

2106     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2107             nmethod::InvalidationReason::UNLOADING_COLD :
2108             nmethod::InvalidationReason::UNLOADING);
2109   }
2110 #endif
2111 
2112   // Post before flushing as jmethodID is being used
2113   post_compiled_method_unload();
2114 
2115   // Register for flushing when it is safe. For concurrent class unloading,
2116   // that would be after the unloading handshake, and for STW class unloading
2117   // that would be when getting back to the VM thread.
2118   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2119 }
2120 
2121 void nmethod::purge(bool unregister_nmethod) {
2122 
2123   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2124 
2125   // completely deallocate this method
2126   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, is_osr_method() ? "osr" : "", p2i(this));
2127 
2128   LogTarget(Debug, codecache) lt;
2129   if (lt.is_enabled()) {
2130     ResourceMark rm;
2131     LogStream ls(lt);
2132     const char* method_name = method()->name()->as_C_string();
2133     const size_t codecache_capacity = CodeCache::capacity()/1024;
2134     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2135     ls.print("Flushing nmethod %6d/" INTPTR_FORMAT ", level=%d, osr=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2136               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2137               _compile_id, p2i(this), _comp_level, is_osr_method(), is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2138               codecache_capacity, codecache_free_space, method_name, compiler_name());
2139   }
2140 
2141   // We need to deallocate any ExceptionCache data.
2142   // Note that we do not need to grab the nmethod lock for this, it
2143   // better be thread safe if we're disposing of it!
2144   ExceptionCache* ec = exception_cache();
2145   while(ec != nullptr) {
2146     ExceptionCache* next = ec->next();
2147     delete ec;
2148     ec = next;
2149   }
2150   if (_pc_desc_container != nullptr) {
2151     delete _pc_desc_container;
2152   }
2153   delete[] _compiled_ic_data;


2154 
2155   if (_immutable_data != blob_end()) {
2156     os::free(_immutable_data);
2157     _immutable_data = blob_end(); // Valid not null address
2158   }
2159   if (unregister_nmethod) {
2160     Universe::heap()->unregister_nmethod(this);
2161   }
2162   CodeCache::unregister_old_nmethod(this);
2163 
2164   JVMCI_ONLY( _metadata_size = 0; )
2165   CodeBlob::purge();
2166 }
2167 
2168 oop nmethod::oop_at(int index) const {
2169   if (index == 0) {
2170     return nullptr;
2171   }
2172 
2173   BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2174   return bs_nm->oop_load_no_keepalive(this, index);
2175 }

2197         MethodHandles::clean_dependency_context(call_site);
2198       } else {
2199         InstanceKlass* ik = deps.context_type();
2200         if (ik == nullptr) {
2201           continue;  // ignore things like evol_method
2202         }
2203         // During GC liveness of dependee determines class that needs to be updated.
2204         // The GC may clean dependency contexts concurrently and in parallel.
2205         ik->clean_dependency_context();
2206       }
2207     }
2208   }
2209 }
2210 
2211 void nmethod::post_compiled_method(CompileTask* task) {
2212   task->mark_success();
2213   task->set_nm_content_size(content_size());
2214   task->set_nm_insts_size(insts_size());
2215   task->set_nm_total_size(total_size());
2216 






2217   // JVMTI -- compiled method notification (must be done outside lock)
2218   post_compiled_method_load_event();
2219 
2220   if (CompilationLog::log() != nullptr) {
2221     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2222   }
2223 
2224   const DirectiveSet* directive = task->directive();
2225   maybe_print_nmethod(directive);
2226 }
2227 
2228 // ------------------------------------------------------------------
2229 // post_compiled_method_load_event
2230 // new method for install_code() path
2231 // Transfer information from compilation to jvmti
2232 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2233   // This is a bad time for a safepoint.  We don't want
2234   // this nmethod to get unloaded while we're queueing the event.
2235   NoSafepointVerifier nsv;
2236 

2925 void nmethod::verify() {
2926   if (is_not_entrant())
2927     return;
2928 
2929   // assert(oopDesc::is_oop(method()), "must be valid");
2930 
2931   ResourceMark rm;
2932 
2933   if (!CodeCache::contains(this)) {
2934     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
2935   }
2936 
2937   if(is_native_method() )
2938     return;
2939 
2940   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
2941   if (nm != this) {
2942     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
2943   }
2944 
2945   for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
2946     if (! p->verify(this)) {
2947       tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));




2948     }
2949   }
2950 
2951 #ifdef ASSERT
2952 #if INCLUDE_JVMCI
2953   {
2954     // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
2955     ImmutableOopMapSet* oms = oop_maps();
2956     ImplicitExceptionTable implicit_table(this);
2957     for (uint i = 0; i < implicit_table.len(); i++) {
2958       int exec_offset = (int) implicit_table.get_exec_offset(i);
2959       if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
2960         assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
2961         bool found = false;
2962         for (int i = 0, imax = oms->count(); i < imax; i++) {
2963           if (oms->pair_at(i)->pc_offset() == exec_offset) {
2964             found = true;
2965             break;

2966           }

2967         }
2968         assert(found, "missing oopmap");
2969       }
2970     }
2971   }
2972 #endif
2973 #endif

2974 
2975   VerifyOopsClosure voc(this);
2976   oops_do(&voc);
2977   assert(voc.ok(), "embedded oops must be OK");
2978   Universe::heap()->verify_nmethod(this);
2979 
2980   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
2981          nm->method()->external_name(), p2i(_oops_do_mark_link));
2982   verify_scopes();


2983 
2984   CompiledICLocker nm_verify(this);
2985   VerifyMetadataClosure vmc;
2986   metadata_do(&vmc);
2987 }
2988 
2989 
2990 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
2991 
2992   // Verify IC only when nmethod installation is finished.
2993   if (!is_not_installed()) {
2994     if (CompiledICLocker::is_safe(this)) {
2995       if (is_inline_cache) {
2996         CompiledIC_at(this, call_site);
2997       } else {
2998         CompiledDirectCall::at(call_site);
2999       }
3000     } else {
3001       CompiledICLocker ml_verify(this);
3002       if (is_inline_cache) {

3131                                              p2i(nul_chk_table_end()),
3132                                              nul_chk_table_size());
3133   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3134                                              p2i(handler_table_begin()),
3135                                              p2i(handler_table_end()),
3136                                              handler_table_size());
3137   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3138                                              p2i(scopes_pcs_begin()),
3139                                              p2i(scopes_pcs_end()),
3140                                              scopes_pcs_size());
3141   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3142                                              p2i(scopes_data_begin()),
3143                                              p2i(scopes_data_end()),
3144                                              scopes_data_size());
3145 #if INCLUDE_JVMCI
3146   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3147                                              p2i(speculations_begin()),
3148                                              p2i(speculations_end()),
3149                                              speculations_size());
3150 #endif



3151 }
3152 
3153 void nmethod::print_code() {
3154   ResourceMark m;
3155   ttyLocker ttyl;
3156   // Call the specialized decode method of this class.
3157   decode(tty);
3158 }
3159 
3160 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3161 
3162 void nmethod::print_dependencies_on(outputStream* out) {
3163   ResourceMark rm;
3164   stringStream st;
3165   st.print_cr("Dependencies:");
3166   for (Dependencies::DepStream deps(this); deps.next(); ) {
3167     deps.print_dependency(&st);
3168     InstanceKlass* ctxk = deps.context_type();
3169     if (ctxk != nullptr) {
3170       if (ctxk->is_dependent_nmethod(this)) {

3230   st->print("scopes:");
3231   if (scopes_pcs_begin() < scopes_pcs_end()) {
3232     st->cr();
3233     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3234       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3235         continue;
3236 
3237       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3238       while (sd != nullptr) {
3239         sd->print_on(st, p);  // print output ends with a newline
3240         sd = sd->sender();
3241       }
3242     }
3243   } else {
3244     st->print_cr(" <list empty>");
3245   }
3246 }
3247 #endif
3248 
3249 #ifndef PRODUCT  // RelocIterator does support printing only then.
3250 void nmethod::print_relocations() {
3251   ResourceMark m;       // in case methods get printed via the debugger
3252   tty->print_cr("relocations:");
3253   RelocIterator iter(this);
3254   iter.print_on(tty);
3255 }
3256 #endif
3257 
3258 void nmethod::print_pcs_on(outputStream* st) {
3259   ResourceMark m;       // in case methods get printed via debugger
3260   st->print("pc-bytecode offsets:");
3261   if (scopes_pcs_begin() < scopes_pcs_end()) {
3262     st->cr();
3263     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3264       p->print_on(st, this);  // print output ends with a newline
3265     }
3266   } else {
3267     st->print_cr(" <list empty>");
3268   }
3269 }
3270 
3271 void nmethod::print_handler_table() {
3272   ExceptionHandlerTable(this).print(code_begin());
3273 }
3274 

4050 void nmethod::update_speculation(JavaThread* thread) {
4051   jlong speculation = thread->pending_failed_speculation();
4052   if (speculation != 0) {
4053     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4054     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4055     thread->set_pending_failed_speculation(0);
4056   }
4057 }
4058 
4059 const char* nmethod::jvmci_name() {
4060   if (jvmci_nmethod_data() != nullptr) {
4061     return jvmci_nmethod_data()->name();
4062   }
4063   return nullptr;
4064 }
4065 
4066 bool nmethod::jvmci_skip_profile_deopt() const {
4067   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4068 }
4069 #endif





















   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/assembler.inline.hpp"
  26 #include "code/aotCodeCache.hpp"
  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/dependencies.hpp"
  30 #include "code/nativeInst.hpp"
  31 #include "code/nmethod.inline.hpp"
  32 #include "code/scopeDesc.hpp"
  33 #include "compiler/abstractCompiler.hpp"
  34 #include "compiler/compilationLog.hpp"
  35 #include "compiler/compileBroker.hpp"
  36 #include "compiler/compileLog.hpp"
  37 #include "compiler/compilerDirectives.hpp"
  38 #include "compiler/compilerOracle.hpp"
  39 #include "compiler/compileTask.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"
  46 #include "gc/shared/collectedHeap.hpp"

1004              _method->method_holder()->external_name(),
1005              _method->name()->as_C_string(),
1006              _method->signature()->as_C_string(),
1007              compile_id());
1008   }
1009   return check_evol.has_evol_dependency();
1010 }
1011 
1012 int nmethod::total_size() const {
1013   return
1014     consts_size()        +
1015     insts_size()         +
1016     stub_size()          +
1017     scopes_data_size()   +
1018     scopes_pcs_size()    +
1019     handler_table_size() +
1020     nul_chk_table_size();
1021 }
1022 
1023 const char* nmethod::compile_kind() const {
1024   if (is_osr_method()) return "osr";
1025   if (preloaded())     return "AP";
1026   if (is_aot())        return "A";
1027 
1028   if (method() != nullptr && is_native_method()) {
1029     if (method()->is_continuation_native_intrinsic()) {
1030       return "cnt";
1031     }
1032     return "c2n";
1033   }
1034   return nullptr;
1035 }
1036 
1037 const char* nmethod::compiler_name() const {
1038   return compilertype2name(_compiler_type);
1039 }
1040 
1041 #ifdef ASSERT
1042 class CheckForOopsClosure : public OopClosure {
1043   bool _found_oop = false;
1044  public:
1045   virtual void do_oop(oop* o) { _found_oop = true; }
1046   virtual void do_oop(narrowOop* o) { _found_oop = true; }
1047   bool found_oop() { return _found_oop; }

1113     nm = new (native_nmethod_size, allow_NonNMethod_space)
1114     nmethod(method(), compiler_none, native_nmethod_size,
1115             compile_id, &offsets,
1116             code_buffer, frame_size,
1117             basic_lock_owner_sp_offset,
1118             basic_lock_sp_offset,
1119             oop_maps, mutable_data_size);
1120     DEBUG_ONLY( if (allow_NonNMethod_space) assert_no_oops_or_metadata(nm); )
1121     NOT_PRODUCT(if (nm != nullptr) native_nmethod_stats.note_native_nmethod(nm));
1122   }
1123 
1124   if (nm != nullptr) {
1125     // verify nmethod
1126     DEBUG_ONLY(nm->verify();) // might block
1127 
1128     nm->log_new_nmethod();
1129   }
1130   return nm;
1131 }
1132 
1133 void nmethod::record_nmethod_dependency() {
1134   // To make dependency checking during class loading fast, record
1135   // the nmethod dependencies in the classes it is dependent on.
1136   // This allows the dependency checking code to simply walk the
1137   // class hierarchy above the loaded class, checking only nmethods
1138   // which are dependent on those classes.  The slow way is to
1139   // check every nmethod for dependencies which makes it linear in
1140   // the number of methods compiled.  For applications with a lot
1141   // classes the slow way is too slow.
1142   for (Dependencies::DepStream deps(this); deps.next(); ) {
1143     if (deps.type() == Dependencies::call_site_target_value) {
1144       // CallSite dependencies are managed on per-CallSite instance basis.
1145       oop call_site = deps.argument_oop(0);
1146       MethodHandles::add_dependent_nmethod(call_site, this);
1147     } else {
1148       InstanceKlass* ik = deps.context_type();
1149       if (ik == nullptr) {
1150         continue;  // ignore things like evol_method
1151       }
1152       // record this nmethod as dependent on this klass
1153       ik->add_dependent_nmethod(this);
1154     }
1155   }
1156 }
1157 
1158 nmethod* nmethod::new_nmethod(const methodHandle& method,
1159   int compile_id,
1160   int entry_bci,
1161   CodeOffsets* offsets,
1162   int orig_pc_offset,
1163   DebugInformationRecorder* debug_info,
1164   Dependencies* dependencies,
1165   CodeBuffer* code_buffer, int frame_size,
1166   OopMapSet* oop_maps,
1167   ExceptionHandlerTable* handler_table,
1168   ImplicitExceptionTable* nul_chk_table,
1169   AbstractCompiler* compiler,
1170   CompLevel comp_level
1171   , AOTCodeEntry* aot_code_entry
1172 #if INCLUDE_JVMCI
1173   , char* speculations,
1174   int speculations_len,
1175   JVMCINMethodData* jvmci_data
1176 #endif
1177 )
1178 {
1179   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1180   code_buffer->finalize_oop_references(method);
1181   // create nmethod
1182   nmethod* nm = nullptr;
1183   int nmethod_size = CodeBlob::allocation_size(code_buffer, sizeof(nmethod));
1184 
1185   int immutable_data_size =
1186       adjust_pcs_size(debug_info->pcs_size())
1187     + align_up((int)dependencies->size_in_bytes(), oopSize)
1188     + align_up(handler_table->size_in_bytes()    , oopSize)
1189     + align_up(nul_chk_table->size_in_bytes()    , oopSize)
1190 #if INCLUDE_JVMCI
1191     + align_up(speculations_len                  , oopSize)

1195   // First, allocate space for immutable data in C heap.
1196   address immutable_data = nullptr;
1197   if (immutable_data_size > 0) {
1198     immutable_data = (address)os::malloc(immutable_data_size, mtCode);
1199     if (immutable_data == nullptr) {
1200       vm_exit_out_of_memory(immutable_data_size, OOM_MALLOC_ERROR, "nmethod: no space for immutable data");
1201       return nullptr;
1202     }
1203   }
1204 
1205   int mutable_data_size = required_mutable_data_size(code_buffer
1206     JVMCI_ONLY(COMMA (compiler->is_jvmci() ? jvmci_data->size() : 0)));
1207 
1208   {
1209     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1210 
1211     nm = new (nmethod_size, comp_level)
1212     nmethod(method(), compiler->type(), nmethod_size, immutable_data_size, mutable_data_size,
1213             compile_id, entry_bci, immutable_data, offsets, orig_pc_offset,
1214             debug_info, dependencies, code_buffer, frame_size, oop_maps,
1215             handler_table, nul_chk_table, compiler, comp_level, aot_code_entry
1216 #if INCLUDE_JVMCI
1217             , speculations,
1218             speculations_len,
1219             jvmci_data
1220 #endif
1221             );
1222 
1223     if (nm != nullptr) {
1224       nm->record_nmethod_dependency();
1225       NOT_PRODUCT(note_java_nmethod(nm));





















1226     }
1227   }
1228   // Do verification and logging outside CodeCache_lock.
1229   if (nm != nullptr) {
1230 
1231 #ifdef ASSERT
1232     LogTarget(Debug, aot, codecache, nmethod) log;
1233     if (log.is_enabled()) {
1234       LogStream out(log);
1235       out.print_cr("== new_nmethod 2");
1236       FlagSetting fs(PrintRelocations, true);
1237       nm->print_on_impl(&out);
1238       nm->decode(&out);
1239     }
1240 #endif
1241 
1242     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1243     DEBUG_ONLY(nm->verify();)
1244     nm->log_new_nmethod();
1245   }
1246   return nm;
1247 }
1248 
1249 nmethod* nmethod::restore(address code_cache_buffer,
1250                           const methodHandle& method,
1251                           int compile_id,
1252                           address reloc_data,
1253                           GrowableArray<Handle>& oop_list,
1254                           GrowableArray<Metadata*>& metadata_list,
1255                           ImmutableOopMapSet* oop_maps,
1256                           address immutable_data,
1257                           GrowableArray<Handle>& reloc_imm_oop_list,
1258                           GrowableArray<Metadata*>& reloc_imm_metadata_list,
1259                           AOTCodeReader* aot_code_reader)
1260 {
1261   CodeBlob::restore(code_cache_buffer, "nmethod", reloc_data, oop_maps);
1262   nmethod* nm = (nmethod*)code_cache_buffer;
1263   nm->set_method(method());
1264   nm->_compile_id = compile_id;
1265   nm->set_immutable_data(immutable_data);
1266   nm->copy_values(&oop_list);
1267   nm->copy_values(&metadata_list);
1268 
1269   aot_code_reader->fix_relocations(nm, &reloc_imm_oop_list, &reloc_imm_metadata_list);
1270 
1271 #ifndef PRODUCT
1272   nm->asm_remarks().init();
1273   aot_code_reader->read_asm_remarks(nm->asm_remarks(), /* use_string_table */ false);
1274   nm->dbg_strings().init();
1275   aot_code_reader->read_dbg_strings(nm->dbg_strings(), /* use_string_table */ false);
1276 #endif
1277 
1278   // Flush the code block
1279   ICache::invalidate_range(nm->code_begin(), nm->code_size());
1280 
1281   // Create cache after PcDesc data is copied - it will be used to initialize cache
1282   nm->_pc_desc_container = new PcDescContainer(nm->scopes_pcs_begin());
1283 
1284   nm->set_aot_code_entry(aot_code_reader->aot_code_entry());
1285 
1286   nm->post_init();
1287   return nm;
1288 }
1289 
1290 nmethod* nmethod::new_nmethod(nmethod* archived_nm,
1291                               const methodHandle& method,
1292                               AbstractCompiler* compiler,
1293                               int compile_id,
1294                               address reloc_data,
1295                               GrowableArray<Handle>& oop_list,
1296                               GrowableArray<Metadata*>& metadata_list,
1297                               ImmutableOopMapSet* oop_maps,
1298                               address immutable_data,
1299                               GrowableArray<Handle>& reloc_imm_oop_list,
1300                               GrowableArray<Metadata*>& reloc_imm_metadata_list,
1301                               AOTCodeReader* aot_code_reader)
1302 {
1303   nmethod* nm = nullptr;
1304   int nmethod_size = archived_nm->size();
1305   // create nmethod
1306   {
1307     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1308     address code_cache_buffer = (address)CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(archived_nm->comp_level()));
1309     if (code_cache_buffer != nullptr) {
1310       nm = archived_nm->restore(code_cache_buffer,
1311                                 method,
1312                                 compile_id,
1313                                 reloc_data,
1314                                 oop_list,
1315                                 metadata_list,
1316                                 oop_maps,
1317                                 immutable_data,
1318                                 reloc_imm_oop_list,
1319                                 reloc_imm_metadata_list,
1320                                 aot_code_reader);
1321       nm->record_nmethod_dependency();
1322       NOT_PRODUCT(note_java_nmethod(nm));
1323     }
1324   }
1325   // Do verification and logging outside CodeCache_lock.
1326   if (nm != nullptr) {
1327 #ifdef ASSERT
1328     LogTarget(Debug, aot, codecache, nmethod) log;
1329     if (log.is_enabled()) {
1330       LogStream out(log);
1331       out.print_cr("== new_nmethod 2");
1332       FlagSetting fs(PrintRelocations, true);
1333       nm->print_on_impl(&out);
1334       nm->decode(&out);
1335     }
1336 #endif
1337     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
1338     DEBUG_ONLY(nm->verify();)
1339     nm->log_new_nmethod();
1340   }
1341   return nm;
1342 }
1343 
1344 // Fill in default values for various fields
1345 void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
1346   // avoid uninitialized fields, even for short time periods
1347   _exception_cache            = nullptr;
1348   _gc_data                    = nullptr;
1349   _oops_do_mark_link          = nullptr;
1350   _compiled_ic_data           = nullptr;
1351 
1352   _is_unloading_state         = 0;
1353   _state                      = not_installed;
1354 
1355   _has_unsafe_access          = 0;
1356   _has_method_handle_invokes  = 0;
1357   _has_wide_vectors           = 0;
1358   _has_monitors               = 0;
1359   _has_scoped_access          = 0;
1360   _has_flushed_dependencies   = 0;
1361   _is_unlinked                = 0;
1362   _load_reported              = 0; // jvmti state
1363   _preloaded                  = 0;
1364   _has_clinit_barriers        = 0;
1365 
1366   _used                       = false;
1367   _deoptimization_status      = not_marked;
1368 
1369   // SECT_CONSTS is first in code buffer so the offset should be 0.
1370   int consts_offset = code_buffer->total_offset_of(code_buffer->consts());
1371   assert(consts_offset == 0, "const_offset: %d", consts_offset);
1372 
1373   _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
1374 
1375   CHECKED_CAST(_entry_offset,              uint16_t, (offsets->value(CodeOffsets::Entry)));
1376   CHECKED_CAST(_verified_entry_offset,     uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
1377 
1378   _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
1379 }
1380 
1381 // Post initialization
1382 void nmethod::post_init() {
1383   clear_unloading_state();
1384 
1385   finalize_relocations();
1386 

1419 
1420     _osr_entry_point         = nullptr;
1421     _pc_desc_container       = nullptr;
1422     _entry_bci               = InvocationEntryBci;
1423     _compile_id              = compile_id;
1424     _comp_level              = CompLevel_none;
1425     _compiler_type           = type;
1426     _orig_pc_offset          = 0;
1427     _num_stack_arg_slots     = 0;
1428 
1429     if (offsets->value(CodeOffsets::Exceptions) != -1) {
1430       // Continuation enter intrinsic
1431       _exception_offset      = code_offset() + offsets->value(CodeOffsets::Exceptions);
1432     } else {
1433       _exception_offset      = 0;
1434     }
1435     // Native wrappers do not have deopt handlers. Make the values
1436     // something that will never match a pc like the nmethod vtable entry
1437     _deopt_handler_offset    = 0;
1438     _deopt_mh_handler_offset = 0;
1439     _aot_code_entry          = nullptr;
1440     _method_profiling_count  = 0;
1441     _unwind_handler_offset   = 0;
1442 
1443     CHECKED_CAST(_oops_size, uint16_t, align_up(code_buffer->total_oop_size(), oopSize));
1444     uint16_t metadata_size;
1445     CHECKED_CAST(metadata_size, uint16_t, align_up(code_buffer->total_metadata_size(), wordSize));
1446     JVMCI_ONLY( _metadata_size = metadata_size; )
1447     assert(_mutable_data_size == _relocation_size + metadata_size,
1448            "wrong mutable data size: %d != %d + %d",
1449            _mutable_data_size, _relocation_size, metadata_size);
1450 
1451     // native wrapper does not have read-only data but we need unique not null address
1452     _immutable_data          = blob_end();
1453     _immutable_data_size     = 0;
1454     _nul_chk_table_offset    = 0;
1455     _handler_table_offset    = 0;
1456     _scopes_pcs_offset       = 0;
1457     _scopes_data_offset      = 0;
1458 #if INCLUDE_JVMCI
1459     _speculations_offset     = 0;
1460 #endif

1480     // This is both handled in decode2(), called via print_code() -> decode()
1481     if (PrintNativeNMethods) {
1482       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
1483       print_code();
1484       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1485 #if defined(SUPPORT_DATA_STRUCTS)
1486       if (AbstractDisassembler::show_structs()) {
1487         if (oop_maps != nullptr) {
1488           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
1489           oop_maps->print_on(tty);
1490           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1491         }
1492       }
1493 #endif
1494     } else {
1495       print(); // print the header part only.
1496     }
1497 #if defined(SUPPORT_DATA_STRUCTS)
1498     if (AbstractDisassembler::show_structs()) {
1499       if (PrintRelocations) {
1500         print_relocations_on(tty);
1501         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1502       }
1503     }
1504 #endif
1505     if (xtty != nullptr) {
1506       xtty->tail("print_native_nmethod");
1507     }
1508   }
1509 }
1510 
1511 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
1512   return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
1513 }
1514 
1515 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
1516   // Try MethodNonProfiled and MethodProfiled.
1517   void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
1518   if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
1519   // Try NonNMethod or give up.
1520   return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);

1524 nmethod::nmethod(
1525   Method* method,
1526   CompilerType type,
1527   int nmethod_size,
1528   int immutable_data_size,
1529   int mutable_data_size,
1530   int compile_id,
1531   int entry_bci,
1532   address immutable_data,
1533   CodeOffsets* offsets,
1534   int orig_pc_offset,
1535   DebugInformationRecorder* debug_info,
1536   Dependencies* dependencies,
1537   CodeBuffer *code_buffer,
1538   int frame_size,
1539   OopMapSet* oop_maps,
1540   ExceptionHandlerTable* handler_table,
1541   ImplicitExceptionTable* nul_chk_table,
1542   AbstractCompiler* compiler,
1543   CompLevel comp_level
1544   , AOTCodeEntry* aot_code_entry
1545 #if INCLUDE_JVMCI
1546   , char* speculations,
1547   int speculations_len,
1548   JVMCINMethodData* jvmci_data
1549 #endif
1550   )
1551   : CodeBlob("nmethod", CodeBlobKind::Nmethod, code_buffer, nmethod_size, sizeof(nmethod),
1552              offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, mutable_data_size),
1553   _deoptimization_generation(0),
1554   _gc_epoch(CodeCache::gc_epoch()),
1555   _method(method),
1556   _osr_link(nullptr)
1557 {
1558   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
1559   {
1560     DEBUG_ONLY(NoSafepointVerifier nsv;)
1561     assert_locked_or_safepoint(CodeCache_lock);
1562 
1563     init_defaults(code_buffer, offsets);
1564     _aot_code_entry          = aot_code_entry;
1565     _method_profiling_count  = 0;
1566 
1567     _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
1568     _entry_bci       = entry_bci;
1569     _compile_id      = compile_id;
1570     _comp_level      = comp_level;
1571     _compiler_type   = type;
1572     _orig_pc_offset  = orig_pc_offset;
1573 
1574     _num_stack_arg_slots = entry_bci != InvocationEntryBci ? 0 : _method->constMethod()->num_stack_arg_slots();
1575 
1576     set_ctable_begin(header_begin() + content_offset());
1577 
1578 #if INCLUDE_JVMCI
1579     if (compiler->is_jvmci()) {
1580       // JVMCI might not produce any stub sections
1581       if (offsets->value(CodeOffsets::Exceptions) != -1) {
1582         _exception_offset        = code_offset() + offsets->value(CodeOffsets::Exceptions);
1583       } else {
1584         _exception_offset        = -1;
1585       }

1676 #if INCLUDE_JVMCI
1677     // Copy speculations to nmethod
1678     if (speculations_size() != 0) {
1679       memcpy(speculations_begin(), speculations, speculations_len);
1680     }
1681 #endif
1682 
1683     post_init();
1684 
1685     // we use the information of entry points to find out if a method is
1686     // static or non static
1687     assert(compiler->is_c2() || compiler->is_jvmci() ||
1688            _method->is_static() == (entry_point() == verified_entry_point()),
1689            " entry points must be same for static methods and vice versa");
1690   }
1691 }
1692 
1693 // Print a short set of xml attributes to identify this nmethod.  The
1694 // output should be embedded in some other element.
1695 void nmethod::log_identity(xmlStream* log) const {
1696   assert(log->inside_attrs_or_error(), "printing attributes");
1697   log->print(" compile_id='%d'", compile_id());
1698   const char* nm_kind = compile_kind();
1699   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
1700   log->print(" compiler='%s'", compiler_name());
1701   if (TieredCompilation) {
1702     log->print(" compile_level='%d'", comp_level());
1703   }
1704 #if INCLUDE_JVMCI
1705   if (jvmci_nmethod_data() != nullptr) {
1706     const char* jvmci_name = jvmci_nmethod_data()->name();
1707     if (jvmci_name != nullptr) {
1708       log->print(" jvmci_mirror_name='");
1709       log->text("%s", jvmci_name);
1710       log->print("'");
1711     }
1712   }
1713 #endif
1714 }
1715 
1716 
1717 #define LOG_OFFSET(log, name)                    \
1718   if (p2i(name##_end()) - p2i(name##_begin())) \
1719     log->print(" " XSTR(name) "_offset='%zd'"    , \
1720                p2i(name##_begin()) - p2i(this))
1721 
1722 

1803       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1804       if (oop_maps() != nullptr) {
1805         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1806         oop_maps()->print_on(tty);
1807         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1808       }
1809     }
1810 #endif
1811   } else {
1812     print(); // print the header part only.
1813   }
1814 
1815 #if defined(SUPPORT_DATA_STRUCTS)
1816   if (AbstractDisassembler::show_structs()) {
1817     methodHandle mh(Thread::current(), _method);
1818     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDebugInfo)) {
1819       print_scopes();
1820       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1821     }
1822     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommandEnum::PrintRelocations)) {
1823       print_relocations_on(tty);
1824       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1825     }
1826     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommandEnum::PrintDependencies)) {
1827       print_dependencies_on(tty);
1828       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1829     }
1830     if (printmethod || PrintExceptionHandlers) {
1831       print_handler_table();
1832       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1833       print_nul_chk_table();
1834       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1835     }
1836 
1837     if (printmethod) {
1838       print_recorded_oops();
1839       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1840       print_recorded_metadata();
1841       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1842     }
1843   }
1844 #endif
1845 
1846   if (xtty != nullptr) {
1847     xtty->tail("print_nmethod");
1848   }
1849 }
1850 
1851 
1852 // Promote one word from an assembly-time handle to a live embedded oop.
1853 inline void nmethod::initialize_immediate_oop(oop* dest, jobject handle) {
1854   if (handle == nullptr ||
1855       // As a special case, IC oops are initialized to 1 or -1.
1856       handle == (jobject) Universe::non_oop_word()) {
1857     *(void**)dest = handle;
1858   } else {
1859     *dest = JNIHandles::resolve_non_null(handle);
1860   }
1861 }
1862 
1863 void nmethod::copy_values(GrowableArray<Handle>* array) {
1864   int length = array->length();
1865   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1866   oop* dest = oops_begin();
1867   for (int index = 0 ; index < length; index++) {
1868     dest[index] = array->at(index)();
1869   }
1870 }
1871 
1872 // Have to have the same name because it's called by a template
1873 void nmethod::copy_values(GrowableArray<jobject>* array) {
1874   int length = array->length();
1875   assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1876   oop* dest = oops_begin();
1877   for (int index = 0 ; index < length; index++) {
1878     initialize_immediate_oop(&dest[index], array->at(index));
1879   }
1880 
1881   // Now we can fix up all the oops in the code.  We need to do this
1882   // in the code because the assembler uses jobjects as placeholders.
1883   // The code and relocations have already been initialized by the
1884   // CodeBlob constructor, so it is valid even at this early point to
1885   // iterate over relocations and patch the code.
1886   fix_oop_relocations(nullptr, nullptr, /*initialize_immediates=*/ true);
1887 }
1888 
1889 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
1890   int length = array->length();

1898 void nmethod::fix_oop_relocations(address begin, address end, bool initialize_immediates) {
1899   // re-patch all oop-bearing instructions, just in case some oops moved
1900   RelocIterator iter(this, begin, end);
1901   while (iter.next()) {
1902     if (iter.type() == relocInfo::oop_type) {
1903       oop_Relocation* reloc = iter.oop_reloc();
1904       if (initialize_immediates && reloc->oop_is_immediate()) {
1905         oop* dest = reloc->oop_addr();
1906         jobject obj = *reinterpret_cast<jobject*>(dest);
1907         initialize_immediate_oop(dest, obj);
1908       }
1909       // Refresh the oop-related bits of this instruction.
1910       reloc->fix_oop_relocation();
1911     } else if (iter.type() == relocInfo::metadata_type) {
1912       metadata_Relocation* reloc = iter.metadata_reloc();
1913       reloc->fix_metadata_relocation();
1914     }
1915   }
1916 }
1917 
1918 void nmethod::create_reloc_immediates_list(JavaThread* thread, GrowableArray<Handle>& oop_list, GrowableArray<Metadata*>& metadata_list) {
1919   RelocIterator iter(this);
1920   while (iter.next()) {
1921     if (iter.type() == relocInfo::oop_type) {
1922       oop_Relocation* reloc = iter.oop_reloc();
1923       if (reloc->oop_is_immediate()) {
1924         oop dest = reloc->oop_value();
1925         Handle h(thread, dest);
1926         oop_list.append(h);
1927       }
1928     } else if (iter.type() == relocInfo::metadata_type) {
1929       metadata_Relocation* reloc = iter.metadata_reloc();
1930       if (reloc->metadata_is_immediate()) {
1931         Metadata* m = reloc->metadata_value();
1932         metadata_list.append(m);
1933       }
1934     }
1935   }
1936 }
1937 
1938 static void install_post_call_nop_displacement(nmethod* nm, address pc) {
1939   NativePostCallNop* nop = nativePostCallNop_at((address) pc);
1940   intptr_t cbaddr = (intptr_t) nm;
1941   intptr_t offset = ((intptr_t) pc) - cbaddr;
1942 
1943   int oopmap_slot = nm->oop_maps()->find_slot_for_offset(int((intptr_t) pc - (intptr_t) nm->code_begin()));
1944   if (oopmap_slot < 0) { // this can happen at asynchronous (non-safepoint) stackwalks
1945     log_debug(codecache)("failed to find oopmap for cb: " INTPTR_FORMAT " offset: %d", cbaddr, (int) offset);
1946   } else if (!nop->patch(oopmap_slot, offset)) {
1947     log_debug(codecache)("failed to encode %d %d", oopmap_slot, (int) offset);
1948   }
1949 }
1950 
1951 void nmethod::finalize_relocations() {
1952   NoSafepointVerifier nsv;
1953 
1954   GrowableArray<NativeMovConstReg*> virtual_call_data;
1955 
1956   // Make sure that post call nops fill in nmethod offsets eagerly so
1957   // we don't have to race with deoptimization

2084   // be alive the previous completed marking cycle.
2085   return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
2086 }
2087 
2088 void nmethod::inc_decompile_count() {
2089   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
2090   // Could be gated by ProfileTraps, but do not bother...
2091 #if INCLUDE_JVMCI
2092   if (jvmci_skip_profile_deopt()) {
2093     return;
2094   }
2095 #endif
2096   Method* m = method();
2097   if (m == nullptr)  return;
2098   MethodData* mdo = m->method_data();
2099   if (mdo == nullptr)  return;
2100   // There is a benign race here.  See comments in methodData.hpp.
2101   mdo->inc_decompile_count();
2102 }
2103 
2104 void nmethod::inc_method_profiling_count() {
2105   Atomic::inc(&_method_profiling_count);
2106 }
2107 
2108 uint64_t nmethod::method_profiling_count() {
2109   return _method_profiling_count;
2110 }
2111 
2112 bool nmethod::try_transition(signed char new_state_int) {
2113   signed char new_state = new_state_int;
2114   assert_lock_strong(NMethodState_lock);
2115   signed char old_state = _state;
2116   if (old_state >= new_state) {
2117     // Ensure monotonicity of transitions.
2118     return false;
2119   }
2120   Atomic::store(&_state, new_state);
2121   return true;
2122 }
2123 
2124 void nmethod::invalidate_osr_method() {
2125   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
2126   // Remove from list of active nmethods
2127   if (method() != nullptr) {
2128     method()->method_holder()->remove_osr_nmethod(this);
2129   }
2130 }
2131 

2141     }
2142   }
2143 
2144   ResourceMark rm;
2145   stringStream ss(NEW_RESOURCE_ARRAY(char, 256), 256);
2146   ss.print("made not entrant: %s", invalidation_reason_to_string(invalidation_reason));
2147 
2148   CompileTask::print_ul(this, ss.freeze());
2149   if (PrintCompilation) {
2150     print_on_with_msg(tty, ss.freeze());
2151   }
2152 }
2153 
2154 void nmethod::unlink_from_method() {
2155   if (method() != nullptr) {
2156     method()->unlink_code(this);
2157   }
2158 }
2159 
2160 // Invalidate code
2161 bool nmethod::make_not_entrant(InvalidationReason invalidation_reason, bool keep_aot_entry) {
2162   // This can be called while the system is already at a safepoint which is ok
2163   NoSafepointVerifier nsv;
2164 
2165   if (is_unloading()) {
2166     // If the nmethod is unloading, then it is already not entrant through
2167     // the nmethod entry barriers. No need to do anything; GC will unload it.
2168     return false;
2169   }
2170 
2171   if (Atomic::load(&_state) == not_entrant) {
2172     // Avoid taking the lock if already in required state.
2173     // This is safe from races because the state is an end-state,
2174     // which the nmethod cannot back out of once entered.
2175     // No need for fencing either.
2176     return false;
2177   }
2178 
2179   {
2180     // Enter critical section.  Does not block for safepoint.
2181     ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

2203     }
2204 
2205     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2206     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
2207       // If nmethod entry barriers are not supported, we won't mark
2208       // nmethods as on-stack when they become on-stack. So we
2209       // degrade to a less accurate flushing strategy, for now.
2210       mark_as_maybe_on_stack();
2211     }
2212 
2213     // Change state
2214     bool success = try_transition(not_entrant);
2215     assert(success, "Transition can't fail");
2216 
2217     // Log the transition once
2218     log_state_change(invalidation_reason);
2219 
2220     // Remove nmethod from method.
2221     unlink_from_method();
2222 
2223     if (!keep_aot_entry) {
2224       // Keep AOT code if it was simply replaced
2225       // otherwise make it not entrant too.
2226       AOTCodeCache::invalidate(_aot_code_entry);
2227     }
2228 
2229     CompileBroker::log_not_entrant(this);
2230   } // leave critical region under NMethodState_lock
2231 
2232 #if INCLUDE_JVMCI
2233   // Invalidate can't occur while holding the NMethodState_lock
2234   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
2235   if (nmethod_data != nullptr) {
2236     nmethod_data->invalidate_nmethod_mirror(this, invalidation_reason);
2237   }
2238 #endif
2239 
2240 #ifdef ASSERT
2241   if (is_osr_method() && method() != nullptr) {
2242     // Make sure osr nmethod is invalidated, i.e. not on the list
2243     bool found = method()->method_holder()->remove_osr_nmethod(this);
2244     assert(!found, "osr nmethod should have been invalidated");
2245   }
2246 #endif
2247 
2248   return true;
2249 }

2274     nmethod_data->invalidate_nmethod_mirror(this, is_cold() ?
2275             nmethod::InvalidationReason::UNLOADING_COLD :
2276             nmethod::InvalidationReason::UNLOADING);
2277   }
2278 #endif
2279 
2280   // Post before flushing as jmethodID is being used
2281   post_compiled_method_unload();
2282 
2283   // Register for flushing when it is safe. For concurrent class unloading,
2284   // that would be after the unloading handshake, and for STW class unloading
2285   // that would be when getting back to the VM thread.
2286   ClassUnloadingContext::context()->register_unlinked_nmethod(this);
2287 }
2288 
2289 void nmethod::purge(bool unregister_nmethod) {
2290 
2291   MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
2292 
2293   // completely deallocate this method
2294   Events::log_nmethod_flush(Thread::current(), "flushing %s nmethod " INTPTR_FORMAT, compile_kind(), p2i(this));
2295 
2296   LogTarget(Debug, codecache) lt;
2297   if (lt.is_enabled()) {
2298     ResourceMark rm;
2299     LogStream ls(lt);
2300     const char* method_name = method()->name()->as_C_string();
2301     const size_t codecache_capacity = CodeCache::capacity()/1024;
2302     const size_t codecache_free_space = CodeCache::unallocated_capacity(CodeCache::get_code_blob_type(this))/1024;
2303     ls.print("Flushing %s nmethod %6d/" INTPTR_FORMAT ", level=%d, cold=%d, epoch=" UINT64_FORMAT ", cold_count=" UINT64_FORMAT ". "
2304               "Cache capacity: %zuKb, free space: %zuKb. method %s (%s)",
2305               compile_kind(), _compile_id, p2i(this), _comp_level, is_cold(), _gc_epoch, CodeCache::cold_gc_count(),
2306               codecache_capacity, codecache_free_space, method_name, compiler_name());
2307   }
2308 
2309   // We need to deallocate any ExceptionCache data.
2310   // Note that we do not need to grab the nmethod lock for this, it
2311   // better be thread safe if we're disposing of it!
2312   ExceptionCache* ec = exception_cache();
2313   while(ec != nullptr) {
2314     ExceptionCache* next = ec->next();
2315     delete ec;
2316     ec = next;
2317   }
2318   if (_pc_desc_container != nullptr) {
2319     delete _pc_desc_container;
2320   }
2321   if (_compiled_ic_data != nullptr) {
2322     delete[] _compiled_ic_data;
2323   }
2324 
2325   if (_immutable_data != data_end() && !AOTCodeCache::is_address_in_aot_cache((address)_oop_maps)) {
2326     os::free(_immutable_data);
2327     _immutable_data = blob_end(); // Valid not null address
2328   }
2329   if (unregister_nmethod) {
2330     Universe::heap()->unregister_nmethod(this);
2331   }
2332   CodeCache::unregister_old_nmethod(this);
2333 
2334   JVMCI_ONLY( _metadata_size = 0; )
2335   CodeBlob::purge();
2336 }
2337 
2338 oop nmethod::oop_at(int index) const {
2339   if (index == 0) {
2340     return nullptr;
2341   }
2342 
2343   BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
2344   return bs_nm->oop_load_no_keepalive(this, index);
2345 }

2367         MethodHandles::clean_dependency_context(call_site);
2368       } else {
2369         InstanceKlass* ik = deps.context_type();
2370         if (ik == nullptr) {
2371           continue;  // ignore things like evol_method
2372         }
2373         // During GC liveness of dependee determines class that needs to be updated.
2374         // The GC may clean dependency contexts concurrently and in parallel.
2375         ik->clean_dependency_context();
2376       }
2377     }
2378   }
2379 }
2380 
2381 void nmethod::post_compiled_method(CompileTask* task) {
2382   task->mark_success();
2383   task->set_nm_content_size(content_size());
2384   task->set_nm_insts_size(insts_size());
2385   task->set_nm_total_size(total_size());
2386 
2387   // task->is_aot_load() is true only for loaded AOT code.
2388   // nmethod::_aot_code_entry is set for loaded and stored AOT code
2389   // to invalidate the entry when nmethod is deoptimized.
2390   // VerifyAOTCode is option to not store in archive AOT code.
2391   guarantee((_aot_code_entry != nullptr) || !task->is_aot_load() || VerifyAOTCode, "sanity");
2392 
2393   // JVMTI -- compiled method notification (must be done outside lock)
2394   post_compiled_method_load_event();
2395 
2396   if (CompilationLog::log() != nullptr) {
2397     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
2398   }
2399 
2400   const DirectiveSet* directive = task->directive();
2401   maybe_print_nmethod(directive);
2402 }
2403 
2404 // ------------------------------------------------------------------
2405 // post_compiled_method_load_event
2406 // new method for install_code() path
2407 // Transfer information from compilation to jvmti
2408 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
2409   // This is a bad time for a safepoint.  We don't want
2410   // this nmethod to get unloaded while we're queueing the event.
2411   NoSafepointVerifier nsv;
2412 

3101 void nmethod::verify() {
3102   if (is_not_entrant())
3103     return;
3104 
3105   // assert(oopDesc::is_oop(method()), "must be valid");
3106 
3107   ResourceMark rm;
3108 
3109   if (!CodeCache::contains(this)) {
3110     fatal("nmethod at " INTPTR_FORMAT " not in zone", p2i(this));
3111   }
3112 
3113   if(is_native_method() )
3114     return;
3115 
3116   nmethod* nm = CodeCache::find_nmethod(verified_entry_point());
3117   if (nm != this) {
3118     fatal("find_nmethod did not find this nmethod (" INTPTR_FORMAT ")", p2i(this));
3119   }
3120 
3121   // Verification can triggered during shutdown after AOTCodeCache is closed.
3122   // If the Scopes data is in the AOT code cache, then we should avoid verification during shutdown.
3123   if (!is_aot() || AOTCodeCache::is_on()) {
3124     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3125       if (! p->verify(this)) {
3126         tty->print_cr("\t\tin nmethod at " INTPTR_FORMAT " (pcs)", p2i(this));
3127       }
3128     }

3129 
3130 #ifdef ASSERT
3131 #if INCLUDE_JVMCI
3132     {
3133       // Verify that implicit exceptions that deoptimize have a PcDesc and OopMap
3134       ImmutableOopMapSet* oms = oop_maps();
3135       ImplicitExceptionTable implicit_table(this);
3136       for (uint i = 0; i < implicit_table.len(); i++) {
3137         int exec_offset = (int) implicit_table.get_exec_offset(i);
3138         if (implicit_table.get_exec_offset(i) == implicit_table.get_cont_offset(i)) {
3139           assert(pc_desc_at(code_begin() + exec_offset) != nullptr, "missing PcDesc");
3140           bool found = false;
3141           for (int i = 0, imax = oms->count(); i < imax; i++) {
3142             if (oms->pair_at(i)->pc_offset() == exec_offset) {
3143               found = true;
3144               break;
3145             }
3146           }
3147           assert(found, "missing oopmap");
3148         }

3149       }
3150     }

3151 #endif
3152 #endif
3153   }
3154 
3155   VerifyOopsClosure voc(this);
3156   oops_do(&voc);
3157   assert(voc.ok(), "embedded oops must be OK");
3158   Universe::heap()->verify_nmethod(this);
3159 
3160   assert(_oops_do_mark_link == nullptr, "_oops_do_mark_link for %s should be nullptr but is " PTR_FORMAT,
3161          nm->method()->external_name(), p2i(_oops_do_mark_link));
3162   if (!is_aot() || AOTCodeCache::is_on()) {
3163     verify_scopes();
3164   }
3165 
3166   CompiledICLocker nm_verify(this);
3167   VerifyMetadataClosure vmc;
3168   metadata_do(&vmc);
3169 }
3170 
3171 
3172 void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) {
3173 
3174   // Verify IC only when nmethod installation is finished.
3175   if (!is_not_installed()) {
3176     if (CompiledICLocker::is_safe(this)) {
3177       if (is_inline_cache) {
3178         CompiledIC_at(this, call_site);
3179       } else {
3180         CompiledDirectCall::at(call_site);
3181       }
3182     } else {
3183       CompiledICLocker ml_verify(this);
3184       if (is_inline_cache) {

3313                                              p2i(nul_chk_table_end()),
3314                                              nul_chk_table_size());
3315   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3316                                              p2i(handler_table_begin()),
3317                                              p2i(handler_table_end()),
3318                                              handler_table_size());
3319   if (scopes_pcs_size   () > 0) st->print_cr(" scopes pcs     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3320                                              p2i(scopes_pcs_begin()),
3321                                              p2i(scopes_pcs_end()),
3322                                              scopes_pcs_size());
3323   if (scopes_data_size  () > 0) st->print_cr(" scopes data    [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3324                                              p2i(scopes_data_begin()),
3325                                              p2i(scopes_data_end()),
3326                                              scopes_data_size());
3327 #if INCLUDE_JVMCI
3328   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
3329                                              p2i(speculations_begin()),
3330                                              p2i(speculations_end()),
3331                                              speculations_size());
3332 #endif
3333   if (AOTCodeCache::is_on() && _aot_code_entry != nullptr) {
3334     _aot_code_entry->print(st);
3335   }
3336 }
3337 
3338 void nmethod::print_code() {
3339   ResourceMark m;
3340   ttyLocker ttyl;
3341   // Call the specialized decode method of this class.
3342   decode(tty);
3343 }
3344 
3345 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
3346 
3347 void nmethod::print_dependencies_on(outputStream* out) {
3348   ResourceMark rm;
3349   stringStream st;
3350   st.print_cr("Dependencies:");
3351   for (Dependencies::DepStream deps(this); deps.next(); ) {
3352     deps.print_dependency(&st);
3353     InstanceKlass* ctxk = deps.context_type();
3354     if (ctxk != nullptr) {
3355       if (ctxk->is_dependent_nmethod(this)) {

3415   st->print("scopes:");
3416   if (scopes_pcs_begin() < scopes_pcs_end()) {
3417     st->cr();
3418     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3419       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
3420         continue;
3421 
3422       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
3423       while (sd != nullptr) {
3424         sd->print_on(st, p);  // print output ends with a newline
3425         sd = sd->sender();
3426       }
3427     }
3428   } else {
3429     st->print_cr(" <list empty>");
3430   }
3431 }
3432 #endif
3433 
3434 #ifndef PRODUCT  // RelocIterator does support printing only then.
3435 void nmethod::print_relocations_on(outputStream* st) {
3436   ResourceMark m;       // in case methods get printed via the debugger
3437   st->print_cr("relocations:");
3438   RelocIterator iter(this);
3439   iter.print_on(st);
3440 }
3441 #endif
3442 
3443 void nmethod::print_pcs_on(outputStream* st) {
3444   ResourceMark m;       // in case methods get printed via debugger
3445   st->print("pc-bytecode offsets:");
3446   if (scopes_pcs_begin() < scopes_pcs_end()) {
3447     st->cr();
3448     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
3449       p->print_on(st, this);  // print output ends with a newline
3450     }
3451   } else {
3452     st->print_cr(" <list empty>");
3453   }
3454 }
3455 
3456 void nmethod::print_handler_table() {
3457   ExceptionHandlerTable(this).print(code_begin());
3458 }
3459 

4235 void nmethod::update_speculation(JavaThread* thread) {
4236   jlong speculation = thread->pending_failed_speculation();
4237   if (speculation != 0) {
4238     guarantee(jvmci_nmethod_data() != nullptr, "failed speculation in nmethod without failed speculation list");
4239     jvmci_nmethod_data()->add_failed_speculation(this, speculation);
4240     thread->set_pending_failed_speculation(0);
4241   }
4242 }
4243 
4244 const char* nmethod::jvmci_name() {
4245   if (jvmci_nmethod_data() != nullptr) {
4246     return jvmci_nmethod_data()->name();
4247   }
4248   return nullptr;
4249 }
4250 
4251 bool nmethod::jvmci_skip_profile_deopt() const {
4252   return jvmci_nmethod_data() != nullptr && !jvmci_nmethod_data()->profile_deopt();
4253 }
4254 #endif
4255 
4256 void nmethod::prepare_for_archiving_impl() {
4257   CodeBlob::prepare_for_archiving_impl();
4258   _deoptimization_generation = 0;
4259   _gc_epoch = 0;
4260   _method_profiling_count = 0;
4261   _osr_link = nullptr;
4262   _method = nullptr;
4263   _immutable_data = nullptr;
4264   _pc_desc_container = nullptr;
4265   _exception_cache = nullptr;
4266   _gc_data = nullptr;
4267   _oops_do_mark_link = nullptr;
4268   _compiled_ic_data = nullptr;
4269   _osr_entry_point = nullptr;
4270   _compile_id = -1;
4271   _deoptimization_status = not_marked;
4272   _is_unloading_state = 0;
4273   _state = not_installed;
4274 }
< prev index next >