< prev index next >

src/hotspot/share/code/nmethod.cpp

Print this page

  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/assembler.inline.hpp"
  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/compiledMethod.inline.hpp"
  30 #include "code/dependencies.hpp"
  31 #include "code/nativeInst.hpp"
  32 #include "code/nmethod.hpp"
  33 #include "code/scopeDesc.hpp"

  34 #include "compiler/abstractCompiler.hpp"
  35 #include "compiler/compilationLog.hpp"
  36 #include "compiler/compileBroker.hpp"
  37 #include "compiler/compileLog.hpp"
  38 #include "compiler/compileTask.hpp"
  39 #include "compiler/compilerDirectives.hpp"
  40 #include "compiler/directivesParser.hpp"
  41 #include "compiler/disassembler.hpp"
  42 #include "compiler/oopMap.inline.hpp"
  43 #include "gc/shared/barrierSet.hpp"
  44 #include "gc/shared/barrierSetNMethod.hpp"
  45 #include "gc/shared/classUnloadingContext.hpp"
  46 #include "gc/shared/collectedHeap.hpp"
  47 #include "interpreter/bytecode.hpp"
  48 #include "jvm.h"
  49 #include "logging/log.hpp"
  50 #include "logging/logStream.hpp"
  51 #include "memory/allocation.inline.hpp"
  52 #include "memory/resourceArea.hpp"
  53 #include "memory/universe.hpp"

 428     handler_table_size() +
 429     nul_chk_table_size();
 430 }
 431 
 432 const char* nmethod::compile_kind() const {
 433   if (is_osr_method())     return "osr";
 434   if (method() != nullptr && is_native_method()) {
 435     if (method()->is_continuation_native_intrinsic()) {
 436       return "cnt";
 437     }
 438     return "c2n";
 439   }
 440   return nullptr;
 441 }
 442 
 443 // Fill in default values for various flag fields
 444 void nmethod::init_defaults() {
 445   _state                      = not_installed;
 446   _has_flushed_dependencies   = 0;
 447   _load_reported              = false; // jvmti state

 448 
 449   _oops_do_mark_link       = nullptr;
 450   _osr_link                = nullptr;
 451 #if INCLUDE_RTM_OPT
 452   _rtm_state               = NoRTM;
 453 #endif
 454 }
 455 
 456 #ifdef ASSERT
 457 class CheckForOopsClosure : public OopClosure {
 458   bool _found_oop = false;
 459  public:
 460   virtual void do_oop(oop* o) { _found_oop = true; }
 461   virtual void do_oop(narrowOop* o) { _found_oop = true; }
 462   bool found_oop() { return _found_oop; }
 463 };
 464 class CheckForMetadataClosure : public MetadataClosure {
 465   bool _found_metadata = false;
 466   Metadata* _ignore = nullptr;
 467  public:

 532     debug_only(nm->verify();) // might block
 533 
 534     nm->log_new_nmethod();
 535   }
 536   return nm;
 537 }
 538 
 539 nmethod* nmethod::new_nmethod(const methodHandle& method,
 540   int compile_id,
 541   int entry_bci,
 542   CodeOffsets* offsets,
 543   int orig_pc_offset,
 544   DebugInformationRecorder* debug_info,
 545   Dependencies* dependencies,
 546   CodeBuffer* code_buffer, int frame_size,
 547   OopMapSet* oop_maps,
 548   ExceptionHandlerTable* handler_table,
 549   ImplicitExceptionTable* nul_chk_table,
 550   AbstractCompiler* compiler,
 551   CompLevel comp_level

 552 #if INCLUDE_JVMCI
 553   , char* speculations,
 554   int speculations_len,
 555   JVMCINMethodData* jvmci_data
 556 #endif
 557 )
 558 {
 559   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
 560   code_buffer->finalize_oop_references(method);
 561   // create nmethod
 562   nmethod* nm = nullptr;
 563 #if INCLUDE_JVMCI
 564   int jvmci_data_size = compiler->is_jvmci() ? jvmci_data->size() : 0;
 565 #endif
 566   int nmethod_size =
 567     CodeBlob::allocation_size(code_buffer, sizeof(nmethod))
 568     + adjust_pcs_size(debug_info->pcs_size())
 569     + align_up((int)dependencies->size_in_bytes(), oopSize)
 570     + align_up(handler_table->size_in_bytes()    , oopSize)
 571     + align_up(nul_chk_table->size_in_bytes()    , oopSize)
 572 #if INCLUDE_JVMCI
 573     + align_up(speculations_len                  , oopSize)
 574     + align_up(jvmci_data_size                   , oopSize)
 575 #endif
 576     + align_up(debug_info->data_size()           , oopSize);
 577   {
 578     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 579 
 580     nm = new (nmethod_size, comp_level)
 581     nmethod(method(), compiler->type(), nmethod_size, compile_id, entry_bci, offsets,
 582             orig_pc_offset, debug_info, dependencies, code_buffer, frame_size,
 583             oop_maps,
 584             handler_table,
 585             nul_chk_table,
 586             compiler,
 587             comp_level

 588 #if INCLUDE_JVMCI
 589             , speculations,
 590             speculations_len,
 591             jvmci_data
 592 #endif
 593             );
 594 
 595     if (nm != nullptr) {
 596       // To make dependency checking during class loading fast, record
 597       // the nmethod dependencies in the classes it is dependent on.
 598       // This allows the dependency checking code to simply walk the
 599       // class hierarchy above the loaded class, checking only nmethods
 600       // which are dependent on those classes.  The slow way is to
 601       // check every nmethod for dependencies which makes it linear in
 602       // the number of methods compiled.  For applications with a lot
 603       // classes the slow way is too slow.
 604       for (Dependencies::DepStream deps(nm); deps.next(); ) {
 605         if (deps.type() == Dependencies::call_site_target_value) {
 606           // CallSite dependencies are managed on per-CallSite instance basis.
 607           oop call_site = deps.argument_oop(0);
 608           MethodHandles::add_dependent_nmethod(call_site, nm);
 609         } else {
 610           InstanceKlass* ik = deps.context_type();
 611           if (ik == nullptr) {
 612             continue;  // ignore things like evol_method
 613           }
 614           // record this nmethod as dependent on this klass
 615           ik->add_dependent_nmethod(nm);
 616         }
 617       }
 618       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));
 619     }
 620   }
 621   // Do verification and logging outside CodeCache_lock.
 622   if (nm != nullptr) {











 623     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
 624     DEBUG_ONLY(nm->verify();)
 625     nm->log_new_nmethod();
 626   }
 627   return nm;
 628 }
 629 
 630 // For native wrappers
 631 nmethod::nmethod(
 632   Method* method,
 633   CompilerType type,
 634   int nmethod_size,
 635   int compile_id,
 636   CodeOffsets* offsets,
 637   CodeBuffer* code_buffer,
 638   int frame_size,
 639   ByteSize basic_lock_owner_sp_offset,
 640   ByteSize basic_lock_sp_offset,
 641   OopMapSet* oop_maps )
 642   : CompiledMethod(method, "native nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),

 645   _native_receiver_sp_offset(basic_lock_owner_sp_offset),
 646   _native_basic_lock_sp_offset(basic_lock_sp_offset),
 647   _is_unloading_state(0)
 648 {
 649   {
 650     int scopes_data_offset   = 0;
 651     int deoptimize_offset    = 0;
 652     int deoptimize_mh_offset = 0;
 653 
 654     debug_only(NoSafepointVerifier nsv;)
 655     assert_locked_or_safepoint(CodeCache_lock);
 656 
 657     init_defaults();
 658     _comp_level              = CompLevel_none;
 659     _entry_bci               = InvocationEntryBci;
 660     // We have no exception handler or deopt handler make the
 661     // values something that will never match a pc like the nmethod vtable entry
 662     _exception_offset        = 0;
 663     _orig_pc_offset          = 0;
 664     _gc_epoch                = CodeCache::gc_epoch();


 665 
 666     _consts_offset           = content_offset()      + code_buffer->total_offset_of(code_buffer->consts());
 667     _stub_offset             = content_offset()      + code_buffer->total_offset_of(code_buffer->stubs());
 668     _oops_offset             = data_offset();
 669     _metadata_offset         = _oops_offset          + align_up(code_buffer->total_oop_size(), oopSize);
 670     scopes_data_offset       = _metadata_offset      + align_up(code_buffer->total_metadata_size(), wordSize);
 671     _scopes_pcs_offset       = scopes_data_offset;
 672     _dependencies_offset     = _scopes_pcs_offset;
 673     _handler_table_offset    = _dependencies_offset;
 674     _nul_chk_table_offset    = _handler_table_offset;
 675     _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
 676 #if INCLUDE_JVMCI
 677     _speculations_offset     = _nul_chk_table_offset;
 678     _jvmci_data_offset       = _speculations_offset;
 679     _nmethod_end_offset      = _jvmci_data_offset;
 680 #else
 681     _nmethod_end_offset      = _nul_chk_table_offset;
 682 #endif
 683     _compile_id              = compile_id;
 684     _entry_point             = code_begin()          + offsets->value(CodeOffsets::Entry);

 721     // This is both handled in decode2(), called via print_code() -> decode()
 722     if (PrintNativeNMethods) {
 723       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
 724       print_code();
 725       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
 726 #if defined(SUPPORT_DATA_STRUCTS)
 727       if (AbstractDisassembler::show_structs()) {
 728         if (oop_maps != nullptr) {
 729           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
 730           oop_maps->print_on(tty);
 731           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
 732         }
 733       }
 734 #endif
 735     } else {
 736       print(); // print the header part only.
 737     }
 738 #if defined(SUPPORT_DATA_STRUCTS)
 739     if (AbstractDisassembler::show_structs()) {
 740       if (PrintRelocations) {
 741         print_relocations();
 742         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
 743       }
 744     }
 745 #endif
 746     if (xtty != nullptr) {
 747       xtty->tail("print_native_nmethod");
 748     }
 749   }
 750 }
 751 
 752 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
 753   return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
 754 }
 755 
 756 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
 757   // Try MethodNonProfiled and MethodProfiled.
 758   void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
 759   if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
 760   // Try NonNMethod or give up.
 761   return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
 762 }
 763 
 764 nmethod::nmethod(
 765   Method* method,
 766   CompilerType type,
 767   int nmethod_size,
 768   int compile_id,
 769   int entry_bci,
 770   CodeOffsets* offsets,
 771   int orig_pc_offset,
 772   DebugInformationRecorder* debug_info,
 773   Dependencies* dependencies,
 774   CodeBuffer *code_buffer,
 775   int frame_size,
 776   OopMapSet* oop_maps,
 777   ExceptionHandlerTable* handler_table,
 778   ImplicitExceptionTable* nul_chk_table,
 779   AbstractCompiler* compiler,
 780   CompLevel comp_level

 781 #if INCLUDE_JVMCI
 782   , char* speculations,
 783   int speculations_len,
 784   JVMCINMethodData* jvmci_data
 785 #endif
 786   )
 787   : CompiledMethod(method, "nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),
 788   _compiled_ic_data(nullptr),
 789   _is_unlinked(false),
 790   _native_receiver_sp_offset(in_ByteSize(-1)),
 791   _native_basic_lock_sp_offset(in_ByteSize(-1)),
 792   _is_unloading_state(0)
 793 {
 794   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
 795   {
 796     debug_only(NoSafepointVerifier nsv;)
 797     assert_locked_or_safepoint(CodeCache_lock);
 798 
 799     _deopt_handler_begin = (address) this;
 800     _deopt_mh_handler_begin = (address) this;
 801 
 802     init_defaults();
 803     _entry_bci               = entry_bci;
 804     _compile_id              = compile_id;
 805     _comp_level              = comp_level;
 806     _orig_pc_offset          = orig_pc_offset;
 807     _gc_epoch                = CodeCache::gc_epoch();


 808 
 809     // Section offsets
 810     _consts_offset           = content_offset()      + code_buffer->total_offset_of(code_buffer->consts());
 811     _stub_offset             = content_offset()      + code_buffer->total_offset_of(code_buffer->stubs());
 812     set_ctable_begin(header_begin() + _consts_offset);
 813     _skipped_instructions_size      = code_buffer->total_skipped_instructions_size();
 814 
 815 #if INCLUDE_JVMCI
 816     if (compiler->is_jvmci()) {
 817       // JVMCI might not produce any stub sections
 818       if (offsets->value(CodeOffsets::Exceptions) != -1) {
 819         _exception_offset        = code_offset()          + offsets->value(CodeOffsets::Exceptions);
 820       } else {
 821         _exception_offset = -1;
 822       }
 823       if (offsets->value(CodeOffsets::Deopt) != -1) {
 824         _deopt_handler_begin       = (address) this + code_offset()          + offsets->value(CodeOffsets::Deopt);
 825       } else {
 826         _deopt_handler_begin = nullptr;
 827       }

 901     nul_chk_table->copy_to(this);
 902 
 903 #if INCLUDE_JVMCI
 904     // Copy speculations to nmethod
 905     if (speculations_size() != 0) {
 906       memcpy(speculations_begin(), speculations, speculations_len);
 907     }
 908 #endif
 909 
 910     // we use the information of entry points to find out if a method is
 911     // static or non static
 912     assert(compiler->is_c2() || compiler->is_jvmci() ||
 913            _method->is_static() == (entry_point() == _verified_entry_point),
 914            " entry points must be same for static methods and vice versa");
 915   }
 916 }
 917 
 918 // Print a short set of xml attributes to identify this nmethod.  The
 919 // output should be embedded in some other element.
 920 void nmethod::log_identity(xmlStream* log) const {
 921   log->print(" compile_id='%d'", compile_id());

 922   const char* nm_kind = compile_kind();
 923   if (nm_kind != nullptr)  log->print(" compile_kind='%s'", nm_kind);
 924   log->print(" compiler='%s'", compiler_name());
 925   if (TieredCompilation) {
 926     log->print(" level='%d'", comp_level());
 927   }
 928 #if INCLUDE_JVMCI
 929   if (jvmci_nmethod_data() != nullptr) {
 930     const char* jvmci_name = jvmci_nmethod_data()->name();
 931     if (jvmci_name != nullptr) {
 932       log->print(" jvmci_mirror_name='");
 933       log->text("%s", jvmci_name);
 934       log->print("'");
 935     }
 936   }
 937 #endif
 938 }
 939 
 940 
 941 #define LOG_OFFSET(log, name)                    \
 942   if (p2i(name##_end()) - p2i(name##_begin())) \
 943     log->print(" " XSTR(name) "_offset='" INTX_FORMAT "'"    , \
 944                p2i(name##_begin()) - p2i(this))
 945 
 946 

1027       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1028       if (oop_maps() != nullptr) {
1029         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1030         oop_maps()->print_on(tty);
1031         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1032       }
1033     }
1034 #endif
1035   } else {
1036     print(); // print the header part only.
1037   }
1038 
1039 #if defined(SUPPORT_DATA_STRUCTS)
1040   if (AbstractDisassembler::show_structs()) {
1041     methodHandle mh(Thread::current(), _method);
1042     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommand::PrintDebugInfo)) {
1043       print_scopes();
1044       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1045     }
1046     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommand::PrintRelocations)) {
1047       print_relocations();
1048       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1049     }
1050     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommand::PrintDependencies)) {
1051       print_dependencies_on(tty);
1052       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1053     }
1054     if (printmethod || PrintExceptionHandlers) {
1055       print_handler_table();
1056       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1057       print_nul_chk_table();
1058       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1059     }
1060 
1061     if (printmethod) {
1062       print_recorded_oops();
1063       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1064       print_recorded_metadata();
1065       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1066     }
1067   }

1275   Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1276 }
1277 
1278 bool nmethod::is_maybe_on_stack() {
1279   // If the condition below is true, it means that the nmethod was found to
1280   // be alive the previous completed marking cycle.
1281   return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1282 }
1283 
1284 void nmethod::inc_decompile_count() {
1285   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1286   // Could be gated by ProfileTraps, but do not bother...
1287   Method* m = method();
1288   if (m == nullptr)  return;
1289   MethodData* mdo = m->method_data();
1290   if (mdo == nullptr)  return;
1291   // There is a benign race here.  See comments in methodData.hpp.
1292   mdo->inc_decompile_count();
1293 }
1294 








1295 bool nmethod::try_transition(signed char new_state_int) {
1296   signed char new_state = new_state_int;
1297   assert_lock_strong(CompiledMethod_lock);
1298   signed char old_state = _state;
1299   if (old_state >= new_state) {
1300     // Ensure monotonicity of transitions.
1301     return false;
1302   }
1303   Atomic::store(&_state, new_state);
1304   return true;
1305 }
1306 
1307 void nmethod::invalidate_osr_method() {
1308   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1309   // Remove from list of active nmethods
1310   if (method() != nullptr) {
1311     method()->method_holder()->remove_osr_nmethod(this);
1312   }
1313 }
1314 

1320                        os::current_thread_id());
1321       log_identity(xtty);
1322       xtty->stamp();
1323       xtty->end_elem();
1324     }
1325   }
1326 
1327   CompileTask::print_ul(this, "made not entrant");
1328   if (PrintCompilation) {
1329     print_on(tty, "made not entrant");
1330   }
1331 }
1332 
1333 void nmethod::unlink_from_method() {
1334   if (method() != nullptr) {
1335     method()->unlink_code(this);
1336   }
1337 }
1338 
1339 // Invalidate code
1340 bool nmethod::make_not_entrant() {
1341   // This can be called while the system is already at a safepoint which is ok
1342   NoSafepointVerifier nsv;
1343 
1344   if (is_unloading()) {
1345     // If the nmethod is unloading, then it is already not entrant through
1346     // the nmethod entry barriers. No need to do anything; GC will unload it.
1347     return false;
1348   }
1349 
1350   if (Atomic::load(&_state) == not_entrant) {
1351     // Avoid taking the lock if already in required state.
1352     // This is safe from races because the state is an end-state,
1353     // which the nmethod cannot back out of once entered.
1354     // No need for fencing either.
1355     return false;
1356   }
1357 
1358   {
1359     // Enter critical section.  Does not block for safepoint.
1360     ConditionalMutexLocker ml(CompiledMethod_lock, !CompiledMethod_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

1383     }
1384 
1385     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
1386     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
1387       // If nmethod entry barriers are not supported, we won't mark
1388       // nmethods as on-stack when they become on-stack. So we
1389       // degrade to a less accurate flushing strategy, for now.
1390       mark_as_maybe_on_stack();
1391     }
1392 
1393     // Change state
1394     bool success = try_transition(not_entrant);
1395     assert(success, "Transition can't fail");
1396 
1397     // Log the transition once
1398     log_state_change();
1399 
1400     // Remove nmethod from method.
1401     unlink_from_method();
1402 







1403   } // leave critical region under CompiledMethod_lock
1404 
1405 #if INCLUDE_JVMCI
1406   // Invalidate can't occur while holding the Patching lock
1407   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
1408   if (nmethod_data != nullptr) {
1409     nmethod_data->invalidate_nmethod_mirror(this);
1410   }
1411 #endif
1412 
1413 #ifdef ASSERT
1414   if (is_osr_method() && method() != nullptr) {
1415     // Make sure osr nmethod is invalidated, i.e. not on the list
1416     bool found = method()->method_holder()->remove_osr_nmethod(this);
1417     assert(!found, "osr nmethod should have been invalidated");
1418   }
1419 #endif
1420 
1421   return true;
1422 }

1517         MethodHandles::clean_dependency_context(call_site);
1518       } else {
1519         InstanceKlass* ik = deps.context_type();
1520         if (ik == nullptr) {
1521           continue;  // ignore things like evol_method
1522         }
1523         // During GC liveness of dependee determines class that needs to be updated.
1524         // The GC may clean dependency contexts concurrently and in parallel.
1525         ik->clean_dependency_context();
1526       }
1527     }
1528   }
1529 }
1530 
1531 void nmethod::post_compiled_method(CompileTask* task) {
1532   task->mark_success();
1533   task->set_nm_content_size(content_size());
1534   task->set_nm_insts_size(insts_size());
1535   task->set_nm_total_size(total_size());
1536 






1537   // JVMTI -- compiled method notification (must be done outside lock)
1538   post_compiled_method_load_event();
1539 
1540   if (CompilationLog::log() != nullptr) {
1541     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
1542   }
1543 
1544   const DirectiveSet* directive = task->directive();
1545   maybe_print_nmethod(directive);
1546 }
1547 
1548 // ------------------------------------------------------------------
1549 // post_compiled_method_load_event
1550 // new method for install_code() path
1551 // Transfer information from compilation to jvmti
1552 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
1553   // This is a bad time for a safepoint.  We don't want
1554   // this nmethod to get unloaded while we're queueing the event.
1555   NoSafepointVerifier nsv;
1556 

2430                                              p2i(dependencies_end()),
2431                                              dependencies_size());
2432   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2433                                              p2i(handler_table_begin()),
2434                                              p2i(handler_table_end()),
2435                                              handler_table_size());
2436   if (nul_chk_table_size() > 0) st->print_cr(" nul chk table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2437                                              p2i(nul_chk_table_begin()),
2438                                              p2i(nul_chk_table_end()),
2439                                              nul_chk_table_size());
2440 #if INCLUDE_JVMCI
2441   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2442                                              p2i(speculations_begin()),
2443                                              p2i(speculations_end()),
2444                                              speculations_size());
2445   if (jvmci_data_size   () > 0) st->print_cr(" JVMCI data     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2446                                              p2i(jvmci_data_begin()),
2447                                              p2i(jvmci_data_end()),
2448                                              jvmci_data_size());
2449 #endif



2450 }
2451 
2452 void nmethod::print_code() {
2453   ResourceMark m;
2454   ttyLocker ttyl;
2455   // Call the specialized decode method of this class.
2456   decode(tty);
2457 }
2458 
2459 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
2460 
2461 void nmethod::print_dependencies_on(outputStream* out) {
2462   ResourceMark rm;
2463   stringStream st;
2464   st.print_cr("Dependencies:");
2465   for (Dependencies::DepStream deps(this); deps.next(); ) {
2466     deps.print_dependency(&st);
2467     InstanceKlass* ctxk = deps.context_type();
2468     if (ctxk != nullptr) {
2469       if (ctxk->is_dependent_nmethod(this)) {

2529   st->print("scopes:");
2530   if (scopes_pcs_begin() < scopes_pcs_end()) {
2531     st->cr();
2532     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
2533       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
2534         continue;
2535 
2536       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
2537       while (sd != nullptr) {
2538         sd->print_on(st, p);  // print output ends with a newline
2539         sd = sd->sender();
2540       }
2541     }
2542   } else {
2543     st->print_cr(" <list empty>");
2544   }
2545 }
2546 #endif
2547 
2548 #ifndef PRODUCT  // RelocIterator does support printing only then.
2549 void nmethod::print_relocations() {
2550   ResourceMark m;       // in case methods get printed via the debugger
2551   tty->print_cr("relocations:");
2552   RelocIterator iter(this);
2553   iter.print();
2554 }
2555 #endif
2556 
2557 void nmethod::print_pcs_on(outputStream* st) {
2558   ResourceMark m;       // in case methods get printed via debugger
2559   st->print("pc-bytecode offsets:");
2560   if (scopes_pcs_begin() < scopes_pcs_end()) {
2561     st->cr();
2562     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
2563       p->print_on(st, this);  // print output ends with a newline
2564     }
2565   } else {
2566     st->print_cr(" <list empty>");
2567   }
2568 }
2569 
2570 void nmethod::print_handler_table() {
2571   ExceptionHandlerTable(this).print(code_begin());
2572 }
2573 

2888           else obj->print_value_on(&st);
2889           st.print(")");
2890           return st.as_string();
2891         }
2892         case relocInfo::metadata_type: {
2893           stringStream st;
2894           metadata_Relocation* r = iter.metadata_reloc();
2895           Metadata* obj = r->metadata_value();
2896           st.print("metadata(");
2897           if (obj == nullptr) st.print("nullptr");
2898           else obj->print_value_on(&st);
2899           st.print(")");
2900           return st.as_string();
2901         }
2902         case relocInfo::runtime_call_type:
2903         case relocInfo::runtime_call_w_cp_type: {
2904           stringStream st;
2905           st.print("runtime_call");
2906           CallRelocation* r = (CallRelocation*)iter.reloc();
2907           address dest = r->destination();










2908           CodeBlob* cb = CodeCache::find_blob(dest);
2909           if (cb != nullptr) {
2910             st.print(" %s", cb->name());
2911           } else {
2912             ResourceMark rm;
2913             const int buflen = 1024;
2914             char* buf = NEW_RESOURCE_ARRAY(char, buflen);
2915             int offset;
2916             if (os::dll_address_to_function_name(dest, buf, buflen, &offset)) {
2917               st.print(" %s", buf);
2918               if (offset != 0) {
2919                 st.print("+%d", offset);
2920               }
2921             }
2922           }
2923           return st.as_string();
2924         }
2925         case relocInfo::virtual_call_type: {
2926           stringStream st;
2927           st.print_raw("virtual_call");

  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/assembler.inline.hpp"
  27 #include "code/codeCache.hpp"
  28 #include "code/compiledIC.hpp"
  29 #include "code/compiledMethod.inline.hpp"
  30 #include "code/dependencies.hpp"
  31 #include "code/nativeInst.hpp"
  32 #include "code/nmethod.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "code/SCCache.hpp"
  35 #include "compiler/abstractCompiler.hpp"
  36 #include "compiler/compilationLog.hpp"
  37 #include "compiler/compileBroker.hpp"
  38 #include "compiler/compileLog.hpp"
  39 #include "compiler/compileTask.hpp"
  40 #include "compiler/compilerDirectives.hpp"
  41 #include "compiler/directivesParser.hpp"
  42 #include "compiler/disassembler.hpp"
  43 #include "compiler/oopMap.inline.hpp"
  44 #include "gc/shared/barrierSet.hpp"
  45 #include "gc/shared/barrierSetNMethod.hpp"
  46 #include "gc/shared/classUnloadingContext.hpp"
  47 #include "gc/shared/collectedHeap.hpp"
  48 #include "interpreter/bytecode.hpp"
  49 #include "jvm.h"
  50 #include "logging/log.hpp"
  51 #include "logging/logStream.hpp"
  52 #include "memory/allocation.inline.hpp"
  53 #include "memory/resourceArea.hpp"
  54 #include "memory/universe.hpp"

 429     handler_table_size() +
 430     nul_chk_table_size();
 431 }
 432 
 433 const char* nmethod::compile_kind() const {
 434   if (is_osr_method())     return "osr";
 435   if (method() != nullptr && is_native_method()) {
 436     if (method()->is_continuation_native_intrinsic()) {
 437       return "cnt";
 438     }
 439     return "c2n";
 440   }
 441   return nullptr;
 442 }
 443 
 444 // Fill in default values for various flag fields
 445 void nmethod::init_defaults() {
 446   _state                      = not_installed;
 447   _has_flushed_dependencies   = 0;
 448   _load_reported              = false; // jvmti state
 449   _used                       = false;
 450 
 451   _oops_do_mark_link       = nullptr;
 452   _osr_link                = nullptr;
 453 #if INCLUDE_RTM_OPT
 454   _rtm_state               = NoRTM;
 455 #endif
 456 }
 457 
 458 #ifdef ASSERT
 459 class CheckForOopsClosure : public OopClosure {
 460   bool _found_oop = false;
 461  public:
 462   virtual void do_oop(oop* o) { _found_oop = true; }
 463   virtual void do_oop(narrowOop* o) { _found_oop = true; }
 464   bool found_oop() { return _found_oop; }
 465 };
 466 class CheckForMetadataClosure : public MetadataClosure {
 467   bool _found_metadata = false;
 468   Metadata* _ignore = nullptr;
 469  public:

 534     debug_only(nm->verify();) // might block
 535 
 536     nm->log_new_nmethod();
 537   }
 538   return nm;
 539 }
 540 
 541 nmethod* nmethod::new_nmethod(const methodHandle& method,
 542   int compile_id,
 543   int entry_bci,
 544   CodeOffsets* offsets,
 545   int orig_pc_offset,
 546   DebugInformationRecorder* debug_info,
 547   Dependencies* dependencies,
 548   CodeBuffer* code_buffer, int frame_size,
 549   OopMapSet* oop_maps,
 550   ExceptionHandlerTable* handler_table,
 551   ImplicitExceptionTable* nul_chk_table,
 552   AbstractCompiler* compiler,
 553   CompLevel comp_level
 554   , SCCEntry* scc_entry
 555 #if INCLUDE_JVMCI
 556   , char* speculations,
 557   int speculations_len,
 558   JVMCINMethodData* jvmci_data
 559 #endif
 560 )
 561 {
 562   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
 563   code_buffer->finalize_oop_references(method);
 564   // create nmethod
 565   nmethod* nm = nullptr;
 566 #if INCLUDE_JVMCI
 567   int jvmci_data_size = compiler->is_jvmci() ? jvmci_data->size() : 0;
 568 #endif
 569   int nmethod_size =
 570     CodeBlob::allocation_size(code_buffer, sizeof(nmethod))
 571     + adjust_pcs_size(debug_info->pcs_size())
 572     + align_up((int)dependencies->size_in_bytes(), oopSize)
 573     + align_up(handler_table->size_in_bytes()    , oopSize)
 574     + align_up(nul_chk_table->size_in_bytes()    , oopSize)
 575 #if INCLUDE_JVMCI
 576     + align_up(speculations_len                  , oopSize)
 577     + align_up(jvmci_data_size                   , oopSize)
 578 #endif
 579     + align_up(debug_info->data_size()           , oopSize);
 580   {
 581     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 582 
 583     nm = new (nmethod_size, comp_level)
 584     nmethod(method(), compiler->type(), nmethod_size, compile_id, entry_bci, offsets,
 585             orig_pc_offset, debug_info, dependencies, code_buffer, frame_size,
 586             oop_maps,
 587             handler_table,
 588             nul_chk_table,
 589             compiler,
 590             comp_level
 591             , scc_entry
 592 #if INCLUDE_JVMCI
 593             , speculations,
 594             speculations_len,
 595             jvmci_data
 596 #endif
 597             );
 598 
 599     if (nm != nullptr) {
 600       // To make dependency checking during class loading fast, record
 601       // the nmethod dependencies in the classes it is dependent on.
 602       // This allows the dependency checking code to simply walk the
 603       // class hierarchy above the loaded class, checking only nmethods
 604       // which are dependent on those classes.  The slow way is to
 605       // check every nmethod for dependencies which makes it linear in
 606       // the number of methods compiled.  For applications with a lot
 607       // classes the slow way is too slow.
 608       for (Dependencies::DepStream deps(nm); deps.next(); ) {
 609         if (deps.type() == Dependencies::call_site_target_value) {
 610           // CallSite dependencies are managed on per-CallSite instance basis.
 611           oop call_site = deps.argument_oop(0);
 612           MethodHandles::add_dependent_nmethod(call_site, nm);
 613         } else {
 614           InstanceKlass* ik = deps.context_type();
 615           if (ik == nullptr) {
 616             continue;  // ignore things like evol_method
 617           }
 618           // record this nmethod as dependent on this klass
 619           ik->add_dependent_nmethod(nm);
 620         }
 621       }
 622       NOT_PRODUCT(if (nm != nullptr)  note_java_nmethod(nm));
 623     }
 624   }
 625   // Do verification and logging outside CodeCache_lock.
 626   if (nm != nullptr) {
 627 
 628 #ifdef ASSERT
 629     LogTarget(Debug, scc, nmethod) log;
 630     if (log.is_enabled()) {
 631       tty->print_cr("== new_nmethod 2");
 632       FlagSetting fs(PrintRelocations, true);
 633       nm->print();
 634       nm->decode(tty);
 635     }
 636 #endif
 637 
 638     // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet.
 639     DEBUG_ONLY(nm->verify();)
 640     nm->log_new_nmethod();
 641   }
 642   return nm;
 643 }
 644 
 645 // For native wrappers
 646 nmethod::nmethod(
 647   Method* method,
 648   CompilerType type,
 649   int nmethod_size,
 650   int compile_id,
 651   CodeOffsets* offsets,
 652   CodeBuffer* code_buffer,
 653   int frame_size,
 654   ByteSize basic_lock_owner_sp_offset,
 655   ByteSize basic_lock_sp_offset,
 656   OopMapSet* oop_maps )
 657   : CompiledMethod(method, "native nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),

 660   _native_receiver_sp_offset(basic_lock_owner_sp_offset),
 661   _native_basic_lock_sp_offset(basic_lock_sp_offset),
 662   _is_unloading_state(0)
 663 {
 664   {
 665     int scopes_data_offset   = 0;
 666     int deoptimize_offset    = 0;
 667     int deoptimize_mh_offset = 0;
 668 
 669     debug_only(NoSafepointVerifier nsv;)
 670     assert_locked_or_safepoint(CodeCache_lock);
 671 
 672     init_defaults();
 673     _comp_level              = CompLevel_none;
 674     _entry_bci               = InvocationEntryBci;
 675     // We have no exception handler or deopt handler make the
 676     // values something that will never match a pc like the nmethod vtable entry
 677     _exception_offset        = 0;
 678     _orig_pc_offset          = 0;
 679     _gc_epoch                = CodeCache::gc_epoch();
 680     _scc_entry               = nullptr;
 681     _method_profiling_count  = 0;
 682 
 683     _consts_offset           = content_offset()      + code_buffer->total_offset_of(code_buffer->consts());
 684     _stub_offset             = content_offset()      + code_buffer->total_offset_of(code_buffer->stubs());
 685     _oops_offset             = data_offset();
 686     _metadata_offset         = _oops_offset          + align_up(code_buffer->total_oop_size(), oopSize);
 687     scopes_data_offset       = _metadata_offset      + align_up(code_buffer->total_metadata_size(), wordSize);
 688     _scopes_pcs_offset       = scopes_data_offset;
 689     _dependencies_offset     = _scopes_pcs_offset;
 690     _handler_table_offset    = _dependencies_offset;
 691     _nul_chk_table_offset    = _handler_table_offset;
 692     _skipped_instructions_size = code_buffer->total_skipped_instructions_size();
 693 #if INCLUDE_JVMCI
 694     _speculations_offset     = _nul_chk_table_offset;
 695     _jvmci_data_offset       = _speculations_offset;
 696     _nmethod_end_offset      = _jvmci_data_offset;
 697 #else
 698     _nmethod_end_offset      = _nul_chk_table_offset;
 699 #endif
 700     _compile_id              = compile_id;
 701     _entry_point             = code_begin()          + offsets->value(CodeOffsets::Entry);

 738     // This is both handled in decode2(), called via print_code() -> decode()
 739     if (PrintNativeNMethods) {
 740       tty->print_cr("-------------------------- Assembly (native nmethod) ---------------------------");
 741       print_code();
 742       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
 743 #if defined(SUPPORT_DATA_STRUCTS)
 744       if (AbstractDisassembler::show_structs()) {
 745         if (oop_maps != nullptr) {
 746           tty->print("oop maps:"); // oop_maps->print_on(tty) outputs a cr() at the beginning
 747           oop_maps->print_on(tty);
 748           tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
 749         }
 750       }
 751 #endif
 752     } else {
 753       print(); // print the header part only.
 754     }
 755 #if defined(SUPPORT_DATA_STRUCTS)
 756     if (AbstractDisassembler::show_structs()) {
 757       if (PrintRelocations) {
 758         print_relocations_on(tty);
 759         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
 760       }
 761     }
 762 #endif
 763     if (xtty != nullptr) {
 764       xtty->tail("print_native_nmethod");
 765     }
 766   }
 767 }
 768 
 769 void* nmethod::operator new(size_t size, int nmethod_size, int comp_level) throw () {
 770   return CodeCache::allocate(nmethod_size, CodeCache::get_code_blob_type(comp_level));
 771 }
 772 
 773 void* nmethod::operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw () {
 774   // Try MethodNonProfiled and MethodProfiled.
 775   void* return_value = CodeCache::allocate(nmethod_size, CodeBlobType::MethodNonProfiled);
 776   if (return_value != nullptr || !allow_NonNMethod_space) return return_value;
 777   // Try NonNMethod or give up.
 778   return CodeCache::allocate(nmethod_size, CodeBlobType::NonNMethod);
 779 }
 780 
 781 nmethod::nmethod(
 782   Method* method,
 783   CompilerType type,
 784   int nmethod_size,
 785   int compile_id,
 786   int entry_bci,
 787   CodeOffsets* offsets,
 788   int orig_pc_offset,
 789   DebugInformationRecorder* debug_info,
 790   Dependencies* dependencies,
 791   CodeBuffer *code_buffer,
 792   int frame_size,
 793   OopMapSet* oop_maps,
 794   ExceptionHandlerTable* handler_table,
 795   ImplicitExceptionTable* nul_chk_table,
 796   AbstractCompiler* compiler,
 797   CompLevel comp_level
 798   , SCCEntry* scc_entry
 799 #if INCLUDE_JVMCI
 800   , char* speculations,
 801   int speculations_len,
 802   JVMCINMethodData* jvmci_data
 803 #endif
 804   )
 805   : CompiledMethod(method, "nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),
 806   _compiled_ic_data(nullptr),
 807   _is_unlinked(false),
 808   _native_receiver_sp_offset(in_ByteSize(-1)),
 809   _native_basic_lock_sp_offset(in_ByteSize(-1)),
 810   _is_unloading_state(0)
 811 {
 812   assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
 813   {
 814     debug_only(NoSafepointVerifier nsv;)
 815     assert_locked_or_safepoint(CodeCache_lock);
 816 
 817     _deopt_handler_begin = (address) this;
 818     _deopt_mh_handler_begin = (address) this;
 819 
 820     init_defaults();
 821     _entry_bci               = entry_bci;
 822     _compile_id              = compile_id;
 823     _comp_level              = comp_level;
 824     _orig_pc_offset          = orig_pc_offset;
 825     _gc_epoch                = CodeCache::gc_epoch();
 826     _scc_entry               = scc_entry;
 827     _method_profiling_count  = 0;
 828 
 829     // Section offsets
 830     _consts_offset           = content_offset()      + code_buffer->total_offset_of(code_buffer->consts());
 831     _stub_offset             = content_offset()      + code_buffer->total_offset_of(code_buffer->stubs());
 832     set_ctable_begin(header_begin() + _consts_offset);
 833     _skipped_instructions_size      = code_buffer->total_skipped_instructions_size();
 834 
 835 #if INCLUDE_JVMCI
 836     if (compiler->is_jvmci()) {
 837       // JVMCI might not produce any stub sections
 838       if (offsets->value(CodeOffsets::Exceptions) != -1) {
 839         _exception_offset        = code_offset()          + offsets->value(CodeOffsets::Exceptions);
 840       } else {
 841         _exception_offset = -1;
 842       }
 843       if (offsets->value(CodeOffsets::Deopt) != -1) {
 844         _deopt_handler_begin       = (address) this + code_offset()          + offsets->value(CodeOffsets::Deopt);
 845       } else {
 846         _deopt_handler_begin = nullptr;
 847       }

 921     nul_chk_table->copy_to(this);
 922 
 923 #if INCLUDE_JVMCI
 924     // Copy speculations to nmethod
 925     if (speculations_size() != 0) {
 926       memcpy(speculations_begin(), speculations, speculations_len);
 927     }
 928 #endif
 929 
 930     // we use the information of entry points to find out if a method is
 931     // static or non static
 932     assert(compiler->is_c2() || compiler->is_jvmci() ||
 933            _method->is_static() == (entry_point() == _verified_entry_point),
 934            " entry points must be same for static methods and vice versa");
 935   }
 936 }
 937 
 938 // Print a short set of xml attributes to identify this nmethod.  The
 939 // output should be embedded in some other element.
 940 void nmethod::log_identity(xmlStream* log) const {
 941   assert(log->inside_attrs_or_error(), "printing attributes");
 942   log->print(" code_compile_id='%d'", compile_id());
 943   const char* nm_kind = compile_kind();
 944   if (nm_kind != nullptr)  log->print(" code_compile_kind='%s'", nm_kind);
 945   log->print(" code_compiler='%s'", compiler_name());
 946   if (TieredCompilation) {
 947     log->print(" code_compile_level='%d'", comp_level());
 948   }
 949 #if INCLUDE_JVMCI
 950   if (jvmci_nmethod_data() != nullptr) {
 951     const char* jvmci_name = jvmci_nmethod_data()->name();
 952     if (jvmci_name != nullptr) {
 953       log->print(" jvmci_mirror_name='");
 954       log->text("%s", jvmci_name);
 955       log->print("'");
 956     }
 957   }
 958 #endif
 959 }
 960 
 961 
 962 #define LOG_OFFSET(log, name)                    \
 963   if (p2i(name##_end()) - p2i(name##_begin())) \
 964     log->print(" " XSTR(name) "_offset='" INTX_FORMAT "'"    , \
 965                p2i(name##_begin()) - p2i(this))
 966 
 967 

1048       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1049       if (oop_maps() != nullptr) {
1050         tty->print("oop maps:"); // oop_maps()->print_on(tty) outputs a cr() at the beginning
1051         oop_maps()->print_on(tty);
1052         tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1053       }
1054     }
1055 #endif
1056   } else {
1057     print(); // print the header part only.
1058   }
1059 
1060 #if defined(SUPPORT_DATA_STRUCTS)
1061   if (AbstractDisassembler::show_structs()) {
1062     methodHandle mh(Thread::current(), _method);
1063     if (printmethod || PrintDebugInfo || CompilerOracle::has_option(mh, CompileCommand::PrintDebugInfo)) {
1064       print_scopes();
1065       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1066     }
1067     if (printmethod || PrintRelocations || CompilerOracle::has_option(mh, CompileCommand::PrintRelocations)) {
1068       print_relocations_on(tty);
1069       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1070     }
1071     if (printmethod || PrintDependencies || CompilerOracle::has_option(mh, CompileCommand::PrintDependencies)) {
1072       print_dependencies_on(tty);
1073       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1074     }
1075     if (printmethod || PrintExceptionHandlers) {
1076       print_handler_table();
1077       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1078       print_nul_chk_table();
1079       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1080     }
1081 
1082     if (printmethod) {
1083       print_recorded_oops();
1084       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1085       print_recorded_metadata();
1086       tty->print_cr("- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ");
1087     }
1088   }

1296   Atomic::store(&_gc_epoch, CodeCache::gc_epoch());
1297 }
1298 
1299 bool nmethod::is_maybe_on_stack() {
1300   // If the condition below is true, it means that the nmethod was found to
1301   // be alive the previous completed marking cycle.
1302   return Atomic::load(&_gc_epoch) >= CodeCache::previous_completed_gc_marking_cycle();
1303 }
1304 
1305 void nmethod::inc_decompile_count() {
1306   if (!is_compiled_by_c2() && !is_compiled_by_jvmci()) return;
1307   // Could be gated by ProfileTraps, but do not bother...
1308   Method* m = method();
1309   if (m == nullptr)  return;
1310   MethodData* mdo = m->method_data();
1311   if (mdo == nullptr)  return;
1312   // There is a benign race here.  See comments in methodData.hpp.
1313   mdo->inc_decompile_count();
1314 }
1315 
1316 void nmethod::inc_method_profiling_count() {
1317   Atomic::inc(&_method_profiling_count);
1318 }
1319 
1320 uint64_t nmethod::method_profiling_count() {
1321   return _method_profiling_count;
1322 }
1323 
1324 bool nmethod::try_transition(signed char new_state_int) {
1325   signed char new_state = new_state_int;
1326   assert_lock_strong(CompiledMethod_lock);
1327   signed char old_state = _state;
1328   if (old_state >= new_state) {
1329     // Ensure monotonicity of transitions.
1330     return false;
1331   }
1332   Atomic::store(&_state, new_state);
1333   return true;
1334 }
1335 
1336 void nmethod::invalidate_osr_method() {
1337   assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1338   // Remove from list of active nmethods
1339   if (method() != nullptr) {
1340     method()->method_holder()->remove_osr_nmethod(this);
1341   }
1342 }
1343 

1349                        os::current_thread_id());
1350       log_identity(xtty);
1351       xtty->stamp();
1352       xtty->end_elem();
1353     }
1354   }
1355 
1356   CompileTask::print_ul(this, "made not entrant");
1357   if (PrintCompilation) {
1358     print_on(tty, "made not entrant");
1359   }
1360 }
1361 
1362 void nmethod::unlink_from_method() {
1363   if (method() != nullptr) {
1364     method()->unlink_code(this);
1365   }
1366 }
1367 
1368 // Invalidate code
1369 bool nmethod::make_not_entrant(bool make_not_entrant) {
1370   // This can be called while the system is already at a safepoint which is ok
1371   NoSafepointVerifier nsv;
1372 
1373   if (is_unloading()) {
1374     // If the nmethod is unloading, then it is already not entrant through
1375     // the nmethod entry barriers. No need to do anything; GC will unload it.
1376     return false;
1377   }
1378 
1379   if (Atomic::load(&_state) == not_entrant) {
1380     // Avoid taking the lock if already in required state.
1381     // This is safe from races because the state is an end-state,
1382     // which the nmethod cannot back out of once entered.
1383     // No need for fencing either.
1384     return false;
1385   }
1386 
1387   {
1388     // Enter critical section.  Does not block for safepoint.
1389     ConditionalMutexLocker ml(CompiledMethod_lock, !CompiledMethod_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);

1412     }
1413 
1414     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
1415     if (bs_nm == nullptr || !bs_nm->supports_entry_barrier(this)) {
1416       // If nmethod entry barriers are not supported, we won't mark
1417       // nmethods as on-stack when they become on-stack. So we
1418       // degrade to a less accurate flushing strategy, for now.
1419       mark_as_maybe_on_stack();
1420     }
1421 
1422     // Change state
1423     bool success = try_transition(not_entrant);
1424     assert(success, "Transition can't fail");
1425 
1426     // Log the transition once
1427     log_state_change();
1428 
1429     // Remove nmethod from method.
1430     unlink_from_method();
1431 
1432     if (make_not_entrant) {
1433       // Keep cached code if it was simply replaced
1434       // otherwise make it not entrant too.
1435       SCCache::invalidate(_scc_entry);
1436     }
1437 
1438     CompileBroker::log_not_entrant(this);
1439   } // leave critical region under CompiledMethod_lock
1440 
1441 #if INCLUDE_JVMCI
1442   // Invalidate can't occur while holding the Patching lock
1443   JVMCINMethodData* nmethod_data = jvmci_nmethod_data();
1444   if (nmethod_data != nullptr) {
1445     nmethod_data->invalidate_nmethod_mirror(this);
1446   }
1447 #endif
1448 
1449 #ifdef ASSERT
1450   if (is_osr_method() && method() != nullptr) {
1451     // Make sure osr nmethod is invalidated, i.e. not on the list
1452     bool found = method()->method_holder()->remove_osr_nmethod(this);
1453     assert(!found, "osr nmethod should have been invalidated");
1454   }
1455 #endif
1456 
1457   return true;
1458 }

1553         MethodHandles::clean_dependency_context(call_site);
1554       } else {
1555         InstanceKlass* ik = deps.context_type();
1556         if (ik == nullptr) {
1557           continue;  // ignore things like evol_method
1558         }
1559         // During GC liveness of dependee determines class that needs to be updated.
1560         // The GC may clean dependency contexts concurrently and in parallel.
1561         ik->clean_dependency_context();
1562       }
1563     }
1564   }
1565 }
1566 
1567 void nmethod::post_compiled_method(CompileTask* task) {
1568   task->mark_success();
1569   task->set_nm_content_size(content_size());
1570   task->set_nm_insts_size(insts_size());
1571   task->set_nm_total_size(total_size());
1572 
1573   // task->is_scc() is true only for loaded cached code.
1574   // nmethod::_scc_entry is set for loaded and stored cached code
1575   // to invalidate the entry when nmethod is deoptimized.
1576   // There is option to not store in archive cached code.
1577   guarantee((_scc_entry != nullptr) || !task->is_scc() || VerifyCachedCode, "sanity");
1578 
1579   // JVMTI -- compiled method notification (must be done outside lock)
1580   post_compiled_method_load_event();
1581 
1582   if (CompilationLog::log() != nullptr) {
1583     CompilationLog::log()->log_nmethod(JavaThread::current(), this);
1584   }
1585 
1586   const DirectiveSet* directive = task->directive();
1587   maybe_print_nmethod(directive);
1588 }
1589 
1590 // ------------------------------------------------------------------
1591 // post_compiled_method_load_event
1592 // new method for install_code() path
1593 // Transfer information from compilation to jvmti
1594 void nmethod::post_compiled_method_load_event(JvmtiThreadState* state) {
1595   // This is a bad time for a safepoint.  We don't want
1596   // this nmethod to get unloaded while we're queueing the event.
1597   NoSafepointVerifier nsv;
1598 

2472                                              p2i(dependencies_end()),
2473                                              dependencies_size());
2474   if (handler_table_size() > 0) st->print_cr(" handler table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2475                                              p2i(handler_table_begin()),
2476                                              p2i(handler_table_end()),
2477                                              handler_table_size());
2478   if (nul_chk_table_size() > 0) st->print_cr(" nul chk table  [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2479                                              p2i(nul_chk_table_begin()),
2480                                              p2i(nul_chk_table_end()),
2481                                              nul_chk_table_size());
2482 #if INCLUDE_JVMCI
2483   if (speculations_size () > 0) st->print_cr(" speculations   [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2484                                              p2i(speculations_begin()),
2485                                              p2i(speculations_end()),
2486                                              speculations_size());
2487   if (jvmci_data_size   () > 0) st->print_cr(" JVMCI data     [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2488                                              p2i(jvmci_data_begin()),
2489                                              p2i(jvmci_data_end()),
2490                                              jvmci_data_size());
2491 #endif
2492   if (SCCache::is_on() && _scc_entry != nullptr) {
2493     _scc_entry->print(st);
2494   }
2495 }
2496 
2497 void nmethod::print_code() {
2498   ResourceMark m;
2499   ttyLocker ttyl;
2500   // Call the specialized decode method of this class.
2501   decode(tty);
2502 }
2503 
2504 #ifndef PRODUCT  // called InstanceKlass methods are available only then. Declared as PRODUCT_RETURN
2505 
2506 void nmethod::print_dependencies_on(outputStream* out) {
2507   ResourceMark rm;
2508   stringStream st;
2509   st.print_cr("Dependencies:");
2510   for (Dependencies::DepStream deps(this); deps.next(); ) {
2511     deps.print_dependency(&st);
2512     InstanceKlass* ctxk = deps.context_type();
2513     if (ctxk != nullptr) {
2514       if (ctxk->is_dependent_nmethod(this)) {

2574   st->print("scopes:");
2575   if (scopes_pcs_begin() < scopes_pcs_end()) {
2576     st->cr();
2577     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
2578       if (p->scope_decode_offset() == DebugInformationRecorder::serialized_null)
2579         continue;
2580 
2581       ScopeDesc* sd = scope_desc_at(p->real_pc(this));
2582       while (sd != nullptr) {
2583         sd->print_on(st, p);  // print output ends with a newline
2584         sd = sd->sender();
2585       }
2586     }
2587   } else {
2588     st->print_cr(" <list empty>");
2589   }
2590 }
2591 #endif
2592 
2593 #ifndef PRODUCT  // RelocIterator does support printing only then.
2594 void nmethod::print_relocations_on(outputStream* st) {
2595   ResourceMark m;       // in case methods get printed via the debugger
2596   st->print_cr("relocations:");
2597   RelocIterator iter(this);
2598   iter.print_on(st);
2599 }
2600 #endif
2601 
2602 void nmethod::print_pcs_on(outputStream* st) {
2603   ResourceMark m;       // in case methods get printed via debugger
2604   st->print("pc-bytecode offsets:");
2605   if (scopes_pcs_begin() < scopes_pcs_end()) {
2606     st->cr();
2607     for (PcDesc* p = scopes_pcs_begin(); p < scopes_pcs_end(); p++) {
2608       p->print_on(st, this);  // print output ends with a newline
2609     }
2610   } else {
2611     st->print_cr(" <list empty>");
2612   }
2613 }
2614 
2615 void nmethod::print_handler_table() {
2616   ExceptionHandlerTable(this).print(code_begin());
2617 }
2618 

2933           else obj->print_value_on(&st);
2934           st.print(")");
2935           return st.as_string();
2936         }
2937         case relocInfo::metadata_type: {
2938           stringStream st;
2939           metadata_Relocation* r = iter.metadata_reloc();
2940           Metadata* obj = r->metadata_value();
2941           st.print("metadata(");
2942           if (obj == nullptr) st.print("nullptr");
2943           else obj->print_value_on(&st);
2944           st.print(")");
2945           return st.as_string();
2946         }
2947         case relocInfo::runtime_call_type:
2948         case relocInfo::runtime_call_w_cp_type: {
2949           stringStream st;
2950           st.print("runtime_call");
2951           CallRelocation* r = (CallRelocation*)iter.reloc();
2952           address dest = r->destination();
2953           if (StubRoutines::contains(dest)) {
2954             StubCodeDesc* desc = StubCodeDesc::desc_for(dest);
2955             if (desc == nullptr) {
2956               desc = StubCodeDesc::desc_for(dest + frame::pc_return_offset);
2957             }
2958             if (desc != nullptr) {
2959               st.print(" Stub::%s", desc->name());
2960               return st.as_string();
2961             }
2962           }
2963           CodeBlob* cb = CodeCache::find_blob(dest);
2964           if (cb != nullptr) {
2965             st.print(" %s", cb->name());
2966           } else {
2967             ResourceMark rm;
2968             const int buflen = 1024;
2969             char* buf = NEW_RESOURCE_ARRAY(char, buflen);
2970             int offset;
2971             if (os::dll_address_to_function_name(dest, buf, buflen, &offset)) {
2972               st.print(" %s", buf);
2973               if (offset != 0) {
2974                 st.print("+%d", offset);
2975               }
2976             }
2977           }
2978           return st.as_string();
2979         }
2980         case relocInfo::virtual_call_type: {
2981           stringStream st;
2982           st.print_raw("virtual_call");
< prev index next >