< prev index next >

src/hotspot/share/code/nmethod.hpp

Print this page
*** 43,10 ***
--- 43,11 ---
  class ImplicitExceptionTable;
  class JvmtiThreadState;
  class MetadataClosure;
  class NativeCallWrapper;
  class OopIterateClosure;
+ class SCCEntry;
  class ScopeDesc;
  class xmlStream;
  
  // This class is used internally by nmethods, to cache
  // exception/pc/handler information.

*** 173,10 ***
--- 174,13 ---
    // Used to track in which deoptimize handshake this method will be deoptimized.
    uint64_t  _deoptimization_generation;
  
    uint64_t  _gc_epoch;
  
+   // Profiling counter used to figure out the hottest nmethods to record into CDS
+   volatile uint64_t _method_profiling_count;
+ 
    Method*   _method;
  
    // To reduce header size union fields which usages do not overlap.
    union {
      // To support simple linked-list chaining of nmethods:

*** 258,10 ***
--- 262,14 ---
  
    int          _compile_id;            // which compilation made this nmethod
    CompLevel    _comp_level;            // compilation level (s1)
    CompilerType _compiler_type;         // which compiler made this nmethod (u1)
  
+   SCCEntry* _scc_entry;
+ 
+   bool _used; // has this nmethod ever been invoked?
+ 
    // Local state used to keep track of whether unloading is happening or not
    volatile uint8_t _is_unloading_state;
  
    // Protected by NMethodState_lock
    volatile signed char _state;         // {not_installed, in_use, not_entrant}

*** 272,11 ***
            _has_wide_vectors:1,         // Preserve wide vectors at safepoints
            _has_monitors:1,             // Fastpath monitor detection for continuations
            _has_scoped_access:1,        // used by for shared scope closure (scopedMemoryAccess.cpp)
            _has_flushed_dependencies:1, // Used for maintenance of dependencies (under CodeCache_lock)
            _is_unlinked:1,              // mark during class unloading
!           _load_reported:1;            // used by jvmti to track if an event has been posted for this nmethod
  
    enum DeoptimizationStatus : u1 {
      not_marked,
      deoptimize,
      deoptimize_noupdate,
--- 280,13 ---
            _has_wide_vectors:1,         // Preserve wide vectors at safepoints
            _has_monitors:1,             // Fastpath monitor detection for continuations
            _has_scoped_access:1,        // used by for shared scope closure (scopedMemoryAccess.cpp)
            _has_flushed_dependencies:1, // Used for maintenance of dependencies (under CodeCache_lock)
            _is_unlinked:1,              // mark during class unloading
!           _load_reported:1,            // used by jvmti to track if an event has been posted for this nmethod
+           _preloaded:1,
+           _has_clinit_barriers:1;
  
    enum DeoptimizationStatus : u1 {
      not_marked,
      deoptimize,
      deoptimize_noupdate,

*** 324,10 ***
--- 334,11 ---
            OopMapSet* oop_maps,
            ExceptionHandlerTable* handler_table,
            ImplicitExceptionTable* nul_chk_table,
            AbstractCompiler* compiler,
            CompLevel comp_level
+           , SCCEntry* scc_entry
  #if INCLUDE_JVMCI
            , char* speculations = nullptr,
            int speculations_len = 0,
            JVMCINMethodData* jvmci_data = nullptr
  #endif

*** 482,10 ***
--- 493,11 ---
                                OopMapSet* oop_maps,
                                ExceptionHandlerTable* handler_table,
                                ImplicitExceptionTable* nul_chk_table,
                                AbstractCompiler* compiler,
                                CompLevel comp_level
+                               , SCCEntry* scc_entry
  #if INCLUDE_JVMCI
                                , char* speculations = nullptr,
                                int speculations_len = 0,
                                JVMCINMethodData* jvmci_data = nullptr
  #endif

*** 622,19 ***
    // Heuristically deduce an nmethod isn't worth keeping around
    bool is_cold();
    bool is_unloading();
    void do_unloading(bool unloading_occurred);
  
    bool make_in_use() {
      return try_transition(in_use);
    }
    // Make the nmethod non entrant. The nmethod will continue to be
    // alive.  It is used when an uncommon trap happens.  Returns true
    // if this thread changed the state of the nmethod or false if
    // another thread performed the transition.
!   bool  make_not_entrant();
!   bool  make_not_used()    { return make_not_entrant(); }
  
    bool  is_marked_for_deoptimization() const { return deoptimization_status() != not_marked; }
    bool  has_been_deoptimized() const { return deoptimization_status() == deoptimize_done; }
    void  set_deoptimized_done();
  
--- 634,22 ---
    // Heuristically deduce an nmethod isn't worth keeping around
    bool is_cold();
    bool is_unloading();
    void do_unloading(bool unloading_occurred);
  
+   void inc_method_profiling_count();
+   uint64_t method_profiling_count();
+ 
    bool make_in_use() {
      return try_transition(in_use);
    }
    // Make the nmethod non entrant. The nmethod will continue to be
    // alive.  It is used when an uncommon trap happens.  Returns true
    // if this thread changed the state of the nmethod or false if
    // another thread performed the transition.
!   bool  make_not_entrant(bool make_not_entrant = true);
!   bool  make_not_used() { return make_not_entrant(false); }
  
    bool  is_marked_for_deoptimization() const { return deoptimization_status() != not_marked; }
    bool  has_been_deoptimized() const { return deoptimization_status() == deoptimize_done; }
    void  set_deoptimized_done();
  

*** 672,10 ***
--- 687,16 ---
    void  set_has_method_handle_invokes(bool z)     { _has_method_handle_invokes = z; }
  
    bool  has_wide_vectors() const                  { return _has_wide_vectors; }
    void  set_has_wide_vectors(bool z)              { _has_wide_vectors = z; }
  
+   bool  has_clinit_barriers() const               { return _has_clinit_barriers; }
+   void  set_has_clinit_barriers(bool z)           { _has_clinit_barriers = z; }
+ 
+   bool  preloaded() const                         { return _preloaded; }
+   void  set_preloaded(bool z)                     { _preloaded = z; }
+ 
    bool  has_flushed_dependencies() const          { return _has_flushed_dependencies; }
    void  set_has_flushed_dependencies(bool z)      {
      assert(!has_flushed_dependencies(), "should only happen once");
      _has_flushed_dependencies = z;
    }

*** 891,10 ***
--- 912,16 ---
    void copy_scopes_pcs(PcDesc* pcs, int count);
    void copy_scopes_data(address buffer, int size);
  
    int orig_pc_offset() { return _orig_pc_offset; }
  
+   SCCEntry* scc_entry() const { return _scc_entry; }
+   bool is_scc() const { return scc_entry() != nullptr; }
+ 
+   bool     used() const { return _used; }
+   void set_used()       { _used = true; }
+ 
    // Post successful compilation
    void post_compiled_method(CompileTask* task);
  
    // jvmti support:
    void post_compiled_method_load_event(JvmtiThreadState* state = nullptr);

*** 916,11 ***
    void print(outputStream* st) const;
    void print_code();
  
  #if defined(SUPPORT_DATA_STRUCTS)
    // print output in opt build for disassembler library
!   void print_relocations()                        PRODUCT_RETURN;
    void print_pcs_on(outputStream* st);
    void print_scopes() { print_scopes_on(tty); }
    void print_scopes_on(outputStream* st)          PRODUCT_RETURN;
    void print_value_on(outputStream* st) const override;
    void print_handler_table();
--- 943,11 ---
    void print(outputStream* st) const;
    void print_code();
  
  #if defined(SUPPORT_DATA_STRUCTS)
    // print output in opt build for disassembler library
!   void print_relocations_on(outputStream* st)     PRODUCT_RETURN;
    void print_pcs_on(outputStream* st);
    void print_scopes() { print_scopes_on(tty); }
    void print_scopes_on(outputStream* st)          PRODUCT_RETURN;
    void print_value_on(outputStream* st) const override;
    void print_handler_table();
< prev index next >