< prev index next >

src/hotspot/share/oops/method.hpp

Print this page




  74  private:
  75   // If you add a new field that points to any metaspace object, you
  76   // must add this field to Method::metaspace_pointers_do().
  77   ConstMethod*      _constMethod;                // Method read-only data.
  78   MethodData*       _method_data;
  79   MethodCounters*   _method_counters;
  80   AccessFlags       _access_flags;               // Access flags
  81   int               _vtable_index;               // vtable index of this method (see VtableIndexFlag)
  82                                                  // note: can have vtables with >2**16 elements (because of inheritance)
  83   u2                _intrinsic_id;               // vmSymbols::intrinsic_id (0 == _none)
  84 
  85   // Flags
  86   enum Flags {
  87     _caller_sensitive      = 1 << 0,
  88     _force_inline          = 1 << 1,
  89     _dont_inline           = 1 << 2,
  90     _hidden                = 1 << 3,
  91     _has_injected_profile  = 1 << 4,
  92     _running_emcp          = 1 << 5,
  93     _intrinsic_candidate   = 1 << 6,
  94     _reserved_stack_access = 1 << 7



  95   };
  96   mutable u2 _flags;
  97 
  98   JFR_ONLY(DEFINE_TRACE_FLAG;)
  99 
 100 #ifndef PRODUCT
 101   int               _compiled_invocation_count;  // Number of nmethod invocations so far (for perf. debugging)
 102 #endif
 103   // Entry point for calling both from and to the interpreter.
 104   address _i2i_entry;           // All-args-on-stack calling convention
 105   // Entry point for calling from compiled code, to compiled code if it exists
 106   // or else the interpreter.
 107   volatile address _from_compiled_entry;        // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()


 108   // The entry point for calling both from and to compiled code is
 109   // "_code->entry_point()".  Because of tiered compilation and de-opt, this
 110   // field can come and go.  It can transition from NULL to not-null at any
 111   // time (whenever a compile completes).  It can transition from not-null to
 112   // NULL only at safepoints (because of a de-opt).
 113   CompiledMethod* volatile _code;                       // Points to the corresponding piece of native code
 114   volatile address           _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry

 115 
 116 #if INCLUDE_AOT && defined(TIERED)
 117   CompiledMethod* _aot_code;
 118 #endif
 119 
 120   // Constructor
 121   Method(ConstMethod* xconst, AccessFlags access_flags);
 122  public:
 123 
 124   static Method* allocate(ClassLoaderData* loader_data,
 125                           int byte_code_size,
 126                           AccessFlags access_flags,
 127                           InlineTableSizes* sizes,
 128                           ConstMethod::MethodType method_type,
 129                           TRAPS);
 130 
 131   // CDS and vtbl checking can create an empty Method to get vtbl pointer.
 132   Method(){}
 133 
 134   bool is_method() const volatile { return true; }
 135 
 136   void restore_unshareable_info(TRAPS);
 137 
 138   // accessors for instance variables
 139 
 140   ConstMethod* constMethod() const             { return _constMethod; }
 141   void set_constMethod(ConstMethod* xconst)    { _constMethod = xconst; }
 142 
 143 
 144   static address make_adapters(const methodHandle& mh, TRAPS);
 145   address from_compiled_entry() const;
 146   address from_compiled_entry_no_trampoline() const;
 147   address from_interpreted_entry() const;
 148 
 149   // access flag
 150   AccessFlags access_flags() const               { return _access_flags;  }
 151   void set_access_flags(AccessFlags flags)       { _access_flags = flags; }
 152 
 153   // name
 154   Symbol* name() const                           { return constants()->symbol_at(name_index()); }
 155   int name_index() const                         { return constMethod()->name_index();         }
 156   void set_name_index(int index)                 { constMethod()->set_name_index(index);       }
 157 
 158   // signature
 159   Symbol* signature() const                      { return constants()->symbol_at(signature_index()); }
 160   int signature_index() const                    { return constMethod()->signature_index();         }
 161   void set_signature_index(int index)            { constMethod()->set_signature_index(index);       }
 162 
 163   // generics support
 164   Symbol* generic_signature() const              { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL); }
 165   int generic_signature_index() const            { return constMethod()->generic_signature_index(); }
 166   void set_generic_signature_index(int index)    { constMethod()->set_generic_signature_index(index); }


 444     if (TieredCompilation) ShouldNotReachHere();
 445     MethodCounters* mcs = get_method_counters(CHECK_0);
 446     return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();
 447   }
 448 #endif
 449 
 450 #ifndef PRODUCT
 451   int  compiled_invocation_count() const         { return _compiled_invocation_count;  }
 452   void set_compiled_invocation_count(int count)  { _compiled_invocation_count = count; }
 453 #else
 454   // for PrintMethodData in a product build
 455   int  compiled_invocation_count() const         { return 0;  }
 456 #endif // not PRODUCT
 457 
 458   // Clear (non-shared space) pointers which could not be relevant
 459   // if this (shared) method were mapped into another JVM.
 460   void remove_unshareable_info();
 461 
 462   // nmethod/verified compiler entry
 463   address verified_code_entry();


 464   bool check_code() const;      // Not inline to avoid circular ref
 465   CompiledMethod* volatile code() const;
 466 
 467   // Locks CompiledMethod_lock if not held.
 468   void unlink_code(CompiledMethod *compare);
 469   // Locks CompiledMethod_lock if not held.
 470   void unlink_code();
 471 
 472 private:
 473   // Either called with CompiledMethod_lock held or from constructor.
 474   void clear_code();
 475 
 476 public:
 477   static void set_code(const methodHandle& mh, CompiledMethod* code);
 478   void set_adapter_entry(AdapterHandlerEntry* adapter) {
 479     constMethod()->set_adapter_entry(adapter);
 480   }
 481   void set_adapter_trampoline(AdapterHandlerEntry** trampoline) {
 482     constMethod()->set_adapter_trampoline(trampoline);
 483   }
 484   void update_adapter_trampoline(AdapterHandlerEntry* adapter) {
 485     constMethod()->update_adapter_trampoline(adapter);
 486   }
 487   void set_from_compiled_entry(address entry) {
 488     _from_compiled_entry =  entry;
 489   }
 490 
 491   address get_i2c_entry();
 492   address get_c2i_entry();

 493   address get_c2i_unverified_entry();

 494   AdapterHandlerEntry* adapter() const {
 495     return constMethod()->adapter();
 496   }
 497   // setup entry points
 498   void link_method(const methodHandle& method, TRAPS);
 499   // clear entry points. Used by sharing code during dump time
 500   void unlink_method() NOT_CDS_RETURN;
 501 
 502   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
 503   virtual MetaspaceObj::Type type() const { return MethodType; }
 504 
 505   // vtable index
 506   enum VtableIndexFlag {
 507     // Valid vtable indexes are non-negative (>= 0).
 508     // These few negative values are used as sentinels.
 509     itable_index_max        = -10, // first itable index, growing downward
 510     pending_itable_index    = -9,  // itable index will be assigned
 511     invalid_vtable_index    = -4,  // distinct from any valid vtable index
 512     garbage_vtable_index    = -3,  // not yet linked; no vtable layout yet
 513     nonvirtual_vtable_index = -2   // there is no need for vtable dispatch


 586 
 587   // localvariable table
 588   bool has_localvariable_table() const
 589                           { return constMethod()->has_localvariable_table(); }
 590   int localvariable_table_length() const
 591                         { return constMethod()->localvariable_table_length(); }
 592   LocalVariableTableElement* localvariable_table_start() const
 593                          { return constMethod()->localvariable_table_start(); }
 594 
 595   bool has_linenumber_table() const
 596                               { return constMethod()->has_linenumber_table(); }
 597   u_char* compressed_linenumber_table() const
 598                        { return constMethod()->compressed_linenumber_table(); }
 599 
 600   // method holder (the Klass* holding this method)
 601   InstanceKlass* method_holder() const         { return constants()->pool_holder(); }
 602 
 603   void compute_size_of_parameters(Thread *thread); // word size of parameters (receiver if any + arguments)
 604   Symbol* klass_name() const;                    // returns the name of the method holder
 605   BasicType result_type() const;                 // type of the method result
 606   bool is_returning_oop() const                  { BasicType r = result_type(); return (r == T_OBJECT || r == T_ARRAY); }
 607   bool is_returning_fp() const                   { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }
 608 
 609   // Checked exceptions thrown by this method (resolved to mirrors)
 610   objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
 611 
 612   // Access flags
 613   bool is_public() const                         { return access_flags().is_public();      }
 614   bool is_private() const                        { return access_flags().is_private();     }
 615   bool is_protected() const                      { return access_flags().is_protected();   }
 616   bool is_package_private() const                { return !is_public() && !is_private() && !is_protected(); }
 617   bool is_static() const                         { return access_flags().is_static();      }
 618   bool is_final() const                          { return access_flags().is_final();       }
 619   bool is_synchronized() const                   { return access_flags().is_synchronized();}
 620   bool is_native() const                         { return access_flags().is_native();      }
 621   bool is_abstract() const                       { return access_flags().is_abstract();    }
 622   bool is_strict() const                         { return access_flags().is_strict();      }
 623   bool is_synthetic() const                      { return access_flags().is_synthetic();   }
 624 
 625   // returns true if contains only return operation
 626   bool is_empty_method() const;
 627 


 660   void set_has_monitor_bytecodes()               { _access_flags.set_has_monitor_bytecodes(); }
 661 
 662   // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
 663   // propererly nest in the method. It might return false, even though they actually nest properly, since the info.
 664   // has not been computed yet.
 665   bool guaranteed_monitor_matching() const       { return access_flags().is_monitor_matching(); }
 666   void set_guaranteed_monitor_matching()         { _access_flags.set_monitor_matching(); }
 667 
 668   // returns true if the method is an accessor function (setter/getter).
 669   bool is_accessor() const;
 670 
 671   // returns true if the method is a getter
 672   bool is_getter() const;
 673 
 674   // returns true if the method is a setter
 675   bool is_setter() const;
 676 
 677   // returns true if the method does nothing but return a constant of primitive type
 678   bool is_constant_getter() const;
 679 
 680   // returns true if the method is an initializer (<init> or <clinit>).
 681   bool is_initializer() const;
 682 
 683   // returns true if the method is static OR if the classfile version < 51
 684   bool has_valid_initializer_flags() const;
 685 
 686   // returns true if the method name is <clinit> and the method has
 687   // valid static initializer flags.
 688   bool is_static_initializer() const;



 689 
 690   // returns true if the method name is <init>
 691   bool is_object_initializer() const;




 692 
 693   // compiled code support
 694   // NOTE: code() is inherently racy as deopt can be clearing code
 695   // simultaneously. Use with caution.
 696   bool has_compiled_code() const;
 697 
 698 #ifdef TIERED
 699   bool has_aot_code() const                      { return aot_code() != NULL; }
 700 #endif
 701 
 702   bool needs_clinit_barrier() const;
 703 
 704   // sizing
 705   static int header_size()                       {
 706     return align_up((int)sizeof(Method), wordSize) / wordSize;
 707   }
 708   static int size(bool is_native);
 709   int size() const                               { return method_size(); }
 710 #if INCLUDE_SERVICES
 711   void collect_statistics(KlassSizeStats *sz) const;
 712 #endif
 713   void log_touched(TRAPS);
 714   static void print_touched_methods(outputStream* out);
 715 
 716   // interpreter support
 717   static ByteSize const_offset()                 { return byte_offset_of(Method, _constMethod       ); }
 718   static ByteSize access_flags_offset()          { return byte_offset_of(Method, _access_flags      ); }
 719   static ByteSize from_compiled_offset()         { return byte_offset_of(Method, _from_compiled_entry); }


 720   static ByteSize code_offset()                  { return byte_offset_of(Method, _code); }

 721   static ByteSize method_data_offset()           {
 722     return byte_offset_of(Method, _method_data);
 723   }
 724   static ByteSize method_counters_offset()       {
 725     return byte_offset_of(Method, _method_counters);
 726   }
 727 #ifndef PRODUCT
 728   static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
 729 #endif // not PRODUCT
 730   static ByteSize native_function_offset()       { return in_ByteSize(sizeof(Method));                 }
 731   static ByteSize from_interpreted_offset()      { return byte_offset_of(Method, _from_interpreted_entry ); }
 732   static ByteSize interpreter_entry_offset()     { return byte_offset_of(Method, _i2i_entry ); }
 733   static ByteSize signature_handler_offset()     { return in_ByteSize(sizeof(Method) + wordSize);      }
 734   static ByteSize itable_index_offset()          { return byte_offset_of(Method, _vtable_index ); }
 735 
 736   // for code generation
 737   static int method_data_offset_in_bytes()       { return offset_of(Method, _method_data); }
 738   static int intrinsic_id_offset_in_bytes()      { return offset_of(Method, _intrinsic_id); }
 739   static int intrinsic_id_size_in_bytes()        { return sizeof(u2); }
 740 


 741   // Static methods that are used to implement member methods where an exposed this pointer
 742   // is needed due to possible GCs
 743   static objArrayHandle resolved_checked_exceptions_impl(Method* method, TRAPS);
 744 
 745   // Returns the byte code index from the byte code pointer
 746   int     bci_from(address bcp) const;
 747   address bcp_from(int bci) const;
 748   address bcp_from(address bcp) const;
 749   int validate_bci_from_bcp(address bcp) const;
 750   int validate_bci(int bci) const;
 751 
 752   // Returns the line number for a bci if debugging information for the method is prowided,
 753   // -1 is returned otherwise.
 754   int line_number_from_bci(int bci) const;
 755 
 756   // Reflection support
 757   bool is_overridden_in(Klass* k) const;
 758 
 759   // Stack walking support
 760   bool is_ignored_by_security_stack_walk() const;


 898   bool intrinsic_candidate() {
 899     return (_flags & _intrinsic_candidate) != 0;
 900   }
 901   void set_intrinsic_candidate(bool x) {
 902     _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
 903   }
 904 
 905   bool has_injected_profile() {
 906     return (_flags & _has_injected_profile) != 0;
 907   }
 908   void set_has_injected_profile(bool x) {
 909     _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
 910   }
 911 
 912   bool has_reserved_stack_access() {
 913     return (_flags & _reserved_stack_access) != 0;
 914   }
 915 
 916   void set_has_reserved_stack_access(bool x) {
 917     _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
























 918   }
 919 
 920   JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
 921 
 922   ConstMethod::MethodType method_type() const {
 923       return _constMethod->method_type();
 924   }
 925   bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
 926 
 927   // On-stack replacement support
 928   bool has_osr_nmethod(int level, bool match_level) {
 929    return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
 930   }
 931 
 932   int mark_osr_nmethods() {
 933     return method_holder()->mark_osr_nmethods(this);
 934   }
 935 
 936   nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
 937     return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);




  74  private:
  75   // If you add a new field that points to any metaspace object, you
  76   // must add this field to Method::metaspace_pointers_do().
  77   ConstMethod*      _constMethod;                // Method read-only data.
  78   MethodData*       _method_data;
  79   MethodCounters*   _method_counters;
  80   AccessFlags       _access_flags;               // Access flags
  81   int               _vtable_index;               // vtable index of this method (see VtableIndexFlag)
  82                                                  // note: can have vtables with >2**16 elements (because of inheritance)
  83   u2                _intrinsic_id;               // vmSymbols::intrinsic_id (0 == _none)
  84 
  85   // Flags
  86   enum Flags {
  87     _caller_sensitive      = 1 << 0,
  88     _force_inline          = 1 << 1,
  89     _dont_inline           = 1 << 2,
  90     _hidden                = 1 << 3,
  91     _has_injected_profile  = 1 << 4,
  92     _running_emcp          = 1 << 5,
  93     _intrinsic_candidate   = 1 << 6,
  94     _reserved_stack_access = 1 << 7,
  95     _scalarized_args       = 1 << 8,
  96     _c1_needs_stack_repair = 1 << 9,
  97     _c2_needs_stack_repair = 1 << 10
  98   };
  99   mutable u2 _flags;
 100 
 101   JFR_ONLY(DEFINE_TRACE_FLAG;)
 102 
 103 #ifndef PRODUCT
 104   int               _compiled_invocation_count;  // Number of nmethod invocations so far (for perf. debugging)
 105 #endif
 106   // Entry point for calling both from and to the interpreter.
 107   address _i2i_entry;           // All-args-on-stack calling convention
 108   // Entry point for calling from compiled code, to compiled code if it exists
 109   // or else the interpreter.
 110   volatile address _from_compiled_entry;          // Cache of: _code ? _code->verified_entry_point()          : _adapter->c2i_entry()
 111   volatile address _from_compiled_value_ro_entry; // Cache of: _code ? _code->verified_value_ro_entry_point() : _adapter->c2i_value_ro_entry()
 112   volatile address _from_compiled_value_entry;    // Cache of: _code ? _code->verified_value_entry_point()    : _adapter->c2i_value_entry()
 113   // The entry point for calling both from and to compiled code is
 114   // "_code->entry_point()".  Because of tiered compilation and de-opt, this
 115   // field can come and go.  It can transition from NULL to not-null at any
 116   // time (whenever a compile completes).  It can transition from not-null to
 117   // NULL only at safepoints (because of a de-opt).
 118   CompiledMethod* volatile _code;                       // Points to the corresponding piece of native code
 119   volatile address           _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
 120   int _max_vt_buffer; // max number of VT buffer chunk to use before recycling
 121 
 122 #if INCLUDE_AOT && defined(TIERED)
 123   CompiledMethod* _aot_code;
 124 #endif
 125 
 126   // Constructor
 127   Method(ConstMethod* xconst, AccessFlags access_flags);
 128  public:
 129 
 130   static Method* allocate(ClassLoaderData* loader_data,
 131                           int byte_code_size,
 132                           AccessFlags access_flags,
 133                           InlineTableSizes* sizes,
 134                           ConstMethod::MethodType method_type,
 135                           TRAPS);
 136 
 137   // CDS and vtbl checking can create an empty Method to get vtbl pointer.
 138   Method(){}
 139 
 140   bool is_method() const volatile { return true; }
 141 
 142   void restore_unshareable_info(TRAPS);
 143 
 144   // accessors for instance variables
 145 
 146   ConstMethod* constMethod() const             { return _constMethod; }
 147   void set_constMethod(ConstMethod* xconst)    { _constMethod = xconst; }
 148 
 149 
 150   static address make_adapters(const methodHandle& mh, TRAPS);
 151   address from_compiled_entry() const;
 152   address from_compiled_entry_no_trampoline(bool caller_is_c1) const;
 153   address from_interpreted_entry() const;
 154 
 155   // access flag
 156   AccessFlags access_flags() const               { return _access_flags;  }
 157   void set_access_flags(AccessFlags flags)       { _access_flags = flags; }
 158 
 159   // name
 160   Symbol* name() const                           { return constants()->symbol_at(name_index()); }
 161   int name_index() const                         { return constMethod()->name_index();         }
 162   void set_name_index(int index)                 { constMethod()->set_name_index(index);       }
 163 
 164   // signature
 165   Symbol* signature() const                      { return constants()->symbol_at(signature_index()); }
 166   int signature_index() const                    { return constMethod()->signature_index();         }
 167   void set_signature_index(int index)            { constMethod()->set_signature_index(index);       }
 168 
 169   // generics support
 170   Symbol* generic_signature() const              { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL); }
 171   int generic_signature_index() const            { return constMethod()->generic_signature_index(); }
 172   void set_generic_signature_index(int index)    { constMethod()->set_generic_signature_index(index); }


 450     if (TieredCompilation) ShouldNotReachHere();
 451     MethodCounters* mcs = get_method_counters(CHECK_0);
 452     return (mcs == NULL) ? 0 : mcs->increment_interpreter_invocation_count();
 453   }
 454 #endif
 455 
 456 #ifndef PRODUCT
 457   int  compiled_invocation_count() const         { return _compiled_invocation_count;  }
 458   void set_compiled_invocation_count(int count)  { _compiled_invocation_count = count; }
 459 #else
 460   // for PrintMethodData in a product build
 461   int  compiled_invocation_count() const         { return 0;  }
 462 #endif // not PRODUCT
 463 
 464   // Clear (non-shared space) pointers which could not be relevant
 465   // if this (shared) method were mapped into another JVM.
 466   void remove_unshareable_info();
 467 
 468   // nmethod/verified compiler entry
 469   address verified_code_entry();
 470   address verified_value_code_entry();
 471   address verified_value_ro_code_entry();
 472   bool check_code() const;      // Not inline to avoid circular ref
 473   CompiledMethod* volatile code() const;
 474 
 475   // Locks CompiledMethod_lock if not held.
 476   void unlink_code(CompiledMethod *compare);
 477   // Locks CompiledMethod_lock if not held.
 478   void unlink_code();
 479 
 480 private:
 481   // Either called with CompiledMethod_lock held or from constructor.
 482   void clear_code();
 483 
 484 public:
 485   static void set_code(const methodHandle& mh, CompiledMethod* code);
 486   void set_adapter_entry(AdapterHandlerEntry* adapter) {
 487     constMethod()->set_adapter_entry(adapter);
 488   }
 489   void set_adapter_trampoline(AdapterHandlerEntry** trampoline) {
 490     constMethod()->set_adapter_trampoline(trampoline);
 491   }
 492   void update_adapter_trampoline(AdapterHandlerEntry* adapter) {
 493     constMethod()->update_adapter_trampoline(adapter);
 494   }
 495   void set_from_compiled_entry(address entry) {
 496     _from_compiled_entry =  entry;
 497   }
 498 
 499   address get_i2c_entry();
 500   address get_c2i_entry();
 501   address get_c2i_value_entry();
 502   address get_c2i_unverified_entry();
 503   address get_c2i_unverified_value_entry();
 504   AdapterHandlerEntry* adapter() const {
 505     return constMethod()->adapter();
 506   }
 507   // setup entry points
 508   void link_method(const methodHandle& method, TRAPS);
 509   // clear entry points. Used by sharing code during dump time
 510   void unlink_method() NOT_CDS_RETURN;
 511 
 512   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
 513   virtual MetaspaceObj::Type type() const { return MethodType; }
 514 
 515   // vtable index
 516   enum VtableIndexFlag {
 517     // Valid vtable indexes are non-negative (>= 0).
 518     // These few negative values are used as sentinels.
 519     itable_index_max        = -10, // first itable index, growing downward
 520     pending_itable_index    = -9,  // itable index will be assigned
 521     invalid_vtable_index    = -4,  // distinct from any valid vtable index
 522     garbage_vtable_index    = -3,  // not yet linked; no vtable layout yet
 523     nonvirtual_vtable_index = -2   // there is no need for vtable dispatch


 596 
 597   // localvariable table
 598   bool has_localvariable_table() const
 599                           { return constMethod()->has_localvariable_table(); }
 600   int localvariable_table_length() const
 601                         { return constMethod()->localvariable_table_length(); }
 602   LocalVariableTableElement* localvariable_table_start() const
 603                          { return constMethod()->localvariable_table_start(); }
 604 
 605   bool has_linenumber_table() const
 606                               { return constMethod()->has_linenumber_table(); }
 607   u_char* compressed_linenumber_table() const
 608                        { return constMethod()->compressed_linenumber_table(); }
 609 
 610   // method holder (the Klass* holding this method)
 611   InstanceKlass* method_holder() const         { return constants()->pool_holder(); }
 612 
 613   void compute_size_of_parameters(Thread *thread); // word size of parameters (receiver if any + arguments)
 614   Symbol* klass_name() const;                    // returns the name of the method holder
 615   BasicType result_type() const;                 // type of the method result
 616   bool may_return_oop() const                    { BasicType r = result_type(); return (r == T_OBJECT || r == T_ARRAY ||  r == T_VALUETYPE); }
 617   ValueKlass* returned_value_type(Thread* thread) const;
 618 
 619   // Checked exceptions thrown by this method (resolved to mirrors)
 620   objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
 621 
 622   // Access flags
 623   bool is_public() const                         { return access_flags().is_public();      }
 624   bool is_private() const                        { return access_flags().is_private();     }
 625   bool is_protected() const                      { return access_flags().is_protected();   }
 626   bool is_package_private() const                { return !is_public() && !is_private() && !is_protected(); }
 627   bool is_static() const                         { return access_flags().is_static();      }
 628   bool is_final() const                          { return access_flags().is_final();       }
 629   bool is_synchronized() const                   { return access_flags().is_synchronized();}
 630   bool is_native() const                         { return access_flags().is_native();      }
 631   bool is_abstract() const                       { return access_flags().is_abstract();    }
 632   bool is_strict() const                         { return access_flags().is_strict();      }
 633   bool is_synthetic() const                      { return access_flags().is_synthetic();   }
 634 
 635   // returns true if contains only return operation
 636   bool is_empty_method() const;
 637 


 670   void set_has_monitor_bytecodes()               { _access_flags.set_has_monitor_bytecodes(); }
 671 
 672   // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
 673   // propererly nest in the method. It might return false, even though they actually nest properly, since the info.
 674   // has not been computed yet.
 675   bool guaranteed_monitor_matching() const       { return access_flags().is_monitor_matching(); }
 676   void set_guaranteed_monitor_matching()         { _access_flags.set_monitor_matching(); }
 677 
 678   // returns true if the method is an accessor function (setter/getter).
 679   bool is_accessor() const;
 680 
 681   // returns true if the method is a getter
 682   bool is_getter() const;
 683 
 684   // returns true if the method is a setter
 685   bool is_setter() const;
 686 
 687   // returns true if the method does nothing but return a constant of primitive type
 688   bool is_constant_getter() const;
 689 






 690   // returns true if the method name is <clinit> and the method has
 691   // valid static initializer flags.
 692   bool is_class_initializer() const;
 693 
 694   // returns true if the method name is <init> and the method is not a static factory
 695   bool is_object_constructor() const;
 696 
 697   // returns true if the method is an object constructor or class initializer
 698   // (non-static <init> or <clinit>), but false for factories (static <init>).
 699   bool is_object_constructor_or_class_initializer() const;
 700 
 701   // returns true if the method name is <init> and the method is static
 702   bool is_static_init_factory() const;
 703 
 704   // compiled code support
 705   // NOTE: code() is inherently racy as deopt can be clearing code
 706   // simultaneously. Use with caution.
 707   bool has_compiled_code() const;
 708 
 709 #ifdef TIERED
 710   bool has_aot_code() const                      { return aot_code() != NULL; }
 711 #endif
 712 
 713   bool needs_clinit_barrier() const;
 714 
 715   // sizing
 716   static int header_size()                       {
 717     return align_up((int)sizeof(Method), wordSize) / wordSize;
 718   }
 719   static int size(bool is_native);
 720   int size() const                               { return method_size(); }
 721 #if INCLUDE_SERVICES
 722   void collect_statistics(KlassSizeStats *sz) const;
 723 #endif
 724   void log_touched(TRAPS);
 725   static void print_touched_methods(outputStream* out);
 726 
 727   // interpreter support
 728   static ByteSize const_offset()                 { return byte_offset_of(Method, _constMethod       ); }
 729   static ByteSize access_flags_offset()          { return byte_offset_of(Method, _access_flags      ); }
 730   static ByteSize from_compiled_offset()         { return byte_offset_of(Method, _from_compiled_entry); }
 731   static ByteSize from_compiled_value_offset()   { return byte_offset_of(Method, _from_compiled_value_entry); }
 732   static ByteSize from_compiled_value_ro_offset(){ return byte_offset_of(Method, _from_compiled_value_ro_entry); }
 733   static ByteSize code_offset()                  { return byte_offset_of(Method, _code); }
 734   static ByteSize flags_offset()                 { return byte_offset_of(Method, _flags); }
 735   static ByteSize method_data_offset()           {
 736     return byte_offset_of(Method, _method_data);
 737   }
 738   static ByteSize method_counters_offset()       {
 739     return byte_offset_of(Method, _method_counters);
 740   }
 741 #ifndef PRODUCT
 742   static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
 743 #endif // not PRODUCT
 744   static ByteSize native_function_offset()       { return in_ByteSize(sizeof(Method));                 }
 745   static ByteSize from_interpreted_offset()      { return byte_offset_of(Method, _from_interpreted_entry ); }
 746   static ByteSize interpreter_entry_offset()     { return byte_offset_of(Method, _i2i_entry ); }
 747   static ByteSize signature_handler_offset()     { return in_ByteSize(sizeof(Method) + wordSize);      }
 748   static ByteSize itable_index_offset()          { return byte_offset_of(Method, _vtable_index ); }
 749 
 750   // for code generation
 751   static int method_data_offset_in_bytes()       { return offset_of(Method, _method_data); }
 752   static int intrinsic_id_offset_in_bytes()      { return offset_of(Method, _intrinsic_id); }
 753   static int intrinsic_id_size_in_bytes()        { return sizeof(u2); }
 754 
 755   static ByteSize max_vt_buffer_offset()         { return byte_offset_of(Method, _max_vt_buffer); }
 756 
 757   // Static methods that are used to implement member methods where an exposed this pointer
 758   // is needed due to possible GCs
 759   static objArrayHandle resolved_checked_exceptions_impl(Method* method, TRAPS);
 760 
 761   // Returns the byte code index from the byte code pointer
 762   int     bci_from(address bcp) const;
 763   address bcp_from(int bci) const;
 764   address bcp_from(address bcp) const;
 765   int validate_bci_from_bcp(address bcp) const;
 766   int validate_bci(int bci) const;
 767 
 768   // Returns the line number for a bci if debugging information for the method is prowided,
 769   // -1 is returned otherwise.
 770   int line_number_from_bci(int bci) const;
 771 
 772   // Reflection support
 773   bool is_overridden_in(Klass* k) const;
 774 
 775   // Stack walking support
 776   bool is_ignored_by_security_stack_walk() const;


 914   bool intrinsic_candidate() {
 915     return (_flags & _intrinsic_candidate) != 0;
 916   }
 917   void set_intrinsic_candidate(bool x) {
 918     _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
 919   }
 920 
 921   bool has_injected_profile() {
 922     return (_flags & _has_injected_profile) != 0;
 923   }
 924   void set_has_injected_profile(bool x) {
 925     _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
 926   }
 927 
 928   bool has_reserved_stack_access() {
 929     return (_flags & _reserved_stack_access) != 0;
 930   }
 931 
 932   void set_has_reserved_stack_access(bool x) {
 933     _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
 934   }
 935 
 936   bool has_scalarized_args() {
 937     return (_flags & _scalarized_args) != 0;
 938   }
 939 
 940   void set_has_scalarized_args(bool x) {
 941     _flags = x ? (_flags | _scalarized_args) : (_flags & ~_scalarized_args);
 942   }
 943 
 944   bool c1_needs_stack_repair() {
 945     return (_flags & _c1_needs_stack_repair) != 0;
 946   }
 947 
 948   bool c2_needs_stack_repair() {
 949     return (_flags & _c2_needs_stack_repair) != 0;
 950   }
 951 
 952   void set_c1_needs_stack_repair(bool x) {
 953     _flags = x ? (_flags | _c1_needs_stack_repair) : (_flags & ~_c1_needs_stack_repair);
 954   }
 955 
 956   void set_c2_needs_stack_repair(bool x) {
 957     _flags = x ? (_flags | _c2_needs_stack_repair) : (_flags & ~_c2_needs_stack_repair);
 958   }
 959 
 960   JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
 961 
 962   ConstMethod::MethodType method_type() const {
 963       return _constMethod->method_type();
 964   }
 965   bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
 966 
 967   // On-stack replacement support
 968   bool has_osr_nmethod(int level, bool match_level) {
 969    return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
 970   }
 971 
 972   int mark_osr_nmethods() {
 973     return method_holder()->mark_osr_nmethods(this);
 974   }
 975 
 976   nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
 977     return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);


< prev index next >