< prev index next >

src/hotspot/share/oops/method.hpp

Print this page

  43 #endif
  44 
  45 
  46 // A Method represents a Java method.
  47 //
  48 // Note that most applications load thousands of methods, so keeping the size of this
  49 // class small has a big impact on footprint.
  50 //
  51 // Note that native_function and signature_handler have to be at fixed offsets
  52 // (required by the interpreter)
  53 //
  54 //  Method embedded field layout (after declared fields):
  55 //   [EMBEDDED native_function       (present only if native) ]
  56 //   [EMBEDDED signature_handler     (present only if native) ]
  57 
  58 class CheckedExceptionElement;
  59 class LocalVariableTableElement;
  60 class AdapterHandlerEntry;
  61 class MethodData;
  62 class MethodCounters;

  63 class ConstMethod;
  64 class InlineTableSizes;
  65 class nmethod;
  66 class InterpreterOopMap;

  67 
  68 class Method : public Metadata {
  69  friend class VMStructs;
  70  friend class JVMCIVMStructs;
  71  friend class MethodTest;
  72  private:
  73   // If you add a new field that points to any metaspace object, you
  74   // must add this field to Method::metaspace_pointers_do().
  75   ConstMethod*      _constMethod;                // Method read-only data.
  76   MethodData*       _method_data;
  77   MethodCounters*   _method_counters;
  78   AdapterHandlerEntry* _adapter;
  79   AccessFlags       _access_flags;               // Access flags
  80   int               _vtable_index;               // vtable index of this method (see VtableIndexFlag)
  81   MethodFlags       _flags;
  82 
  83   u2                _intrinsic_id;               // vmSymbols::intrinsic_id (0 == _none)
  84 
  85   JFR_ONLY(DEFINE_TRACE_FLAG;)
  86 
  87 #ifndef PRODUCT
  88   int64_t _compiled_invocation_count;
  89 
  90   Symbol* _name;
  91 #endif
  92   // Entry point for calling both from and to the interpreter.
  93   address _i2i_entry;           // All-args-on-stack calling convention
  94   // Entry point for calling from compiled code, to compiled code if it exists
  95   // or else the interpreter.
  96   volatile address _from_compiled_entry;     // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
  97   // The entry point for calling both from and to compiled code is
  98   // "_code->entry_point()".  Because of tiered compilation and de-opt, this
  99   // field can come and go.  It can transition from null to not-null at any
 100   // time (whenever a compile completes).  It can transition from not-null to
 101   // null only at safepoints (because of a de-opt).
 102   nmethod* volatile _code;                   // Points to the corresponding piece of native code
 103   volatile address  _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
 104 



 105   // Constructor
 106   Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
 107  public:
 108 
 109   static Method* allocate(ClassLoaderData* loader_data,
 110                           int byte_code_size,
 111                           AccessFlags access_flags,
 112                           InlineTableSizes* sizes,
 113                           ConstMethod::MethodType method_type,
 114                           Symbol* name,
 115                           TRAPS);
 116 
 117   // CDS and vtbl checking can create an empty Method to get vtbl pointer.
 118   Method(){}
 119 
 120   virtual bool is_method() const { return true; }
 121 
 122 #if INCLUDE_CDS
 123   void remove_unshareable_info();
 124   void restore_unshareable_info(TRAPS);

 125 #endif
 126 
 127   // accessors for instance variables
 128 
 129   ConstMethod* constMethod() const             { return _constMethod; }
 130   void set_constMethod(ConstMethod* xconst)    { _constMethod = xconst; }
 131 
 132 
 133   static address make_adapters(const methodHandle& mh, TRAPS);
 134   address from_compiled_entry() const;
 135   address from_interpreted_entry() const;
 136 
 137   // access flag
 138   AccessFlags access_flags() const               { return _access_flags;  }
 139   void set_access_flags(AccessFlags flags)       { _access_flags = flags; }
 140 
 141   // name
 142   Symbol* name() const                           { return constants()->symbol_at(name_index()); }
 143   u2 name_index() const                          { return constMethod()->name_index();         }
 144   void set_name_index(int index)                 { constMethod()->set_name_index(index);       }

 297   // constraint classes are loaded if necessary. Note that this may
 298   // throw an exception if loading of the constraint classes causes
 299   // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
 300   // If an exception is thrown, returns the bci of the
 301   // exception handler which caused the exception to be thrown, which
 302   // is needed for proper retries. See, for example,
 303   // InterpreterRuntime::exception_handler_for_exception.
 304   static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
 305 
 306   static bool register_native(Klass* k,
 307                               Symbol* name,
 308                               Symbol* signature,
 309                               address entry,
 310                               TRAPS);
 311 
 312   // method data access
 313   MethodData* method_data() const              {
 314     return _method_data;
 315   }
 316 





 317   // mark an exception handler as entered (used to prune dead catch blocks in C2)
 318   void set_exception_handler_entered(int handler_bci);
 319 
 320   MethodCounters* method_counters() const {
 321     return _method_counters;
 322   }
 323 
 324   void clear_method_counters() {
 325     _method_counters = nullptr;
 326   }
 327 
 328   bool init_method_counters(MethodCounters* counters);
 329 
 330   inline int prev_event_count() const;
 331   inline void set_prev_event_count(int count);
 332   inline jlong prev_time() const;
 333   inline void set_prev_time(jlong time);
 334   inline float rate() const;
 335   inline void set_rate(float rate);
 336 
 337   int invocation_count() const;
 338   int backedge_count() const;
 339 
 340   bool was_executed_more_than(int n);
 341   bool was_never_executed()                     { return !was_executed_more_than(0);  }
 342 
 343   static void build_profiling_method_data(const methodHandle& method, TRAPS);
 344 
 345   static MethodCounters* build_method_counters(Thread* current, Method* m);
 346 
 347   int interpreter_invocation_count()            { return invocation_count();          }
 348 
 349 #ifndef PRODUCT
 350   int64_t  compiled_invocation_count() const    { return _compiled_invocation_count;}
 351   void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
 352 #else
 353   // for PrintMethodData in a product build
 354   int64_t  compiled_invocation_count() const    { return 0; }
 355 #endif // not PRODUCT
 356 
 357   // nmethod/verified compiler entry
 358   address verified_code_entry();
 359   bool check_code() const;      // Not inline to avoid circular ref
 360   nmethod* code() const;
 361 
 362   // Locks NMethodState_lock if not held.
 363   void unlink_code(nmethod *compare);
 364   // Locks NMethodState_lock if not held.
 365   void unlink_code();
 366 
 367 private:
 368   // Either called with NMethodState_lock held or from constructor.
 369   void clear_code();
 370 
 371   void clear_method_data() {
 372     _method_data = nullptr;
 373   }
 374 
 375 public:
 376   static void set_code(const methodHandle& mh, nmethod* code);
 377   void set_adapter_entry(AdapterHandlerEntry* adapter) {
 378     _adapter = adapter;
 379   }
 380   void set_from_compiled_entry(address entry) {
 381     _from_compiled_entry =  entry;
 382   }
 383 










 384   address get_i2c_entry();
 385   address get_c2i_entry();
 386   address get_c2i_unverified_entry();
 387   address get_c2i_no_clinit_check_entry();
 388   AdapterHandlerEntry* adapter() const {
 389     return _adapter;
 390   }
 391   // setup entry points
 392   void link_method(const methodHandle& method, TRAPS);
 393   // clear entry points. Used by sharing code during dump time
 394   void unlink_method() NOT_CDS_RETURN;
 395   void remove_unshareable_flags() NOT_CDS_RETURN;
 396 
 397   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
 398   virtual MetaspaceObj::Type type() const { return MethodType; }
 399 
 400   // vtable index
 401   enum VtableIndexFlag {
 402     // Valid vtable indexes are non-negative (>= 0).
 403     // These few negative values are used as sentinels.

 578 
 579   // returns true if the method is an initializer (<init> or <clinit>).
 580   bool is_initializer() const;
 581 
 582   // returns true if the method is static OR if the classfile version < 51
 583   bool has_valid_initializer_flags() const;
 584 
 585   // returns true if the method name is <clinit> and the method has
 586   // valid static initializer flags.
 587   bool is_static_initializer() const;
 588 
 589   // returns true if the method name is <init>
 590   bool is_object_initializer() const;
 591 
 592   // compiled code support
 593   // NOTE: code() is inherently racy as deopt can be clearing code
 594   // simultaneously. Use with caution.
 595   bool has_compiled_code() const;
 596 
 597   bool needs_clinit_barrier() const;

 598 
 599   // sizing
 600   static int header_size()                       {
 601     return align_up((int)sizeof(Method), wordSize) / wordSize;
 602   }
 603   static int size(bool is_native);
 604   int size() const                               { return method_size(); }
 605   void log_touched(Thread* current);
 606   static void print_touched_methods(outputStream* out);
 607 
 608   // interpreter support
 609   static ByteSize const_offset()                 { return byte_offset_of(Method, _constMethod       ); }
 610   static ByteSize access_flags_offset()          { return byte_offset_of(Method, _access_flags      ); }
 611   static ByteSize from_compiled_offset()         { return byte_offset_of(Method, _from_compiled_entry); }
 612   static ByteSize code_offset()                  { return byte_offset_of(Method, _code); }
 613 
 614   static ByteSize method_counters_offset()       {
 615     return byte_offset_of(Method, _method_counters);
 616   }
 617 #ifndef PRODUCT

  43 #endif
  44 
  45 
  46 // A Method represents a Java method.
  47 //
  48 // Note that most applications load thousands of methods, so keeping the size of this
  49 // class small has a big impact on footprint.
  50 //
  51 // Note that native_function and signature_handler have to be at fixed offsets
  52 // (required by the interpreter)
  53 //
  54 //  Method embedded field layout (after declared fields):
  55 //   [EMBEDDED native_function       (present only if native) ]
  56 //   [EMBEDDED signature_handler     (present only if native) ]
  57 
  58 class CheckedExceptionElement;
  59 class LocalVariableTableElement;
  60 class AdapterHandlerEntry;
  61 class MethodData;
  62 class MethodCounters;
  63 class MethodTrainingData;
  64 class ConstMethod;
  65 class InlineTableSizes;
  66 class nmethod;
  67 class InterpreterOopMap;
  68 class SCCEntry;
  69 
  70 class Method : public Metadata {
  71  friend class VMStructs;
  72  friend class JVMCIVMStructs;
  73  friend class MethodTest;
  74  private:
  75   // If you add a new field that points to any metaspace object, you
  76   // must add this field to Method::metaspace_pointers_do().
  77   ConstMethod*      _constMethod;                // Method read-only data.
  78   MethodData*       _method_data;
  79   MethodCounters*   _method_counters;
  80   AdapterHandlerEntry* _adapter;
  81   AccessFlags       _access_flags;               // Access flags
  82   int               _vtable_index;               // vtable index of this method (see VtableIndexFlag)
  83   MethodFlags       _flags;
  84 
  85   u2                _intrinsic_id;               // vmSymbols::intrinsic_id (0 == _none)
  86 
  87   JFR_ONLY(DEFINE_TRACE_FLAG;)
  88 
  89 #ifndef PRODUCT
  90   int64_t _compiled_invocation_count;
  91 
  92   Symbol* _name;
  93 #endif
  94   // Entry point for calling both from and to the interpreter.
  95   address _i2i_entry;           // All-args-on-stack calling convention
  96   // Entry point for calling from compiled code, to compiled code if it exists
  97   // or else the interpreter.
  98   volatile address _from_compiled_entry;     // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
  99   // The entry point for calling both from and to compiled code is
 100   // "_code->entry_point()".  Because of tiered compilation and de-opt, this
 101   // field can come and go.  It can transition from null to not-null at any
 102   // time (whenever a compile completes).  It can transition from not-null to
 103   // null only at safepoints (because of a de-opt).
 104   nmethod* volatile _code;                   // Points to the corresponding piece of native code
 105   volatile address  _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
 106 
 107   nmethod*  _preload_code;  // preloaded SCCache code
 108   SCCEntry* _scc_entry;     // SCCache entry for pre-loading code
 109 
 110   // Constructor
 111   Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
 112  public:
 113 
 114   static Method* allocate(ClassLoaderData* loader_data,
 115                           int byte_code_size,
 116                           AccessFlags access_flags,
 117                           InlineTableSizes* sizes,
 118                           ConstMethod::MethodType method_type,
 119                           Symbol* name,
 120                           TRAPS);
 121 
 122   // CDS and vtbl checking can create an empty Method to get vtbl pointer.
 123   Method(){}
 124 
 125   virtual bool is_method() const { return true; }
 126 
 127 #if INCLUDE_CDS
 128   void remove_unshareable_info();
 129   void restore_unshareable_info(TRAPS);
 130   static void restore_archived_method_handle_intrinsic(methodHandle m, TRAPS);
 131 #endif
 132 
 133   // accessors for instance variables
 134 
 135   ConstMethod* constMethod() const             { return _constMethod; }
 136   void set_constMethod(ConstMethod* xconst)    { _constMethod = xconst; }
 137 
 138 
 139   static address make_adapters(const methodHandle& mh, TRAPS);
 140   address from_compiled_entry() const;
 141   address from_interpreted_entry() const;
 142 
 143   // access flag
 144   AccessFlags access_flags() const               { return _access_flags;  }
 145   void set_access_flags(AccessFlags flags)       { _access_flags = flags; }
 146 
 147   // name
 148   Symbol* name() const                           { return constants()->symbol_at(name_index()); }
 149   u2 name_index() const                          { return constMethod()->name_index();         }
 150   void set_name_index(int index)                 { constMethod()->set_name_index(index);       }

 303   // constraint classes are loaded if necessary. Note that this may
 304   // throw an exception if loading of the constraint classes causes
 305   // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
 306   // If an exception is thrown, returns the bci of the
 307   // exception handler which caused the exception to be thrown, which
 308   // is needed for proper retries. See, for example,
 309   // InterpreterRuntime::exception_handler_for_exception.
 310   static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
 311 
 312   static bool register_native(Klass* k,
 313                               Symbol* name,
 314                               Symbol* signature,
 315                               address entry,
 316                               TRAPS);
 317 
 318   // method data access
 319   MethodData* method_data() const              {
 320     return _method_data;
 321   }
 322 
 323   void set_method_data(MethodData* data);
 324 
 325   MethodTrainingData* training_data_or_null() const;
 326   bool init_training_data(MethodTrainingData* tdata);
 327 
 328   // mark an exception handler as entered (used to prune dead catch blocks in C2)
 329   void set_exception_handler_entered(int handler_bci);
 330 
 331   MethodCounters* method_counters() const {
 332     return _method_counters;
 333   }
 334 
 335   void clear_method_counters() {
 336     _method_counters = nullptr;
 337   }
 338 
 339   bool init_method_counters(MethodCounters* counters);
 340 
 341   inline int prev_event_count() const;
 342   inline void set_prev_event_count(int count);
 343   inline jlong prev_time() const;
 344   inline void set_prev_time(jlong time);
 345   inline float rate() const;
 346   inline void set_rate(float rate);
 347 
 348   int invocation_count() const;
 349   int backedge_count() const;
 350 
 351   bool was_executed_more_than(int n);
 352   bool was_never_executed()                     { return !was_executed_more_than(0);  }
 353 
 354   static void build_profiling_method_data(const methodHandle& method, TRAPS);
 355   static bool install_training_method_data(const methodHandle& method);
 356   static MethodCounters* build_method_counters(Thread* current, Method* m);
 357 
 358   int interpreter_invocation_count()            { return invocation_count();          }
 359 
 360 #ifndef PRODUCT
 361   int64_t  compiled_invocation_count() const    { return _compiled_invocation_count;}
 362   void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
 363 #else
 364   // for PrintMethodData in a product build
 365   int64_t  compiled_invocation_count() const    { return 0; }
 366 #endif // not PRODUCT
 367 
 368   // nmethod/verified compiler entry
 369   address verified_code_entry();
 370   bool check_code() const;      // Not inline to avoid circular ref
 371   nmethod* code() const;
 372 
 373   // Locks NMethodState_lock if not held.
 374   void unlink_code(nmethod *compare);
 375   // Locks NMethodState_lock if not held.
 376   void unlink_code();
 377 
 378 private:
 379   // Either called with NMethodState_lock held or from constructor.
 380   void clear_code();
 381 
 382   void clear_method_data() {
 383     _method_data = nullptr;
 384   }
 385 
 386 public:
 387   static void set_code(const methodHandle& mh, nmethod* code);
 388   void set_adapter_entry(AdapterHandlerEntry* adapter) {
 389     _adapter = adapter;
 390   }
 391   void set_from_compiled_entry(address entry) {
 392     _from_compiled_entry =  entry;
 393   }
 394 
 395   void set_preload_code(nmethod* code) {
 396     _preload_code = code;
 397   }
 398   void set_scc_entry(SCCEntry* entry) {
 399     _scc_entry = entry;
 400   }
 401   SCCEntry* scc_entry() const {
 402     return _scc_entry;
 403   }
 404 
 405   address get_i2c_entry();
 406   address get_c2i_entry();
 407   address get_c2i_unverified_entry();
 408   address get_c2i_no_clinit_check_entry();
 409   AdapterHandlerEntry* adapter() const {
 410     return _adapter;
 411   }
 412   // setup entry points
 413   void link_method(const methodHandle& method, TRAPS);
 414   // clear entry points. Used by sharing code during dump time
 415   void unlink_method() NOT_CDS_RETURN;
 416   void remove_unshareable_flags() NOT_CDS_RETURN;
 417 
 418   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
 419   virtual MetaspaceObj::Type type() const { return MethodType; }
 420 
 421   // vtable index
 422   enum VtableIndexFlag {
 423     // Valid vtable indexes are non-negative (>= 0).
 424     // These few negative values are used as sentinels.

 599 
 600   // returns true if the method is an initializer (<init> or <clinit>).
 601   bool is_initializer() const;
 602 
 603   // returns true if the method is static OR if the classfile version < 51
 604   bool has_valid_initializer_flags() const;
 605 
 606   // returns true if the method name is <clinit> and the method has
 607   // valid static initializer flags.
 608   bool is_static_initializer() const;
 609 
 610   // returns true if the method name is <init>
 611   bool is_object_initializer() const;
 612 
 613   // compiled code support
 614   // NOTE: code() is inherently racy as deopt can be clearing code
 615   // simultaneously. Use with caution.
 616   bool has_compiled_code() const;
 617 
 618   bool needs_clinit_barrier() const;
 619   bool code_has_clinit_barriers() const;
 620 
 621   // sizing
 622   static int header_size()                       {
 623     return align_up((int)sizeof(Method), wordSize) / wordSize;
 624   }
 625   static int size(bool is_native);
 626   int size() const                               { return method_size(); }
 627   void log_touched(Thread* current);
 628   static void print_touched_methods(outputStream* out);
 629 
 630   // interpreter support
 631   static ByteSize const_offset()                 { return byte_offset_of(Method, _constMethod       ); }
 632   static ByteSize access_flags_offset()          { return byte_offset_of(Method, _access_flags      ); }
 633   static ByteSize from_compiled_offset()         { return byte_offset_of(Method, _from_compiled_entry); }
 634   static ByteSize code_offset()                  { return byte_offset_of(Method, _code); }
 635 
 636   static ByteSize method_counters_offset()       {
 637     return byte_offset_of(Method, _method_counters);
 638   }
 639 #ifndef PRODUCT
< prev index next >