< prev index next >

src/hotspot/share/oops/method.hpp

Print this page
*** 23,10 ***
--- 23,11 ---
   */
  
  #ifndef SHARE_OOPS_METHOD_HPP
  #define SHARE_OOPS_METHOD_HPP
  
+ #include "classfile/vmSymbols.hpp"
  #include "code/compressedStream.hpp"
  #include "compiler/compilerDefinitions.hpp"
  #include "interpreter/invocationCounter.hpp"
  #include "oops/annotations.hpp"
  #include "oops/constantPool.hpp"

*** 88,11 ***
      _dont_inline           = 1 << 2,
      _hidden                = 1 << 3,
      _has_injected_profile  = 1 << 4,
      _intrinsic_candidate   = 1 << 5,
      _reserved_stack_access = 1 << 6,
!     _scoped                = 1 << 7
    };
    mutable u2 _flags;
  
    JFR_ONLY(DEFINE_TRACE_FLAG;)
  
--- 89,13 ---
      _dont_inline           = 1 << 2,
      _hidden                = 1 << 3,
      _has_injected_profile  = 1 << 4,
      _intrinsic_candidate   = 1 << 5,
      _reserved_stack_access = 1 << 6,
!     _scoped                = 1 << 7,
+     _changes_current_thread = 1 << 8,
+     _jvmti_mount_transition = 1 << 9,
    };
    mutable u2 _flags;
  
    JFR_ONLY(DEFINE_TRACE_FLAG;)
  

*** 110,10 ***
--- 113,12 ---
    // time (whenever a compile completes).  It can transition from not-null to
    // NULL only at safepoints (because of a de-opt).
    CompiledMethod* volatile _code;                       // Points to the corresponding piece of native code
    volatile address           _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
  
+   int _num_stack_arg_slots;
+ 
    // Constructor
    Method(ConstMethod* xconst, AccessFlags access_flags);
   public:
  
    static Method* allocate(ClassLoaderData* loader_data,

*** 463,10 ***
--- 468,13 ---
    // setup entry points
    void link_method(const methodHandle& method, TRAPS);
    // clear entry points. Used by sharing code during dump time
    void unlink_method() NOT_CDS_RETURN;
  
+   // the number of argument reg slots that the compiled method uses on the stack.
+   int num_stack_arg_slots() const { return _num_stack_arg_slots;  };
+ 
    virtual void metaspace_pointers_do(MetaspaceClosure* iter);
    virtual MetaspaceObj::Type type() const { return MethodType; }
  
    // vtable index
    enum VtableIndexFlag {

*** 718,10 ***
--- 726,17 ---
    bool is_compiled_lambda_form() const;             // intrinsic_id() == vmIntrinsics::_compiledLambdaForm
    bool has_member_arg() const;                      // intrinsic_id() == vmIntrinsics::_linkToSpecial, etc.
    static methodHandle make_method_handle_intrinsic(vmIntrinsicID iid, // _invokeBasic, _linkToVirtual
                                                     Symbol* signature, //anything at all
                                                     TRAPS);
+ 
+ 
+   // Continuation
+   bool is_continuation_enter_intrinsic() const { return intrinsic_id() == vmIntrinsics::_Continuation_enterSpecial; }
+ 
+   bool is_special_native_intrinsic() const { return is_method_handle_intrinsic() || is_continuation_enter_intrinsic(); }
+ 
    static Klass* check_non_bcp_klass(Klass* klass);
  
    enum {
      // How many extra stack entries for invokedynamic
      extra_stack_entries_for_jsr292 = 1

*** 742,10 ***
--- 757,12 ---
    void set_is_deleted()                             { _access_flags.set_is_deleted(); }
  
    bool on_stack() const                             { return access_flags().on_stack(); }
    void set_on_stack(const bool value);
  
+   void record_marking_cycle();
+ 
    // see the definition in Method*.cpp for the gory details
    bool should_not_be_cached() const;
  
    // JVMTI Native method prefixing support:
    bool is_prefixed_native() const                   { return access_flags().is_prefixed_native(); }

*** 826,10 ***
--- 843,24 ---
    }
    void set_dont_inline(bool x) {
      _flags = x ? (_flags | _dont_inline) : (_flags & ~_dont_inline);
    }
  
+   bool changes_current_thread() {
+     return (_flags & _changes_current_thread) != 0;
+   }
+   void set_changes_current_thread(bool x) {
+     _flags = x ? (_flags | _changes_current_thread) : (_flags & ~_changes_current_thread);
+   }
+ 
+   bool jvmti_mount_transition() {
+     return (_flags & _jvmti_mount_transition) != 0;
+   }
+   void set_jvmti_mount_transition(bool x) {
+     _flags = x ? (_flags | _jvmti_mount_transition) : (_flags & ~_jvmti_mount_transition);
+   }
+ 
    bool is_hidden() const {
      return (_flags & _hidden) != 0;
    }
  
    void set_hidden(bool x) {

*** 940,15 ***
  
    // Return if true if not all classes references in signature, including return type, has been loaded
    static bool has_unloaded_classes_in_signature(const methodHandle& m, TRAPS);
  
    // Printing
!   void print_short_name(outputStream* st = tty); // prints as klassname::methodname; Exposed so field engineers can debug VM
  #if INCLUDE_JVMTI
!   void print_name(outputStream* st = tty); // prints as "virtual void foo(int)"; exposed for -Xlog:redefine+class
  #else
!   void print_name(outputStream* st = tty)        PRODUCT_RETURN; // prints as "virtual void foo(int)"
  #endif
  
    typedef int (*method_comparator_func)(Method* a, Method* b);
  
    // Helper routine used for method sorting
--- 971,15 ---
  
    // Return if true if not all classes references in signature, including return type, has been loaded
    static bool has_unloaded_classes_in_signature(const methodHandle& m, TRAPS);
  
    // Printing
!   void print_short_name(outputStream* st = tty) const; // prints as klassname::methodname; Exposed so field engineers can debug VM
  #if INCLUDE_JVMTI
!   void print_name(outputStream* st = tty) const; // prints as "virtual void foo(int)"; exposed for -Xlog:redefine+class
  #else
!   void print_name(outputStream* st = tty) const  PRODUCT_RETURN; // prints as "virtual void foo(int)"
  #endif
  
    typedef int (*method_comparator_func)(Method* a, Method* b);
  
    // Helper routine used for method sorting

*** 988,10 ***
--- 1019,12 ---
   private:
  
    // Inlined elements
    address* native_function_addr() const          { assert(is_native(), "must be native"); return (address*) (this+1); }
    address* signature_handler_addr() const        { return native_function_addr() + 1; }
+ 
+   void set_num_stack_arg_slots();
  };
  
  
  // Utility class for compressing line number tables
  
< prev index next >