74 // If you add a new field that points to any metaspace object, you
75 // must add this field to Method::metaspace_pointers_do().
76 ConstMethod* _constMethod; // Method read-only data.
77 MethodData* _method_data;
78 MethodCounters* _method_counters;
79 AdapterHandlerEntry* _adapter;
80 AccessFlags _access_flags; // Access flags
81 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
82 // note: can have vtables with >2**16 elements (because of inheritance)
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 // Flags
86 enum Flags {
87 _caller_sensitive = 1 << 0,
88 _force_inline = 1 << 1,
89 _dont_inline = 1 << 2,
90 _hidden = 1 << 3,
91 _has_injected_profile = 1 << 4,
92 _intrinsic_candidate = 1 << 5,
93 _reserved_stack_access = 1 << 6,
94 _scoped = 1 << 7,
95 _changes_current_thread = 1 << 8,
96 _jvmti_mount_transition = 1 << 9,
97 };
98 mutable u2 _flags;
99
100 JFR_ONLY(DEFINE_TRACE_FLAG;)
101
102 #ifndef PRODUCT
103 int64_t _compiled_invocation_count;
104
105 Symbol* _name;
106 #endif
107 // Entry point for calling both from and to the interpreter.
108 address _i2i_entry; // All-args-on-stack calling convention
109 // Entry point for calling from compiled code, to compiled code if it exists
110 // or else the interpreter.
111 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
112 // The entry point for calling both from and to compiled code is
113 // "_code->entry_point()". Because of tiered compilation and de-opt, this
114 // field can come and go. It can transition from NULL to not-null at any
115 // time (whenever a compile completes). It can transition from not-null to
116 // NULL only at safepoints (because of a de-opt).
117 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
118 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
119
120 // Constructor
121 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
122 public:
123
124 static Method* allocate(ClassLoaderData* loader_data,
125 int byte_code_size,
126 AccessFlags access_flags,
127 InlineTableSizes* sizes,
128 ConstMethod::MethodType method_type,
129 Symbol* name,
130 TRAPS);
131
132 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
133 Method(){}
134
135 virtual bool is_method() const { return true; }
136
137 #if INCLUDE_CDS
138 void remove_unshareable_info();
139 void restore_unshareable_info(TRAPS);
140 #endif
141
142 // accessors for instance variables
143
144 ConstMethod* constMethod() const { return _constMethod; }
145 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
146
147
148 static address make_adapters(const methodHandle& mh, TRAPS);
149 address from_compiled_entry() const;
150 address from_interpreted_entry() const;
151
152 // access flag
153 AccessFlags access_flags() const { return _access_flags; }
154 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
155
156 // name
157 Symbol* name() const { return constants()->symbol_at(name_index()); }
158 int name_index() const { return constMethod()->name_index(); }
159 void set_name_index(int index) { constMethod()->set_name_index(index); }
160
161 // signature
162 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
163 int signature_index() const { return constMethod()->signature_index(); }
164 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
165
166 // generics support
167 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL); }
168 int generic_signature_index() const { return constMethod()->generic_signature_index(); }
169 void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); }
411
412 bool was_executed_more_than(int n);
413 bool was_never_executed() { return !was_executed_more_than(0); }
414
415 static void build_profiling_method_data(const methodHandle& method, TRAPS);
416
417 static MethodCounters* build_method_counters(Thread* current, Method* m);
418
419 int interpreter_invocation_count() { return invocation_count(); }
420
421 #ifndef PRODUCT
422 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
423 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
424 #else
425 // for PrintMethodData in a product build
426 int64_t compiled_invocation_count() const { return 0; }
427 #endif // not PRODUCT
428
429 // nmethod/verified compiler entry
430 address verified_code_entry();
431 bool check_code() const; // Not inline to avoid circular ref
432 CompiledMethod* volatile code() const;
433
434 // Locks CompiledMethod_lock if not held.
435 void unlink_code(CompiledMethod *compare);
436 // Locks CompiledMethod_lock if not held.
437 void unlink_code();
438
439 private:
440 // Either called with CompiledMethod_lock held or from constructor.
441 void clear_code();
442
443 public:
444 static void set_code(const methodHandle& mh, CompiledMethod* code);
445 void set_adapter_entry(AdapterHandlerEntry* adapter) {
446 _adapter = adapter;
447 }
448 void set_from_compiled_entry(address entry) {
449 _from_compiled_entry = entry;
450 }
451
452 address get_i2c_entry();
453 address get_c2i_entry();
454 address get_c2i_unverified_entry();
455 address get_c2i_no_clinit_check_entry();
456 AdapterHandlerEntry* adapter() const {
457 return _adapter;
458 }
459 // setup entry points
460 void link_method(const methodHandle& method, TRAPS);
461 // clear entry points. Used by sharing code during dump time
462 void unlink_method() NOT_CDS_RETURN;
463
464 // the number of argument reg slots that the compiled method uses on the stack.
465 int num_stack_arg_slots() const { return constMethod()->num_stack_arg_slots(); }
466
467 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
468 virtual MetaspaceObj::Type type() const { return MethodType; }
469
470 // vtable index
471 enum VtableIndexFlag {
472 // Valid vtable indexes are non-negative (>= 0).
473 // These few negative values are used as sentinels.
474 itable_index_max = -10, // first itable index, growing downward
548
549 // localvariable table
550 bool has_localvariable_table() const
551 { return constMethod()->has_localvariable_table(); }
552 int localvariable_table_length() const
553 { return constMethod()->localvariable_table_length(); }
554 LocalVariableTableElement* localvariable_table_start() const
555 { return constMethod()->localvariable_table_start(); }
556
557 bool has_linenumber_table() const
558 { return constMethod()->has_linenumber_table(); }
559 u_char* compressed_linenumber_table() const
560 { return constMethod()->compressed_linenumber_table(); }
561
562 // method holder (the Klass* holding this method)
563 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
564
565 Symbol* klass_name() const; // returns the name of the method holder
566 BasicType result_type() const { return constMethod()->result_type(); }
567 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
568 bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }
569
570 // Checked exceptions thrown by this method (resolved to mirrors)
571 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
572
573 // Access flags
574 bool is_public() const { return access_flags().is_public(); }
575 bool is_private() const { return access_flags().is_private(); }
576 bool is_protected() const { return access_flags().is_protected(); }
577 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
578 bool is_static() const { return access_flags().is_static(); }
579 bool is_final() const { return access_flags().is_final(); }
580 bool is_synchronized() const { return access_flags().is_synchronized();}
581 bool is_native() const { return access_flags().is_native(); }
582 bool is_abstract() const { return access_flags().is_abstract(); }
583 bool is_synthetic() const { return access_flags().is_synthetic(); }
584
585 // returns true if contains only return operation
586 bool is_empty_method() const;
587
588 // returns true if this is a vanilla constructor
623 void set_has_monitor_bytecodes() { _access_flags.set_has_monitor_bytecodes(); }
624
625 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
626 // propererly nest in the method. It might return false, even though they actually nest properly, since the info.
627 // has not been computed yet.
628 bool guaranteed_monitor_matching() const { return access_flags().is_monitor_matching(); }
629 void set_guaranteed_monitor_matching() { _access_flags.set_monitor_matching(); }
630
631 // returns true if the method is an accessor function (setter/getter).
632 bool is_accessor() const;
633
634 // returns true if the method is a getter
635 bool is_getter() const;
636
637 // returns true if the method is a setter
638 bool is_setter() const;
639
640 // returns true if the method does nothing but return a constant of primitive type
641 bool is_constant_getter() const;
642
643 // returns true if the method is an initializer (<init> or <clinit>).
644 bool is_initializer() const;
645
646 // returns true if the method is static OR if the classfile version < 51
647 bool has_valid_initializer_flags() const;
648
649 // returns true if the method name is <clinit> and the method has
650 // valid static initializer flags.
651 bool is_static_initializer() const;
652
653 // returns true if the method name is <init>
654 bool is_object_initializer() const;
655
656 // compiled code support
657 // NOTE: code() is inherently racy as deopt can be clearing code
658 // simultaneously. Use with caution.
659 bool has_compiled_code() const;
660
661 bool needs_clinit_barrier() const;
662
663 // sizing
664 static int header_size() {
665 return align_up((int)sizeof(Method), wordSize) / wordSize;
666 }
667 static int size(bool is_native);
668 int size() const { return method_size(); }
669 void log_touched(Thread* current);
670 static void print_touched_methods(outputStream* out);
671
672 // interpreter support
673 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
674 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
675 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
676 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
677 static ByteSize method_data_offset() {
678 return byte_offset_of(Method, _method_data);
679 }
680 static ByteSize method_counters_offset() {
681 return byte_offset_of(Method, _method_counters);
682 }
683 #ifndef PRODUCT
684 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
685 #endif // not PRODUCT
686 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
687 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
688 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
689 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
690 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
691
692 // for code generation
693 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); }
694 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); }
695 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
696
876 }
877 void set_intrinsic_candidate(bool x) {
878 _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
879 }
880
881 bool has_injected_profile() {
882 return (_flags & _has_injected_profile) != 0;
883 }
884 void set_has_injected_profile(bool x) {
885 _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
886 }
887
888 bool has_reserved_stack_access() {
889 return (_flags & _reserved_stack_access) != 0;
890 }
891
892 void set_has_reserved_stack_access(bool x) {
893 _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
894 }
895
896 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
897
898 ConstMethod::MethodType method_type() const {
899 return _constMethod->method_type();
900 }
901 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
902
903 // On-stack replacement support
904 bool has_osr_nmethod(int level, bool match_level) {
905 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
906 }
907
908 int mark_osr_nmethods() {
909 return method_holder()->mark_osr_nmethods(this);
910 }
911
912 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
913 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
914 }
915
|
74 // If you add a new field that points to any metaspace object, you
75 // must add this field to Method::metaspace_pointers_do().
76 ConstMethod* _constMethod; // Method read-only data.
77 MethodData* _method_data;
78 MethodCounters* _method_counters;
79 AdapterHandlerEntry* _adapter;
80 AccessFlags _access_flags; // Access flags
81 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
82 // note: can have vtables with >2**16 elements (because of inheritance)
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 // Flags
86 enum Flags {
87 _caller_sensitive = 1 << 0,
88 _force_inline = 1 << 1,
89 _dont_inline = 1 << 2,
90 _hidden = 1 << 3,
91 _has_injected_profile = 1 << 4,
92 _intrinsic_candidate = 1 << 5,
93 _reserved_stack_access = 1 << 6,
94 _scalarized_args = 1 << 7,
95 _scalarized_return = 1 << 8,
96 _c1_needs_stack_repair = 1 << 9,
97 _c2_needs_stack_repair = 1 << 10,
98 _scoped = 1 << 11,
99 _changes_current_thread = 1 << 12,
100 _jvmti_mount_transition = 1 << 13,
101 };
102 mutable u2 _flags;
103
104 JFR_ONLY(DEFINE_TRACE_FLAG;)
105
106 #ifndef PRODUCT
107 int64_t _compiled_invocation_count;
108
109 Symbol* _name;
110 #endif
111 // Entry point for calling both from and to the interpreter.
112 address _i2i_entry; // All-args-on-stack calling convention
113 // Entry point for calling from compiled code, to compiled code if it exists
114 // or else the interpreter.
115 volatile address _from_compiled_entry; // Cache of: _code ? _code->verified_entry_point() : _adapter->c2i_entry()
116 volatile address _from_compiled_inline_ro_entry; // Cache of: _code ? _code->verified_inline_ro_entry_point() : _adapter->c2i_inline_ro_entry()
117 volatile address _from_compiled_inline_entry; // Cache of: _code ? _code->verified_inline_entry_point() : _adapter->c2i_inline_entry()
118 // The entry point for calling both from and to compiled code is
119 // "_code->entry_point()". Because of tiered compilation and de-opt, this
120 // field can come and go. It can transition from NULL to not-null at any
121 // time (whenever a compile completes). It can transition from not-null to
122 // NULL only at safepoints (because of a de-opt).
123 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
124 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
125
126 // Constructor
127 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
128 public:
129
130 static Method* allocate(ClassLoaderData* loader_data,
131 int byte_code_size,
132 AccessFlags access_flags,
133 InlineTableSizes* sizes,
134 ConstMethod::MethodType method_type,
135 Symbol* name,
136 TRAPS);
137
138 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
139 Method(){}
140
141 virtual bool is_method() const { return true; }
142
143 #if INCLUDE_CDS
144 void remove_unshareable_info();
145 void restore_unshareable_info(TRAPS);
146 #endif
147
148 // accessors for instance variables
149
150 ConstMethod* constMethod() const { return _constMethod; }
151 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
152
153
154 static address make_adapters(const methodHandle& mh, TRAPS);
155 address from_compiled_entry() const;
156 address from_compiled_inline_ro_entry() const;
157 address from_compiled_inline_entry() const;
158 address from_interpreted_entry() const;
159
160 // access flag
161 AccessFlags access_flags() const { return _access_flags; }
162 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
163
164 // name
165 Symbol* name() const { return constants()->symbol_at(name_index()); }
166 int name_index() const { return constMethod()->name_index(); }
167 void set_name_index(int index) { constMethod()->set_name_index(index); }
168
169 // signature
170 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
171 int signature_index() const { return constMethod()->signature_index(); }
172 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
173
174 // generics support
175 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : (Symbol*)NULL); }
176 int generic_signature_index() const { return constMethod()->generic_signature_index(); }
177 void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); }
419
420 bool was_executed_more_than(int n);
421 bool was_never_executed() { return !was_executed_more_than(0); }
422
423 static void build_profiling_method_data(const methodHandle& method, TRAPS);
424
425 static MethodCounters* build_method_counters(Thread* current, Method* m);
426
427 int interpreter_invocation_count() { return invocation_count(); }
428
429 #ifndef PRODUCT
430 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
431 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
432 #else
433 // for PrintMethodData in a product build
434 int64_t compiled_invocation_count() const { return 0; }
435 #endif // not PRODUCT
436
437 // nmethod/verified compiler entry
438 address verified_code_entry();
439 address verified_inline_code_entry();
440 address verified_inline_ro_code_entry();
441 bool check_code() const; // Not inline to avoid circular ref
442 CompiledMethod* volatile code() const;
443
444 // Locks CompiledMethod_lock if not held.
445 void unlink_code(CompiledMethod *compare);
446 // Locks CompiledMethod_lock if not held.
447 void unlink_code();
448
449 private:
450 // Either called with CompiledMethod_lock held or from constructor.
451 void clear_code();
452
453 public:
454 static void set_code(const methodHandle& mh, CompiledMethod* code);
455 void set_adapter_entry(AdapterHandlerEntry* adapter) {
456 _adapter = adapter;
457 }
458 void set_from_compiled_entry(address entry) {
459 _from_compiled_entry = entry;
460 }
461 void set_from_compiled_inline_ro_entry(address entry) {
462 _from_compiled_inline_ro_entry = entry;
463 }
464 void set_from_compiled_inline_entry(address entry) {
465 _from_compiled_inline_entry = entry;
466 }
467
468 address get_i2c_entry();
469 address get_c2i_entry();
470 address get_c2i_inline_entry();
471 address get_c2i_unverified_entry();
472 address get_c2i_unverified_inline_entry();
473 address get_c2i_no_clinit_check_entry();
474 AdapterHandlerEntry* adapter() const {
475 return _adapter;
476 }
477 // setup entry points
478 void link_method(const methodHandle& method, TRAPS);
479 // clear entry points. Used by sharing code during dump time
480 void unlink_method() NOT_CDS_RETURN;
481
482 // the number of argument reg slots that the compiled method uses on the stack.
483 int num_stack_arg_slots() const { return constMethod()->num_stack_arg_slots(); }
484
485 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
486 virtual MetaspaceObj::Type type() const { return MethodType; }
487
488 // vtable index
489 enum VtableIndexFlag {
490 // Valid vtable indexes are non-negative (>= 0).
491 // These few negative values are used as sentinels.
492 itable_index_max = -10, // first itable index, growing downward
566
567 // localvariable table
568 bool has_localvariable_table() const
569 { return constMethod()->has_localvariable_table(); }
570 int localvariable_table_length() const
571 { return constMethod()->localvariable_table_length(); }
572 LocalVariableTableElement* localvariable_table_start() const
573 { return constMethod()->localvariable_table_start(); }
574
575 bool has_linenumber_table() const
576 { return constMethod()->has_linenumber_table(); }
577 u_char* compressed_linenumber_table() const
578 { return constMethod()->compressed_linenumber_table(); }
579
580 // method holder (the Klass* holding this method)
581 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
582
583 Symbol* klass_name() const; // returns the name of the method holder
584 BasicType result_type() const { return constMethod()->result_type(); }
585 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
586 InlineKlass* returns_inline_type(Thread* thread) const;
587
588 // Checked exceptions thrown by this method (resolved to mirrors)
589 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
590
591 // Access flags
592 bool is_public() const { return access_flags().is_public(); }
593 bool is_private() const { return access_flags().is_private(); }
594 bool is_protected() const { return access_flags().is_protected(); }
595 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
596 bool is_static() const { return access_flags().is_static(); }
597 bool is_final() const { return access_flags().is_final(); }
598 bool is_synchronized() const { return access_flags().is_synchronized();}
599 bool is_native() const { return access_flags().is_native(); }
600 bool is_abstract() const { return access_flags().is_abstract(); }
601 bool is_synthetic() const { return access_flags().is_synthetic(); }
602
603 // returns true if contains only return operation
604 bool is_empty_method() const;
605
606 // returns true if this is a vanilla constructor
641 void set_has_monitor_bytecodes() { _access_flags.set_has_monitor_bytecodes(); }
642
643 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
644 // propererly nest in the method. It might return false, even though they actually nest properly, since the info.
645 // has not been computed yet.
646 bool guaranteed_monitor_matching() const { return access_flags().is_monitor_matching(); }
647 void set_guaranteed_monitor_matching() { _access_flags.set_monitor_matching(); }
648
649 // returns true if the method is an accessor function (setter/getter).
650 bool is_accessor() const;
651
652 // returns true if the method is a getter
653 bool is_getter() const;
654
655 // returns true if the method is a setter
656 bool is_setter() const;
657
658 // returns true if the method does nothing but return a constant of primitive type
659 bool is_constant_getter() const;
660
661 // returns true if the method name is <clinit> and the method has
662 // valid static initializer flags.
663 bool is_class_initializer() const;
664
665 // returns true if the method name is <init> and the method is not a static factory
666 bool is_object_constructor() const;
667
668 // returns true if the method is an object constructor or class initializer
669 // (non-static <init> or <clinit>), but false for factories (static <vnew>).
670 bool is_object_constructor_or_class_initializer() const;
671
672 // returns true if the method name is <vnew> and the method is static
673 bool is_static_vnew_factory() const;
674
675 // compiled code support
676 // NOTE: code() is inherently racy as deopt can be clearing code
677 // simultaneously. Use with caution.
678 bool has_compiled_code() const;
679
680 bool needs_clinit_barrier() const;
681
682 // sizing
683 static int header_size() {
684 return align_up((int)sizeof(Method), wordSize) / wordSize;
685 }
686 static int size(bool is_native);
687 int size() const { return method_size(); }
688 void log_touched(Thread* current);
689 static void print_touched_methods(outputStream* out);
690
691 // interpreter support
692 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
693 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
694 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
695 static ByteSize from_compiled_inline_offset() { return byte_offset_of(Method, _from_compiled_inline_entry); }
696 static ByteSize from_compiled_inline_ro_offset(){ return byte_offset_of(Method, _from_compiled_inline_ro_entry); }
697 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
698 static ByteSize flags_offset() { return byte_offset_of(Method, _flags); }
699 static ByteSize method_data_offset() {
700 return byte_offset_of(Method, _method_data);
701 }
702 static ByteSize method_counters_offset() {
703 return byte_offset_of(Method, _method_counters);
704 }
705 #ifndef PRODUCT
706 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
707 #endif // not PRODUCT
708 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
709 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
710 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
711 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
712 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
713
714 // for code generation
715 static int method_data_offset_in_bytes() { return offset_of(Method, _method_data); }
716 static int intrinsic_id_offset_in_bytes() { return offset_of(Method, _intrinsic_id); }
717 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
718
898 }
899 void set_intrinsic_candidate(bool x) {
900 _flags = x ? (_flags | _intrinsic_candidate) : (_flags & ~_intrinsic_candidate);
901 }
902
903 bool has_injected_profile() {
904 return (_flags & _has_injected_profile) != 0;
905 }
906 void set_has_injected_profile(bool x) {
907 _flags = x ? (_flags | _has_injected_profile) : (_flags & ~_has_injected_profile);
908 }
909
910 bool has_reserved_stack_access() {
911 return (_flags & _reserved_stack_access) != 0;
912 }
913
914 void set_has_reserved_stack_access(bool x) {
915 _flags = x ? (_flags | _reserved_stack_access) : (_flags & ~_reserved_stack_access);
916 }
917
918 bool has_scalarized_args() const {
919 return (_flags & _scalarized_args) != 0;
920 }
921
922 void set_has_scalarized_args(bool x) {
923 _flags = x ? (_flags | _scalarized_args) : (_flags & ~_scalarized_args);
924 }
925
926 bool has_scalarized_return() const {
927 return (_flags & _scalarized_return) != 0;
928 }
929
930 void set_has_scalarized_return(bool x) {
931 _flags = x ? (_flags | _scalarized_return) : (_flags & ~_scalarized_return);
932 }
933
934 bool is_scalarized_arg(int idx) const;
935
936 bool c1_needs_stack_repair() {
937 return (_flags & _c1_needs_stack_repair) != 0;
938 }
939
940 bool c2_needs_stack_repair() {
941 return (_flags & _c2_needs_stack_repair) != 0;
942 }
943
944 void set_c1_needs_stack_repair(bool x) {
945 _flags = x ? (_flags | _c1_needs_stack_repair) : (_flags & ~_c1_needs_stack_repair);
946 }
947
948 void set_c2_needs_stack_repair(bool x) {
949 _flags = x ? (_flags | _c2_needs_stack_repair) : (_flags & ~_c2_needs_stack_repair);
950 }
951
952 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
953
954 ConstMethod::MethodType method_type() const {
955 return _constMethod->method_type();
956 }
957 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
958
959 // On-stack replacement support
960 bool has_osr_nmethod(int level, bool match_level) {
961 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
962 }
963
964 int mark_osr_nmethods() {
965 return method_holder()->mark_osr_nmethods(this);
966 }
967
968 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
969 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
970 }
971
|