76 MethodData* _method_data;
77 MethodCounters* _method_counters;
78 AdapterHandlerEntry* _adapter;
79 AccessFlags _access_flags; // Access flags
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 MethodFlags _flags;
82
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 JFR_ONLY(DEFINE_TRACE_FLAG;)
86
87 #ifndef PRODUCT
88 int64_t _compiled_invocation_count;
89
90 Symbol* _name;
91 #endif
92 // Entry point for calling both from and to the interpreter.
93 address _i2i_entry; // All-args-on-stack calling convention
94 // Entry point for calling from compiled code, to compiled code if it exists
95 // or else the interpreter.
96 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
97 // The entry point for calling both from and to compiled code is
98 // "_code->entry_point()". Because of tiered compilation and de-opt, this
99 // field can come and go. It can transition from null to not-null at any
100 // time (whenever a compile completes). It can transition from not-null to
101 // null only at safepoints (because of a de-opt).
102 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
103 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
104
105 // Constructor
106 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
107 public:
108
109 static Method* allocate(ClassLoaderData* loader_data,
110 int byte_code_size,
111 AccessFlags access_flags,
112 InlineTableSizes* sizes,
113 ConstMethod::MethodType method_type,
114 Symbol* name,
115 TRAPS);
116
117 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
118 Method(){}
119
120 virtual bool is_method() const { return true; }
121
122 #if INCLUDE_CDS
123 void remove_unshareable_info();
124 void restore_unshareable_info(TRAPS);
125 #endif
126
127 // accessors for instance variables
128
129 ConstMethod* constMethod() const { return _constMethod; }
130 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
131
132
133 static address make_adapters(const methodHandle& mh, TRAPS);
134 address from_compiled_entry() const;
135 address from_interpreted_entry() const;
136
137 // access flag
138 AccessFlags access_flags() const { return _access_flags; }
139 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
140
141 // name
142 Symbol* name() const { return constants()->symbol_at(name_index()); }
143 u2 name_index() const { return constMethod()->name_index(); }
144 void set_name_index(int index) { constMethod()->set_name_index(index); }
145
146 // signature
147 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
148 u2 signature_index() const { return constMethod()->signature_index(); }
149 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
150
151 // generics support
152 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : nullptr); }
153 u2 generic_signature_index() const { return constMethod()->generic_signature_index(); }
154
339
340 bool was_executed_more_than(int n);
341 bool was_never_executed() { return !was_executed_more_than(0); }
342
343 static void build_profiling_method_data(const methodHandle& method, TRAPS);
344
345 static MethodCounters* build_method_counters(Thread* current, Method* m);
346
347 int interpreter_invocation_count() { return invocation_count(); }
348
349 #ifndef PRODUCT
350 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
351 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
352 #else
353 // for PrintMethodData in a product build
354 int64_t compiled_invocation_count() const { return 0; }
355 #endif // not PRODUCT
356
357 // nmethod/verified compiler entry
358 address verified_code_entry();
359 bool check_code() const; // Not inline to avoid circular ref
360 CompiledMethod* code() const;
361
362 // Locks CompiledMethod_lock if not held.
363 void unlink_code(CompiledMethod *compare);
364 // Locks CompiledMethod_lock if not held.
365 void unlink_code();
366
367 private:
368 // Either called with CompiledMethod_lock held or from constructor.
369 void clear_code();
370
371 void clear_method_data() {
372 _method_data = nullptr;
373 }
374
375 public:
376 static void set_code(const methodHandle& mh, CompiledMethod* code);
377 void set_adapter_entry(AdapterHandlerEntry* adapter) {
378 _adapter = adapter;
379 }
380 void set_from_compiled_entry(address entry) {
381 _from_compiled_entry = entry;
382 }
383
384 address get_i2c_entry();
385 address get_c2i_entry();
386 address get_c2i_unverified_entry();
387 address get_c2i_no_clinit_check_entry();
388 AdapterHandlerEntry* adapter() const {
389 return _adapter;
390 }
391 // setup entry points
392 void link_method(const methodHandle& method, TRAPS);
393 // clear entry points. Used by sharing code during dump time
394 void unlink_method() NOT_CDS_RETURN;
395 void remove_unshareable_flags() NOT_CDS_RETURN;
396
397 // the number of argument reg slots that the compiled method uses on the stack.
398 int num_stack_arg_slots(bool rounded = true) const {
399 return rounded ? align_up(constMethod()->num_stack_arg_slots(), 2) : constMethod()->num_stack_arg_slots(); }
400
401 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
402 virtual MetaspaceObj::Type type() const { return MethodType; }
403
404 // vtable index
405 enum VtableIndexFlag {
406 // Valid vtable indexes are non-negative (>= 0).
482
483 // localvariable table
484 bool has_localvariable_table() const
485 { return constMethod()->has_localvariable_table(); }
486 u2 localvariable_table_length() const
487 { return constMethod()->localvariable_table_length(); }
488 LocalVariableTableElement* localvariable_table_start() const
489 { return constMethod()->localvariable_table_start(); }
490
491 bool has_linenumber_table() const
492 { return constMethod()->has_linenumber_table(); }
493 u_char* compressed_linenumber_table() const
494 { return constMethod()->compressed_linenumber_table(); }
495
496 // method holder (the Klass* holding this method)
497 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
498
499 Symbol* klass_name() const; // returns the name of the method holder
500 BasicType result_type() const { return constMethod()->result_type(); }
501 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
502 bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }
503
504 // Checked exceptions thrown by this method (resolved to mirrors)
505 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
506
507 // Access flags
508 bool is_public() const { return access_flags().is_public(); }
509 bool is_private() const { return access_flags().is_private(); }
510 bool is_protected() const { return access_flags().is_protected(); }
511 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
512 bool is_static() const { return access_flags().is_static(); }
513 bool is_final() const { return access_flags().is_final(); }
514 bool is_synchronized() const { return access_flags().is_synchronized();}
515 bool is_native() const { return access_flags().is_native(); }
516 bool is_abstract() const { return access_flags().is_abstract(); }
517 bool is_synthetic() const { return access_flags().is_synthetic(); }
518
519 // returns true if contains only return operation
520 bool is_empty_method() const;
521
522 // returns true if this is a vanilla constructor
561 bool has_monitors() const { return is_synchronized() || has_monitor_bytecodes(); }
562
563 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
564 // properly nest in the method. It might return false, even though they actually nest properly, since the info.
565 // has not been computed yet.
566 bool guaranteed_monitor_matching() const { return monitor_matching(); }
567 void set_guaranteed_monitor_matching() { set_monitor_matching(); }
568
569 // returns true if the method is an accessor function (setter/getter).
570 bool is_accessor() const;
571
572 // returns true if the method is a getter
573 bool is_getter() const;
574
575 // returns true if the method is a setter
576 bool is_setter() const;
577
578 // returns true if the method does nothing but return a constant of primitive type
579 bool is_constant_getter() const;
580
581 // returns true if the method is an initializer (<init> or <clinit>).
582 bool is_initializer() const;
583
584 // returns true if the method is static OR if the classfile version < 51
585 bool has_valid_initializer_flags() const;
586
587 // returns true if the method name is <clinit> and the method has
588 // valid static initializer flags.
589 bool is_static_initializer() const;
590
591 // returns true if the method name is <init>
592 bool is_object_initializer() const;
593
594 // compiled code support
595 // NOTE: code() is inherently racy as deopt can be clearing code
596 // simultaneously. Use with caution.
597 bool has_compiled_code() const;
598
599 bool needs_clinit_barrier() const;
600
601 // sizing
602 static int header_size() {
603 return align_up((int)sizeof(Method), wordSize) / wordSize;
604 }
605 static int size(bool is_native);
606 int size() const { return method_size(); }
607 void log_touched(Thread* current);
608 static void print_touched_methods(outputStream* out);
609
610 // interpreter support
611 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
612 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
613 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
614 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
615
616 static ByteSize method_counters_offset() {
617 return byte_offset_of(Method, _method_counters);
618 }
619 #ifndef PRODUCT
620 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
621 #endif // not PRODUCT
622 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
623 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
624 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
625 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
626 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
627
628 // for code generation
629 static ByteSize method_data_offset() { return byte_offset_of(Method, _method_data); }
630 static ByteSize intrinsic_id_offset() { return byte_offset_of(Method, _intrinsic_id); }
631 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
632
633 // Static methods that are used to implement member methods where an exposed this pointer
634 // is needed due to possible GCs
753 void set_changes_current_thread() { constMethod()->set_changes_current_thread(); }
754
755 bool jvmti_mount_transition() const { return constMethod()->jvmti_mount_transition(); }
756 void set_jvmti_mount_transition() { constMethod()->set_jvmti_mount_transition(); }
757
758 bool is_hidden() const { return constMethod()->is_hidden(); }
759 void set_is_hidden() { constMethod()->set_is_hidden(); }
760
761 bool is_scoped() const { return constMethod()->is_scoped(); }
762 void set_scoped() { constMethod()->set_is_scoped(); }
763
764 bool intrinsic_candidate() const { return constMethod()->intrinsic_candidate(); }
765 void set_intrinsic_candidate() { constMethod()->set_intrinsic_candidate(); }
766
767 bool has_injected_profile() const { return constMethod()->has_injected_profile(); }
768 void set_has_injected_profile() { constMethod()->set_has_injected_profile(); }
769
770 bool has_reserved_stack_access() const { return constMethod()->reserved_stack_access(); }
771 void set_has_reserved_stack_access() { constMethod()->set_reserved_stack_access(); }
772
773 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
774
775 ConstMethod::MethodType method_type() const {
776 return _constMethod->method_type();
777 }
778 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
779
780 // On-stack replacement support
781 bool has_osr_nmethod(int level, bool match_level) {
782 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != nullptr;
783 }
784
785 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
786 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
787 }
788
789 // Find if klass for method is loaded
790 bool is_klass_loaded_by_klass_index(int klass_index) const;
791 bool is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved = false) const;
792
|
76 MethodData* _method_data;
77 MethodCounters* _method_counters;
78 AdapterHandlerEntry* _adapter;
79 AccessFlags _access_flags; // Access flags
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 MethodFlags _flags;
82
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 JFR_ONLY(DEFINE_TRACE_FLAG;)
86
87 #ifndef PRODUCT
88 int64_t _compiled_invocation_count;
89
90 Symbol* _name;
91 #endif
92 // Entry point for calling both from and to the interpreter.
93 address _i2i_entry; // All-args-on-stack calling convention
94 // Entry point for calling from compiled code, to compiled code if it exists
95 // or else the interpreter.
96 volatile address _from_compiled_entry; // Cache of: _code ? _code->verified_entry_point() : _adapter->c2i_entry()
97 volatile address _from_compiled_inline_ro_entry; // Cache of: _code ? _code->verified_inline_ro_entry_point() : _adapter->c2i_inline_ro_entry()
98 volatile address _from_compiled_inline_entry; // Cache of: _code ? _code->verified_inline_entry_point() : _adapter->c2i_inline_entry()
99 // The entry point for calling both from and to compiled code is
100 // "_code->entry_point()". Because of tiered compilation and de-opt, this
101 // field can come and go. It can transition from null to not-null at any
102 // time (whenever a compile completes). It can transition from not-null to
103 // null only at safepoints (because of a de-opt).
104 CompiledMethod* volatile _code; // Points to the corresponding piece of native code
105 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
106
107 // Constructor
108 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
109 public:
110
111 static Method* allocate(ClassLoaderData* loader_data,
112 int byte_code_size,
113 AccessFlags access_flags,
114 InlineTableSizes* sizes,
115 ConstMethod::MethodType method_type,
116 Symbol* name,
117 TRAPS);
118
119 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
120 Method(){}
121
122 virtual bool is_method() const { return true; }
123
124 #if INCLUDE_CDS
125 void remove_unshareable_info();
126 void restore_unshareable_info(TRAPS);
127 #endif
128
129 // accessors for instance variables
130
131 ConstMethod* constMethod() const { return _constMethod; }
132 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
133
134
135 static address make_adapters(const methodHandle& mh, TRAPS);
136 address from_compiled_entry() const;
137 address from_compiled_inline_ro_entry() const;
138 address from_compiled_inline_entry() const;
139 address from_interpreted_entry() const;
140
141 // access flag
142 AccessFlags access_flags() const { return _access_flags; }
143 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
144
145 // name
146 Symbol* name() const { return constants()->symbol_at(name_index()); }
147 u2 name_index() const { return constMethod()->name_index(); }
148 void set_name_index(int index) { constMethod()->set_name_index(index); }
149
150 // signature
151 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
152 u2 signature_index() const { return constMethod()->signature_index(); }
153 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
154
155 // generics support
156 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : nullptr); }
157 u2 generic_signature_index() const { return constMethod()->generic_signature_index(); }
158
343
344 bool was_executed_more_than(int n);
345 bool was_never_executed() { return !was_executed_more_than(0); }
346
347 static void build_profiling_method_data(const methodHandle& method, TRAPS);
348
349 static MethodCounters* build_method_counters(Thread* current, Method* m);
350
351 int interpreter_invocation_count() { return invocation_count(); }
352
353 #ifndef PRODUCT
354 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
355 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
356 #else
357 // for PrintMethodData in a product build
358 int64_t compiled_invocation_count() const { return 0; }
359 #endif // not PRODUCT
360
361 // nmethod/verified compiler entry
362 address verified_code_entry();
363 address verified_inline_code_entry();
364 address verified_inline_ro_code_entry();
365 bool check_code() const; // Not inline to avoid circular ref
366 CompiledMethod* code() const;
367
368 // Locks CompiledMethod_lock if not held.
369 void unlink_code(CompiledMethod *compare);
370 // Locks CompiledMethod_lock if not held.
371 void unlink_code();
372
373 private:
374 // Either called with CompiledMethod_lock held or from constructor.
375 void clear_code();
376
377 void clear_method_data() {
378 _method_data = nullptr;
379 }
380
381 public:
382 static void set_code(const methodHandle& mh, CompiledMethod* code);
383 void set_adapter_entry(AdapterHandlerEntry* adapter) {
384 _adapter = adapter;
385 }
386 void set_from_compiled_entry(address entry) {
387 _from_compiled_entry = entry;
388 }
389 void set_from_compiled_inline_ro_entry(address entry) {
390 _from_compiled_inline_ro_entry = entry;
391 }
392 void set_from_compiled_inline_entry(address entry) {
393 _from_compiled_inline_entry = entry;
394 }
395
396 address get_i2c_entry();
397 address get_c2i_entry();
398 address get_c2i_inline_entry();
399 address get_c2i_unverified_entry();
400 address get_c2i_unverified_inline_entry();
401 address get_c2i_no_clinit_check_entry();
402 AdapterHandlerEntry* adapter() const {
403 return _adapter;
404 }
405 // setup entry points
406 void link_method(const methodHandle& method, TRAPS);
407 // clear entry points. Used by sharing code during dump time
408 void unlink_method() NOT_CDS_RETURN;
409 void remove_unshareable_flags() NOT_CDS_RETURN;
410
411 // the number of argument reg slots that the compiled method uses on the stack.
412 int num_stack_arg_slots(bool rounded = true) const {
413 return rounded ? align_up(constMethod()->num_stack_arg_slots(), 2) : constMethod()->num_stack_arg_slots(); }
414
415 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
416 virtual MetaspaceObj::Type type() const { return MethodType; }
417
418 // vtable index
419 enum VtableIndexFlag {
420 // Valid vtable indexes are non-negative (>= 0).
496
497 // localvariable table
498 bool has_localvariable_table() const
499 { return constMethod()->has_localvariable_table(); }
500 u2 localvariable_table_length() const
501 { return constMethod()->localvariable_table_length(); }
502 LocalVariableTableElement* localvariable_table_start() const
503 { return constMethod()->localvariable_table_start(); }
504
505 bool has_linenumber_table() const
506 { return constMethod()->has_linenumber_table(); }
507 u_char* compressed_linenumber_table() const
508 { return constMethod()->compressed_linenumber_table(); }
509
510 // method holder (the Klass* holding this method)
511 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
512
513 Symbol* klass_name() const; // returns the name of the method holder
514 BasicType result_type() const { return constMethod()->result_type(); }
515 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
516 InlineKlass* returns_inline_type(Thread* thread) const;
517
518 // Checked exceptions thrown by this method (resolved to mirrors)
519 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
520
521 // Access flags
522 bool is_public() const { return access_flags().is_public(); }
523 bool is_private() const { return access_flags().is_private(); }
524 bool is_protected() const { return access_flags().is_protected(); }
525 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
526 bool is_static() const { return access_flags().is_static(); }
527 bool is_final() const { return access_flags().is_final(); }
528 bool is_synchronized() const { return access_flags().is_synchronized();}
529 bool is_native() const { return access_flags().is_native(); }
530 bool is_abstract() const { return access_flags().is_abstract(); }
531 bool is_synthetic() const { return access_flags().is_synthetic(); }
532
533 // returns true if contains only return operation
534 bool is_empty_method() const;
535
536 // returns true if this is a vanilla constructor
575 bool has_monitors() const { return is_synchronized() || has_monitor_bytecodes(); }
576
577 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
578 // properly nest in the method. It might return false, even though they actually nest properly, since the info.
579 // has not been computed yet.
580 bool guaranteed_monitor_matching() const { return monitor_matching(); }
581 void set_guaranteed_monitor_matching() { set_monitor_matching(); }
582
583 // returns true if the method is an accessor function (setter/getter).
584 bool is_accessor() const;
585
586 // returns true if the method is a getter
587 bool is_getter() const;
588
589 // returns true if the method is a setter
590 bool is_setter() const;
591
592 // returns true if the method does nothing but return a constant of primitive type
593 bool is_constant_getter() const;
594
595 // returns true if the method name is <clinit> and the method has
596 // valid static initializer flags.
597 bool is_class_initializer() const;
598
599 // returns true if the method name is <init>
600 bool is_object_constructor() const;
601
602 // compiled code support
603 // NOTE: code() is inherently racy as deopt can be clearing code
604 // simultaneously. Use with caution.
605 bool has_compiled_code() const;
606
607 bool needs_clinit_barrier() const;
608
609 // sizing
610 static int header_size() {
611 return align_up((int)sizeof(Method), wordSize) / wordSize;
612 }
613 static int size(bool is_native);
614 int size() const { return method_size(); }
615 void log_touched(Thread* current);
616 static void print_touched_methods(outputStream* out);
617
618 // interpreter support
619 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
620 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
621 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
622 static ByteSize from_compiled_inline_offset() { return byte_offset_of(Method, _from_compiled_inline_entry); }
623 static ByteSize from_compiled_inline_ro_offset(){ return byte_offset_of(Method, _from_compiled_inline_ro_entry); }
624 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
625 static ByteSize flags_offset() { return byte_offset_of(Method, _flags); }
626
627 static ByteSize method_counters_offset() {
628 return byte_offset_of(Method, _method_counters);
629 }
630 #ifndef PRODUCT
631 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
632 #endif // not PRODUCT
633 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
634 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
635 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
636 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
637 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
638
639 // for code generation
640 static ByteSize method_data_offset() { return byte_offset_of(Method, _method_data); }
641 static ByteSize intrinsic_id_offset() { return byte_offset_of(Method, _intrinsic_id); }
642 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
643
644 // Static methods that are used to implement member methods where an exposed this pointer
645 // is needed due to possible GCs
764 void set_changes_current_thread() { constMethod()->set_changes_current_thread(); }
765
766 bool jvmti_mount_transition() const { return constMethod()->jvmti_mount_transition(); }
767 void set_jvmti_mount_transition() { constMethod()->set_jvmti_mount_transition(); }
768
769 bool is_hidden() const { return constMethod()->is_hidden(); }
770 void set_is_hidden() { constMethod()->set_is_hidden(); }
771
772 bool is_scoped() const { return constMethod()->is_scoped(); }
773 void set_scoped() { constMethod()->set_is_scoped(); }
774
775 bool intrinsic_candidate() const { return constMethod()->intrinsic_candidate(); }
776 void set_intrinsic_candidate() { constMethod()->set_intrinsic_candidate(); }
777
778 bool has_injected_profile() const { return constMethod()->has_injected_profile(); }
779 void set_has_injected_profile() { constMethod()->set_has_injected_profile(); }
780
781 bool has_reserved_stack_access() const { return constMethod()->reserved_stack_access(); }
782 void set_has_reserved_stack_access() { constMethod()->set_reserved_stack_access(); }
783
784 bool is_scalarized_arg(int idx) const;
785
786 bool c1_needs_stack_repair() const { return constMethod()->c1_needs_stack_repair(); }
787 void set_c1_needs_stack_repair() { constMethod()->set_c1_needs_stack_repair(); }
788
789 bool c2_needs_stack_repair() const { return constMethod()->c2_needs_stack_repair(); }
790 void set_c2_needs_stack_repair() { constMethod()->set_c2_needs_stack_repair(); }
791
792 bool mismatch() const { return constMethod()->mismatch(); }
793 void set_mismatch() { constMethod()->set_mismatch(); }
794
795 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
796
797 ConstMethod::MethodType method_type() const {
798 return _constMethod->method_type();
799 }
800 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
801
802 // On-stack replacement support
803 bool has_osr_nmethod(int level, bool match_level) {
804 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != nullptr;
805 }
806
807 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
808 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
809 }
810
811 // Find if klass for method is loaded
812 bool is_klass_loaded_by_klass_index(int klass_index) const;
813 bool is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved = false) const;
814
|