76 MethodData* _method_data;
77 MethodCounters* _method_counters;
78 AdapterHandlerEntry* _adapter;
79 AccessFlags _access_flags; // Access flags
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 MethodFlags _flags;
82
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 JFR_ONLY(DEFINE_TRACE_FLAG;)
86
87 #ifndef PRODUCT
88 int64_t _compiled_invocation_count;
89
90 Symbol* _name;
91 #endif
92 // Entry point for calling both from and to the interpreter.
93 address _i2i_entry; // All-args-on-stack calling convention
94 // Entry point for calling from compiled code, to compiled code if it exists
95 // or else the interpreter.
96 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
97 // The entry point for calling both from and to compiled code is
98 // "_code->entry_point()". Because of tiered compilation and de-opt, this
99 // field can come and go. It can transition from null to not-null at any
100 // time (whenever a compile completes). It can transition from not-null to
101 // null only at safepoints (because of a de-opt).
102 nmethod* volatile _code; // Points to the corresponding piece of native code
103 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
104
105 // Constructor
106 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
107 public:
108
109 static Method* allocate(ClassLoaderData* loader_data,
110 int byte_code_size,
111 AccessFlags access_flags,
112 InlineTableSizes* sizes,
113 ConstMethod::MethodType method_type,
114 Symbol* name,
115 TRAPS);
116
117 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
118 Method(){}
119
120 virtual bool is_method() const { return true; }
121
122 #if INCLUDE_CDS
123 void remove_unshareable_info();
124 void restore_unshareable_info(TRAPS);
125 #endif
126
127 // accessors for instance variables
128
129 ConstMethod* constMethod() const { return _constMethod; }
130 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
131
132
133 static address make_adapters(const methodHandle& mh, TRAPS);
134 address from_compiled_entry() const;
135 address from_interpreted_entry() const;
136
137 // access flag
138 AccessFlags access_flags() const { return _access_flags; }
139 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
140
141 // name
142 Symbol* name() const { return constants()->symbol_at(name_index()); }
143 u2 name_index() const { return constMethod()->name_index(); }
144 void set_name_index(int index) { constMethod()->set_name_index(index); }
145
146 // signature
147 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
148 u2 signature_index() const { return constMethod()->signature_index(); }
149 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
150
151 // generics support
152 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : nullptr); }
153 u2 generic_signature_index() const { return constMethod()->generic_signature_index(); }
154
339
340 bool was_executed_more_than(int n);
341 bool was_never_executed() { return !was_executed_more_than(0); }
342
343 static void build_profiling_method_data(const methodHandle& method, TRAPS);
344
345 static MethodCounters* build_method_counters(Thread* current, Method* m);
346
347 int interpreter_invocation_count() { return invocation_count(); }
348
349 #ifndef PRODUCT
350 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
351 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
352 #else
353 // for PrintMethodData in a product build
354 int64_t compiled_invocation_count() const { return 0; }
355 #endif // not PRODUCT
356
357 // nmethod/verified compiler entry
358 address verified_code_entry();
359 bool check_code() const; // Not inline to avoid circular ref
360 nmethod* code() const;
361
362 // Locks NMethodState_lock if not held.
363 void unlink_code(nmethod *compare);
364 // Locks NMethodState_lock if not held.
365 void unlink_code();
366
367 private:
368 // Either called with NMethodState_lock held or from constructor.
369 void clear_code();
370
371 void clear_method_data() {
372 _method_data = nullptr;
373 }
374
375 public:
376 static void set_code(const methodHandle& mh, nmethod* code);
377 void set_adapter_entry(AdapterHandlerEntry* adapter) {
378 _adapter = adapter;
379 }
380 void set_from_compiled_entry(address entry) {
381 _from_compiled_entry = entry;
382 }
383
384 address get_i2c_entry();
385 address get_c2i_entry();
386 address get_c2i_unverified_entry();
387 address get_c2i_no_clinit_check_entry();
388 AdapterHandlerEntry* adapter() const {
389 return _adapter;
390 }
391 // setup entry points
392 void link_method(const methodHandle& method, TRAPS);
393 // clear entry points. Used by sharing code during dump time
394 void unlink_method() NOT_CDS_RETURN;
395 void remove_unshareable_flags() NOT_CDS_RETURN;
396
397 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
398 virtual MetaspaceObj::Type type() const { return MethodType; }
399
400 // vtable index
401 enum VtableIndexFlag {
402 // Valid vtable indexes are non-negative (>= 0).
403 // These few negative values are used as sentinels.
404 itable_index_max = -10, // first itable index, growing downward
405 pending_itable_index = -9, // itable index will be assigned
406 invalid_vtable_index = -4, // distinct from any valid vtable index
480
481 // localvariable table
482 bool has_localvariable_table() const
483 { return constMethod()->has_localvariable_table(); }
484 u2 localvariable_table_length() const
485 { return constMethod()->localvariable_table_length(); }
486 LocalVariableTableElement* localvariable_table_start() const
487 { return constMethod()->localvariable_table_start(); }
488
489 bool has_linenumber_table() const
490 { return constMethod()->has_linenumber_table(); }
491 u_char* compressed_linenumber_table() const
492 { return constMethod()->compressed_linenumber_table(); }
493
494 // method holder (the Klass* holding this method)
495 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
496
497 Symbol* klass_name() const; // returns the name of the method holder
498 BasicType result_type() const { return constMethod()->result_type(); }
499 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
500 bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }
501
502 // Checked exceptions thrown by this method (resolved to mirrors)
503 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
504
505 // Access flags
506 bool is_public() const { return access_flags().is_public(); }
507 bool is_private() const { return access_flags().is_private(); }
508 bool is_protected() const { return access_flags().is_protected(); }
509 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
510 bool is_static() const { return access_flags().is_static(); }
511 bool is_final() const { return access_flags().is_final(); }
512 bool is_synchronized() const { return access_flags().is_synchronized();}
513 bool is_native() const { return access_flags().is_native(); }
514 bool is_abstract() const { return access_flags().is_abstract(); }
515 bool is_synthetic() const { return access_flags().is_synthetic(); }
516
517 // returns true if contains only return operation
518 bool is_empty_method() const;
519
520 // returns true if this is a vanilla constructor
559 bool has_monitors() const { return is_synchronized() || has_monitor_bytecodes(); }
560
561 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
562 // properly nest in the method. It might return false, even though they actually nest properly, since the info.
563 // has not been computed yet.
564 bool guaranteed_monitor_matching() const { return monitor_matching(); }
565 void set_guaranteed_monitor_matching() { set_monitor_matching(); }
566
567 // returns true if the method is an accessor function (setter/getter).
568 bool is_accessor() const;
569
570 // returns true if the method is a getter
571 bool is_getter() const;
572
573 // returns true if the method is a setter
574 bool is_setter() const;
575
576 // returns true if the method does nothing but return a constant of primitive type
577 bool is_constant_getter() const;
578
579 // returns true if the method is static OR if the classfile version < 51
580 bool has_valid_initializer_flags() const;
581
582 // returns true if the method name is <clinit> and the method has
583 // valid static initializer flags.
584 bool is_static_initializer() const;
585
586 // returns true if the method name is <init>
587 bool is_object_initializer() const;
588
589 // compiled code support
590 // NOTE: code() is inherently racy as deopt can be clearing code
591 // simultaneously. Use with caution.
592 bool has_compiled_code() const;
593
594 bool needs_clinit_barrier() const;
595
596 // sizing
597 static int header_size() {
598 return align_up((int)sizeof(Method), wordSize) / wordSize;
599 }
600 static int size(bool is_native);
601 int size() const { return method_size(); }
602 void log_touched(Thread* current);
603 static void print_touched_methods(outputStream* out);
604
605 // interpreter support
606 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
607 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
608 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
609 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
610
611 static ByteSize method_counters_offset() {
612 return byte_offset_of(Method, _method_counters);
613 }
614 #ifndef PRODUCT
615 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
616 #endif // not PRODUCT
617 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
618 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
619 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
620 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
621 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
622
623 // for code generation
624 static ByteSize method_data_offset() { return byte_offset_of(Method, _method_data); }
625 static ByteSize intrinsic_id_offset() { return byte_offset_of(Method, _intrinsic_id); }
626 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
627
628 // Static methods that are used to implement member methods where an exposed this pointer
629 // is needed due to possible GCs
747 void set_changes_current_thread() { constMethod()->set_changes_current_thread(); }
748
749 bool jvmti_mount_transition() const { return constMethod()->jvmti_mount_transition(); }
750 void set_jvmti_mount_transition() { constMethod()->set_jvmti_mount_transition(); }
751
752 bool is_hidden() const { return constMethod()->is_hidden(); }
753 void set_is_hidden() { constMethod()->set_is_hidden(); }
754
755 bool is_scoped() const { return constMethod()->is_scoped(); }
756 void set_scoped() { constMethod()->set_is_scoped(); }
757
758 bool intrinsic_candidate() const { return constMethod()->intrinsic_candidate(); }
759 void set_intrinsic_candidate() { constMethod()->set_intrinsic_candidate(); }
760
761 bool has_injected_profile() const { return constMethod()->has_injected_profile(); }
762 void set_has_injected_profile() { constMethod()->set_has_injected_profile(); }
763
764 bool has_reserved_stack_access() const { return constMethod()->reserved_stack_access(); }
765 void set_has_reserved_stack_access() { constMethod()->set_reserved_stack_access(); }
766
767 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
768
769 ConstMethod::MethodType method_type() const {
770 return _constMethod->method_type();
771 }
772 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
773
774 // On-stack replacement support
775 bool has_osr_nmethod(int level, bool match_level) {
776 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != nullptr;
777 }
778
779 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
780 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
781 }
782
783 // Find if klass for method is loaded
784 bool is_klass_loaded_by_klass_index(int klass_index) const;
785 bool is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved = false) const;
786
|
76 MethodData* _method_data;
77 MethodCounters* _method_counters;
78 AdapterHandlerEntry* _adapter;
79 AccessFlags _access_flags; // Access flags
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 MethodFlags _flags;
82
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 JFR_ONLY(DEFINE_TRACE_FLAG;)
86
87 #ifndef PRODUCT
88 int64_t _compiled_invocation_count;
89
90 Symbol* _name;
91 #endif
92 // Entry point for calling both from and to the interpreter.
93 address _i2i_entry; // All-args-on-stack calling convention
94 // Entry point for calling from compiled code, to compiled code if it exists
95 // or else the interpreter.
96 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
97 volatile address _from_compiled_inline_ro_entry; // Cache of: _code ? _code->verified_inline_ro_entry_point() : _adapter->c2i_inline_ro_entry()
98 volatile address _from_compiled_inline_entry; // Cache of: _code ? _code->verified_inline_entry_point() : _adapter->c2i_inline_entry()
99 // The entry point for calling both from and to compiled code is
100 // "_code->entry_point()". Because of tiered compilation and de-opt, this
101 // field can come and go. It can transition from null to not-null at any
102 // time (whenever a compile completes). It can transition from not-null to
103 // null only at safepoints (because of a de-opt).
104 nmethod* volatile _code; // Points to the corresponding piece of native code
105 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
106
107 // Constructor
108 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
109 public:
110
111 static Method* allocate(ClassLoaderData* loader_data,
112 int byte_code_size,
113 AccessFlags access_flags,
114 InlineTableSizes* sizes,
115 ConstMethod::MethodType method_type,
116 Symbol* name,
117 TRAPS);
118
119 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
120 Method(){}
121
122 virtual bool is_method() const { return true; }
123
124 #if INCLUDE_CDS
125 void remove_unshareable_info();
126 void restore_unshareable_info(TRAPS);
127 #endif
128
129 // accessors for instance variables
130
131 ConstMethod* constMethod() const { return _constMethod; }
132 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
133
134
135 static address make_adapters(const methodHandle& mh, TRAPS);
136 address from_compiled_entry() const;
137 address from_compiled_inline_ro_entry() const;
138 address from_compiled_inline_entry() const;
139 address from_interpreted_entry() const;
140
141 // access flag
142 AccessFlags access_flags() const { return _access_flags; }
143 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
144
145 // name
146 Symbol* name() const { return constants()->symbol_at(name_index()); }
147 u2 name_index() const { return constMethod()->name_index(); }
148 void set_name_index(int index) { constMethod()->set_name_index(index); }
149
150 // signature
151 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
152 u2 signature_index() const { return constMethod()->signature_index(); }
153 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
154
155 // generics support
156 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : nullptr); }
157 u2 generic_signature_index() const { return constMethod()->generic_signature_index(); }
158
343
344 bool was_executed_more_than(int n);
345 bool was_never_executed() { return !was_executed_more_than(0); }
346
347 static void build_profiling_method_data(const methodHandle& method, TRAPS);
348
349 static MethodCounters* build_method_counters(Thread* current, Method* m);
350
351 int interpreter_invocation_count() { return invocation_count(); }
352
353 #ifndef PRODUCT
354 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
355 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
356 #else
357 // for PrintMethodData in a product build
358 int64_t compiled_invocation_count() const { return 0; }
359 #endif // not PRODUCT
360
361 // nmethod/verified compiler entry
362 address verified_code_entry();
363 address verified_inline_code_entry();
364 address verified_inline_ro_code_entry();
365 bool check_code() const; // Not inline to avoid circular ref
366 nmethod* code() const;
367
368 // Locks NMethodState_lock if not held.
369 void unlink_code(nmethod *compare);
370 // Locks NMethodState_lock if not held.
371 void unlink_code();
372
373 private:
374 // Either called with NMethodState_lock held or from constructor.
375 void clear_code();
376
377 void clear_method_data() {
378 _method_data = nullptr;
379 }
380
381 public:
382 static void set_code(const methodHandle& mh, nmethod* code);
383 void set_adapter_entry(AdapterHandlerEntry* adapter) {
384 _adapter = adapter;
385 }
386 void set_from_compiled_entry(address entry) {
387 _from_compiled_entry = entry;
388 }
389 void set_from_compiled_inline_ro_entry(address entry) {
390 _from_compiled_inline_ro_entry = entry;
391 }
392 void set_from_compiled_inline_entry(address entry) {
393 _from_compiled_inline_entry = entry;
394 }
395
396 address get_i2c_entry();
397 address get_c2i_entry();
398 address get_c2i_inline_entry();
399 address get_c2i_unverified_entry();
400 address get_c2i_unverified_inline_entry();
401 address get_c2i_no_clinit_check_entry();
402 AdapterHandlerEntry* adapter() const {
403 return _adapter;
404 }
405 // setup entry points
406 void link_method(const methodHandle& method, TRAPS);
407 // clear entry points. Used by sharing code during dump time
408 void unlink_method() NOT_CDS_RETURN;
409 void remove_unshareable_flags() NOT_CDS_RETURN;
410
411 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
412 virtual MetaspaceObj::Type type() const { return MethodType; }
413
414 // vtable index
415 enum VtableIndexFlag {
416 // Valid vtable indexes are non-negative (>= 0).
417 // These few negative values are used as sentinels.
418 itable_index_max = -10, // first itable index, growing downward
419 pending_itable_index = -9, // itable index will be assigned
420 invalid_vtable_index = -4, // distinct from any valid vtable index
494
495 // localvariable table
496 bool has_localvariable_table() const
497 { return constMethod()->has_localvariable_table(); }
498 u2 localvariable_table_length() const
499 { return constMethod()->localvariable_table_length(); }
500 LocalVariableTableElement* localvariable_table_start() const
501 { return constMethod()->localvariable_table_start(); }
502
503 bool has_linenumber_table() const
504 { return constMethod()->has_linenumber_table(); }
505 u_char* compressed_linenumber_table() const
506 { return constMethod()->compressed_linenumber_table(); }
507
508 // method holder (the Klass* holding this method)
509 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
510
511 Symbol* klass_name() const; // returns the name of the method holder
512 BasicType result_type() const { return constMethod()->result_type(); }
513 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
514 InlineKlass* returns_inline_type(Thread* thread) const;
515
516 // Checked exceptions thrown by this method (resolved to mirrors)
517 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
518
519 // Access flags
520 bool is_public() const { return access_flags().is_public(); }
521 bool is_private() const { return access_flags().is_private(); }
522 bool is_protected() const { return access_flags().is_protected(); }
523 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
524 bool is_static() const { return access_flags().is_static(); }
525 bool is_final() const { return access_flags().is_final(); }
526 bool is_synchronized() const { return access_flags().is_synchronized();}
527 bool is_native() const { return access_flags().is_native(); }
528 bool is_abstract() const { return access_flags().is_abstract(); }
529 bool is_synthetic() const { return access_flags().is_synthetic(); }
530
531 // returns true if contains only return operation
532 bool is_empty_method() const;
533
534 // returns true if this is a vanilla constructor
573 bool has_monitors() const { return is_synchronized() || has_monitor_bytecodes(); }
574
575 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
576 // properly nest in the method. It might return false, even though they actually nest properly, since the info.
577 // has not been computed yet.
578 bool guaranteed_monitor_matching() const { return monitor_matching(); }
579 void set_guaranteed_monitor_matching() { set_monitor_matching(); }
580
581 // returns true if the method is an accessor function (setter/getter).
582 bool is_accessor() const;
583
584 // returns true if the method is a getter
585 bool is_getter() const;
586
587 // returns true if the method is a setter
588 bool is_setter() const;
589
590 // returns true if the method does nothing but return a constant of primitive type
591 bool is_constant_getter() const;
592
593 // returns true if the method name is <clinit> and the method has
594 // valid static initializer flags.
595 bool is_class_initializer() const;
596
597 // returns true if the method name is <init>
598 bool is_object_constructor() const;
599
600 // compiled code support
601 // NOTE: code() is inherently racy as deopt can be clearing code
602 // simultaneously. Use with caution.
603 bool has_compiled_code() const;
604
605 bool needs_clinit_barrier() const;
606
607 // sizing
608 static int header_size() {
609 return align_up((int)sizeof(Method), wordSize) / wordSize;
610 }
611 static int size(bool is_native);
612 int size() const { return method_size(); }
613 void log_touched(Thread* current);
614 static void print_touched_methods(outputStream* out);
615
616 // interpreter support
617 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
618 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
619 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
620 static ByteSize from_compiled_inline_offset() { return byte_offset_of(Method, _from_compiled_inline_entry); }
621 static ByteSize from_compiled_inline_ro_offset(){ return byte_offset_of(Method, _from_compiled_inline_ro_entry); }
622 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
623 static ByteSize flags_offset() { return byte_offset_of(Method, _flags); }
624
625 static ByteSize method_counters_offset() {
626 return byte_offset_of(Method, _method_counters);
627 }
628 #ifndef PRODUCT
629 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
630 #endif // not PRODUCT
631 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
632 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
633 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
634 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
635 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
636
637 // for code generation
638 static ByteSize method_data_offset() { return byte_offset_of(Method, _method_data); }
639 static ByteSize intrinsic_id_offset() { return byte_offset_of(Method, _intrinsic_id); }
640 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
641
642 // Static methods that are used to implement member methods where an exposed this pointer
643 // is needed due to possible GCs
761 void set_changes_current_thread() { constMethod()->set_changes_current_thread(); }
762
763 bool jvmti_mount_transition() const { return constMethod()->jvmti_mount_transition(); }
764 void set_jvmti_mount_transition() { constMethod()->set_jvmti_mount_transition(); }
765
766 bool is_hidden() const { return constMethod()->is_hidden(); }
767 void set_is_hidden() { constMethod()->set_is_hidden(); }
768
769 bool is_scoped() const { return constMethod()->is_scoped(); }
770 void set_scoped() { constMethod()->set_is_scoped(); }
771
772 bool intrinsic_candidate() const { return constMethod()->intrinsic_candidate(); }
773 void set_intrinsic_candidate() { constMethod()->set_intrinsic_candidate(); }
774
775 bool has_injected_profile() const { return constMethod()->has_injected_profile(); }
776 void set_has_injected_profile() { constMethod()->set_has_injected_profile(); }
777
778 bool has_reserved_stack_access() const { return constMethod()->reserved_stack_access(); }
779 void set_has_reserved_stack_access() { constMethod()->set_reserved_stack_access(); }
780
781 bool is_scalarized_arg(int idx) const;
782
783 bool c1_needs_stack_repair() const { return constMethod()->c1_needs_stack_repair(); }
784 void set_c1_needs_stack_repair() { constMethod()->set_c1_needs_stack_repair(); }
785
786 bool c2_needs_stack_repair() const { return constMethod()->c2_needs_stack_repair(); }
787 void set_c2_needs_stack_repair() { constMethod()->set_c2_needs_stack_repair(); }
788
789 bool mismatch() const { return constMethod()->mismatch(); }
790 void set_mismatch() { constMethod()->set_mismatch(); }
791
792 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
793
794 ConstMethod::MethodType method_type() const {
795 return _constMethod->method_type();
796 }
797 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
798
799 // On-stack replacement support
800 bool has_osr_nmethod(int level, bool match_level) {
801 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != nullptr;
802 }
803
804 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
805 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
806 }
807
808 // Find if klass for method is loaded
809 bool is_klass_loaded_by_klass_index(int klass_index) const;
810 bool is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved = false) const;
811
|