75 MethodData* _method_data;
76 MethodCounters* _method_counters;
77 AdapterHandlerEntry* _adapter;
78 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
79 AccessFlags _access_flags; // Access flags
80 MethodFlags _flags;
81
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 JFR_ONLY(DEFINE_TRACE_FLAG;)
85
86 #ifndef PRODUCT
87 int64_t _compiled_invocation_count;
88
89 Symbol* _name;
90 #endif
91 // Entry point for calling both from and to the interpreter.
92 address _i2i_entry; // All-args-on-stack calling convention
93 // Entry point for calling from compiled code, to compiled code if it exists
94 // or else the interpreter.
95 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
96 // The entry point for calling both from and to compiled code is
97 // "_code->entry_point()". Because of tiered compilation and de-opt, this
98 // field can come and go. It can transition from null to not-null at any
99 // time (whenever a compile completes). It can transition from not-null to
100 // null only at safepoints (because of a de-opt).
101 nmethod* volatile _code; // Points to the corresponding piece of native code
102 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
103
104 // Constructor
105 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
106 public:
107
108 static Method* allocate(ClassLoaderData* loader_data,
109 int byte_code_size,
110 AccessFlags access_flags,
111 InlineTableSizes* sizes,
112 ConstMethod::MethodType method_type,
113 Symbol* name,
114 TRAPS);
115
116 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
117 Method(){}
118
119 virtual bool is_method() const { return true; }
120
121 #if INCLUDE_CDS
122 void remove_unshareable_info();
123 void restore_unshareable_info(TRAPS);
124 static void restore_archived_method_handle_intrinsic(methodHandle m, TRAPS);
125 #endif
126
127 // accessors for instance variables
128
129 ConstMethod* constMethod() const { return _constMethod; }
130 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
131
132
133 static address make_adapters(const methodHandle& mh, TRAPS);
134 address from_compiled_entry() const;
135 address from_interpreted_entry() const;
136
137 // access flag
138 AccessFlags access_flags() const { return _access_flags; }
139 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
140
141 // name
142 Symbol* name() const { return constants()->symbol_at(name_index()); }
143 u2 name_index() const { return constMethod()->name_index(); }
144 void set_name_index(int index) { constMethod()->set_name_index(index); }
145
146 // signature
147 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
148 u2 signature_index() const { return constMethod()->signature_index(); }
149 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
150
151 // generics support
152 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : nullptr); }
153 u2 generic_signature_index() const { return constMethod()->generic_signature_index(); }
154
339
340 bool was_executed_more_than(int n);
341 bool was_never_executed() { return !was_executed_more_than(0); }
342
343 static void build_profiling_method_data(const methodHandle& method, TRAPS);
344
345 static MethodCounters* build_method_counters(Thread* current, Method* m);
346
347 inline int interpreter_invocation_count() const;
348
349 #ifndef PRODUCT
350 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
351 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
352 #else
353 // for PrintMethodData in a product build
354 int64_t compiled_invocation_count() const { return 0; }
355 #endif // not PRODUCT
356
357 // nmethod/verified compiler entry
358 address verified_code_entry();
359 bool check_code() const; // Not inline to avoid circular ref
360 nmethod* code() const;
361
362 // Locks NMethodState_lock if not held.
363 void unlink_code(nmethod *compare);
364 // Locks NMethodState_lock if not held.
365 void unlink_code();
366
367 private:
368 // Either called with NMethodState_lock held or from constructor.
369 void clear_code();
370
371 void clear_method_data() {
372 _method_data = nullptr;
373 }
374
375 public:
376 static void set_code(const methodHandle& mh, nmethod* code);
377 void set_adapter_entry(AdapterHandlerEntry* adapter) {
378 _adapter = adapter;
379 }
380 void set_from_compiled_entry(address entry) {
381 _from_compiled_entry = entry;
382 }
383
384 address get_i2c_entry();
385 address get_c2i_entry();
386 address get_c2i_unverified_entry();
387 address get_c2i_no_clinit_check_entry();
388 AdapterHandlerEntry* adapter() const {
389 return _adapter;
390 }
391 // setup entry points
392 void link_method(const methodHandle& method, TRAPS);
393 // clear entry points. Used by sharing code during dump time
394 void unlink_method() NOT_CDS_RETURN;
395 void remove_unshareable_flags() NOT_CDS_RETURN;
396
397 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
398 virtual MetaspaceObj::Type type() const { return MethodType; }
399
400 // vtable index
401 enum VtableIndexFlag {
402 // Valid vtable indexes are non-negative (>= 0).
403 // These few negative values are used as sentinels.
404 itable_index_max = -10, // first itable index, growing downward
405 pending_itable_index = -9, // itable index will be assigned
406 invalid_vtable_index = -4, // distinct from any valid vtable index
480
481 // localvariable table
482 bool has_localvariable_table() const
483 { return constMethod()->has_localvariable_table(); }
484 u2 localvariable_table_length() const
485 { return constMethod()->localvariable_table_length(); }
486 LocalVariableTableElement* localvariable_table_start() const
487 { return constMethod()->localvariable_table_start(); }
488
489 bool has_linenumber_table() const
490 { return constMethod()->has_linenumber_table(); }
491 u_char* compressed_linenumber_table() const
492 { return constMethod()->compressed_linenumber_table(); }
493
494 // method holder (the Klass* holding this method)
495 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
496
497 Symbol* klass_name() const; // returns the name of the method holder
498 BasicType result_type() const { return constMethod()->result_type(); }
499 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
500 bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }
501
502 // Checked exceptions thrown by this method (resolved to mirrors)
503 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
504
505 // Access flags
506 bool is_public() const { return access_flags().is_public(); }
507 bool is_private() const { return access_flags().is_private(); }
508 bool is_protected() const { return access_flags().is_protected(); }
509 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
510 bool is_static() const { return access_flags().is_static(); }
511 bool is_final() const { return access_flags().is_final(); }
512 bool is_synchronized() const { return access_flags().is_synchronized();}
513 bool is_native() const { return access_flags().is_native(); }
514 bool is_abstract() const { return access_flags().is_abstract(); }
515 bool is_synthetic() const { return access_flags().is_synthetic(); }
516
517 // returns true if contains only return operation
518 bool is_empty_method() const;
519
520 // returns true if this is a vanilla constructor
559 bool has_monitors() const { return is_synchronized() || has_monitor_bytecodes(); }
560
561 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
562 // properly nest in the method. It might return false, even though they actually nest properly, since the info.
563 // has not been computed yet.
564 bool guaranteed_monitor_matching() const { return monitor_matching(); }
565 void set_guaranteed_monitor_matching() { set_monitor_matching(); }
566
567 // returns true if the method is an accessor function (setter/getter).
568 bool is_accessor() const;
569
570 // returns true if the method is a getter
571 bool is_getter() const;
572
573 // returns true if the method is a setter
574 bool is_setter() const;
575
576 // returns true if the method does nothing but return a constant of primitive type
577 bool is_constant_getter() const;
578
579 // returns true if the method is static OR if the classfile version < 51
580 bool has_valid_initializer_flags() const;
581
582 // returns true if the method name is <clinit> and the method has
583 // valid static initializer flags.
584 bool is_static_initializer() const;
585
586 // returns true if the method name is <init>
587 bool is_object_initializer() const;
588
589 // returns true if the method name is wait0
590 bool is_object_wait0() const;
591
592 // compiled code support
593 // NOTE: code() is inherently racy as deopt can be clearing code
594 // simultaneously. Use with caution.
595 bool has_compiled_code() const;
596
597 bool needs_clinit_barrier() const;
598
599 // sizing
600 static int header_size() {
601 return align_up((int)sizeof(Method), wordSize) / wordSize;
602 }
603 static int size(bool is_native);
604 int size() const { return method_size(); }
605 void log_touched(Thread* current);
606 static void print_touched_methods(outputStream* out);
607
608 // interpreter support
609 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
610 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
611 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
612 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
613
614 static ByteSize method_counters_offset() {
615 return byte_offset_of(Method, _method_counters);
616 }
617 #ifndef PRODUCT
618 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
619 #endif // not PRODUCT
620 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
621 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
622 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
623 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
624 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
625
626 // for code generation
627 static ByteSize method_data_offset() { return byte_offset_of(Method, _method_data); }
628 static ByteSize intrinsic_id_offset() { return byte_offset_of(Method, _intrinsic_id); }
629 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
630
631 // Static methods that are used to implement member methods where an exposed this pointer
632 // is needed due to possible GCs
753 void set_jvmti_hide_events() { constMethod()->set_jvmti_hide_events(); }
754
755 bool jvmti_mount_transition() const { return constMethod()->jvmti_mount_transition(); }
756 void set_jvmti_mount_transition() { constMethod()->set_jvmti_mount_transition(); }
757
758 bool is_hidden() const { return constMethod()->is_hidden(); }
759 void set_is_hidden() { constMethod()->set_is_hidden(); }
760
761 bool is_scoped() const { return constMethod()->is_scoped(); }
762 void set_scoped() { constMethod()->set_is_scoped(); }
763
764 bool intrinsic_candidate() const { return constMethod()->intrinsic_candidate(); }
765 void set_intrinsic_candidate() { constMethod()->set_intrinsic_candidate(); }
766
767 bool has_injected_profile() const { return constMethod()->has_injected_profile(); }
768 void set_has_injected_profile() { constMethod()->set_has_injected_profile(); }
769
770 bool has_reserved_stack_access() const { return constMethod()->reserved_stack_access(); }
771 void set_has_reserved_stack_access() { constMethod()->set_reserved_stack_access(); }
772
773 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
774
775 ConstMethod::MethodType method_type() const {
776 return _constMethod->method_type();
777 }
778 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
779
780 // On-stack replacement support
781 bool has_osr_nmethod(int level, bool match_level) {
782 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != nullptr;
783 }
784
785 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
786 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
787 }
788
789 // Find if klass for method is loaded
790 bool is_klass_loaded_by_klass_index(int klass_index) const;
791 bool is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved = false) const;
792
|
75 MethodData* _method_data;
76 MethodCounters* _method_counters;
77 AdapterHandlerEntry* _adapter;
78 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
79 AccessFlags _access_flags; // Access flags
80 MethodFlags _flags;
81
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 JFR_ONLY(DEFINE_TRACE_FLAG;)
85
86 #ifndef PRODUCT
87 int64_t _compiled_invocation_count;
88
89 Symbol* _name;
90 #endif
91 // Entry point for calling both from and to the interpreter.
92 address _i2i_entry; // All-args-on-stack calling convention
93 // Entry point for calling from compiled code, to compiled code if it exists
94 // or else the interpreter.
95 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
96 volatile address _from_compiled_inline_ro_entry; // Cache of: _code ? _code->verified_inline_ro_entry_point() : _adapter->c2i_inline_ro_entry()
97 volatile address _from_compiled_inline_entry; // Cache of: _code ? _code->verified_inline_entry_point() : _adapter->c2i_inline_entry()
98 // The entry point for calling both from and to compiled code is
99 // "_code->entry_point()". Because of tiered compilation and de-opt, this
100 // field can come and go. It can transition from null to not-null at any
101 // time (whenever a compile completes). It can transition from not-null to
102 // null only at safepoints (because of a de-opt).
103 nmethod* volatile _code; // Points to the corresponding piece of native code
104 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
105
106 // Constructor
107 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
108 public:
109
110 static Method* allocate(ClassLoaderData* loader_data,
111 int byte_code_size,
112 AccessFlags access_flags,
113 InlineTableSizes* sizes,
114 ConstMethod::MethodType method_type,
115 Symbol* name,
116 TRAPS);
117
118 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
119 Method(){}
120
121 virtual bool is_method() const { return true; }
122
123 #if INCLUDE_CDS
124 void remove_unshareable_info();
125 void restore_unshareable_info(TRAPS);
126 static void restore_archived_method_handle_intrinsic(methodHandle m, TRAPS);
127 #endif
128
129 // accessors for instance variables
130
131 ConstMethod* constMethod() const { return _constMethod; }
132 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
133
134
135 static address make_adapters(const methodHandle& mh, TRAPS);
136 address from_compiled_entry() const;
137 address from_compiled_inline_ro_entry() const;
138 address from_compiled_inline_entry() const;
139 address from_interpreted_entry() const;
140
141 // access flag
142 AccessFlags access_flags() const { return _access_flags; }
143 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
144
145 // name
146 Symbol* name() const { return constants()->symbol_at(name_index()); }
147 u2 name_index() const { return constMethod()->name_index(); }
148 void set_name_index(int index) { constMethod()->set_name_index(index); }
149
150 // signature
151 Symbol* signature() const { return constants()->symbol_at(signature_index()); }
152 u2 signature_index() const { return constMethod()->signature_index(); }
153 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
154
155 // generics support
156 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? constants()->symbol_at(idx) : nullptr); }
157 u2 generic_signature_index() const { return constMethod()->generic_signature_index(); }
158
343
344 bool was_executed_more_than(int n);
345 bool was_never_executed() { return !was_executed_more_than(0); }
346
347 static void build_profiling_method_data(const methodHandle& method, TRAPS);
348
349 static MethodCounters* build_method_counters(Thread* current, Method* m);
350
351 inline int interpreter_invocation_count() const;
352
353 #ifndef PRODUCT
354 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
355 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
356 #else
357 // for PrintMethodData in a product build
358 int64_t compiled_invocation_count() const { return 0; }
359 #endif // not PRODUCT
360
361 // nmethod/verified compiler entry
362 address verified_code_entry();
363 address verified_inline_code_entry();
364 address verified_inline_ro_code_entry();
365 bool check_code() const; // Not inline to avoid circular ref
366 nmethod* code() const;
367
368 // Locks NMethodState_lock if not held.
369 void unlink_code(nmethod *compare);
370 // Locks NMethodState_lock if not held.
371 void unlink_code();
372
373 private:
374 // Either called with NMethodState_lock held or from constructor.
375 void clear_code();
376
377 void clear_method_data() {
378 _method_data = nullptr;
379 }
380
381 public:
382 static void set_code(const methodHandle& mh, nmethod* code);
383 void set_adapter_entry(AdapterHandlerEntry* adapter) {
384 _adapter = adapter;
385 }
386 void set_from_compiled_entry(address entry) {
387 _from_compiled_entry = entry;
388 }
389 void set_from_compiled_inline_ro_entry(address entry) {
390 _from_compiled_inline_ro_entry = entry;
391 }
392 void set_from_compiled_inline_entry(address entry) {
393 _from_compiled_inline_entry = entry;
394 }
395
396 address get_i2c_entry();
397 address get_c2i_entry();
398 address get_c2i_inline_entry();
399 address get_c2i_unverified_entry();
400 address get_c2i_unverified_inline_entry();
401 address get_c2i_no_clinit_check_entry();
402 AdapterHandlerEntry* adapter() const {
403 return _adapter;
404 }
405 // setup entry points
406 void link_method(const methodHandle& method, TRAPS);
407 // clear entry points. Used by sharing code during dump time
408 void unlink_method() NOT_CDS_RETURN;
409 void remove_unshareable_flags() NOT_CDS_RETURN;
410
411 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
412 virtual MetaspaceObj::Type type() const { return MethodType; }
413
414 // vtable index
415 enum VtableIndexFlag {
416 // Valid vtable indexes are non-negative (>= 0).
417 // These few negative values are used as sentinels.
418 itable_index_max = -10, // first itable index, growing downward
419 pending_itable_index = -9, // itable index will be assigned
420 invalid_vtable_index = -4, // distinct from any valid vtable index
494
495 // localvariable table
496 bool has_localvariable_table() const
497 { return constMethod()->has_localvariable_table(); }
498 u2 localvariable_table_length() const
499 { return constMethod()->localvariable_table_length(); }
500 LocalVariableTableElement* localvariable_table_start() const
501 { return constMethod()->localvariable_table_start(); }
502
503 bool has_linenumber_table() const
504 { return constMethod()->has_linenumber_table(); }
505 u_char* compressed_linenumber_table() const
506 { return constMethod()->compressed_linenumber_table(); }
507
508 // method holder (the Klass* holding this method)
509 InstanceKlass* method_holder() const { return constants()->pool_holder(); }
510
511 Symbol* klass_name() const; // returns the name of the method holder
512 BasicType result_type() const { return constMethod()->result_type(); }
513 bool is_returning_oop() const { BasicType r = result_type(); return is_reference_type(r); }
514 InlineKlass* returns_inline_type(Thread* thread) const;
515
516 // Checked exceptions thrown by this method (resolved to mirrors)
517 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
518
519 // Access flags
520 bool is_public() const { return access_flags().is_public(); }
521 bool is_private() const { return access_flags().is_private(); }
522 bool is_protected() const { return access_flags().is_protected(); }
523 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
524 bool is_static() const { return access_flags().is_static(); }
525 bool is_final() const { return access_flags().is_final(); }
526 bool is_synchronized() const { return access_flags().is_synchronized();}
527 bool is_native() const { return access_flags().is_native(); }
528 bool is_abstract() const { return access_flags().is_abstract(); }
529 bool is_synthetic() const { return access_flags().is_synthetic(); }
530
531 // returns true if contains only return operation
532 bool is_empty_method() const;
533
534 // returns true if this is a vanilla constructor
573 bool has_monitors() const { return is_synchronized() || has_monitor_bytecodes(); }
574
575 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
576 // properly nest in the method. It might return false, even though they actually nest properly, since the info.
577 // has not been computed yet.
578 bool guaranteed_monitor_matching() const { return monitor_matching(); }
579 void set_guaranteed_monitor_matching() { set_monitor_matching(); }
580
581 // returns true if the method is an accessor function (setter/getter).
582 bool is_accessor() const;
583
584 // returns true if the method is a getter
585 bool is_getter() const;
586
587 // returns true if the method is a setter
588 bool is_setter() const;
589
590 // returns true if the method does nothing but return a constant of primitive type
591 bool is_constant_getter() const;
592
593 // returns true if the method name is <clinit> and the method has
594 // valid static initializer flags.
595 bool is_class_initializer() const;
596
597 // returns true if the method name is <init>
598 bool is_object_constructor() const;
599
600 // returns true if the method name is wait0
601 bool is_object_wait0() const;
602
603 // compiled code support
604 // NOTE: code() is inherently racy as deopt can be clearing code
605 // simultaneously. Use with caution.
606 bool has_compiled_code() const;
607
608 bool needs_clinit_barrier() const;
609
610 // sizing
611 static int header_size() {
612 return align_up((int)sizeof(Method), wordSize) / wordSize;
613 }
614 static int size(bool is_native);
615 int size() const { return method_size(); }
616 void log_touched(Thread* current);
617 static void print_touched_methods(outputStream* out);
618
619 // interpreter support
620 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
621 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
622 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
623 static ByteSize from_compiled_inline_offset() { return byte_offset_of(Method, _from_compiled_inline_entry); }
624 static ByteSize from_compiled_inline_ro_offset(){ return byte_offset_of(Method, _from_compiled_inline_ro_entry); }
625 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
626 static ByteSize flags_offset() { return byte_offset_of(Method, _flags); }
627
628 static ByteSize method_counters_offset() {
629 return byte_offset_of(Method, _method_counters);
630 }
631 #ifndef PRODUCT
632 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(Method, _compiled_invocation_count); }
633 #endif // not PRODUCT
634 static ByteSize native_function_offset() { return in_ByteSize(sizeof(Method)); }
635 static ByteSize from_interpreted_offset() { return byte_offset_of(Method, _from_interpreted_entry ); }
636 static ByteSize interpreter_entry_offset() { return byte_offset_of(Method, _i2i_entry ); }
637 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(Method) + wordSize); }
638 static ByteSize itable_index_offset() { return byte_offset_of(Method, _vtable_index ); }
639
640 // for code generation
641 static ByteSize method_data_offset() { return byte_offset_of(Method, _method_data); }
642 static ByteSize intrinsic_id_offset() { return byte_offset_of(Method, _intrinsic_id); }
643 static int intrinsic_id_size_in_bytes() { return sizeof(u2); }
644
645 // Static methods that are used to implement member methods where an exposed this pointer
646 // is needed due to possible GCs
767 void set_jvmti_hide_events() { constMethod()->set_jvmti_hide_events(); }
768
769 bool jvmti_mount_transition() const { return constMethod()->jvmti_mount_transition(); }
770 void set_jvmti_mount_transition() { constMethod()->set_jvmti_mount_transition(); }
771
772 bool is_hidden() const { return constMethod()->is_hidden(); }
773 void set_is_hidden() { constMethod()->set_is_hidden(); }
774
775 bool is_scoped() const { return constMethod()->is_scoped(); }
776 void set_scoped() { constMethod()->set_is_scoped(); }
777
778 bool intrinsic_candidate() const { return constMethod()->intrinsic_candidate(); }
779 void set_intrinsic_candidate() { constMethod()->set_intrinsic_candidate(); }
780
781 bool has_injected_profile() const { return constMethod()->has_injected_profile(); }
782 void set_has_injected_profile() { constMethod()->set_has_injected_profile(); }
783
784 bool has_reserved_stack_access() const { return constMethod()->reserved_stack_access(); }
785 void set_has_reserved_stack_access() { constMethod()->set_reserved_stack_access(); }
786
787 bool is_scalarized_arg(int idx) const;
788
789 bool c1_needs_stack_repair() const { return constMethod()->c1_needs_stack_repair(); }
790 void set_c1_needs_stack_repair() { constMethod()->set_c1_needs_stack_repair(); }
791
792 bool c2_needs_stack_repair() const { return constMethod()->c2_needs_stack_repair(); }
793 void set_c2_needs_stack_repair() { constMethod()->set_c2_needs_stack_repair(); }
794
795 bool mismatch() const { return constMethod()->mismatch(); }
796 void set_mismatch() { constMethod()->set_mismatch(); }
797
798 JFR_ONLY(DEFINE_TRACE_FLAG_ACCESSOR;)
799
800 ConstMethod::MethodType method_type() const {
801 return _constMethod->method_type();
802 }
803 bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
804
805 // On-stack replacement support
806 bool has_osr_nmethod(int level, bool match_level) {
807 return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != nullptr;
808 }
809
810 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
811 return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
812 }
813
814 // Find if klass for method is loaded
815 bool is_klass_loaded_by_klass_index(int klass_index) const;
816 bool is_klass_loaded(int refinfo_index, Bytecodes::Code bc, bool must_be_resolved = false) const;
817
|