42 #endif
43
44
45 // A Method represents a Java method.
46 //
47 // Note that most applications load thousands of methods, so keeping the size of this
48 // class small has a big impact on footprint.
49 //
50 // Note that native_function and signature_handler have to be at fixed offsets
51 // (required by the interpreter)
52 //
53 // Method embedded field layout (after declared fields):
54 // [EMBEDDED native_function (present only if native) ]
55 // [EMBEDDED signature_handler (present only if native) ]
56
57 class CheckedExceptionElement;
58 class LocalVariableTableElement;
59 class AdapterHandlerEntry;
60 class MethodData;
61 class MethodCounters;
62 class ConstMethod;
63 class InlineTableSizes;
64 class nmethod;
65 class InterpreterOopMap;
66
67 class Method : public Metadata {
68 friend class VMStructs;
69 friend class JVMCIVMStructs;
70 friend class MethodTest;
71 private:
72 // If you add a new field that points to any metaspace object, you
73 // must add this field to Method::metaspace_pointers_do().
74 ConstMethod* _constMethod; // Method read-only data.
75 MethodData* _method_data;
76 MethodCounters* _method_counters;
77 AdapterHandlerEntry* _adapter;
78 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
79 AccessFlags _access_flags; // Access flags
80 MethodFlags _flags;
81
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 JFR_ONLY(DEFINE_TRACE_FLAG;)
85
86 #ifndef PRODUCT
87 int64_t _compiled_invocation_count;
88
89 Symbol* _name;
90 #endif
91 // Entry point for calling both from and to the interpreter.
92 address _i2i_entry; // All-args-on-stack calling convention
93 // Entry point for calling from compiled code, to compiled code if it exists
94 // or else the interpreter.
95 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
96 // The entry point for calling both from and to compiled code is
97 // "_code->entry_point()". Because of tiered compilation and de-opt, this
98 // field can come and go. It can transition from null to not-null at any
99 // time (whenever a compile completes). It can transition from not-null to
100 // null only at safepoints (because of a de-opt).
101 nmethod* volatile _code; // Points to the corresponding piece of native code
102 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
103
104 // Constructor
105 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
106 public:
107
108 static Method* allocate(ClassLoaderData* loader_data,
109 int byte_code_size,
110 AccessFlags access_flags,
111 InlineTableSizes* sizes,
112 ConstMethod::MethodType method_type,
113 Symbol* name,
114 TRAPS);
115
116 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
117 Method(){}
118
119 virtual bool is_method() const { return true; }
120
121 #if INCLUDE_CDS
122 void remove_unshareable_info();
123 void restore_unshareable_info(TRAPS);
124 static void restore_archived_method_handle_intrinsic(methodHandle m, TRAPS);
125 #endif
126
127 // accessors for instance variables
128
129 ConstMethod* constMethod() const { return _constMethod; }
130 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
131
132
133 static address make_adapters(const methodHandle& mh, TRAPS);
134 address from_compiled_entry() const;
135 address from_interpreted_entry() const;
136
137 // access flag
138 AccessFlags access_flags() const { return _access_flags; }
139 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
140
141 // name
142 Symbol* name() const { return constants()->symbol_at(name_index()); }
297 // constraint classes are loaded if necessary. Note that this may
298 // throw an exception if loading of the constraint classes causes
299 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
300 // If an exception is thrown, returns the bci of the
301 // exception handler which caused the exception to be thrown, which
302 // is needed for proper retries. See, for example,
303 // InterpreterRuntime::exception_handler_for_exception.
304 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
305
306 static bool register_native(Klass* k,
307 Symbol* name,
308 Symbol* signature,
309 address entry,
310 TRAPS);
311
312 // method data access
313 MethodData* method_data() const {
314 return _method_data;
315 }
316
317 // mark an exception handler as entered (used to prune dead catch blocks in C2)
318 void set_exception_handler_entered(int handler_bci);
319
320 MethodCounters* method_counters() const {
321 return _method_counters;
322 }
323
324 void clear_method_counters() {
325 _method_counters = nullptr;
326 }
327
328 bool init_method_counters(MethodCounters* counters);
329
330 inline int prev_event_count() const;
331 inline void set_prev_event_count(int count);
332 inline jlong prev_time() const;
333 inline void set_prev_time(jlong time);
334 inline float rate() const;
335 inline void set_rate(float rate);
336
337 inline int invocation_count() const;
338 inline int backedge_count() const;
339
340 bool was_executed_more_than(int n);
341 bool was_never_executed() { return !was_executed_more_than(0); }
342
343 static void build_profiling_method_data(const methodHandle& method, TRAPS);
344
345 static MethodCounters* build_method_counters(Thread* current, Method* m);
346
347 inline int interpreter_invocation_count() const;
348
349 #ifndef PRODUCT
350 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
351 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
352 #else
353 // for PrintMethodData in a product build
354 int64_t compiled_invocation_count() const { return 0; }
355 #endif // not PRODUCT
356
357 // nmethod/verified compiler entry
358 address verified_code_entry();
359 bool check_code() const; // Not inline to avoid circular ref
360 nmethod* code() const;
361
362 // Locks NMethodState_lock if not held.
363 void unlink_code(nmethod *compare);
364 // Locks NMethodState_lock if not held.
365 void unlink_code();
366
367 private:
368 // Either called with NMethodState_lock held or from constructor.
369 void clear_code();
370
371 void clear_method_data() {
372 _method_data = nullptr;
373 }
374
375 public:
376 static void set_code(const methodHandle& mh, nmethod* code);
377 void set_adapter_entry(AdapterHandlerEntry* adapter) {
378 _adapter = adapter;
379 }
380 void set_from_compiled_entry(address entry) {
381 _from_compiled_entry = entry;
382 }
383
384 address get_i2c_entry();
385 address get_c2i_entry();
386 address get_c2i_unverified_entry();
387 address get_c2i_no_clinit_check_entry();
388 AdapterHandlerEntry* adapter() const {
389 return _adapter;
390 }
391 // setup entry points
392 void link_method(const methodHandle& method, TRAPS);
393 // clear entry points. Used by sharing code during dump time
394 void unlink_method() NOT_CDS_RETURN;
395 void remove_unshareable_flags() NOT_CDS_RETURN;
396
397 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
398 virtual MetaspaceObj::Type type() const { return MethodType; }
399
400 // vtable index
401 enum VtableIndexFlag {
402 // Valid vtable indexes are non-negative (>= 0).
403 // These few negative values are used as sentinels.
578
579 // returns true if the method is static OR if the classfile version < 51
580 bool has_valid_initializer_flags() const;
581
582 // returns true if the method name is <clinit> and the method has
583 // valid static initializer flags.
584 bool is_static_initializer() const;
585
586 // returns true if the method name is <init>
587 bool is_object_initializer() const;
588
589 // returns true if the method name is wait0
590 bool is_object_wait0() const;
591
592 // compiled code support
593 // NOTE: code() is inherently racy as deopt can be clearing code
594 // simultaneously. Use with caution.
595 bool has_compiled_code() const;
596
597 bool needs_clinit_barrier() const;
598
599 // sizing
600 static int header_size() {
601 return align_up((int)sizeof(Method), wordSize) / wordSize;
602 }
603 static int size(bool is_native);
604 int size() const { return method_size(); }
605 void log_touched(Thread* current);
606 static void print_touched_methods(outputStream* out);
607
608 // interpreter support
609 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
610 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
611 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
612 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
613
614 static ByteSize method_counters_offset() {
615 return byte_offset_of(Method, _method_counters);
616 }
617 #ifndef PRODUCT
|
42 #endif
43
44
45 // A Method represents a Java method.
46 //
47 // Note that most applications load thousands of methods, so keeping the size of this
48 // class small has a big impact on footprint.
49 //
50 // Note that native_function and signature_handler have to be at fixed offsets
51 // (required by the interpreter)
52 //
53 // Method embedded field layout (after declared fields):
54 // [EMBEDDED native_function (present only if native) ]
55 // [EMBEDDED signature_handler (present only if native) ]
56
57 class CheckedExceptionElement;
58 class LocalVariableTableElement;
59 class AdapterHandlerEntry;
60 class MethodData;
61 class MethodCounters;
62 class MethodTrainingData;
63 class ConstMethod;
64 class InlineTableSizes;
65 class nmethod;
66 class InterpreterOopMap;
67 class SCCEntry;
68
69 class Method : public Metadata {
70 friend class VMStructs;
71 friend class JVMCIVMStructs;
72 friend class MethodTest;
73 private:
74 // If you add a new field that points to any metaspace object, you
75 // must add this field to Method::metaspace_pointers_do().
76 ConstMethod* _constMethod; // Method read-only data.
77 MethodData* _method_data;
78 MethodCounters* _method_counters;
79 AdapterHandlerEntry* _adapter;
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 AccessFlags _access_flags; // Access flags
82 MethodFlags _flags;
83
84 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
85
86 JFR_ONLY(DEFINE_TRACE_FLAG;)
87
88 #ifndef PRODUCT
89 int64_t _compiled_invocation_count;
90
91 Symbol* _name;
92 #endif
93 // Entry point for calling both from and to the interpreter.
94 address _i2i_entry; // All-args-on-stack calling convention
95 // Entry point for calling from compiled code, to compiled code if it exists
96 // or else the interpreter.
97 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
98 // The entry point for calling both from and to compiled code is
99 // "_code->entry_point()". Because of tiered compilation and de-opt, this
100 // field can come and go. It can transition from null to not-null at any
101 // time (whenever a compile completes). It can transition from not-null to
102 // null only at safepoints (because of a de-opt).
103 nmethod* volatile _code; // Points to the corresponding piece of native code
104 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
105
106 nmethod* _preload_code; // preloaded SCCache code
107 SCCEntry* _scc_entry; // SCCache entry for pre-loading code
108
109 // Constructor
110 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
111 public:
112
113 static Method* allocate(ClassLoaderData* loader_data,
114 int byte_code_size,
115 AccessFlags access_flags,
116 InlineTableSizes* sizes,
117 ConstMethod::MethodType method_type,
118 Symbol* name,
119 TRAPS);
120
121 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
122 Method(){}
123
124 virtual bool is_method() const { return true; }
125
126 #if INCLUDE_CDS
127 void remove_unshareable_info();
128 void restore_adapter(TRAPS);
129 void restore_unshareable_info(TRAPS);
130 static void restore_archived_method_handle_intrinsic(methodHandle m, TRAPS);
131 #endif
132
133 // accessors for instance variables
134
135 ConstMethod* constMethod() const { return _constMethod; }
136 void set_constMethod(ConstMethod* xconst) { _constMethod = xconst; }
137
138
139 static address make_adapters(const methodHandle& mh, TRAPS);
140 address from_compiled_entry() const;
141 address from_interpreted_entry() const;
142
143 // access flag
144 AccessFlags access_flags() const { return _access_flags; }
145 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
146
147 // name
148 Symbol* name() const { return constants()->symbol_at(name_index()); }
303 // constraint classes are loaded if necessary. Note that this may
304 // throw an exception if loading of the constraint classes causes
305 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
306 // If an exception is thrown, returns the bci of the
307 // exception handler which caused the exception to be thrown, which
308 // is needed for proper retries. See, for example,
309 // InterpreterRuntime::exception_handler_for_exception.
310 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
311
312 static bool register_native(Klass* k,
313 Symbol* name,
314 Symbol* signature,
315 address entry,
316 TRAPS);
317
318 // method data access
319 MethodData* method_data() const {
320 return _method_data;
321 }
322
323 void set_method_data(MethodData* data);
324
325 MethodTrainingData* training_data_or_null() const;
326 bool init_training_data(MethodTrainingData* td);
327
328 // mark an exception handler as entered (used to prune dead catch blocks in C2)
329 void set_exception_handler_entered(int handler_bci);
330
331 MethodCounters* method_counters() const {
332 return _method_counters;
333 }
334
335 void clear_method_counters() {
336 _method_counters = nullptr;
337 }
338
339 bool init_method_counters(MethodCounters* counters);
340
341 inline int prev_event_count() const;
342 inline void set_prev_event_count(int count);
343 inline jlong prev_time() const;
344 inline void set_prev_time(jlong time);
345 inline float rate() const;
346 inline void set_rate(float rate);
347
348 inline int invocation_count() const;
349 inline int backedge_count() const;
350
351 bool was_executed_more_than(int n);
352 bool was_never_executed() { return !was_executed_more_than(0); }
353
354 static void build_profiling_method_data(const methodHandle& method, TRAPS);
355 static bool install_training_method_data(const methodHandle& method);
356 static MethodCounters* build_method_counters(Thread* current, Method* m);
357
358 inline int interpreter_invocation_count() const;
359
360 #ifndef PRODUCT
361 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
362 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
363 #else
364 // for PrintMethodData in a product build
365 int64_t compiled_invocation_count() const { return 0; }
366 #endif // not PRODUCT
367
368 // nmethod/verified compiler entry
369 address verified_code_entry();
370 bool check_code() const; // Not inline to avoid circular ref
371 nmethod* code() const;
372
373 // Locks NMethodState_lock if not held.
374 void unlink_code(nmethod *compare);
375 // Locks NMethodState_lock if not held.
376 void unlink_code();
377
378 private:
379 // Either called with NMethodState_lock held or from constructor.
380 void clear_code();
381
382 void clear_method_data() {
383 _method_data = nullptr;
384 }
385
386 public:
387 static void set_code(const methodHandle& mh, nmethod* code);
388 void set_adapter_entry(AdapterHandlerEntry* adapter) {
389 _adapter = adapter;
390 }
391 void set_from_compiled_entry(address entry) {
392 _from_compiled_entry = entry;
393 }
394
395 void set_preload_code(nmethod* code) {
396 _preload_code = code;
397 }
398 void set_scc_entry(SCCEntry* entry) {
399 _scc_entry = entry;
400 }
401 SCCEntry* scc_entry() const {
402 return _scc_entry;
403 }
404
405 address get_i2c_entry();
406 address get_c2i_entry();
407 address get_c2i_unverified_entry();
408 address get_c2i_no_clinit_check_entry();
409 AdapterHandlerEntry* adapter() const {
410 return _adapter;
411 }
412 // setup entry points
413 void link_method(const methodHandle& method, TRAPS);
414 // clear entry points. Used by sharing code during dump time
415 void unlink_method() NOT_CDS_RETURN;
416 void remove_unshareable_flags() NOT_CDS_RETURN;
417
418 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
419 virtual MetaspaceObj::Type type() const { return MethodType; }
420
421 // vtable index
422 enum VtableIndexFlag {
423 // Valid vtable indexes are non-negative (>= 0).
424 // These few negative values are used as sentinels.
599
600 // returns true if the method is static OR if the classfile version < 51
601 bool has_valid_initializer_flags() const;
602
603 // returns true if the method name is <clinit> and the method has
604 // valid static initializer flags.
605 bool is_static_initializer() const;
606
607 // returns true if the method name is <init>
608 bool is_object_initializer() const;
609
610 // returns true if the method name is wait0
611 bool is_object_wait0() const;
612
613 // compiled code support
614 // NOTE: code() is inherently racy as deopt can be clearing code
615 // simultaneously. Use with caution.
616 bool has_compiled_code() const;
617
618 bool needs_clinit_barrier() const;
619 bool code_has_clinit_barriers() const;
620
621 // sizing
622 static int header_size() {
623 return align_up((int)sizeof(Method), wordSize) / wordSize;
624 }
625 static int size(bool is_native);
626 int size() const { return method_size(); }
627 void log_touched(Thread* current);
628 static void print_touched_methods(outputStream* out);
629
630 // interpreter support
631 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
632 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
633 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
634 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
635
636 static ByteSize method_counters_offset() {
637 return byte_offset_of(Method, _method_counters);
638 }
639 #ifndef PRODUCT
|