43 #endif
44
45
46 // A Method represents a Java method.
47 //
48 // Note that most applications load thousands of methods, so keeping the size of this
49 // class small has a big impact on footprint.
50 //
51 // Note that native_function and signature_handler have to be at fixed offsets
52 // (required by the interpreter)
53 //
54 // Method embedded field layout (after declared fields):
55 // [EMBEDDED native_function (present only if native) ]
56 // [EMBEDDED signature_handler (present only if native) ]
57
58 class CheckedExceptionElement;
59 class LocalVariableTableElement;
60 class AdapterHandlerEntry;
61 class MethodData;
62 class MethodCounters;
63 class ConstMethod;
64 class InlineTableSizes;
65 class nmethod;
66 class InterpreterOopMap;
67
68 class Method : public Metadata {
69 friend class VMStructs;
70 friend class JVMCIVMStructs;
71 friend class MethodTest;
72 private:
73 // If you add a new field that points to any metaspace object, you
74 // must add this field to Method::metaspace_pointers_do().
75 ConstMethod* _constMethod; // Method read-only data.
76 MethodData* _method_data;
77 MethodCounters* _method_counters;
78 AdapterHandlerEntry* _adapter;
79 AccessFlags _access_flags; // Access flags
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 MethodFlags _flags;
82
83 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
84
85 JFR_ONLY(DEFINE_TRACE_FLAG;)
86
87 #ifndef PRODUCT
88 int64_t _compiled_invocation_count;
89
90 Symbol* _name;
91 #endif
92 // Entry point for calling both from and to the interpreter.
93 address _i2i_entry; // All-args-on-stack calling convention
94 // Entry point for calling from compiled code, to compiled code if it exists
95 // or else the interpreter.
96 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
97 // The entry point for calling both from and to compiled code is
98 // "_code->entry_point()". Because of tiered compilation and de-opt, this
99 // field can come and go. It can transition from null to not-null at any
100 // time (whenever a compile completes). It can transition from not-null to
101 // null only at safepoints (because of a de-opt).
102 nmethod* volatile _code; // Points to the corresponding piece of native code
103 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
104
105 // Constructor
106 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
107 public:
108
109 static Method* allocate(ClassLoaderData* loader_data,
110 int byte_code_size,
111 AccessFlags access_flags,
112 InlineTableSizes* sizes,
113 ConstMethod::MethodType method_type,
114 Symbol* name,
115 TRAPS);
116
117 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
118 Method(){}
119
120 virtual bool is_method() const { return true; }
121
122 #if INCLUDE_CDS
123 void remove_unshareable_info();
124 void restore_unshareable_info(TRAPS);
298 // constraint classes are loaded if necessary. Note that this may
299 // throw an exception if loading of the constraint classes causes
300 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
301 // If an exception is thrown, returns the bci of the
302 // exception handler which caused the exception to be thrown, which
303 // is needed for proper retries. See, for example,
304 // InterpreterRuntime::exception_handler_for_exception.
305 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
306
307 static bool register_native(Klass* k,
308 Symbol* name,
309 Symbol* signature,
310 address entry,
311 TRAPS);
312
313 // method data access
314 MethodData* method_data() const {
315 return _method_data;
316 }
317
318 // mark an exception handler as entered (used to prune dead catch blocks in C2)
319 void set_exception_handler_entered(int handler_bci);
320
321 MethodCounters* method_counters() const {
322 return _method_counters;
323 }
324
325 void clear_method_counters() {
326 _method_counters = nullptr;
327 }
328
329 bool init_method_counters(MethodCounters* counters);
330
331 inline int prev_event_count() const;
332 inline void set_prev_event_count(int count);
333 inline jlong prev_time() const;
334 inline void set_prev_time(jlong time);
335 inline float rate() const;
336 inline void set_rate(float rate);
337
338 int invocation_count() const;
339 int backedge_count() const;
340
341 bool was_executed_more_than(int n);
342 bool was_never_executed() { return !was_executed_more_than(0); }
343
344 static void build_profiling_method_data(const methodHandle& method, TRAPS);
345
346 static MethodCounters* build_method_counters(Thread* current, Method* m);
347
348 int interpreter_invocation_count() { return invocation_count(); }
349
350 #ifndef PRODUCT
351 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
352 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
353 #else
354 // for PrintMethodData in a product build
355 int64_t compiled_invocation_count() const { return 0; }
356 #endif // not PRODUCT
357
358 // nmethod/verified compiler entry
359 address verified_code_entry();
360 bool check_code() const; // Not inline to avoid circular ref
361 nmethod* code() const;
362
363 // Locks NMethodState_lock if not held.
364 void unlink_code(nmethod *compare);
365 // Locks NMethodState_lock if not held.
366 void unlink_code();
367
368 private:
369 // Either called with NMethodState_lock held or from constructor.
370 void clear_code();
371
372 void clear_method_data() {
373 _method_data = nullptr;
374 }
375
376 public:
377 static void set_code(const methodHandle& mh, nmethod* code);
378 void set_adapter_entry(AdapterHandlerEntry* adapter) {
379 _adapter = adapter;
380 }
381 void set_from_compiled_entry(address entry) {
382 _from_compiled_entry = entry;
383 }
384
385 address get_i2c_entry();
386 address get_c2i_entry();
387 address get_c2i_unverified_entry();
388 address get_c2i_no_clinit_check_entry();
389 AdapterHandlerEntry* adapter() const {
390 return _adapter;
391 }
392 // setup entry points
393 void link_method(const methodHandle& method, TRAPS);
394 // clear entry points. Used by sharing code during dump time
395 void unlink_method() NOT_CDS_RETURN;
396 void remove_unshareable_flags() NOT_CDS_RETURN;
397
398 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
399 virtual MetaspaceObj::Type type() const { return MethodType; }
400
401 // vtable index
402 enum VtableIndexFlag {
403 // Valid vtable indexes are non-negative (>= 0).
404 // These few negative values are used as sentinels.
579
580 // returns true if the method is static OR if the classfile version < 51
581 bool has_valid_initializer_flags() const;
582
583 // returns true if the method name is <clinit> and the method has
584 // valid static initializer flags.
585 bool is_static_initializer() const;
586
587 // returns true if the method name is <init>
588 bool is_object_initializer() const;
589
590 // returns true if the method name is wait0
591 bool is_object_wait0() const;
592
593 // compiled code support
594 // NOTE: code() is inherently racy as deopt can be clearing code
595 // simultaneously. Use with caution.
596 bool has_compiled_code() const;
597
598 bool needs_clinit_barrier() const;
599
600 // sizing
601 static int header_size() {
602 return align_up((int)sizeof(Method), wordSize) / wordSize;
603 }
604 static int size(bool is_native);
605 int size() const { return method_size(); }
606 void log_touched(Thread* current);
607 static void print_touched_methods(outputStream* out);
608
609 // interpreter support
610 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
611 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
612 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
613 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
614
615 static ByteSize method_counters_offset() {
616 return byte_offset_of(Method, _method_counters);
617 }
618 #ifndef PRODUCT
|
43 #endif
44
45
46 // A Method represents a Java method.
47 //
48 // Note that most applications load thousands of methods, so keeping the size of this
49 // class small has a big impact on footprint.
50 //
51 // Note that native_function and signature_handler have to be at fixed offsets
52 // (required by the interpreter)
53 //
54 // Method embedded field layout (after declared fields):
55 // [EMBEDDED native_function (present only if native) ]
56 // [EMBEDDED signature_handler (present only if native) ]
57
58 class CheckedExceptionElement;
59 class LocalVariableTableElement;
60 class AdapterHandlerEntry;
61 class MethodData;
62 class MethodCounters;
63 class MethodTrainingData;
64 class ConstMethod;
65 class InlineTableSizes;
66 class nmethod;
67 class InterpreterOopMap;
68 class SCCEntry;
69
70 class Method : public Metadata {
71 friend class VMStructs;
72 friend class JVMCIVMStructs;
73 friend class MethodTest;
74 private:
75 // If you add a new field that points to any metaspace object, you
76 // must add this field to Method::metaspace_pointers_do().
77 ConstMethod* _constMethod; // Method read-only data.
78 MethodData* _method_data;
79 MethodCounters* _method_counters;
80 AdapterHandlerEntry* _adapter;
81 AccessFlags _access_flags; // Access flags
82 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
83 MethodFlags _flags;
84
85 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
86
87 JFR_ONLY(DEFINE_TRACE_FLAG;)
88
89 #ifndef PRODUCT
90 int64_t _compiled_invocation_count;
91
92 Symbol* _name;
93 #endif
94 // Entry point for calling both from and to the interpreter.
95 address _i2i_entry; // All-args-on-stack calling convention
96 // Entry point for calling from compiled code, to compiled code if it exists
97 // or else the interpreter.
98 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
99 // The entry point for calling both from and to compiled code is
100 // "_code->entry_point()". Because of tiered compilation and de-opt, this
101 // field can come and go. It can transition from null to not-null at any
102 // time (whenever a compile completes). It can transition from not-null to
103 // null only at safepoints (because of a de-opt).
104 nmethod* volatile _code; // Points to the corresponding piece of native code
105 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
106
107 nmethod* _preload_code; // preloaded SCCache code
108 SCCEntry* _scc_entry; // SCCache entry for pre-loading code
109
110 // Constructor
111 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
112 public:
113
114 static Method* allocate(ClassLoaderData* loader_data,
115 int byte_code_size,
116 AccessFlags access_flags,
117 InlineTableSizes* sizes,
118 ConstMethod::MethodType method_type,
119 Symbol* name,
120 TRAPS);
121
122 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
123 Method(){}
124
125 virtual bool is_method() const { return true; }
126
127 #if INCLUDE_CDS
128 void remove_unshareable_info();
129 void restore_unshareable_info(TRAPS);
303 // constraint classes are loaded if necessary. Note that this may
304 // throw an exception if loading of the constraint classes causes
305 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
306 // If an exception is thrown, returns the bci of the
307 // exception handler which caused the exception to be thrown, which
308 // is needed for proper retries. See, for example,
309 // InterpreterRuntime::exception_handler_for_exception.
310 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
311
312 static bool register_native(Klass* k,
313 Symbol* name,
314 Symbol* signature,
315 address entry,
316 TRAPS);
317
318 // method data access
319 MethodData* method_data() const {
320 return _method_data;
321 }
322
323 void set_method_data(MethodData* data);
324
325 MethodTrainingData* training_data_or_null() const;
326 bool init_training_data(MethodTrainingData* tdata);
327
328 // mark an exception handler as entered (used to prune dead catch blocks in C2)
329 void set_exception_handler_entered(int handler_bci);
330
331 MethodCounters* method_counters() const {
332 return _method_counters;
333 }
334
335 void clear_method_counters() {
336 _method_counters = nullptr;
337 }
338
339 bool init_method_counters(MethodCounters* counters);
340
341 inline int prev_event_count() const;
342 inline void set_prev_event_count(int count);
343 inline jlong prev_time() const;
344 inline void set_prev_time(jlong time);
345 inline float rate() const;
346 inline void set_rate(float rate);
347
348 int invocation_count() const;
349 int backedge_count() const;
350
351 bool was_executed_more_than(int n);
352 bool was_never_executed() { return !was_executed_more_than(0); }
353
354 static void build_profiling_method_data(const methodHandle& method, TRAPS);
355 static bool install_training_method_data(const methodHandle& method);
356 static MethodCounters* build_method_counters(Thread* current, Method* m);
357
358 int interpreter_invocation_count() { return invocation_count(); }
359
360 #ifndef PRODUCT
361 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
362 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
363 #else
364 // for PrintMethodData in a product build
365 int64_t compiled_invocation_count() const { return 0; }
366 #endif // not PRODUCT
367
368 // nmethod/verified compiler entry
369 address verified_code_entry();
370 bool check_code() const; // Not inline to avoid circular ref
371 nmethod* code() const;
372
373 // Locks NMethodState_lock if not held.
374 void unlink_code(nmethod *compare);
375 // Locks NMethodState_lock if not held.
376 void unlink_code();
377
378 private:
379 // Either called with NMethodState_lock held or from constructor.
380 void clear_code();
381
382 void clear_method_data() {
383 _method_data = nullptr;
384 }
385
386 public:
387 static void set_code(const methodHandle& mh, nmethod* code);
388 void set_adapter_entry(AdapterHandlerEntry* adapter) {
389 _adapter = adapter;
390 }
391 void set_from_compiled_entry(address entry) {
392 _from_compiled_entry = entry;
393 }
394
395 void set_preload_code(nmethod* code) {
396 _preload_code = code;
397 }
398 void set_scc_entry(SCCEntry* entry) {
399 _scc_entry = entry;
400 }
401 SCCEntry* scc_entry() const {
402 return _scc_entry;
403 }
404
405 address get_i2c_entry();
406 address get_c2i_entry();
407 address get_c2i_unverified_entry();
408 address get_c2i_no_clinit_check_entry();
409 AdapterHandlerEntry* adapter() const {
410 return _adapter;
411 }
412 // setup entry points
413 void link_method(const methodHandle& method, TRAPS);
414 // clear entry points. Used by sharing code during dump time
415 void unlink_method() NOT_CDS_RETURN;
416 void remove_unshareable_flags() NOT_CDS_RETURN;
417
418 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
419 virtual MetaspaceObj::Type type() const { return MethodType; }
420
421 // vtable index
422 enum VtableIndexFlag {
423 // Valid vtable indexes are non-negative (>= 0).
424 // These few negative values are used as sentinels.
599
600 // returns true if the method is static OR if the classfile version < 51
601 bool has_valid_initializer_flags() const;
602
603 // returns true if the method name is <clinit> and the method has
604 // valid static initializer flags.
605 bool is_static_initializer() const;
606
607 // returns true if the method name is <init>
608 bool is_object_initializer() const;
609
610 // returns true if the method name is wait0
611 bool is_object_wait0() const;
612
613 // compiled code support
614 // NOTE: code() is inherently racy as deopt can be clearing code
615 // simultaneously. Use with caution.
616 bool has_compiled_code() const;
617
618 bool needs_clinit_barrier() const;
619 bool code_has_clinit_barriers() const;
620
621 // sizing
622 static int header_size() {
623 return align_up((int)sizeof(Method), wordSize) / wordSize;
624 }
625 static int size(bool is_native);
626 int size() const { return method_size(); }
627 void log_touched(Thread* current);
628 static void print_touched_methods(outputStream* out);
629
630 // interpreter support
631 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
632 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
633 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
634 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
635
636 static ByteSize method_counters_offset() {
637 return byte_offset_of(Method, _method_counters);
638 }
639 #ifndef PRODUCT
|