< prev index next >

src/hotspot/share/code/nmethod.hpp

Print this page

 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_CODE_NMETHOD_HPP
 26 #define SHARE_CODE_NMETHOD_HPP
 27 
 28 #include "code/compiledMethod.hpp"
 29 
 30 class CompiledICData;
 31 class CompileTask;
 32 class DepChange;
 33 class DirectiveSet;
 34 class DebugInformationRecorder;
 35 class JvmtiThreadState;
 36 class OopIterateClosure;

 37 
 38 // nmethods (native methods) are the compiled code versions of Java methods.
 39 //
 40 // An nmethod contains:
 41 //  - header                 (the nmethod structure)
 42 //  [Relocation]
 43 //  - relocation information
 44 //  - constant part          (doubles, longs and floats used in nmethod)
 45 //  - oop table
 46 //  [Code]
 47 //  - code body
 48 //  - exception handler
 49 //  - stub code
 50 //  [Debugging information]
 51 //  - oop array
 52 //  - data array
 53 //  - pcs
 54 //  [Exception handler table]
 55 //  - handler entry point array
 56 //  [Implicit Null Pointer exception table]

 58 //  [Speculations]
 59 //  - encoded speculations array
 60 //  [JVMCINMethodData]
 61 //  - meta data for JVMCI compiled nmethod
 62 
 63 #if INCLUDE_JVMCI
 64 class FailedSpeculation;
 65 class JVMCINMethodData;
 66 #endif
 67 
 68 class nmethod : public CompiledMethod {
 69   friend class VMStructs;
 70   friend class JVMCIVMStructs;
 71   friend class CodeCache;  // scavengable oops
 72   friend class JVMCINMethodData;
 73 
 74  private:
 75 
 76   uint64_t  _gc_epoch;
 77 



 78   // To support simple linked-list chaining of nmethods:
 79   nmethod*  _osr_link;         // from InstanceKlass::osr_nmethods_head
 80 
 81   // STW two-phase nmethod root processing helpers.
 82   //
 83   // When determining liveness of a given nmethod to do code cache unloading,
 84   // some collectors need to do different things depending on whether the nmethods
 85   // need to absolutely be kept alive during root processing; "strong"ly reachable
 86   // nmethods are known to be kept alive at root processing, but the liveness of
 87   // "weak"ly reachable ones is to be determined later.
 88   //
 89   // We want to allow strong and weak processing of nmethods by different threads
 90   // at the same time without heavy synchronization. Additional constraints are
 91   // to make sure that every nmethod is processed a minimal amount of time, and
 92   // nmethods themselves are always iterated at most once at a particular time.
 93   //
 94   // Note that strong processing work must be a superset of weak processing work
 95   // for this code to work.
 96   //
 97   // We store state and claim information in the _oops_do_mark_link member, using

220 #if INCLUDE_JVMCI
221   int _speculations_offset;
222   int _jvmci_data_offset;
223 #endif
224   int _nmethod_end_offset;
225 
226   int code_offset() const { return int(code_begin() - header_begin()); }
227 
228   // location in frame (offset for sp) that deopt can store the original
229   // pc during a deopt.
230   int _orig_pc_offset;
231 
232   int _compile_id;                           // which compilation made this nmethod
233 
234 #if INCLUDE_RTM_OPT
235   // RTM state at compile time. Used during deoptimization to decide
236   // whether to restart collecting RTM locking abort statistic again.
237   RTMState _rtm_state;
238 #endif
239 


240   // These are used for compiled synchronized native methods to
241   // locate the owner and stack slot for the BasicLock. They are
242   // needed because there is no debug information for compiled native
243   // wrappers and the oop maps are insufficient to allow
244   // frame::retrieve_receiver() to work. Currently they are expected
245   // to be byte offsets from the Java stack pointer for maximum code
246   // sharing between platforms. JVMTI's GetLocalInstance() uses these
247   // offsets to find the receiver for non-static native wrapper frames.
248   ByteSize _native_receiver_sp_offset;
249   ByteSize _native_basic_lock_sp_offset;
250 
251   CompLevel _comp_level;               // compilation level
252 
253   // Local state used to keep track of whether unloading is happening or not
254   volatile uint8_t _is_unloading_state;
255 
256   // protected by CodeCache_lock
257   bool _has_flushed_dependencies;      // Used for maintenance of dependencies (CodeCache_lock)
258 
259   // used by jvmti to track if an event has been posted for this nmethod.
260   bool _load_reported;
261 


262   // Protected by CompiledMethod_lock
263   volatile signed char _state;         // {not_installed, in_use, not_used, not_entrant}
264 
265   int _skipped_instructions_size;
266 
267   // For native wrappers
268   nmethod(Method* method,
269           CompilerType type,
270           int nmethod_size,
271           int compile_id,
272           CodeOffsets* offsets,
273           CodeBuffer *code_buffer,
274           int frame_size,
275           ByteSize basic_lock_owner_sp_offset, /* synchronized natives only */
276           ByteSize basic_lock_sp_offset,       /* synchronized natives only */
277           OopMapSet* oop_maps);
278 
279   // Creation support
280   nmethod(Method* method,
281           CompilerType type,
282           int nmethod_size,
283           int compile_id,
284           int entry_bci,
285           CodeOffsets* offsets,
286           int orig_pc_offset,
287           DebugInformationRecorder *recorder,
288           Dependencies* dependencies,
289           CodeBuffer *code_buffer,
290           int frame_size,
291           OopMapSet* oop_maps,
292           ExceptionHandlerTable* handler_table,
293           ImplicitExceptionTable* nul_chk_table,
294           AbstractCompiler* compiler,
295           CompLevel comp_level

296 #if INCLUDE_JVMCI
297           , char* speculations = nullptr,
298           int speculations_len = 0,
299           JVMCINMethodData* jvmci_data = nullptr
300 #endif
301           );
302 
303   // helper methods
304   void* operator new(size_t size, int nmethod_size, int comp_level) throw();
305   // For method handle intrinsics: Try MethodNonProfiled, MethodProfiled and NonNMethod.
306   // Attention: Only allow NonNMethod space for special nmethods which don't need to be
307   // findable by nmethod iterators! In particular, they must not contain oops!
308   void* operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw();
309 
310   const char* reloc_string_for(u_char* begin, u_char* end);
311 
312   bool try_transition(signed char new_state);
313 
314   // Returns true if this thread changed the state of the nmethod or
315   // false if another thread performed the transition.

327   int data_offset() const                     { return _data_offset; }
328 
329   address header_end() const                  { return (address)    header_begin() + header_size(); }
330 
331  public:
332   // create nmethod with entry_bci
333   static nmethod* new_nmethod(const methodHandle& method,
334                               int compile_id,
335                               int entry_bci,
336                               CodeOffsets* offsets,
337                               int orig_pc_offset,
338                               DebugInformationRecorder* recorder,
339                               Dependencies* dependencies,
340                               CodeBuffer *code_buffer,
341                               int frame_size,
342                               OopMapSet* oop_maps,
343                               ExceptionHandlerTable* handler_table,
344                               ImplicitExceptionTable* nul_chk_table,
345                               AbstractCompiler* compiler,
346                               CompLevel comp_level

347 #if INCLUDE_JVMCI
348                               , char* speculations = nullptr,
349                               int speculations_len = 0,
350                               JVMCINMethodData* jvmci_data = nullptr
351 #endif
352   );
353 
354   // Only used for unit tests.
355   nmethod()
356     : CompiledMethod(),
357       _native_receiver_sp_offset(in_ByteSize(-1)),
358       _native_basic_lock_sp_offset(in_ByteSize(-1)),
359       _is_unloading_state(0) {}
360 
361 
362   static nmethod* new_native_nmethod(const methodHandle& method,
363                                      int compile_id,
364                                      CodeBuffer *code_buffer,
365                                      int vep_offset,
366                                      int frame_complete,

429   bool scopes_pcs_contains   (PcDesc* addr) const { return scopes_pcs_begin   () <= addr && addr < scopes_pcs_end   (); }
430 
431   // entry points
432   address entry_point() const                     { return _entry_point;             } // normal entry point
433   address verified_entry_point() const            { return _verified_entry_point;    } // if klass is correct
434 
435   // flag accessing and manipulation
436   bool  is_not_installed() const                  { return _state == not_installed; }
437   bool  is_in_use() const                         { return _state <= in_use; }
438   bool  is_not_entrant() const                    { return _state == not_entrant; }
439 
440   void clear_unloading_state();
441   // Heuristically deduce an nmethod isn't worth keeping around
442   bool is_cold();
443   virtual bool is_unloading();
444   virtual void do_unloading(bool unloading_occurred);
445 
446   bool is_unlinked() const                        { return _is_unlinked; }
447   void set_is_unlinked()                          { assert(!_is_unlinked, "already unlinked"); _is_unlinked = true; }
448 



449 #if INCLUDE_RTM_OPT
450   // rtm state accessing and manipulating
451   RTMState  rtm_state() const                     { return _rtm_state; }
452   void set_rtm_state(RTMState state)              { _rtm_state = state; }
453 #endif
454 
455   bool make_in_use() {
456     return try_transition(in_use);
457   }
458   // Make the nmethod non entrant. The nmethod will continue to be
459   // alive.  It is used when an uncommon trap happens.  Returns true
460   // if this thread changed the state of the nmethod or false if
461   // another thread performed the transition.
462   bool  make_not_entrant();
463   bool  make_not_used()    { return make_not_entrant(); }
464 
465   int get_state() const {
466     return _state;
467   }
468 
469   bool has_dependencies()                         { return dependencies_size() != 0; }
470   void print_dependencies_on(outputStream* out) PRODUCT_RETURN;
471   void flush_dependencies();
472   bool has_flushed_dependencies()                 { return _has_flushed_dependencies; }
473   void set_has_flushed_dependencies()             {
474     assert(!has_flushed_dependencies(), "should only happen once");
475     _has_flushed_dependencies = 1;
476   }
477 
478   int   comp_level() const                        { return _comp_level; }
479 
480   void unlink_from_method();
481 
482   // Support for oops in scopes and relocs:
483   // Note: index 0 is reserved for null.

580 
581   static void oops_do_marking_prologue();
582   static void oops_do_marking_epilogue();
583 
584  private:
585   ScopeDesc* scope_desc_in(address begin, address end);
586 
587   address* orig_pc_addr(const frame* fr);
588 
589   // used by jvmti to track if the load events has been reported
590   bool  load_reported() const                     { return _load_reported; }
591   void  set_load_reported()                       { _load_reported = true; }
592 
593  public:
594   // copying of debugging information
595   void copy_scopes_pcs(PcDesc* pcs, int count);
596   void copy_scopes_data(address buffer, int size);
597 
598   int orig_pc_offset() { return _orig_pc_offset; }
599 






600   // Post successful compilation
601   void post_compiled_method(CompileTask* task);
602 
603   // jvmti support:
604   void post_compiled_method_load_event(JvmtiThreadState* state = nullptr);
605 
606   // verify operations
607   void verify();
608   void verify_scopes();
609   void verify_interrupt_point(address interrupt_point, bool is_inline_cache);
610 
611   // Disassemble this nmethod with additional debug information, e.g. information about blocks.
612   void decode2(outputStream* st) const;
613   void print_constant_pool(outputStream* st);
614 
615   // Avoid hiding of parent's 'decode(outputStream*)' method.
616   void decode(outputStream* st) const { decode2(st); } // just delegate here.
617 
618   // printing support
619   void print()                          const;

 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_CODE_NMETHOD_HPP
 26 #define SHARE_CODE_NMETHOD_HPP
 27 
 28 #include "code/compiledMethod.hpp"
 29 
 30 class CompiledICData;
 31 class CompileTask;
 32 class DepChange;
 33 class DirectiveSet;
 34 class DebugInformationRecorder;
 35 class JvmtiThreadState;
 36 class OopIterateClosure;
 37 class SCCEntry;
 38 
 39 // nmethods (native methods) are the compiled code versions of Java methods.
 40 //
 41 // An nmethod contains:
 42 //  - header                 (the nmethod structure)
 43 //  [Relocation]
 44 //  - relocation information
 45 //  - constant part          (doubles, longs and floats used in nmethod)
 46 //  - oop table
 47 //  [Code]
 48 //  - code body
 49 //  - exception handler
 50 //  - stub code
 51 //  [Debugging information]
 52 //  - oop array
 53 //  - data array
 54 //  - pcs
 55 //  [Exception handler table]
 56 //  - handler entry point array
 57 //  [Implicit Null Pointer exception table]

 59 //  [Speculations]
 60 //  - encoded speculations array
 61 //  [JVMCINMethodData]
 62 //  - meta data for JVMCI compiled nmethod
 63 
 64 #if INCLUDE_JVMCI
 65 class FailedSpeculation;
 66 class JVMCINMethodData;
 67 #endif
 68 
 69 class nmethod : public CompiledMethod {
 70   friend class VMStructs;
 71   friend class JVMCIVMStructs;
 72   friend class CodeCache;  // scavengable oops
 73   friend class JVMCINMethodData;
 74 
 75  private:
 76 
 77   uint64_t  _gc_epoch;
 78 
 79   // Profiling counter used to figure out the hottest nmethods to record into CDS
 80   volatile uint64_t _method_profiling_count;
 81 
 82   // To support simple linked-list chaining of nmethods:
 83   nmethod*  _osr_link;         // from InstanceKlass::osr_nmethods_head
 84 
 85   // STW two-phase nmethod root processing helpers.
 86   //
 87   // When determining liveness of a given nmethod to do code cache unloading,
 88   // some collectors need to do different things depending on whether the nmethods
 89   // need to absolutely be kept alive during root processing; "strong"ly reachable
 90   // nmethods are known to be kept alive at root processing, but the liveness of
 91   // "weak"ly reachable ones is to be determined later.
 92   //
 93   // We want to allow strong and weak processing of nmethods by different threads
 94   // at the same time without heavy synchronization. Additional constraints are
 95   // to make sure that every nmethod is processed a minimal amount of time, and
 96   // nmethods themselves are always iterated at most once at a particular time.
 97   //
 98   // Note that strong processing work must be a superset of weak processing work
 99   // for this code to work.
100   //
101   // We store state and claim information in the _oops_do_mark_link member, using

224 #if INCLUDE_JVMCI
225   int _speculations_offset;
226   int _jvmci_data_offset;
227 #endif
228   int _nmethod_end_offset;
229 
230   int code_offset() const { return int(code_begin() - header_begin()); }
231 
232   // location in frame (offset for sp) that deopt can store the original
233   // pc during a deopt.
234   int _orig_pc_offset;
235 
236   int _compile_id;                           // which compilation made this nmethod
237 
238 #if INCLUDE_RTM_OPT
239   // RTM state at compile time. Used during deoptimization to decide
240   // whether to restart collecting RTM locking abort statistic again.
241   RTMState _rtm_state;
242 #endif
243 
244   SCCEntry* _scc_entry;
245 
246   // These are used for compiled synchronized native methods to
247   // locate the owner and stack slot for the BasicLock. They are
248   // needed because there is no debug information for compiled native
249   // wrappers and the oop maps are insufficient to allow
250   // frame::retrieve_receiver() to work. Currently they are expected
251   // to be byte offsets from the Java stack pointer for maximum code
252   // sharing between platforms. JVMTI's GetLocalInstance() uses these
253   // offsets to find the receiver for non-static native wrapper frames.
254   ByteSize _native_receiver_sp_offset;
255   ByteSize _native_basic_lock_sp_offset;
256 
257   CompLevel _comp_level;               // compilation level
258 
259   // Local state used to keep track of whether unloading is happening or not
260   volatile uint8_t _is_unloading_state;
261 
262   // protected by CodeCache_lock
263   bool _has_flushed_dependencies;      // Used for maintenance of dependencies (CodeCache_lock)
264 
265   // used by jvmti to track if an event has been posted for this nmethod.
266   bool _load_reported;
267 
268   bool _used; // has this nmethod ever been invoked?
269 
270   // Protected by CompiledMethod_lock
271   volatile signed char _state;         // {not_installed, in_use, not_used, not_entrant}
272 
273   int _skipped_instructions_size;
274 
275   // For native wrappers
276   nmethod(Method* method,
277           CompilerType type,
278           int nmethod_size,
279           int compile_id,
280           CodeOffsets* offsets,
281           CodeBuffer *code_buffer,
282           int frame_size,
283           ByteSize basic_lock_owner_sp_offset, /* synchronized natives only */
284           ByteSize basic_lock_sp_offset,       /* synchronized natives only */
285           OopMapSet* oop_maps);
286 
287   // Creation support
288   nmethod(Method* method,
289           CompilerType type,
290           int nmethod_size,
291           int compile_id,
292           int entry_bci,
293           CodeOffsets* offsets,
294           int orig_pc_offset,
295           DebugInformationRecorder *recorder,
296           Dependencies* dependencies,
297           CodeBuffer *code_buffer,
298           int frame_size,
299           OopMapSet* oop_maps,
300           ExceptionHandlerTable* handler_table,
301           ImplicitExceptionTable* nul_chk_table,
302           AbstractCompiler* compiler,
303           CompLevel comp_level
304           , SCCEntry* scc_entry
305 #if INCLUDE_JVMCI
306           , char* speculations = nullptr,
307           int speculations_len = 0,
308           JVMCINMethodData* jvmci_data = nullptr
309 #endif
310           );
311 
312   // helper methods
313   void* operator new(size_t size, int nmethod_size, int comp_level) throw();
314   // For method handle intrinsics: Try MethodNonProfiled, MethodProfiled and NonNMethod.
315   // Attention: Only allow NonNMethod space for special nmethods which don't need to be
316   // findable by nmethod iterators! In particular, they must not contain oops!
317   void* operator new(size_t size, int nmethod_size, bool allow_NonNMethod_space) throw();
318 
319   const char* reloc_string_for(u_char* begin, u_char* end);
320 
321   bool try_transition(signed char new_state);
322 
323   // Returns true if this thread changed the state of the nmethod or
324   // false if another thread performed the transition.

336   int data_offset() const                     { return _data_offset; }
337 
338   address header_end() const                  { return (address)    header_begin() + header_size(); }
339 
340  public:
341   // create nmethod with entry_bci
342   static nmethod* new_nmethod(const methodHandle& method,
343                               int compile_id,
344                               int entry_bci,
345                               CodeOffsets* offsets,
346                               int orig_pc_offset,
347                               DebugInformationRecorder* recorder,
348                               Dependencies* dependencies,
349                               CodeBuffer *code_buffer,
350                               int frame_size,
351                               OopMapSet* oop_maps,
352                               ExceptionHandlerTable* handler_table,
353                               ImplicitExceptionTable* nul_chk_table,
354                               AbstractCompiler* compiler,
355                               CompLevel comp_level
356                               , SCCEntry* scc_entry
357 #if INCLUDE_JVMCI
358                               , char* speculations = nullptr,
359                               int speculations_len = 0,
360                               JVMCINMethodData* jvmci_data = nullptr
361 #endif
362   );
363 
364   // Only used for unit tests.
365   nmethod()
366     : CompiledMethod(),
367       _native_receiver_sp_offset(in_ByteSize(-1)),
368       _native_basic_lock_sp_offset(in_ByteSize(-1)),
369       _is_unloading_state(0) {}
370 
371 
372   static nmethod* new_native_nmethod(const methodHandle& method,
373                                      int compile_id,
374                                      CodeBuffer *code_buffer,
375                                      int vep_offset,
376                                      int frame_complete,

439   bool scopes_pcs_contains   (PcDesc* addr) const { return scopes_pcs_begin   () <= addr && addr < scopes_pcs_end   (); }
440 
441   // entry points
442   address entry_point() const                     { return _entry_point;             } // normal entry point
443   address verified_entry_point() const            { return _verified_entry_point;    } // if klass is correct
444 
445   // flag accessing and manipulation
446   bool  is_not_installed() const                  { return _state == not_installed; }
447   bool  is_in_use() const                         { return _state <= in_use; }
448   bool  is_not_entrant() const                    { return _state == not_entrant; }
449 
450   void clear_unloading_state();
451   // Heuristically deduce an nmethod isn't worth keeping around
452   bool is_cold();
453   virtual bool is_unloading();
454   virtual void do_unloading(bool unloading_occurred);
455 
456   bool is_unlinked() const                        { return _is_unlinked; }
457   void set_is_unlinked()                          { assert(!_is_unlinked, "already unlinked"); _is_unlinked = true; }
458 
459   void inc_method_profiling_count();
460   uint64_t method_profiling_count();
461 
462 #if INCLUDE_RTM_OPT
463   // rtm state accessing and manipulating
464   RTMState  rtm_state() const                     { return _rtm_state; }
465   void set_rtm_state(RTMState state)              { _rtm_state = state; }
466 #endif
467 
468   bool make_in_use() {
469     return try_transition(in_use);
470   }
471   // Make the nmethod non entrant. The nmethod will continue to be
472   // alive.  It is used when an uncommon trap happens.  Returns true
473   // if this thread changed the state of the nmethod or false if
474   // another thread performed the transition.
475   bool  make_not_entrant(bool make_not_entrant = true);
476   bool  make_not_used() { return make_not_entrant(false); }
477 
478   int get_state() const {
479     return _state;
480   }
481 
482   bool has_dependencies()                         { return dependencies_size() != 0; }
483   void print_dependencies_on(outputStream* out) PRODUCT_RETURN;
484   void flush_dependencies();
485   bool has_flushed_dependencies()                 { return _has_flushed_dependencies; }
486   void set_has_flushed_dependencies()             {
487     assert(!has_flushed_dependencies(), "should only happen once");
488     _has_flushed_dependencies = 1;
489   }
490 
491   int   comp_level() const                        { return _comp_level; }
492 
493   void unlink_from_method();
494 
495   // Support for oops in scopes and relocs:
496   // Note: index 0 is reserved for null.

593 
594   static void oops_do_marking_prologue();
595   static void oops_do_marking_epilogue();
596 
597  private:
598   ScopeDesc* scope_desc_in(address begin, address end);
599 
600   address* orig_pc_addr(const frame* fr);
601 
602   // used by jvmti to track if the load events has been reported
603   bool  load_reported() const                     { return _load_reported; }
604   void  set_load_reported()                       { _load_reported = true; }
605 
606  public:
607   // copying of debugging information
608   void copy_scopes_pcs(PcDesc* pcs, int count);
609   void copy_scopes_data(address buffer, int size);
610 
611   int orig_pc_offset() { return _orig_pc_offset; }
612 
613   SCCEntry* scc_entry() const { return _scc_entry; }
614   bool is_scc() const { return scc_entry() != nullptr; }
615 
616   bool     used() const { return _used; }
617   void set_used()       { _used = true; }
618 
619   // Post successful compilation
620   void post_compiled_method(CompileTask* task);
621 
622   // jvmti support:
623   void post_compiled_method_load_event(JvmtiThreadState* state = nullptr);
624 
625   // verify operations
626   void verify();
627   void verify_scopes();
628   void verify_interrupt_point(address interrupt_point, bool is_inline_cache);
629 
630   // Disassemble this nmethod with additional debug information, e.g. information about blocks.
631   void decode2(outputStream* st) const;
632   void print_constant_pool(outputStream* st);
633 
634   // Avoid hiding of parent's 'decode(outputStream*)' method.
635   void decode(outputStream* st) const { decode2(st); } // just delegate here.
636 
637   // printing support
638   void print()                          const;
< prev index next >