< prev index next >

src/hotspot/share/code/compiledMethod.cpp

Print this page

 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "code/compiledIC.hpp"
 27 #include "code/compiledMethod.inline.hpp"
 28 #include "code/exceptionHandlerTable.hpp"
 29 #include "code/scopeDesc.hpp"
 30 #include "code/codeCache.hpp"
 31 #include "code/icBuffer.hpp"
 32 #include "gc/shared/barrierSet.hpp"
 33 #include "gc/shared/barrierSetNMethod.hpp"
 34 #include "gc/shared/gcBehaviours.hpp"
 35 #include "interpreter/bytecode.inline.hpp"
 36 #include "logging/log.hpp"
 37 #include "logging/logTag.hpp"
 38 #include "memory/resourceArea.hpp"
 39 #include "oops/compiledICHolder.inline.hpp"
 40 #include "oops/klass.inline.hpp"
 41 #include "oops/methodData.hpp"
 42 #include "oops/method.inline.hpp"

 43 #include "prims/methodHandles.hpp"
 44 #include "runtime/atomic.hpp"
 45 #include "runtime/deoptimization.hpp"


 46 #include "runtime/handles.inline.hpp"
 47 #include "runtime/mutexLocker.hpp"
 48 #include "runtime/sharedRuntime.hpp"
 49 
 50 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout,
 51                                int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps,
 52                                bool caller_must_gc_arguments)
 53   : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
 54     _mark_for_deoptimization_status(not_marked),
 55     _method(method),
 56     _gc_data(NULL)
 57 {
 58   init_defaults();
 59 }
 60 
 61 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, int size,
 62                                int header_size, CodeBuffer* cb, int frame_complete_offset, int frame_size,
 63                                OopMapSet* oop_maps, bool caller_must_gc_arguments)
 64   : CodeBlob(name, type, CodeBlobLayout((address) this, size, header_size, cb), cb,
 65              frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
 66     _mark_for_deoptimization_status(not_marked),
 67     _method(method),
 68     _gc_data(NULL)
 69 {
 70   init_defaults();
 71 }
 72 
 73 void CompiledMethod::init_defaults() {
 74   { // avoid uninitialized fields, even for short time periods
 75     _scopes_data_begin          = NULL;
 76     _deopt_handler_begin        = NULL;
 77     _deopt_mh_handler_begin     = NULL;
 78     _exception_cache            = NULL;
 79   }
 80   _has_unsafe_access          = 0;
 81   _has_method_handle_invokes  = 0;
 82   _has_wide_vectors           = 0;

 83 }
 84 
 85 bool CompiledMethod::is_method_handle_return(address return_pc) {
 86   if (!has_method_handle_invokes())  return false;
 87   PcDesc* pd = pc_desc_at(return_pc);
 88   if (pd == NULL)
 89     return false;
 90   return pd->is_method_handle_invoke();
 91 }
 92 
 93 // Returns a string version of the method state.
 94 const char* CompiledMethod::state() const {
 95   int state = get_state();
 96   switch (state) {
 97   case not_installed:
 98     return "not installed";
 99   case in_use:
100     return "in use";
101   case not_used:
102     return "not_used";
103   case not_entrant:
104     return "not_entrant";
105   case zombie:
106     return "zombie";
107   case unloaded:
108     return "unloaded";
109   default:
110     fatal("unexpected method state: %d", state);
111     return NULL;
112   }
113 }
114 
115 //-----------------------------------------------------------------------------
116 void CompiledMethod::mark_for_deoptimization(bool inc_recompile_counts) {

117   MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
118                  Mutex::_no_safepoint_check_flag);
119   _mark_for_deoptimization_status = (inc_recompile_counts ? deoptimize : deoptimize_noupdate);
120 }
121 
122 //-----------------------------------------------------------------------------
123 
124 ExceptionCache* CompiledMethod::exception_cache_acquire() const {
125   return Atomic::load_acquire(&_exception_cache);
126 }
127 
128 void CompiledMethod::add_exception_cache_entry(ExceptionCache* new_entry) {
129   assert(ExceptionCache_lock->owned_by_self(),"Must hold the ExceptionCache_lock");
130   assert(new_entry != NULL,"Must be non null");
131   assert(new_entry->next() == NULL, "Must be null");
132 
133   for (;;) {
134     ExceptionCache *ec = exception_cache();
135     if (ec != NULL) {
136       Klass* ex_klass = ec->exception_type();

302   return new ScopeDesc(this, pd);
303 }
304 
305 address CompiledMethod::oops_reloc_begin() const {
306   // If the method is not entrant or zombie then a JMP is plastered over the
307   // first few bytes.  If an oop in the old code was there, that oop
308   // should not get GC'd.  Skip the first few bytes of oops on
309   // not-entrant methods.
310   if (frame_complete_offset() != CodeOffsets::frame_never_safe &&
311       code_begin() + frame_complete_offset() >
312       verified_entry_point() + NativeJump::instruction_size)
313   {
314     // If we have a frame_complete_offset after the native jump, then there
315     // is no point trying to look for oops before that. This is a requirement
316     // for being allowed to scan oops concurrently.
317     return code_begin() + frame_complete_offset();
318   }
319 
320   // It is not safe to read oops concurrently using entry barriers, if their
321   // location depend on whether the nmethod is entrant or not.
322   assert(BarrierSet::barrier_set()->barrier_set_nmethod() == NULL, "Not safe oop scan");
323 
324   address low_boundary = verified_entry_point();
325   if (!is_in_use() && is_nmethod()) {
326     low_boundary += NativeJump::instruction_size;
327     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
328     // This means that the low_boundary is going to be a little too high.
329     // This shouldn't matter, since oops of non-entrant methods are never used.
330     // In fact, why are we bothering to look at oops in a non-entrant method??
331   }
332   return low_boundary;
333 }
334 
335 int CompiledMethod::verify_icholder_relocations() {
336   ResourceMark rm;
337   int count = 0;
338 
339   RelocIterator iter(this);
340   while(iter.next()) {
341     if (iter.type() == relocInfo::virtual_call_type) {
342       if (CompiledIC::is_icholder_call_site(iter.virtual_call_reloc(), this)) {
343         CompiledIC *ic = CompiledIC_at(&iter);
344         if (TraceCompiledIC) {
345           tty->print("noticed icholder " INTPTR_FORMAT " ", p2i(ic->cached_icholder()));
346           ic->print();
347         }
348         assert(ic->cached_icholder() != NULL, "must be non-NULL");
349         count++;
350       }
351     }
352   }
353 
354   return count;
355 }
356 
357 // Method that knows how to preserve outgoing arguments at call. This method must be
358 // called with a frame corresponding to a Java invoke
359 void CompiledMethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
360   if (method() != NULL && !method()->is_native()) {
361     address pc = fr.pc();
362     SimpleScopeDesc ssd(this, pc);
363     if (ssd.is_optimized_linkToNative()) return; // call was replaced
364     Bytecode_invoke call(methodHandle(Thread::current(), ssd.method()), ssd.bci());
365     bool has_receiver = call.has_receiver();
366     bool has_appendix = call.has_appendix();
367     Symbol* signature = call.signature();
368 
369     // The method attached by JIT-compilers should be used, if present.
370     // Bytecode can be inaccurate in such case.
371     Method* callee = attached_method_before_pc(pc);
372     if (callee != NULL) {
373       has_receiver = !(callee->access_flags().is_static());
374       has_appendix = false;
375       signature = callee->signature();
376     }
377 
378     fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);


























379   }
380 }
381 
382 Method* CompiledMethod::attached_method(address call_instr) {
383   assert(code_contains(call_instr), "not part of the nmethod");
384   RelocIterator iter(this, call_instr, call_instr + 1);
385   while (iter.next()) {
386     if (iter.addr() == call_instr) {
387       switch(iter.type()) {
388         case relocInfo::static_call_type:      return iter.static_call_reloc()->method_value();
389         case relocInfo::opt_virtual_call_type: return iter.opt_virtual_call_reloc()->method_value();
390         case relocInfo::virtual_call_type:     return iter.virtual_call_reloc()->method_value();
391         default:                               break;
392       }
393     }
394   }
395   return NULL; // not found
396 }
397 
398 Method* CompiledMethod::attached_method_before_pc(address pc) {

566   if (!cleanup_inline_caches_impl(unloading_occurred, false)) {
567     return false;
568   }
569 
570 #ifdef ASSERT
571   // Check that the metadata embedded in the nmethod is alive
572   CheckClass check_class;
573   metadata_do(&check_class);
574 #endif
575   return true;
576 }
577 
578 void CompiledMethod::run_nmethod_entry_barrier() {
579   BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
580   if (bs_nm != NULL) {
581     // We want to keep an invariant that nmethods found through iterations of a Thread's
582     // nmethods found in safepoints have gone through an entry barrier and are not armed.
583     // By calling this nmethod entry barrier, it plays along and acts
584     // like any other nmethod found on the stack of a thread (fewer surprises).
585     nmethod* nm = as_nmethod_or_null();
586     if (nm != NULL) {
587       bool alive = bs_nm->nmethod_entry_barrier(nm);
588       assert(alive, "should be alive");
589     }
590   }
591 }
592 
593 void CompiledMethod::cleanup_inline_caches(bool clean_all) {
594   for (;;) {
595     ICRefillVerifier ic_refill_verifier;
596     { CompiledICLocker ic_locker(this);
597       if (cleanup_inline_caches_impl(false, clean_all)) {
598         return;
599       }
600     }
601     // Call this nmethod entry barrier from the sweeper.
602     run_nmethod_entry_barrier();
603     InlineCacheBuffer::refill_ic_stubs();
604   }
605 }
606 




607 // Called to clean up after class unloading for live nmethods and from the sweeper
608 // for all methods.
609 bool CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
610   assert(CompiledICLocker::is_safe(this), "mt unsafe call");
611   ResourceMark rm;
612 
613   // Find all calls in an nmethod and clear the ones that point to non-entrant,
614   // zombie and unloaded nmethods.
615   RelocIterator iter(this, oops_reloc_begin());
616   bool is_in_static_stub = false;
617   while(iter.next()) {
618 
619     switch (iter.type()) {
620 
621     case relocInfo::virtual_call_type:
622       if (unloading_occurred) {
623         // If class unloading occurred we first clear ICs where the cached metadata
624         // is referring to an unloaded klass or method.
625         if (!clean_ic_if_metadata_is_dead(CompiledIC_at(&iter))) {
626           return false;

 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "code/compiledIC.hpp"
 27 #include "code/compiledMethod.inline.hpp"
 28 #include "code/exceptionHandlerTable.hpp"
 29 #include "code/scopeDesc.hpp"
 30 #include "code/codeCache.hpp"
 31 #include "code/icBuffer.hpp"
 32 #include "gc/shared/barrierSet.hpp"
 33 #include "gc/shared/barrierSetNMethod.hpp"
 34 #include "gc/shared/gcBehaviours.hpp"
 35 #include "interpreter/bytecode.inline.hpp"
 36 #include "logging/log.hpp"
 37 #include "logging/logTag.hpp"
 38 #include "memory/resourceArea.hpp"
 39 #include "oops/compiledICHolder.inline.hpp"
 40 #include "oops/klass.inline.hpp"
 41 #include "oops/methodData.hpp"
 42 #include "oops/method.inline.hpp"
 43 #include "oops/weakHandle.inline.hpp"
 44 #include "prims/methodHandles.hpp"
 45 #include "runtime/atomic.hpp"
 46 #include "runtime/deoptimization.hpp"
 47 #include "runtime/frame.inline.hpp"
 48 #include "runtime/jniHandles.inline.hpp"
 49 #include "runtime/handles.inline.hpp"
 50 #include "runtime/mutexLocker.hpp"
 51 #include "runtime/sharedRuntime.hpp"
 52 
 53 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout,
 54                                int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps,
 55                                bool caller_must_gc_arguments, bool compiled)
 56   : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments, compiled),
 57     _mark_for_deoptimization_status(not_marked),
 58     _method(method),
 59     _gc_data(NULL)
 60 {
 61   init_defaults();
 62 }
 63 
 64 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, int size,
 65                                int header_size, CodeBuffer* cb, int frame_complete_offset, int frame_size,
 66                                OopMapSet* oop_maps, bool caller_must_gc_arguments, bool compiled)
 67   : CodeBlob(name, type, CodeBlobLayout((address) this, size, header_size, cb), cb,
 68              frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments, compiled),
 69     _mark_for_deoptimization_status(not_marked),
 70     _method(method),
 71     _gc_data(NULL)
 72 {
 73   init_defaults();
 74 }
 75 
 76 void CompiledMethod::init_defaults() {
 77   { // avoid uninitialized fields, even for short time periods
 78     _scopes_data_begin          = NULL;
 79     _deopt_handler_begin        = NULL;
 80     _deopt_mh_handler_begin     = NULL;
 81     _exception_cache            = NULL;
 82   }
 83   _has_unsafe_access          = 0;
 84   _has_method_handle_invokes  = 0;
 85   _has_wide_vectors           = 0;
 86   _has_monitors               = 0;
 87 }
 88 
 89 bool CompiledMethod::is_method_handle_return(address return_pc) {
 90   if (!has_method_handle_invokes())  return false;
 91   PcDesc* pd = pc_desc_at(return_pc);
 92   if (pd == NULL)
 93     return false;
 94   return pd->is_method_handle_invoke();
 95 }
 96 
 97 // Returns a string version of the method state.
 98 const char* CompiledMethod::state() const {
 99   int state = get_state();
100   switch (state) {
101   case not_installed:
102     return "not installed";
103   case in_use:
104     return "in use";
105   case not_used:
106     return "not_used";
107   case not_entrant:
108     return "not_entrant";
109   case zombie:
110     return "zombie";
111   case unloaded:
112     return "unloaded";
113   default:
114     fatal("unexpected method state: %d", state);
115     return NULL;
116   }
117 }
118 
119 //-----------------------------------------------------------------------------
120 void CompiledMethod::mark_for_deoptimization(bool inc_recompile_counts) {
121   // assert (can_be_deoptimized(), ""); // in some places we check before marking, in others not.
122   MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
123                  Mutex::_no_safepoint_check_flag);
124   _mark_for_deoptimization_status = (inc_recompile_counts ? deoptimize : deoptimize_noupdate);
125 }
126 
127 //-----------------------------------------------------------------------------
128 
129 ExceptionCache* CompiledMethod::exception_cache_acquire() const {
130   return Atomic::load_acquire(&_exception_cache);
131 }
132 
133 void CompiledMethod::add_exception_cache_entry(ExceptionCache* new_entry) {
134   assert(ExceptionCache_lock->owned_by_self(),"Must hold the ExceptionCache_lock");
135   assert(new_entry != NULL,"Must be non null");
136   assert(new_entry->next() == NULL, "Must be null");
137 
138   for (;;) {
139     ExceptionCache *ec = exception_cache();
140     if (ec != NULL) {
141       Klass* ex_klass = ec->exception_type();

307   return new ScopeDesc(this, pd);
308 }
309 
310 address CompiledMethod::oops_reloc_begin() const {
311   // If the method is not entrant or zombie then a JMP is plastered over the
312   // first few bytes.  If an oop in the old code was there, that oop
313   // should not get GC'd.  Skip the first few bytes of oops on
314   // not-entrant methods.
315   if (frame_complete_offset() != CodeOffsets::frame_never_safe &&
316       code_begin() + frame_complete_offset() >
317       verified_entry_point() + NativeJump::instruction_size)
318   {
319     // If we have a frame_complete_offset after the native jump, then there
320     // is no point trying to look for oops before that. This is a requirement
321     // for being allowed to scan oops concurrently.
322     return code_begin() + frame_complete_offset();
323   }
324 
325   // It is not safe to read oops concurrently using entry barriers, if their
326   // location depend on whether the nmethod is entrant or not.
327   // assert(BarrierSet::barrier_set()->barrier_set_nmethod() == NULL, "Not safe oop scan");
328 
329   address low_boundary = verified_entry_point();
330   if (!is_in_use() && is_nmethod()) {
331     low_boundary += NativeJump::instruction_size;
332     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
333     // This means that the low_boundary is going to be a little too high.
334     // This shouldn't matter, since oops of non-entrant methods are never used.
335     // In fact, why are we bothering to look at oops in a non-entrant method??
336   }
337   return low_boundary;
338 }
339 
340 int CompiledMethod::verify_icholder_relocations() {
341   ResourceMark rm;
342   int count = 0;
343 
344   RelocIterator iter(this);
345   while(iter.next()) {
346     if (iter.type() == relocInfo::virtual_call_type) {
347       if (CompiledIC::is_icholder_call_site(iter.virtual_call_reloc(), this)) {
348         CompiledIC *ic = CompiledIC_at(&iter);
349         if (TraceCompiledIC) {
350           tty->print("noticed icholder " INTPTR_FORMAT " ", p2i(ic->cached_icholder()));
351           ic->print();
352         }
353         assert(ic->cached_icholder() != NULL, "must be non-NULL");
354         count++;
355       }
356     }
357   }
358 
359   return count;
360 }
361 
362 // Method that knows how to preserve outgoing arguments at call. This method must be
363 // called with a frame corresponding to a Java invoke
364 void CompiledMethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
365   if (method() != NULL) {
366     // handle the case of an anchor explicitly set in continuation code that doesn't have a callee
367     JavaThread* thread = reg_map->thread();
368     if (thread->has_last_Java_frame() && fr.sp() == thread->last_Java_sp()) {
369       // if (!method()->is_native()) fr.print_on(tty);
370       // assert (method()->is_native(), "");
371       return;









372     }
373 
374     if (!method()->is_native()) {
375       address pc = fr.pc();
376       bool has_receiver, has_appendix;
377       Symbol* signature;
378 
379       // The method attached by JIT-compilers should be used, if present.
380       // Bytecode can be inaccurate in such case.
381       Method* callee = attached_method_before_pc(pc);
382       if (callee != NULL) {
383         has_receiver = !(callee->access_flags().is_static());
384         has_appendix = false;
385         signature    = callee->signature();
386       } else {
387         SimpleScopeDesc ssd(this, pc);
388         if (ssd.is_optimized_linkToNative()) return; // call was replaced
389         Bytecode_invoke call(methodHandle(Thread::current(), ssd.method()), ssd.bci());
390         has_receiver = call.has_receiver();
391         has_appendix = call.has_appendix();
392         signature    = call.signature();
393       }
394 
395       fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);
396     } else if (method()->is_continuation_enter_intrinsic()) {
397       // This method only calls Continuation.enter()
398       Symbol* signature = vmSymbols::continuationEnter_signature();
399       fr.oops_compiled_arguments_do(signature, false, false, reg_map, f);
400     }
401   }
402 }
403 
404 Method* CompiledMethod::attached_method(address call_instr) {
405   assert(code_contains(call_instr), "not part of the nmethod");
406   RelocIterator iter(this, call_instr, call_instr + 1);
407   while (iter.next()) {
408     if (iter.addr() == call_instr) {
409       switch(iter.type()) {
410         case relocInfo::static_call_type:      return iter.static_call_reloc()->method_value();
411         case relocInfo::opt_virtual_call_type: return iter.opt_virtual_call_reloc()->method_value();
412         case relocInfo::virtual_call_type:     return iter.virtual_call_reloc()->method_value();
413         default:                               break;
414       }
415     }
416   }
417   return NULL; // not found
418 }
419 
420 Method* CompiledMethod::attached_method_before_pc(address pc) {

588   if (!cleanup_inline_caches_impl(unloading_occurred, false)) {
589     return false;
590   }
591 
592 #ifdef ASSERT
593   // Check that the metadata embedded in the nmethod is alive
594   CheckClass check_class;
595   metadata_do(&check_class);
596 #endif
597   return true;
598 }
599 
600 void CompiledMethod::run_nmethod_entry_barrier() {
601   BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
602   if (bs_nm != NULL) {
603     // We want to keep an invariant that nmethods found through iterations of a Thread's
604     // nmethods found in safepoints have gone through an entry barrier and are not armed.
605     // By calling this nmethod entry barrier, it plays along and acts
606     // like any other nmethod found on the stack of a thread (fewer surprises).
607     nmethod* nm = as_nmethod_or_null();
608     if (nm != NULL && bs_nm->is_armed(nm)) {
609       bool alive = bs_nm->nmethod_entry_barrier(nm);
610       assert(alive, "should be alive");
611     }
612   }
613 }
614 
615 void CompiledMethod::cleanup_inline_caches(bool clean_all) {
616   for (;;) {
617     ICRefillVerifier ic_refill_verifier;
618     { CompiledICLocker ic_locker(this);
619       if (cleanup_inline_caches_impl(false, clean_all)) {
620         return;
621       }
622     }
623     // Call this nmethod entry barrier from the sweeper.
624     run_nmethod_entry_barrier();
625     InlineCacheBuffer::refill_ic_stubs();
626   }
627 }
628 
629 address* CompiledMethod::orig_pc_addr(const frame* fr) {
630   return (address*) ((address)fr->unextended_sp() + orig_pc_offset());
631 }
632 
633 // Called to clean up after class unloading for live nmethods and from the sweeper
634 // for all methods.
635 bool CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
636   assert(CompiledICLocker::is_safe(this), "mt unsafe call");
637   ResourceMark rm;
638 
639   // Find all calls in an nmethod and clear the ones that point to non-entrant,
640   // zombie and unloaded nmethods.
641   RelocIterator iter(this, oops_reloc_begin());
642   bool is_in_static_stub = false;
643   while(iter.next()) {
644 
645     switch (iter.type()) {
646 
647     case relocInfo::virtual_call_type:
648       if (unloading_occurred) {
649         // If class unloading occurred we first clear ICs where the cached metadata
650         // is referring to an unloaded klass or method.
651         if (!clean_ic_if_metadata_is_dead(CompiledIC_at(&iter))) {
652           return false;
< prev index next >