< prev index next >

src/hotspot/share/code/compiledMethod.cpp

Print this page
@@ -38,33 +38,36 @@
  #include "memory/resourceArea.hpp"
  #include "oops/compiledICHolder.inline.hpp"
  #include "oops/klass.inline.hpp"
  #include "oops/methodData.hpp"
  #include "oops/method.inline.hpp"
+ #include "oops/weakHandle.inline.hpp"
  #include "prims/methodHandles.hpp"
  #include "runtime/atomic.hpp"
  #include "runtime/deoptimization.hpp"
+ #include "runtime/frame.inline.hpp"
+ #include "runtime/jniHandles.inline.hpp"
  #include "runtime/handles.inline.hpp"
  #include "runtime/mutexLocker.hpp"
  #include "runtime/sharedRuntime.hpp"
  
  CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout,
                                 int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps,
-                                bool caller_must_gc_arguments)
-   : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
+                                bool caller_must_gc_arguments, bool compiled)
+   : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments, compiled),
      _mark_for_deoptimization_status(not_marked),
      _method(method),
      _gc_data(NULL)
  {
    init_defaults();
  }
  
  CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, int size,
                                 int header_size, CodeBuffer* cb, int frame_complete_offset, int frame_size,
-                                OopMapSet* oop_maps, bool caller_must_gc_arguments)
+                                OopMapSet* oop_maps, bool caller_must_gc_arguments, bool compiled)
    : CodeBlob(name, type, CodeBlobLayout((address) this, size, header_size, cb), cb,
-              frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
+              frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments, compiled),
      _mark_for_deoptimization_status(not_marked),
      _method(method),
      _gc_data(NULL)
  {
    init_defaults();

@@ -78,10 +81,11 @@
      _exception_cache            = NULL;
    }
    _has_unsafe_access          = 0;
    _has_method_handle_invokes  = 0;
    _has_wide_vectors           = 0;
+   _has_monitors               = 0;
  }
  
  bool CompiledMethod::is_method_handle_return(address return_pc) {
    if (!has_method_handle_invokes())  return false;
    PcDesc* pd = pc_desc_at(return_pc);

@@ -112,10 +116,11 @@
    }
  }
  
  //-----------------------------------------------------------------------------
  void CompiledMethod::mark_for_deoptimization(bool inc_recompile_counts) {
+   // assert (can_be_deoptimized(), ""); // in some places we check before marking, in others not.
    MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
                   Mutex::_no_safepoint_check_flag);
    _mark_for_deoptimization_status = (inc_recompile_counts ? deoptimize : deoptimize_noupdate);
  }
  

@@ -317,11 +322,11 @@
      return code_begin() + frame_complete_offset();
    }
  
    // It is not safe to read oops concurrently using entry barriers, if their
    // location depend on whether the nmethod is entrant or not.
-   assert(BarrierSet::barrier_set()->barrier_set_nmethod() == NULL, "Not safe oop scan");
+   // assert(BarrierSet::barrier_set()->barrier_set_nmethod() == NULL, "Not safe oop scan");
  
    address low_boundary = verified_entry_point();
    if (!is_in_use() && is_nmethod()) {
      low_boundary += NativeJump::instruction_size;
      // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.

@@ -355,29 +360,46 @@
  }
  
  // Method that knows how to preserve outgoing arguments at call. This method must be
  // called with a frame corresponding to a Java invoke
  void CompiledMethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
-   if (method() != NULL && !method()->is_native()) {
-     address pc = fr.pc();
-     SimpleScopeDesc ssd(this, pc);
-     if (ssd.is_optimized_linkToNative()) return; // call was replaced
-     Bytecode_invoke call(methodHandle(Thread::current(), ssd.method()), ssd.bci());
-     bool has_receiver = call.has_receiver();
-     bool has_appendix = call.has_appendix();
-     Symbol* signature = call.signature();
- 
-     // The method attached by JIT-compilers should be used, if present.
-     // Bytecode can be inaccurate in such case.
-     Method* callee = attached_method_before_pc(pc);
-     if (callee != NULL) {
-       has_receiver = !(callee->access_flags().is_static());
-       has_appendix = false;
-       signature = callee->signature();
+   if (method() != NULL) {
+     // handle the case of an anchor explicitly set in continuation code that doesn't have a callee
+     JavaThread* thread = reg_map->thread();
+     if (thread->has_last_Java_frame() && fr.sp() == thread->last_Java_sp()) {
+       // if (!method()->is_native()) fr.print_on(tty);
+       // assert (method()->is_native(), "");
+       return;
      }
  
-     fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);
+     if (!method()->is_native()) {
+       address pc = fr.pc();
+       bool has_receiver, has_appendix;
+       Symbol* signature;
+ 
+       // The method attached by JIT-compilers should be used, if present.
+       // Bytecode can be inaccurate in such case.
+       Method* callee = attached_method_before_pc(pc);
+       if (callee != NULL) {
+         has_receiver = !(callee->access_flags().is_static());
+         has_appendix = false;
+         signature    = callee->signature();
+       } else {
+         SimpleScopeDesc ssd(this, pc);
+         if (ssd.is_optimized_linkToNative()) return; // call was replaced
+         Bytecode_invoke call(methodHandle(Thread::current(), ssd.method()), ssd.bci());
+         has_receiver = call.has_receiver();
+         has_appendix = call.has_appendix();
+         signature    = call.signature();
+       }
+ 
+       fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);
+     } else if (method()->is_continuation_enter_intrinsic()) {
+       // This method only calls Continuation.enter()
+       Symbol* signature = vmSymbols::continuationEnter_signature();
+       fr.oops_compiled_arguments_do(signature, false, false, reg_map, f);
+     }
    }
  }
  
  Method* CompiledMethod::attached_method(address call_instr) {
    assert(code_contains(call_instr), "not part of the nmethod");

@@ -581,11 +603,11 @@
      // We want to keep an invariant that nmethods found through iterations of a Thread's
      // nmethods found in safepoints have gone through an entry barrier and are not armed.
      // By calling this nmethod entry barrier, it plays along and acts
      // like any other nmethod found on the stack of a thread (fewer surprises).
      nmethod* nm = as_nmethod_or_null();
-     if (nm != NULL) {
+     if (nm != NULL && bs_nm->is_armed(nm)) {
        bool alive = bs_nm->nmethod_entry_barrier(nm);
        assert(alive, "should be alive");
      }
    }
  }

@@ -602,10 +624,14 @@
      run_nmethod_entry_barrier();
      InlineCacheBuffer::refill_ic_stubs();
    }
  }
  
+ address* CompiledMethod::orig_pc_addr(const frame* fr) {
+   return (address*) ((address)fr->unextended_sp() + orig_pc_offset());
+ }
+ 
  // Called to clean up after class unloading for live nmethods and from the sweeper
  // for all methods.
  bool CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
    assert(CompiledICLocker::is_safe(this), "mt unsafe call");
    ResourceMark rm;
< prev index next >