< prev index next >

src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp

Print this page
@@ -26,10 +26,11 @@
  #ifndef _WINDOWS
  #include "alloca.h"
  #endif
  #include "asm/macroAssembler.hpp"
  #include "asm/macroAssembler.inline.hpp"
+ #include "code/compiledIC.hpp"
  #include "code/debugInfoRec.hpp"
  #include "code/icBuffer.hpp"
  #include "code/nativeInst.hpp"
  #include "code/vtableStubs.hpp"
  #include "compiler/oopMap.hpp"

@@ -940,10 +941,12 @@
          __ movdbl(r_1->as_XMMRegister(), Address(saved_sp, next_off));
        }
      }
    }
  
+   __ push_cont_fastpath(r15_thread); // Set JavaThread::_cont_fastpath to the sp of the oldest interpreted frame we know about
+ 
    // 6243940 We might end up in handle_wrong_method if
    // the callee is deoptimized as we race thru here. If that
    // happens we don't want to take a safepoint because the
    // caller frame will look interpreted and arguments are now
    // "compiled" so it is much better to make this transition

@@ -1468,10 +1471,106 @@
        }
      }
    }
  }
  
+ // defined in stubGenerator_x86_64.cpp
+ OopMap* continuation_enter_setup(MacroAssembler* masm, int& stack_slots);
+ void fill_continuation_entry(MacroAssembler* masm);
+ void continuation_enter_cleanup(MacroAssembler* masm);
+ 
+ // enterSpecial(Continuation c, boolean isContinue)
+ // On entry: c_rarg1 -- the continuation object
+ //           c_rarg2 -- isContinue
+ static void gen_continuation_enter(MacroAssembler* masm,
+                                  const methodHandle& method,
+                                  const BasicType* sig_bt,
+                                  const VMRegPair* regs,
+                                  int& exception_offset,
+                                  OopMapSet*oop_maps,
+                                  int& frame_complete,
+                                  int& stack_slots) {
+   //verify_oop_args(masm, method, sig_bt, regs);
+   AddressLiteral resolve(SharedRuntime::get_resolve_static_call_stub(),
+                          relocInfo::static_call_type);
+ 
+   stack_slots = 2; // will be overwritten
+   address start = __ pc();
+ 
+   Label call_thaw, exit;
+ 
+   __ push(rbp);
+ 
+   //BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
+   //bs->nmethod_entry_barrier(masm);
+   OopMap* map = continuation_enter_setup(masm, stack_slots);  // kills rax
+ 
+   // Frame is now completed as far as size and linkage.
+   frame_complete =__ pc() - start;
+   // if isContinue == 0
+   //   _enterSP = sp
+   // end
+  
+   fill_continuation_entry(masm); // kills rax
+ 
+   __ cmpl(c_rarg2, 0);
+   __ jcc(Assembler::notEqual, call_thaw);
+ 
+   int up = align_up((intptr_t) __ pc() + 1, 4) - (intptr_t) (__ pc() + 1);
+   if (up > 0) {
+     __ nop(up);
+   }
+ 
+   address mark = __ pc();
+   __ call(resolve);
+   oop_maps->add_gc_map(__ pc() - start, map);
+   __ post_call_nop();
+ 
+   __ jmp(exit);
+ 
+   __ bind(call_thaw);
+ 
+   __ movptr(rbx, (intptr_t) StubRoutines::cont_thaw());
+   __ call(rbx);
+   oop_maps->add_gc_map(__ pc() - start, map->deep_copy());
+   ContinuationEntry::return_pc_offset = __ pc() - start;
+   __ post_call_nop();
+ 
+   __ bind(exit);
+   continuation_enter_cleanup(masm);
+   __ pop(rbp);
+   __ ret(0);
+ 
+   /// exception handling
+ 
+   exception_offset = __ pc() - start;
+ 
+   continuation_enter_cleanup(masm);
+   __ addptr(rsp, 1*wordSize);
+ 
+   __ movptr(rbx, rax); // save the exception
+   __ movptr(c_rarg0, Address(rsp, 0));
+ 
+   __ call_VM_leaf(CAST_FROM_FN_PTR(address,
+         SharedRuntime::exception_handler_for_return_address),
+       r15_thread, c_rarg0);
+   __ mov(rdi, rax);
+   __ movptr(rax, rbx);
+   __ mov(rbx, rdi);
+   __ pop(rdx);
+ 
+   // continue at exception handler (return address removed)
+   // rax: exception
+   // rbx: exception handler
+   // rdx: throwing pc
+   __ verify_oop(rax);
+   __ jmp(rbx);
+ 
+   CodeBuffer* cbuf = masm->code_section()->outer();
+   address stub = CompiledStaticCall::emit_to_interp_stub(*cbuf, mark);
+ }
+ 
  static void gen_special_dispatch(MacroAssembler* masm,
                                   const methodHandle& method,
                                   const BasicType* sig_bt,
                                   const VMRegPair* regs) {
    verify_oop_args(masm, method, sig_bt, regs);

@@ -1550,10 +1649,41 @@
                                                  int compile_id,
                                                  BasicType* in_sig_bt,
                                                  VMRegPair* in_regs,
                                                  BasicType ret_type,
                                                  address critical_entry) {
+   if (method->is_continuation_enter_intrinsic()) {
+     vmIntrinsics::ID iid = method->intrinsic_id();
+     intptr_t start = (intptr_t)__ pc();
+     int vep_offset = ((intptr_t)__ pc()) - start;
+     int exception_offset = 0;
+     int frame_complete = 0;
+     int stack_slots = 0;
+     OopMapSet* oop_maps =  new OopMapSet();
+     gen_continuation_enter(masm,
+                          method,
+                          in_sig_bt,
+                          in_regs,
+                          exception_offset,
+                          oop_maps,
+                          frame_complete,
+                          stack_slots);
+     __ flush();
+     nmethod* nm = nmethod::new_native_nmethod(method,
+                                               compile_id,
+                                               masm->code(),
+                                               vep_offset,
+                                               frame_complete,
+                                               stack_slots,
+                                               in_ByteSize(-1),
+                                               in_ByteSize(-1),
+                                               oop_maps,
+                                               exception_offset);
+     ContinuationEntry::set_enter_nmethod(nm);
+     return nm;
+   }
+ 
    if (method->is_method_handle_intrinsic()) {
      vmIntrinsics::ID iid = method->intrinsic_id();
      intptr_t start = (intptr_t)__ pc();
      int vep_offset = ((intptr_t)__ pc()) - start;
      gen_special_dispatch(masm,

@@ -1571,10 +1701,11 @@
                                         stack_slots / VMRegImpl::slots_per_word,
                                         in_ByteSize(-1),
                                         in_ByteSize(-1),
                                         (OopMapSet*)NULL);
    }
+ 
    bool is_critical_native = true;
    address native_func = critical_entry;
    if (native_func == NULL) {
      native_func = method->native_function();
      is_critical_native = false;

@@ -2106,12 +2237,12 @@
      // Save the test result, for recursive case, the result is zero
      __ movptr(Address(lock_reg, mark_word_offset), swap_reg);
      __ jcc(Assembler::notEqual, slow_path_lock);
  
      // Slow path will re-enter here
- 
      __ bind(lock_done);
+     // __ inc_held_monitor_count(r15_thread);
    }
  
    // Finally just about ready to make the JNI call
  
    // get JNIEnv* which is first argument to native

@@ -2251,11 +2382,11 @@
      if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
        restore_native_result(masm, ret_type, stack_slots);
      }
  
      __ bind(done);
- 
+     // __ dec_held_monitor_count(r15_thread);
    }
    {
      SkipIfEqual skip(masm, &DTraceMethodProbes, false);
      save_native_result(masm, ret_type, stack_slots);
      __ mov_metadata(c_rarg1, method());

@@ -3031,11 +3162,11 @@
  
    // The following is basically a call_VM.  However, we need the precise
    // address of the call in order to generate an oopmap. Hence, we do all the
    // work outselves.
  
-   __ set_last_Java_frame(noreg, noreg, NULL);
+   __ set_last_Java_frame(noreg, noreg, NULL);  // JavaFrameAnchor::capture_last_Java_pc() will get the pc from the return address, which we store next:
  
    // The return address must always be correct so that frame constructor never
    // sees an invalid pc.
  
    if (!cause_return) {

@@ -3179,10 +3310,12 @@
    int start = __ offset();
  
    // No need to save vector registers since they are caller-saved anyway.
    map = RegisterSaver::save_live_registers(masm, 0, &frame_size_in_words, /*save_vectors*/ false);
  
+   // __ stop_if_in_cont(r10, "CONT 3");
+ 
    int frame_complete = __ offset();
  
    __ set_last_Java_frame(noreg, noreg, NULL);
  
    __ mov(c_rarg0, r15_thread);
< prev index next >