< prev index next >

src/hotspot/cpu/aarch64/continuationHelper_aarch64.inline.hpp

Print this page

 35 template<typename FKind>
 36 static inline intptr_t** link_address(const frame& f) {
 37   assert(FKind::is_instance(f), "");
 38   return FKind::interpreted
 39             ? (intptr_t**)(f.fp() + frame::link_offset)
 40             : (intptr_t**)(f.unextended_sp() + f.cb()->frame_size() - frame::sender_sp_offset);
 41 }
 42 
 43 static inline void patch_return_pc_with_preempt_stub(frame& f) {
 44   if (f.is_runtime_frame()) {
 45     // Unlike x86 we don't know where in the callee frame the return pc is
 46     // saved so we can't patch the return from the VM call back to Java.
 47     // Instead, we will patch the return from the runtime stub back to the
 48     // compiled method so that the target returns to the preempt cleanup stub.
 49     intptr_t* caller_sp = f.sp() + f.cb()->frame_size();
 50     caller_sp[-1] = (intptr_t)StubRoutines::cont_preempt_stub();
 51   } else {
 52     // The target will check for preemption once it returns to the interpreter
 53     // or the native wrapper code and will manually jump to the preempt stub.
 54     JavaThread *thread = JavaThread::current();



 55     thread->set_preempt_alternate_return(StubRoutines::cont_preempt_stub());
 56   }
 57 }
 58 
 59 inline int ContinuationHelper::frame_align_words(int size) {
 60 #ifdef _LP64
 61   return size & 1;
 62 #else
 63   return 0;
 64 #endif
 65 }
 66 
 67 inline intptr_t* ContinuationHelper::frame_align_pointer(intptr_t* sp) {
 68 #ifdef _LP64
 69   sp = align_down(sp, frame::frame_alignment);
 70 #endif
 71   return sp;
 72 }
 73 
 74 template<typename FKind>

 35 template<typename FKind>
 36 static inline intptr_t** link_address(const frame& f) {
 37   assert(FKind::is_instance(f), "");
 38   return FKind::interpreted
 39             ? (intptr_t**)(f.fp() + frame::link_offset)
 40             : (intptr_t**)(f.unextended_sp() + f.cb()->frame_size() - frame::sender_sp_offset);
 41 }
 42 
 43 static inline void patch_return_pc_with_preempt_stub(frame& f) {
 44   if (f.is_runtime_frame()) {
 45     // Unlike x86 we don't know where in the callee frame the return pc is
 46     // saved so we can't patch the return from the VM call back to Java.
 47     // Instead, we will patch the return from the runtime stub back to the
 48     // compiled method so that the target returns to the preempt cleanup stub.
 49     intptr_t* caller_sp = f.sp() + f.cb()->frame_size();
 50     caller_sp[-1] = (intptr_t)StubRoutines::cont_preempt_stub();
 51   } else {
 52     // The target will check for preemption once it returns to the interpreter
 53     // or the native wrapper code and will manually jump to the preempt stub.
 54     JavaThread *thread = JavaThread::current();
 55     DEBUG_ONLY(Method* m = f.is_interpreted_frame() ? f.interpreter_frame_method() : f.cb()->as_nmethod()->method();)
 56     assert(m->is_object_wait0() || thread->interp_at_preemptable_vmcall_cnt() > 0,
 57            "preemptable VM call not using call_VM_preemptable");
 58     thread->set_preempt_alternate_return(StubRoutines::cont_preempt_stub());
 59   }
 60 }
 61 
 62 inline int ContinuationHelper::frame_align_words(int size) {
 63 #ifdef _LP64
 64   return size & 1;
 65 #else
 66   return 0;
 67 #endif
 68 }
 69 
 70 inline intptr_t* ContinuationHelper::frame_align_pointer(intptr_t* sp) {
 71 #ifdef _LP64
 72   sp = align_down(sp, frame::frame_alignment);
 73 #endif
 74   return sp;
 75 }
 76 
 77 template<typename FKind>
< prev index next >