< prev index next >

src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp

Print this page
*** 307,10 ***
--- 307,41 ---
    } else {
      tbnz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), slow_path);
    }
  }
  
+ void MacroAssembler::push_cont_fastpath(Register java_thread) {
+   Label done;
+   ldr(rscratch1, Address(java_thread, JavaThread::cont_fastpath_offset()));
+   cmp(sp, rscratch1);
+   br(Assembler::LS, done);
+   mov(rscratch1, sp); // we can't use sp as the source in str
+   str(rscratch1, Address(java_thread, JavaThread::cont_fastpath_offset()));
+   bind(done);
+ }
+ 
+ void MacroAssembler::pop_cont_fastpath(Register java_thread) {
+   Label done;
+   ldr(rscratch1, Address(java_thread, JavaThread::cont_fastpath_offset()));
+   cmp(sp, rscratch1);
+   br(Assembler::LO, done);
+   str(zr, Address(java_thread, JavaThread::cont_fastpath_offset()));
+   bind(done);
+ }
+ 
+ void MacroAssembler::inc_held_monitor_count(Register java_thread) {
+   incrementw(Address(java_thread, JavaThread::held_monitor_count_offset()));
+ }
+ 
+ void MacroAssembler::dec_held_monitor_count(Register java_thread) {
+   decrementw(Address(java_thread, JavaThread::held_monitor_count_offset()));
+ }
+ 
+ void MacroAssembler::reset_held_monitor_count(Register java_thread) {
+   strw(zr, Address(java_thread, JavaThread::held_monitor_count_offset()));
+ }
+ 
  void MacroAssembler::reset_last_Java_frame(bool clear_fp) {
    // we must set sp to zero to clear frame
    str(zr, Address(rthread, JavaThread::last_Java_sp_offset()));
  
    // must clear fp, so that compiled frames are not confused; it is

*** 539,28 ***
    call_VM_base(oop_result, noreg, noreg, entry_point, number_of_arguments, check_exceptions);
  }
  
  // Maybe emit a call via a trampoline.  If the code cache is small
  // trampolines won't be emitted.
! 
! address MacroAssembler::trampoline_call(Address entry, CodeBuffer* cbuf) {
-   assert(JavaThread::current()->is_Compiler_thread(), "just checking");
    assert(entry.rspec().type() == relocInfo::runtime_call_type
           || entry.rspec().type() == relocInfo::opt_virtual_call_type
           || entry.rspec().type() == relocInfo::static_call_type
           || entry.rspec().type() == relocInfo::virtual_call_type, "wrong reloc type");
  
    // We need a trampoline if branches are far.
    if (far_branches()) {
      bool in_scratch_emit_size = false;
  #ifdef COMPILER2
!     // We don't want to emit a trampoline if C2 is generating dummy
!     // code during its branch shortening phase.
!     CompileTask* task = ciEnv::current()->task();
!     in_scratch_emit_size =
!       (task != NULL && is_c2_compile(task->comp_level()) &&
!        Compile::current()->output()->in_scratch_emit_size());
  #endif
      if (!in_scratch_emit_size) {
        address stub = emit_trampoline_stub(offset(), entry.target());
        if (stub == NULL) {
          postcond(pc() == badAddress);
--- 570,29 ---
    call_VM_base(oop_result, noreg, noreg, entry_point, number_of_arguments, check_exceptions);
  }
  
  // Maybe emit a call via a trampoline.  If the code cache is small
  // trampolines won't be emitted.
! address MacroAssembler::trampoline_call1(Address entry, CodeBuffer* cbuf, bool check_emit_size) {
!   //assert(JavaThread::current()->is_Compiler_thread(), "just checking");
    assert(entry.rspec().type() == relocInfo::runtime_call_type
           || entry.rspec().type() == relocInfo::opt_virtual_call_type
           || entry.rspec().type() == relocInfo::static_call_type
           || entry.rspec().type() == relocInfo::virtual_call_type, "wrong reloc type");
  
    // We need a trampoline if branches are far.
    if (far_branches()) {
      bool in_scratch_emit_size = false;
  #ifdef COMPILER2
!     if (check_emit_size) {
!       // We don't want to emit a trampoline if C2 is generating dummy
!       // code during its branch shortening phase.
!       CompileTask* task = ciEnv::current()->task();
!       in_scratch_emit_size =
!         (task != NULL && is_c2_compile(task->comp_level()) &&
+          Compile::current()->output()->in_scratch_emit_size());
+     }
  #endif
      if (!in_scratch_emit_size) {
        address stub = emit_trampoline_stub(offset(), entry.target());
        if (stub == NULL) {
          postcond(pc() == badAddress);

*** 2103,10 ***
--- 2135,19 ---
      buf = code_string(ss.as_string());
    }
    stop(buf);
  }
  
+ void MacroAssembler::_assert_asm(Assembler::Condition cc, const char* msg) {
+ #ifdef ASSERT
+   Label OK;
+   br(cc, OK);
+   stop(msg);
+   bind(OK);
+ #endif
+ }
+ 
  // If a constant does not fit in an immediate field, generate some
  // number of MOV instructions and then perform the operation.
  void MacroAssembler::wrap_add_sub_imm_insn(Register Rd, Register Rn, unsigned imm,
                                             add_sub_imm_insn insn1,
                                             add_sub_reg_insn insn2) {
< prev index next >