< prev index next >

src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp

Print this page

 292   return address(((uint64_t)insn_addr + (offset << 2)));
 293 }
 294 
 295 void MacroAssembler::safepoint_poll(Label& slow_path, bool at_return, bool acquire, bool in_nmethod) {
 296   if (acquire) {
 297     lea(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
 298     ldar(rscratch1, rscratch1);
 299   } else {
 300     ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
 301   }
 302   if (at_return) {
 303     // Note that when in_nmethod is set, the stack pointer is incremented before the poll. Therefore,
 304     // we may safely use the sp instead to perform the stack watermark check.
 305     cmp(in_nmethod ? sp : rfp, rscratch1);
 306     br(Assembler::HI, slow_path);
 307   } else {
 308     tbnz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), slow_path);
 309   }
 310 }
 311 































 312 void MacroAssembler::reset_last_Java_frame(bool clear_fp) {
 313   // we must set sp to zero to clear frame
 314   str(zr, Address(rthread, JavaThread::last_Java_sp_offset()));
 315 
 316   // must clear fp, so that compiled frames are not confused; it is
 317   // possible that we need it only for debugging
 318   if (clear_fp) {
 319     str(zr, Address(rthread, JavaThread::last_Java_fp_offset()));
 320   }
 321 
 322   // Always clear the pc because it could have been set by make_walkable()
 323   str(zr, Address(rthread, JavaThread::last_Java_pc_offset()));
 324 }
 325 
 326 // Calls to C land
 327 //
 328 // When entering C land, the rfp, & resp of the last Java frame have to be recorded
 329 // in the (thread-local) JavaThread object. When leaving C land, the last Java fp
 330 // has to be reset to 0. This is required to allow proper stack traversal.
 331 void MacroAssembler::set_last_Java_frame(Register last_java_sp,

 524     ldr(rscratch1, Address(java_thread, in_bytes(Thread::pending_exception_offset())));
 525     Label ok;
 526     cbz(rscratch1, ok);
 527     lea(rscratch1, RuntimeAddress(StubRoutines::forward_exception_entry()));
 528     br(rscratch1);
 529     bind(ok);
 530   }
 531 
 532   // get oop result if there is one and reset the value in the thread
 533   if (oop_result->is_valid()) {
 534     get_vm_result(oop_result, java_thread);
 535   }
 536 }
 537 
 538 void MacroAssembler::call_VM_helper(Register oop_result, address entry_point, int number_of_arguments, bool check_exceptions) {
 539   call_VM_base(oop_result, noreg, noreg, entry_point, number_of_arguments, check_exceptions);
 540 }
 541 
 542 // Maybe emit a call via a trampoline.  If the code cache is small
 543 // trampolines won't be emitted.
 544 
 545 address MacroAssembler::trampoline_call(Address entry, CodeBuffer* cbuf) {
 546   assert(JavaThread::current()->is_Compiler_thread(), "just checking");
 547   assert(entry.rspec().type() == relocInfo::runtime_call_type
 548          || entry.rspec().type() == relocInfo::opt_virtual_call_type
 549          || entry.rspec().type() == relocInfo::static_call_type
 550          || entry.rspec().type() == relocInfo::virtual_call_type, "wrong reloc type");
 551 
 552   // We need a trampoline if branches are far.
 553   if (far_branches()) {
 554     bool in_scratch_emit_size = false;
 555 #ifdef COMPILER2
 556     // We don't want to emit a trampoline if C2 is generating dummy
 557     // code during its branch shortening phase.
 558     CompileTask* task = ciEnv::current()->task();
 559     in_scratch_emit_size =
 560       (task != NULL && is_c2_compile(task->comp_level()) &&
 561        Compile::current()->output()->in_scratch_emit_size());


 562 #endif
 563     if (!in_scratch_emit_size) {
 564       address stub = emit_trampoline_stub(offset(), entry.target());
 565       if (stub == NULL) {
 566         postcond(pc() == badAddress);
 567         return NULL; // CodeCache is full
 568       }
 569     }
 570   }
 571 
 572   if (cbuf) cbuf->set_insts_mark();
 573   relocate(entry.rspec());
 574   if (!far_branches()) {
 575     bl(entry.target());
 576   } else {
 577     bl(pc());
 578   }
 579   // just need to return a non-null address
 580   postcond(pc() != badAddress);
 581   return pc();

2088   bind(done);
2089 }
2090 
2091 void MacroAssembler::stop(const char* msg) {
2092   BLOCK_COMMENT(msg);
2093   dcps1(0xdeae);
2094   emit_int64((uintptr_t)msg);
2095 }
2096 
2097 void MacroAssembler::unimplemented(const char* what) {
2098   const char* buf = NULL;
2099   {
2100     ResourceMark rm;
2101     stringStream ss;
2102     ss.print("unimplemented: %s", what);
2103     buf = code_string(ss.as_string());
2104   }
2105   stop(buf);
2106 }
2107 









2108 // If a constant does not fit in an immediate field, generate some
2109 // number of MOV instructions and then perform the operation.
2110 void MacroAssembler::wrap_add_sub_imm_insn(Register Rd, Register Rn, unsigned imm,
2111                                            add_sub_imm_insn insn1,
2112                                            add_sub_reg_insn insn2) {
2113   assert(Rd != zr, "Rd = zr and not setting flags?");
2114   if (operand_valid_for_add_sub_immediate((int)imm)) {
2115     (this->*insn1)(Rd, Rn, imm);
2116   } else {
2117     if (uabs(imm) < (1 << 24)) {
2118        (this->*insn1)(Rd, Rn, imm & -(1 << 12));
2119        (this->*insn1)(Rd, Rd, imm & ((1 << 12)-1));
2120     } else {
2121        assert_different_registers(Rd, Rn);
2122        mov(Rd, (uint64_t)imm);
2123        (this->*insn2)(Rd, Rn, Rd, LSL, 0);
2124     }
2125   }
2126 }
2127 

 292   return address(((uint64_t)insn_addr + (offset << 2)));
 293 }
 294 
 295 void MacroAssembler::safepoint_poll(Label& slow_path, bool at_return, bool acquire, bool in_nmethod) {
 296   if (acquire) {
 297     lea(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
 298     ldar(rscratch1, rscratch1);
 299   } else {
 300     ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
 301   }
 302   if (at_return) {
 303     // Note that when in_nmethod is set, the stack pointer is incremented before the poll. Therefore,
 304     // we may safely use the sp instead to perform the stack watermark check.
 305     cmp(in_nmethod ? sp : rfp, rscratch1);
 306     br(Assembler::HI, slow_path);
 307   } else {
 308     tbnz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), slow_path);
 309   }
 310 }
 311 
 312 void MacroAssembler::push_cont_fastpath(Register java_thread) {
 313   Label done;
 314   ldr(rscratch1, Address(java_thread, JavaThread::cont_fastpath_offset()));
 315   cmp(sp, rscratch1);
 316   br(Assembler::LS, done);
 317   mov(rscratch1, sp); // we can't use sp as the source in str
 318   str(rscratch1, Address(java_thread, JavaThread::cont_fastpath_offset()));
 319   bind(done);
 320 }
 321 
 322 void MacroAssembler::pop_cont_fastpath(Register java_thread) {
 323   Label done;
 324   ldr(rscratch1, Address(java_thread, JavaThread::cont_fastpath_offset()));
 325   cmp(sp, rscratch1);
 326   br(Assembler::LO, done);
 327   str(zr, Address(java_thread, JavaThread::cont_fastpath_offset()));
 328   bind(done);
 329 }
 330 
 331 void MacroAssembler::inc_held_monitor_count(Register java_thread) {
 332   incrementw(Address(java_thread, JavaThread::held_monitor_count_offset()));
 333 }
 334 
 335 void MacroAssembler::dec_held_monitor_count(Register java_thread) {
 336   decrementw(Address(java_thread, JavaThread::held_monitor_count_offset()));
 337 }
 338 
 339 void MacroAssembler::reset_held_monitor_count(Register java_thread) {
 340   strw(zr, Address(java_thread, JavaThread::held_monitor_count_offset()));
 341 }
 342 
 343 void MacroAssembler::reset_last_Java_frame(bool clear_fp) {
 344   // we must set sp to zero to clear frame
 345   str(zr, Address(rthread, JavaThread::last_Java_sp_offset()));
 346 
 347   // must clear fp, so that compiled frames are not confused; it is
 348   // possible that we need it only for debugging
 349   if (clear_fp) {
 350     str(zr, Address(rthread, JavaThread::last_Java_fp_offset()));
 351   }
 352 
 353   // Always clear the pc because it could have been set by make_walkable()
 354   str(zr, Address(rthread, JavaThread::last_Java_pc_offset()));
 355 }
 356 
 357 // Calls to C land
 358 //
 359 // When entering C land, the rfp, & resp of the last Java frame have to be recorded
 360 // in the (thread-local) JavaThread object. When leaving C land, the last Java fp
 361 // has to be reset to 0. This is required to allow proper stack traversal.
 362 void MacroAssembler::set_last_Java_frame(Register last_java_sp,

 555     ldr(rscratch1, Address(java_thread, in_bytes(Thread::pending_exception_offset())));
 556     Label ok;
 557     cbz(rscratch1, ok);
 558     lea(rscratch1, RuntimeAddress(StubRoutines::forward_exception_entry()));
 559     br(rscratch1);
 560     bind(ok);
 561   }
 562 
 563   // get oop result if there is one and reset the value in the thread
 564   if (oop_result->is_valid()) {
 565     get_vm_result(oop_result, java_thread);
 566   }
 567 }
 568 
 569 void MacroAssembler::call_VM_helper(Register oop_result, address entry_point, int number_of_arguments, bool check_exceptions) {
 570   call_VM_base(oop_result, noreg, noreg, entry_point, number_of_arguments, check_exceptions);
 571 }
 572 
 573 // Maybe emit a call via a trampoline.  If the code cache is small
 574 // trampolines won't be emitted.
 575 address MacroAssembler::trampoline_call1(Address entry, CodeBuffer* cbuf, bool check_emit_size) {
 576   //assert(JavaThread::current()->is_Compiler_thread(), "just checking");

 577   assert(entry.rspec().type() == relocInfo::runtime_call_type
 578          || entry.rspec().type() == relocInfo::opt_virtual_call_type
 579          || entry.rspec().type() == relocInfo::static_call_type
 580          || entry.rspec().type() == relocInfo::virtual_call_type, "wrong reloc type");
 581 
 582   // We need a trampoline if branches are far.
 583   if (far_branches()) {
 584     bool in_scratch_emit_size = false;
 585 #ifdef COMPILER2
 586     if (check_emit_size) {
 587       // We don't want to emit a trampoline if C2 is generating dummy
 588       // code during its branch shortening phase.
 589       CompileTask* task = ciEnv::current()->task();
 590       in_scratch_emit_size =
 591         (task != NULL && is_c2_compile(task->comp_level()) &&
 592          Compile::current()->output()->in_scratch_emit_size());
 593     }
 594 #endif
 595     if (!in_scratch_emit_size) {
 596       address stub = emit_trampoline_stub(offset(), entry.target());
 597       if (stub == NULL) {
 598         postcond(pc() == badAddress);
 599         return NULL; // CodeCache is full
 600       }
 601     }
 602   }
 603 
 604   if (cbuf) cbuf->set_insts_mark();
 605   relocate(entry.rspec());
 606   if (!far_branches()) {
 607     bl(entry.target());
 608   } else {
 609     bl(pc());
 610   }
 611   // just need to return a non-null address
 612   postcond(pc() != badAddress);
 613   return pc();

2120   bind(done);
2121 }
2122 
2123 void MacroAssembler::stop(const char* msg) {
2124   BLOCK_COMMENT(msg);
2125   dcps1(0xdeae);
2126   emit_int64((uintptr_t)msg);
2127 }
2128 
2129 void MacroAssembler::unimplemented(const char* what) {
2130   const char* buf = NULL;
2131   {
2132     ResourceMark rm;
2133     stringStream ss;
2134     ss.print("unimplemented: %s", what);
2135     buf = code_string(ss.as_string());
2136   }
2137   stop(buf);
2138 }
2139 
2140 void MacroAssembler::_assert_asm(Assembler::Condition cc, const char* msg) {
2141 #ifdef ASSERT
2142   Label OK;
2143   br(cc, OK);
2144   stop(msg);
2145   bind(OK);
2146 #endif
2147 }
2148 
2149 // If a constant does not fit in an immediate field, generate some
2150 // number of MOV instructions and then perform the operation.
2151 void MacroAssembler::wrap_add_sub_imm_insn(Register Rd, Register Rn, unsigned imm,
2152                                            add_sub_imm_insn insn1,
2153                                            add_sub_reg_insn insn2) {
2154   assert(Rd != zr, "Rd = zr and not setting flags?");
2155   if (operand_valid_for_add_sub_immediate((int)imm)) {
2156     (this->*insn1)(Rd, Rn, imm);
2157   } else {
2158     if (uabs(imm) < (1 << 24)) {
2159        (this->*insn1)(Rd, Rn, imm & -(1 << 12));
2160        (this->*insn1)(Rd, Rd, imm & ((1 << 12)-1));
2161     } else {
2162        assert_different_registers(Rd, Rn);
2163        mov(Rd, (uint64_t)imm);
2164        (this->*insn2)(Rd, Rn, Rd, LSL, 0);
2165     }
2166   }
2167 }
2168 
< prev index next >