< prev index next >

src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp

Print this page

 577     if (UseZGC) {
 578       // Check if mask is good.
 579       // verifies that ZAddressBadMask & r0 == 0
 580       __ ldr(c_rarg3, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
 581       __ andr(c_rarg2, r0, c_rarg3);
 582       __ cbnz(c_rarg2, error);
 583     }
 584 #endif
 585 
 586     // Check if the oop is in the right area of memory
 587     __ mov(c_rarg3, (intptr_t) Universe::verify_oop_mask());
 588     __ andr(c_rarg2, r0, c_rarg3);
 589     __ mov(c_rarg3, (intptr_t) Universe::verify_oop_bits());
 590 
 591     // Compare c_rarg2 and c_rarg3.  We don't use a compare
 592     // instruction here because the flags register is live.
 593     __ eor(c_rarg2, c_rarg2, c_rarg3);
 594     __ cbnz(c_rarg2, error);
 595 
 596     // make sure klass is 'reasonable', which is not zero.
 597     __ load_klass(r0, r0);  // get klass
 598     __ cbz(r0, error);      // if klass is NULL it is broken










 599 
 600     // return if everything seems ok
 601     __ bind(exit);
 602 
 603     __ ldp(c_rarg3, c_rarg2, Address(__ post(sp, 16)));
 604     __ ret(lr);
 605 
 606     // handle errors
 607     __ bind(error);
 608     __ ldp(c_rarg3, c_rarg2, Address(__ post(sp, 16)));
 609 
 610     __ push(RegSet::range(r0, r29), sp);
 611     // debug(char* msg, int64_t pc, int64_t regs[])
 612     __ mov(c_rarg0, rscratch1);      // pass address of error message
 613     __ mov(c_rarg1, lr);             // pass return address
 614     __ mov(c_rarg2, sp);             // pass address of regs on stack
 615 #ifndef PRODUCT
 616     assert(frame::arg_reg_save_area_bytes == 0, "not expecting frame reg save area");
 617 #endif
 618     BLOCK_COMMENT("call MacroAssembler::debug");

6627     gen_cas_entry(MacroAssembler::xword, memory_order_relaxed);
6628 
6629     AtomicStubMark mark_cmpxchg_4_release
6630       (_masm, &aarch64_atomic_cmpxchg_4_release_impl);
6631     gen_cas_entry(MacroAssembler::word, memory_order_release);
6632     AtomicStubMark mark_cmpxchg_8_release
6633       (_masm, &aarch64_atomic_cmpxchg_8_release_impl);
6634     gen_cas_entry(MacroAssembler::xword, memory_order_release);
6635 
6636     AtomicStubMark mark_cmpxchg_4_seq_cst
6637       (_masm, &aarch64_atomic_cmpxchg_4_seq_cst_impl);
6638     gen_cas_entry(MacroAssembler::word, memory_order_seq_cst);
6639     AtomicStubMark mark_cmpxchg_8_seq_cst
6640       (_masm, &aarch64_atomic_cmpxchg_8_seq_cst_impl);
6641     gen_cas_entry(MacroAssembler::xword, memory_order_seq_cst);
6642 
6643     ICache::invalidate_range(first_entry, __ pc() - first_entry);
6644   }
6645 #endif // LINUX
6646 























6647   address generate_cont_thaw(Continuation::thaw_kind kind) {
6648     bool return_barrier = Continuation::is_thaw_return_barrier(kind);
6649     bool return_barrier_exception = Continuation::is_thaw_return_barrier_exception(kind);
6650 
6651     address start = __ pc();
6652 
6653     if (return_barrier) {
6654       __ ldr(rscratch1, Address(rthread, JavaThread::cont_entry_offset()));
6655       __ mov(sp, rscratch1);
6656     }
6657     assert_asm(_masm, (__ ldr(rscratch1, Address(rthread, JavaThread::cont_entry_offset())), __ cmp(sp, rscratch1)), Assembler::EQ, "incorrect sp");
6658 
6659     if (return_barrier) {
6660       // preserve possible return value from a method returning to the return barrier
6661       __ fmovd(rscratch1, v0);
6662       __ stp(rscratch1, r0, Address(__ pre(sp, -2 * wordSize)));
6663     }
6664 
6665     __ movw(c_rarg1, (return_barrier ? 1 : 0));
6666     __ call_VM_leaf(CAST_FROM_FN_PTR(address, Continuation::prepare_thaw), rthread, c_rarg1);

7794       StubRoutines::_crc_table_adr = (address)StubRoutines::aarch64::_crc_table;
7795       StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
7796     }
7797 
7798     if (UseCRC32CIntrinsics) {
7799       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C();
7800     }
7801 
7802     // Disabled until JDK-8210858 is fixed
7803     // if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
7804     //   StubRoutines::_dlog = generate_dlog();
7805     // }
7806 
7807     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
7808       StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
7809     }
7810 
7811     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
7812       StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
7813     }


7814   }
7815 
7816   void generate_phase1() {
7817     // Continuation stubs:
7818     StubRoutines::_cont_thaw          = generate_cont_thaw();
7819     StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
7820     StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
7821 
7822     JFR_ONLY(StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();)
7823     JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
7824   }
7825 
7826   void generate_all() {
7827     // support for verify_oop (must happen after universe_init)
7828     if (VerifyOops) {
7829       StubRoutines::_verify_oop_subroutine_entry   = generate_verify_oop();
7830     }
7831     StubRoutines::_throw_AbstractMethodError_entry =
7832       generate_throw_exception("AbstractMethodError throw_exception",
7833                                CAST_FROM_FN_PTR(address,

 577     if (UseZGC) {
 578       // Check if mask is good.
 579       // verifies that ZAddressBadMask & r0 == 0
 580       __ ldr(c_rarg3, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
 581       __ andr(c_rarg2, r0, c_rarg3);
 582       __ cbnz(c_rarg2, error);
 583     }
 584 #endif
 585 
 586     // Check if the oop is in the right area of memory
 587     __ mov(c_rarg3, (intptr_t) Universe::verify_oop_mask());
 588     __ andr(c_rarg2, r0, c_rarg3);
 589     __ mov(c_rarg3, (intptr_t) Universe::verify_oop_bits());
 590 
 591     // Compare c_rarg2 and c_rarg3.  We don't use a compare
 592     // instruction here because the flags register is live.
 593     __ eor(c_rarg2, c_rarg2, c_rarg3);
 594     __ cbnz(c_rarg2, error);
 595 
 596     // make sure klass is 'reasonable', which is not zero.
 597     // NOTE: We used to load the Klass* here, and compare that to zero.
 598     // However, with current Lilliput implementation, that would require
 599     // checking the locking bits and calling into the runtime, which
 600     // clobbers the condition flags, which may be live around this call.
 601     // OTOH, this is a simple NULL-check, and we can simply load the upper
 602     // 32bit of the header as narrowKlass, and compare that to 0. The
 603     // worst that can happen (rarely) is that the object is locked and
 604     // we have lock pointer bits in the upper 32bits. We can't get a false
 605     // negative.
 606     assert(oopDesc::klass_offset_in_bytes() % 4 == 0, "must be 4 byte aligned");
 607     __ ldrw(r0, Address(r0, oopDesc::klass_offset_in_bytes()));  // get klass
 608     __ cbzw(r0, error);      // if klass is NULL it is broken
 609 
 610     // return if everything seems ok
 611     __ bind(exit);
 612 
 613     __ ldp(c_rarg3, c_rarg2, Address(__ post(sp, 16)));
 614     __ ret(lr);
 615 
 616     // handle errors
 617     __ bind(error);
 618     __ ldp(c_rarg3, c_rarg2, Address(__ post(sp, 16)));
 619 
 620     __ push(RegSet::range(r0, r29), sp);
 621     // debug(char* msg, int64_t pc, int64_t regs[])
 622     __ mov(c_rarg0, rscratch1);      // pass address of error message
 623     __ mov(c_rarg1, lr);             // pass return address
 624     __ mov(c_rarg2, sp);             // pass address of regs on stack
 625 #ifndef PRODUCT
 626     assert(frame::arg_reg_save_area_bytes == 0, "not expecting frame reg save area");
 627 #endif
 628     BLOCK_COMMENT("call MacroAssembler::debug");

6637     gen_cas_entry(MacroAssembler::xword, memory_order_relaxed);
6638 
6639     AtomicStubMark mark_cmpxchg_4_release
6640       (_masm, &aarch64_atomic_cmpxchg_4_release_impl);
6641     gen_cas_entry(MacroAssembler::word, memory_order_release);
6642     AtomicStubMark mark_cmpxchg_8_release
6643       (_masm, &aarch64_atomic_cmpxchg_8_release_impl);
6644     gen_cas_entry(MacroAssembler::xword, memory_order_release);
6645 
6646     AtomicStubMark mark_cmpxchg_4_seq_cst
6647       (_masm, &aarch64_atomic_cmpxchg_4_seq_cst_impl);
6648     gen_cas_entry(MacroAssembler::word, memory_order_seq_cst);
6649     AtomicStubMark mark_cmpxchg_8_seq_cst
6650       (_masm, &aarch64_atomic_cmpxchg_8_seq_cst_impl);
6651     gen_cas_entry(MacroAssembler::xword, memory_order_seq_cst);
6652 
6653     ICache::invalidate_range(first_entry, __ pc() - first_entry);
6654   }
6655 #endif // LINUX
6656 
6657   // Pass object argument in r0 (which has to be preserved outside this stub)
6658   // Pass back result in r0
6659   // Clobbers rscratch1
6660   address generate_load_nklass() {
6661     __ align(CodeEntryAlignment);
6662     StubCodeMark mark(this, "StubRoutines", "load_nklass");
6663 
6664     address start = __ pc();
6665 
6666     __ set_last_Java_frame(sp, rfp, lr, rscratch1);
6667     __ enter();
6668     __ push(RegSet::of(rscratch1, rscratch2), sp);
6669     __ push_call_clobbered_registers_except(r0);
6670     __ call_VM_leaf(CAST_FROM_FN_PTR(address, oopDesc::load_nklass_runtime), 1);
6671     __ pop_call_clobbered_registers_except(r0);
6672     __ pop(RegSet::of(rscratch1, rscratch2), sp);
6673     __ leave();
6674     __ reset_last_Java_frame(true);
6675     __ ret(lr);
6676 
6677     return start;
6678   }
6679 
6680   address generate_cont_thaw(Continuation::thaw_kind kind) {
6681     bool return_barrier = Continuation::is_thaw_return_barrier(kind);
6682     bool return_barrier_exception = Continuation::is_thaw_return_barrier_exception(kind);
6683 
6684     address start = __ pc();
6685 
6686     if (return_barrier) {
6687       __ ldr(rscratch1, Address(rthread, JavaThread::cont_entry_offset()));
6688       __ mov(sp, rscratch1);
6689     }
6690     assert_asm(_masm, (__ ldr(rscratch1, Address(rthread, JavaThread::cont_entry_offset())), __ cmp(sp, rscratch1)), Assembler::EQ, "incorrect sp");
6691 
6692     if (return_barrier) {
6693       // preserve possible return value from a method returning to the return barrier
6694       __ fmovd(rscratch1, v0);
6695       __ stp(rscratch1, r0, Address(__ pre(sp, -2 * wordSize)));
6696     }
6697 
6698     __ movw(c_rarg1, (return_barrier ? 1 : 0));
6699     __ call_VM_leaf(CAST_FROM_FN_PTR(address, Continuation::prepare_thaw), rthread, c_rarg1);

7827       StubRoutines::_crc_table_adr = (address)StubRoutines::aarch64::_crc_table;
7828       StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
7829     }
7830 
7831     if (UseCRC32CIntrinsics) {
7832       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C();
7833     }
7834 
7835     // Disabled until JDK-8210858 is fixed
7836     // if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
7837     //   StubRoutines::_dlog = generate_dlog();
7838     // }
7839 
7840     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
7841       StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
7842     }
7843 
7844     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
7845       StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
7846     }
7847 
7848     StubRoutines::_load_nklass = generate_load_nklass();
7849   }
7850 
7851   void generate_phase1() {
7852     // Continuation stubs:
7853     StubRoutines::_cont_thaw          = generate_cont_thaw();
7854     StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
7855     StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
7856 
7857     JFR_ONLY(StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();)
7858     JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
7859   }
7860 
7861   void generate_all() {
7862     // support for verify_oop (must happen after universe_init)
7863     if (VerifyOops) {
7864       StubRoutines::_verify_oop_subroutine_entry   = generate_verify_oop();
7865     }
7866     StubRoutines::_throw_AbstractMethodError_entry =
7867       generate_throw_exception("AbstractMethodError throw_exception",
7868                                CAST_FROM_FN_PTR(address,
< prev index next >