< prev index next >

src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp

Print this page

 568     if (UseZGC) {
 569       // Check if mask is good.
 570       // verifies that ZAddressBadMask & r0 == 0
 571       __ ldr(c_rarg3, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
 572       __ andr(c_rarg2, r0, c_rarg3);
 573       __ cbnz(c_rarg2, error);
 574     }
 575 #endif
 576 
 577     // Check if the oop is in the right area of memory
 578     __ mov(c_rarg3, (intptr_t) Universe::verify_oop_mask());
 579     __ andr(c_rarg2, r0, c_rarg3);
 580     __ mov(c_rarg3, (intptr_t) Universe::verify_oop_bits());
 581 
 582     // Compare c_rarg2 and c_rarg3.  We don't use a compare
 583     // instruction here because the flags register is live.
 584     __ eor(c_rarg2, c_rarg2, c_rarg3);
 585     __ cbnz(c_rarg2, error);
 586 
 587     // make sure klass is 'reasonable', which is not zero.
 588     __ load_klass(r0, r0);  // get klass
 589     __ cbz(r0, error);      // if klass is NULL it is broken










 590 
 591     // return if everything seems ok
 592     __ bind(exit);
 593 
 594     __ ldp(c_rarg3, c_rarg2, Address(__ post(sp, 16)));
 595     __ ret(lr);
 596 
 597     // handle errors
 598     __ bind(error);
 599     __ ldp(c_rarg3, c_rarg2, Address(__ post(sp, 16)));
 600 
 601     __ push(RegSet::range(r0, r29), sp);
 602     // debug(char* msg, int64_t pc, int64_t regs[])
 603     __ mov(c_rarg0, rscratch1);      // pass address of error message
 604     __ mov(c_rarg1, lr);             // pass return address
 605     __ mov(c_rarg2, sp);             // pass address of regs on stack
 606 #ifndef PRODUCT
 607     assert(frame::arg_reg_save_area_bytes == 0, "not expecting frame reg save area");
 608 #endif
 609     BLOCK_COMMENT("call MacroAssembler::debug");

6449     gen_cas_entry(MacroAssembler::xword, memory_order_relaxed);
6450 
6451     AtomicStubMark mark_cmpxchg_4_release
6452       (_masm, &aarch64_atomic_cmpxchg_4_release_impl);
6453     gen_cas_entry(MacroAssembler::word, memory_order_release);
6454     AtomicStubMark mark_cmpxchg_8_release
6455       (_masm, &aarch64_atomic_cmpxchg_8_release_impl);
6456     gen_cas_entry(MacroAssembler::xword, memory_order_release);
6457 
6458     AtomicStubMark mark_cmpxchg_4_seq_cst
6459       (_masm, &aarch64_atomic_cmpxchg_4_seq_cst_impl);
6460     gen_cas_entry(MacroAssembler::word, memory_order_seq_cst);
6461     AtomicStubMark mark_cmpxchg_8_seq_cst
6462       (_masm, &aarch64_atomic_cmpxchg_8_seq_cst_impl);
6463     gen_cas_entry(MacroAssembler::xword, memory_order_seq_cst);
6464 
6465     ICache::invalidate_range(first_entry, __ pc() - first_entry);
6466   }
6467 #endif // LINUX
6468 























6469   // Continuation point for throwing of implicit exceptions that are
6470   // not handled in the current activation. Fabricates an exception
6471   // oop and initiates normal exception dispatching in this
6472   // frame. Since we need to preserve callee-saved values (currently
6473   // only for C2, but done for C1 as well) we need a callee-saved oop
6474   // map and therefore have to make these stubs into RuntimeStubs
6475   // rather than BufferBlobs.  If the compiler needs all registers to
6476   // be preserved between the fault point and the exception handler
6477   // then it must assume responsibility for that in
6478   // AbstractCompiler::continuation_for_implicit_null_exception or
6479   // continuation_for_implicit_division_by_zero_exception. All other
6480   // implicit exceptions (e.g., NullPointerException or
6481   // AbstractMethodError on entry) are either at call sites or
6482   // otherwise assume that stack unwinding will be initiated, so
6483   // caller saved registers were assumed volatile in the compiler.
6484 
6485 #undef __
6486 #define __ masm->
6487 
6488   address generate_throw_exception(const char* name,

7431       StubRoutines::_crc_table_adr = (address)StubRoutines::aarch64::_crc_table;
7432       StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
7433     }
7434 
7435     if (UseCRC32CIntrinsics) {
7436       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C();
7437     }
7438 
7439     // Disabled until JDK-8210858 is fixed
7440     // if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
7441     //   StubRoutines::_dlog = generate_dlog();
7442     // }
7443 
7444     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
7445       StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
7446     }
7447 
7448     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
7449       StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
7450     }


7451   }
7452 
7453   void generate_all() {
7454     // support for verify_oop (must happen after universe_init)
7455     if (VerifyOops) {
7456       StubRoutines::_verify_oop_subroutine_entry   = generate_verify_oop();
7457     }
7458     StubRoutines::_throw_AbstractMethodError_entry =
7459       generate_throw_exception("AbstractMethodError throw_exception",
7460                                CAST_FROM_FN_PTR(address,
7461                                                 SharedRuntime::
7462                                                 throw_AbstractMethodError));
7463 
7464     StubRoutines::_throw_IncompatibleClassChangeError_entry =
7465       generate_throw_exception("IncompatibleClassChangeError throw_exception",
7466                                CAST_FROM_FN_PTR(address,
7467                                                 SharedRuntime::
7468                                                 throw_IncompatibleClassChangeError));
7469 
7470     StubRoutines::_throw_NullPointerException_at_call_entry =

 568     if (UseZGC) {
 569       // Check if mask is good.
 570       // verifies that ZAddressBadMask & r0 == 0
 571       __ ldr(c_rarg3, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
 572       __ andr(c_rarg2, r0, c_rarg3);
 573       __ cbnz(c_rarg2, error);
 574     }
 575 #endif
 576 
 577     // Check if the oop is in the right area of memory
 578     __ mov(c_rarg3, (intptr_t) Universe::verify_oop_mask());
 579     __ andr(c_rarg2, r0, c_rarg3);
 580     __ mov(c_rarg3, (intptr_t) Universe::verify_oop_bits());
 581 
 582     // Compare c_rarg2 and c_rarg3.  We don't use a compare
 583     // instruction here because the flags register is live.
 584     __ eor(c_rarg2, c_rarg2, c_rarg3);
 585     __ cbnz(c_rarg2, error);
 586 
 587     // make sure klass is 'reasonable', which is not zero.
 588     // NOTE: We used to load the Klass* here, and compare that to zero.
 589     // However, with current Lilliput implementation, that would require
 590     // checking the locking bits and calling into the runtime, which
 591     // clobbers the condition flags, which may be live around this call.
 592     // OTOH, this is a simple NULL-check, and we can simply load the upper
 593     // 32bit of the header as narrowKlass, and compare that to 0. The
 594     // worst that can happen (rarely) is that the object is locked and
 595     // we have lock pointer bits in the upper 32bits. We can't get a false
 596     // negative.
 597     assert(oopDesc::klass_offset_in_bytes() % 4 == 0, "must be 4 byte aligned");
 598     __ ldrw(r0, Address(r0, oopDesc::klass_offset_in_bytes()));  // get klass
 599     __ cbzw(r0, error);      // if klass is NULL it is broken
 600 
 601     // return if everything seems ok
 602     __ bind(exit);
 603 
 604     __ ldp(c_rarg3, c_rarg2, Address(__ post(sp, 16)));
 605     __ ret(lr);
 606 
 607     // handle errors
 608     __ bind(error);
 609     __ ldp(c_rarg3, c_rarg2, Address(__ post(sp, 16)));
 610 
 611     __ push(RegSet::range(r0, r29), sp);
 612     // debug(char* msg, int64_t pc, int64_t regs[])
 613     __ mov(c_rarg0, rscratch1);      // pass address of error message
 614     __ mov(c_rarg1, lr);             // pass return address
 615     __ mov(c_rarg2, sp);             // pass address of regs on stack
 616 #ifndef PRODUCT
 617     assert(frame::arg_reg_save_area_bytes == 0, "not expecting frame reg save area");
 618 #endif
 619     BLOCK_COMMENT("call MacroAssembler::debug");

6459     gen_cas_entry(MacroAssembler::xword, memory_order_relaxed);
6460 
6461     AtomicStubMark mark_cmpxchg_4_release
6462       (_masm, &aarch64_atomic_cmpxchg_4_release_impl);
6463     gen_cas_entry(MacroAssembler::word, memory_order_release);
6464     AtomicStubMark mark_cmpxchg_8_release
6465       (_masm, &aarch64_atomic_cmpxchg_8_release_impl);
6466     gen_cas_entry(MacroAssembler::xword, memory_order_release);
6467 
6468     AtomicStubMark mark_cmpxchg_4_seq_cst
6469       (_masm, &aarch64_atomic_cmpxchg_4_seq_cst_impl);
6470     gen_cas_entry(MacroAssembler::word, memory_order_seq_cst);
6471     AtomicStubMark mark_cmpxchg_8_seq_cst
6472       (_masm, &aarch64_atomic_cmpxchg_8_seq_cst_impl);
6473     gen_cas_entry(MacroAssembler::xword, memory_order_seq_cst);
6474 
6475     ICache::invalidate_range(first_entry, __ pc() - first_entry);
6476   }
6477 #endif // LINUX
6478 
6479   // Pass object argument in r0 (which has to be preserved outside this stub)
6480   // Pass back result in r0
6481   // Clobbers rscratch1
6482   address generate_load_nklass() {
6483     __ align(CodeEntryAlignment);
6484     StubCodeMark mark(this, "StubRoutines", "load_nklass");
6485 
6486     address start = __ pc();
6487 
6488     __ set_last_Java_frame(sp, rfp, lr, rscratch1);
6489     __ enter();
6490     __ push(RegSet::of(rscratch1, rscratch2), sp);
6491     __ push_call_clobbered_registers_except(r0);
6492     __ call_VM_leaf(CAST_FROM_FN_PTR(address, oopDesc::load_nklass_runtime), 1);
6493     __ pop_call_clobbered_registers_except(r0);
6494     __ pop(RegSet::of(rscratch1, rscratch2), sp);
6495     __ leave();
6496     __ reset_last_Java_frame(true);
6497     __ ret(lr);
6498 
6499     return start;
6500   }
6501 
6502   // Continuation point for throwing of implicit exceptions that are
6503   // not handled in the current activation. Fabricates an exception
6504   // oop and initiates normal exception dispatching in this
6505   // frame. Since we need to preserve callee-saved values (currently
6506   // only for C2, but done for C1 as well) we need a callee-saved oop
6507   // map and therefore have to make these stubs into RuntimeStubs
6508   // rather than BufferBlobs.  If the compiler needs all registers to
6509   // be preserved between the fault point and the exception handler
6510   // then it must assume responsibility for that in
6511   // AbstractCompiler::continuation_for_implicit_null_exception or
6512   // continuation_for_implicit_division_by_zero_exception. All other
6513   // implicit exceptions (e.g., NullPointerException or
6514   // AbstractMethodError on entry) are either at call sites or
6515   // otherwise assume that stack unwinding will be initiated, so
6516   // caller saved registers were assumed volatile in the compiler.
6517 
6518 #undef __
6519 #define __ masm->
6520 
6521   address generate_throw_exception(const char* name,

7464       StubRoutines::_crc_table_adr = (address)StubRoutines::aarch64::_crc_table;
7465       StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
7466     }
7467 
7468     if (UseCRC32CIntrinsics) {
7469       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C();
7470     }
7471 
7472     // Disabled until JDK-8210858 is fixed
7473     // if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
7474     //   StubRoutines::_dlog = generate_dlog();
7475     // }
7476 
7477     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
7478       StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
7479     }
7480 
7481     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
7482       StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
7483     }
7484 
7485     StubRoutines::_load_nklass = generate_load_nklass();
7486   }
7487 
7488   void generate_all() {
7489     // support for verify_oop (must happen after universe_init)
7490     if (VerifyOops) {
7491       StubRoutines::_verify_oop_subroutine_entry   = generate_verify_oop();
7492     }
7493     StubRoutines::_throw_AbstractMethodError_entry =
7494       generate_throw_exception("AbstractMethodError throw_exception",
7495                                CAST_FROM_FN_PTR(address,
7496                                                 SharedRuntime::
7497                                                 throw_AbstractMethodError));
7498 
7499     StubRoutines::_throw_IncompatibleClassChangeError_entry =
7500       generate_throw_exception("IncompatibleClassChangeError throw_exception",
7501                                CAST_FROM_FN_PTR(address,
7502                                                 SharedRuntime::
7503                                                 throw_IncompatibleClassChangeError));
7504 
7505     StubRoutines::_throw_NullPointerException_at_call_entry =
< prev index next >