< prev index next >

src/hotspot/cpu/aarch64/aarch64.ad

Print this page

 1969     st->print("bhi #slow_path");
 1970   }
 1971 }
 1972 #endif
 1973 
 1974 void MachEpilogNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
 1975   Compile* C = ra_->C;
 1976   C2_MacroAssembler _masm(&cbuf);
 1977   int framesize = C->output()->frame_slots() << LogBytesPerInt;
 1978 
 1979   __ remove_frame(framesize);
 1980 
 1981   if (StackReservedPages > 0 && C->has_reserved_stack_access()) {
 1982     __ reserved_stack_check();
 1983   }
 1984 
 1985   if (do_polling() && C->is_method_compilation()) {
 1986     Label dummy_label;
 1987     Label* code_stub = &dummy_label;
 1988     if (!C->output()->in_scratch_emit_size()) {
 1989       code_stub = &C->output()->safepoint_poll_table()->add_safepoint(__ offset());


 1990     }
 1991     __ relocate(relocInfo::poll_return_type);
 1992     __ safepoint_poll(*code_stub, true /* at_return */, false /* acquire */, true /* in_nmethod */);
 1993   }
 1994 }
 1995 
 1996 uint MachEpilogNode::size(PhaseRegAlloc *ra_) const {
 1997   // Variable size. Determine dynamically.
 1998   return MachNode::size(ra_);
 1999 }
 2000 
 2001 int MachEpilogNode::reloc() const {
 2002   // Return number of relocatable values contained in this instruction.
 2003   return 1; // 1 for polling page.
 2004 }
 2005 
 2006 const Pipeline * MachEpilogNode::pipeline() const {
 2007   return MachNode::pipeline_class();
 2008 }
 2009 

 3811 
 3812     assert_different_registers(oop, box, tmp, disp_hdr);
 3813 
 3814     // Load markWord from object into displaced_header.
 3815     __ ldr(disp_hdr, Address(oop, oopDesc::mark_offset_in_bytes()));
 3816 
 3817     if (DiagnoseSyncOnValueBasedClasses != 0) {
 3818       __ load_klass(tmp, oop);
 3819       __ ldrw(tmp, Address(tmp, Klass::access_flags_offset()));
 3820       __ tstw(tmp, JVM_ACC_IS_VALUE_BASED_CLASS);
 3821       __ br(Assembler::NE, cont);
 3822     }
 3823 
 3824     if (UseBiasedLocking && !UseOptoBiasInlining) {
 3825       __ biased_locking_enter(box, oop, disp_hdr, tmp, true, cont);
 3826     }
 3827 
 3828     // Check for existing monitor
 3829     __ tbnz(disp_hdr, exact_log2(markWord::monitor_value), object_has_monitor);
 3830 
 3831     // Set tmp to be (markWord of object | UNLOCK_VALUE).
 3832     __ orr(tmp, disp_hdr, markWord::unlocked_value);
 3833 
 3834     // Initialize the box. (Must happen before we update the object mark!)
 3835     __ str(tmp, Address(box, BasicLock::displaced_header_offset_in_bytes()));
 3836 
 3837     // Compare object markWord with an unlocked value (tmp) and if
 3838     // equal exchange the stack address of our box with object markWord.
 3839     // On failure disp_hdr contains the possibly locked markWord.
 3840     __ cmpxchg(oop, tmp, box, Assembler::xword, /*acquire*/ true,
 3841                /*release*/ true, /*weak*/ false, disp_hdr);
 3842     __ br(Assembler::EQ, cont);
 3843 
 3844     assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");
 3845 
 3846     // If the compare-and-exchange succeeded, then we found an unlocked
 3847     // object, will have now locked it will continue at label cont
 3848 
 3849     __ bind(cas_failed);
 3850     // We did not see an unlocked object so try the fast recursive case.
 3851 
 3852     // Check if the owner is self by comparing the value in the
 3853     // markWord of object (disp_hdr) with the stack pointer.
 3854     __ mov(rscratch1, sp);
 3855     __ sub(disp_hdr, disp_hdr, rscratch1);
 3856     __ mov(tmp, (address) (~(os::vm_page_size()-1) | markWord::lock_mask_in_place));
 3857     // If condition is true we are cont and hence we can store 0 as the
 3858     // displaced header in the box, which indicates that it is a recursive lock.
 3859     __ ands(tmp/*==0?*/, disp_hdr, tmp);   // Sets flags for result
 3860     __ str(tmp/*==0, perhaps*/, Address(box, BasicLock::displaced_header_offset_in_bytes()));
 3861 
 3862     __ b(cont);








 3863 
 3864     // Handle existing monitor.
 3865     __ bind(object_has_monitor);
 3866 
 3867     // The object's monitor m is unlocked iff m->owner == NULL,
 3868     // otherwise m->owner may contain a thread or a stack address.
 3869     //
 3870     // Try to CAS m->owner from NULL to current thread.
 3871     __ add(tmp, disp_hdr, (ObjectMonitor::owner_offset_in_bytes()-markWord::monitor_value));
 3872     __ cmpxchg(tmp, zr, rthread, Assembler::xword, /*acquire*/ true,
 3873                /*release*/ true, /*weak*/ false, rscratch1); // Sets flags for result
 3874 
 3875     // Store a non-null value into the box to avoid looking like a re-entrant
 3876     // lock. The fast-path monitor unlock code checks for
 3877     // markWord::monitor_value so use markWord::unused_mark which has the
 3878     // relevant bit set, and also matches ObjectSynchronizer::enter.
 3879     __ mov(tmp, (address)markWord::unused_mark().value());
 3880     __ str(tmp, Address(box, BasicLock::displaced_header_offset_in_bytes()));
 3881 

 3882     __ br(Assembler::EQ, cont); // CAS success means locking succeeded
 3883 
 3884     __ cmp(rscratch1, rthread);
 3885     __ br(Assembler::NE, cont); // Check for recursive locking
 3886 
 3887     // Recursive lock case
 3888     __ increment(Address(disp_hdr, ObjectMonitor::recursions_offset_in_bytes() - markWord::monitor_value), 1);
 3889     // flag == EQ still from the cmp above, checking if this is a reentrant lock
 3890 
 3891     __ bind(cont);
 3892     // flag == EQ indicates success
 3893     // flag == NE indicates failure
 3894   %}
 3895 
 3896   enc_class aarch64_enc_fast_unlock(iRegP object, iRegP box, iRegP tmp, iRegP tmp2) %{
 3897     C2_MacroAssembler _masm(&cbuf);
 3898     Register oop = as_Register($object$$reg);
 3899     Register box = as_Register($box$$reg);
 3900     Register disp_hdr = as_Register($tmp$$reg);
 3901     Register tmp = as_Register($tmp2$$reg);
 3902     Label cont;
 3903     Label object_has_monitor;
 3904 
 3905     assert_different_registers(oop, box, tmp, disp_hdr);
 3906 
 3907     if (UseBiasedLocking && !UseOptoBiasInlining) {
 3908       __ biased_locking_exit(oop, tmp, cont);
 3909     }
 3910 
 3911     // Find the lock address and load the displaced header from the stack.
 3912     __ ldr(disp_hdr, Address(box, BasicLock::displaced_header_offset_in_bytes()));

 3913 
 3914     // If the displaced header is 0, we have a recursive unlock.
 3915     __ cmp(disp_hdr, zr);
 3916     __ br(Assembler::EQ, cont);

 3917 
 3918     // Handle existing monitor.
 3919     __ ldr(tmp, Address(oop, oopDesc::mark_offset_in_bytes()));
 3920     __ tbnz(tmp, exact_log2(markWord::monitor_value), object_has_monitor);
 3921 
 3922     // Check if it is still a light weight lock, this is is true if we
 3923     // see the stack address of the basicLock in the markWord of the
 3924     // object.
 3925 
 3926     __ cmpxchg(oop, box, disp_hdr, Assembler::xword, /*acquire*/ false,
 3927                /*release*/ true, /*weak*/ false, tmp);
 3928     __ b(cont);









 3929 
 3930     assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");
 3931 
 3932     // Handle existing monitor.
 3933     __ bind(object_has_monitor);
 3934     STATIC_ASSERT(markWord::monitor_value <= INT_MAX);
 3935     __ add(tmp, tmp, -(int)markWord::monitor_value); // monitor














 3936     __ ldr(disp_hdr, Address(tmp, ObjectMonitor::recursions_offset_in_bytes()));
 3937 
 3938     Label notRecursive;
 3939     __ cbz(disp_hdr, notRecursive);
 3940 
 3941     // Recursive lock
 3942     __ sub(disp_hdr, disp_hdr, 1u);
 3943     __ str(disp_hdr, Address(tmp, ObjectMonitor::recursions_offset_in_bytes()));
 3944     __ cmp(disp_hdr, disp_hdr); // Sets flags for result
 3945     __ b(cont);
 3946 
 3947     __ bind(notRecursive);
 3948     __ ldr(rscratch1, Address(tmp, ObjectMonitor::EntryList_offset_in_bytes()));
 3949     __ ldr(disp_hdr, Address(tmp, ObjectMonitor::cxq_offset_in_bytes()));
 3950     __ orr(rscratch1, rscratch1, disp_hdr); // Will be 0 if both are 0.
 3951     __ cmp(rscratch1, zr); // Sets flags for result
 3952     __ cbnz(rscratch1, cont);
 3953     // need a release store here
 3954     __ lea(tmp, Address(tmp, ObjectMonitor::owner_offset_in_bytes()));
 3955     __ stlr(zr, tmp); // set unowned

 7422 %}
 7423 
 7424 // Load Klass Pointer
 7425 instruct loadKlass(iRegPNoSp dst, memory8 mem)
 7426 %{
 7427   match(Set dst (LoadKlass mem));
 7428   predicate(!needs_acquiring_load(n));
 7429 
 7430   ins_cost(4 * INSN_COST);
 7431   format %{ "ldr  $dst, $mem\t# class" %}
 7432 
 7433   ins_encode(aarch64_enc_ldr(dst, mem));
 7434 
 7435   ins_pipe(iload_reg_mem);
 7436 %}
 7437 
 7438 // Load Narrow Klass Pointer
 7439 instruct loadNKlass(iRegNNoSp dst, memory4 mem)
 7440 %{
 7441   match(Set dst (LoadNKlass mem));
 7442   predicate(!needs_acquiring_load(n));
 7443 
 7444   ins_cost(4 * INSN_COST);
 7445   format %{ "ldrw  $dst, $mem\t# compressed class ptr" %}
 7446 
 7447   ins_encode(aarch64_enc_ldrw(dst, mem));
 7448 
 7449   ins_pipe(iload_reg_mem);
 7450 %}
 7451 


























 7452 // Load Float
 7453 instruct loadF(vRegF dst, memory4 mem)
 7454 %{
 7455   match(Set dst (LoadF mem));
 7456   predicate(!needs_acquiring_load(n));
 7457 
 7458   ins_cost(4 * INSN_COST);
 7459   format %{ "ldrs  $dst, $mem\t# float" %}
 7460 
 7461   ins_encode( aarch64_enc_ldrs(dst, mem) );
 7462 
 7463   ins_pipe(pipe_class_memory);
 7464 %}
 7465 
 7466 // Load Double
 7467 instruct loadD(vRegD dst, memory8 mem)
 7468 %{
 7469   match(Set dst (LoadD mem));
 7470   predicate(!needs_acquiring_load(n));
 7471 

 1969     st->print("bhi #slow_path");
 1970   }
 1971 }
 1972 #endif
 1973 
 1974 void MachEpilogNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
 1975   Compile* C = ra_->C;
 1976   C2_MacroAssembler _masm(&cbuf);
 1977   int framesize = C->output()->frame_slots() << LogBytesPerInt;
 1978 
 1979   __ remove_frame(framesize);
 1980 
 1981   if (StackReservedPages > 0 && C->has_reserved_stack_access()) {
 1982     __ reserved_stack_check();
 1983   }
 1984 
 1985   if (do_polling() && C->is_method_compilation()) {
 1986     Label dummy_label;
 1987     Label* code_stub = &dummy_label;
 1988     if (!C->output()->in_scratch_emit_size()) {
 1989       C2SafepointPollStub* stub = new (C->comp_arena()) C2SafepointPollStub(__ offset());
 1990       C->output()->add_stub(stub);
 1991       code_stub = &stub->entry();
 1992     }
 1993     __ relocate(relocInfo::poll_return_type);
 1994     __ safepoint_poll(*code_stub, true /* at_return */, false /* acquire */, true /* in_nmethod */);
 1995   }
 1996 }
 1997 
 1998 uint MachEpilogNode::size(PhaseRegAlloc *ra_) const {
 1999   // Variable size. Determine dynamically.
 2000   return MachNode::size(ra_);
 2001 }
 2002 
 2003 int MachEpilogNode::reloc() const {
 2004   // Return number of relocatable values contained in this instruction.
 2005   return 1; // 1 for polling page.
 2006 }
 2007 
 2008 const Pipeline * MachEpilogNode::pipeline() const {
 2009   return MachNode::pipeline_class();
 2010 }
 2011 

 3813 
 3814     assert_different_registers(oop, box, tmp, disp_hdr);
 3815 
 3816     // Load markWord from object into displaced_header.
 3817     __ ldr(disp_hdr, Address(oop, oopDesc::mark_offset_in_bytes()));
 3818 
 3819     if (DiagnoseSyncOnValueBasedClasses != 0) {
 3820       __ load_klass(tmp, oop);
 3821       __ ldrw(tmp, Address(tmp, Klass::access_flags_offset()));
 3822       __ tstw(tmp, JVM_ACC_IS_VALUE_BASED_CLASS);
 3823       __ br(Assembler::NE, cont);
 3824     }
 3825 
 3826     if (UseBiasedLocking && !UseOptoBiasInlining) {
 3827       __ biased_locking_enter(box, oop, disp_hdr, tmp, true, cont);
 3828     }
 3829 
 3830     // Check for existing monitor
 3831     __ tbnz(disp_hdr, exact_log2(markWord::monitor_value), object_has_monitor);
 3832 
 3833     if (LockingMode == LM_MONITOR) {
 3834       __ tst(oop, oop); // Set NE to indicate 'failure' -> take slow-path. We know that oop != 0.
 3835       __ b(cont);
 3836     } else if (LockingMode == LM_LEGACY) {
 3837       // Set tmp to be (markWord of object | UNLOCK_VALUE).
 3838       __ orr(tmp, disp_hdr, markWord::unlocked_value);
 3839 
 3840       // Initialize the box. (Must happen before we update the object mark!)
 3841       __ str(tmp, Address(box, BasicLock::displaced_header_offset_in_bytes()));
 3842 
 3843       // Compare object markWord with an unlocked value (tmp) and if
 3844       // equal exchange the stack address of our box with object markWord.
 3845       // On failure disp_hdr contains the possibly locked markWord.
 3846       __ cmpxchg(oop, tmp, box, Assembler::xword, /*acquire*/ true,
 3847                  /*release*/ true, /*weak*/ false, disp_hdr);
 3848       __ br(Assembler::EQ, cont);
 3849 
 3850       assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");
 3851 
 3852       // If the compare-and-exchange succeeded, then we found an unlocked
 3853       // object, will have now locked it will continue at label cont
 3854 
 3855       __ bind(cas_failed);
 3856       // We did not see an unlocked object so try the fast recursive case.
 3857 
 3858       // Check if the owner is self by comparing the value in the
 3859       // markWord of object (disp_hdr) with the stack pointer.
 3860       __ mov(rscratch1, sp);
 3861       __ sub(disp_hdr, disp_hdr, rscratch1);
 3862       __ mov(tmp, (address) (~(os::vm_page_size()-1) | markWord::lock_mask_in_place));
 3863       // If condition is true we are cont and hence we can store 0 as the
 3864       // displaced header in the box, which indicates that it is a recursive lock.
 3865       __ ands(tmp/*==0?*/, disp_hdr, tmp);   // Sets flags for result
 3866       __ str(tmp/*==0, perhaps*/, Address(box, BasicLock::displaced_header_offset_in_bytes()));
 3867       __ b(cont);
 3868     } else {
 3869       assert(LockingMode == LM_LIGHTWEIGHT, "must be");
 3870       __ fast_lock(oop, disp_hdr, tmp, rscratch1, cont);
 3871       __ b(cont);
 3872     }
 3873 
 3874     // Handle existing monitor.
 3875     __ bind(object_has_monitor);
 3876 
 3877     // The object's monitor m is unlocked iff m->owner == NULL,
 3878     // otherwise m->owner may contain a thread or a stack address.
 3879     //
 3880     // Try to CAS m->owner from NULL to current thread.
 3881     __ add(tmp, disp_hdr, (ObjectMonitor::owner_offset_in_bytes()-markWord::monitor_value));
 3882     __ cmpxchg(tmp, zr, rthread, Assembler::xword, /*acquire*/ true,
 3883                /*release*/ true, /*weak*/ false, rscratch1); // Sets flags for result
 3884 
 3885     if (LockingMode != LM_LIGHTWEIGHT) {
 3886       // Store a non-null value into the box to avoid looking like a re-entrant
 3887       // lock. The fast-path monitor unlock code checks for
 3888       // markWord::monitor_value so use markWord::unused_mark which has the
 3889       // relevant bit set, and also matches ObjectSynchronizer::enter.
 3890       __ mov(tmp, (address)markWord::unused_mark().value());
 3891       __ str(tmp, Address(box, BasicLock::displaced_header_offset_in_bytes()));
 3892     }
 3893     __ br(Assembler::EQ, cont); // CAS success means locking succeeded
 3894 
 3895     __ cmp(rscratch1, rthread);
 3896     __ br(Assembler::NE, cont); // Check for recursive locking
 3897 
 3898     // Recursive lock case
 3899     __ increment(Address(disp_hdr, ObjectMonitor::recursions_offset_in_bytes() - markWord::monitor_value), 1);
 3900     // flag == EQ still from the cmp above, checking if this is a reentrant lock
 3901 
 3902     __ bind(cont);
 3903     // flag == EQ indicates success
 3904     // flag == NE indicates failure
 3905   %}
 3906 
 3907   enc_class aarch64_enc_fast_unlock(iRegP object, iRegP box, iRegP tmp, iRegP tmp2) %{
 3908     C2_MacroAssembler _masm(&cbuf);
 3909     Register oop = as_Register($object$$reg);
 3910     Register box = as_Register($box$$reg);
 3911     Register disp_hdr = as_Register($tmp$$reg);
 3912     Register tmp = as_Register($tmp2$$reg);
 3913     Label cont;
 3914     Label object_has_monitor;
 3915 
 3916     assert_different_registers(oop, box, tmp, disp_hdr);
 3917 
 3918     if (UseBiasedLocking && !UseOptoBiasInlining) {
 3919       __ biased_locking_exit(oop, tmp, cont);
 3920     }
 3921 
 3922     if (LockingMode == LM_LEGACY) {
 3923       // Find the lock address and load the displaced header from the stack.
 3924       __ ldr(disp_hdr, Address(box, BasicLock::displaced_header_offset_in_bytes()));
 3925 
 3926       // If the displaced header is 0, we have a recursive unlock.
 3927       __ cmp(disp_hdr, zr);
 3928       __ br(Assembler::EQ, cont);
 3929     }
 3930 
 3931     // Handle existing monitor.
 3932     __ ldr(tmp, Address(oop, oopDesc::mark_offset_in_bytes()));
 3933     __ tbnz(tmp, exact_log2(markWord::monitor_value), object_has_monitor);
 3934 
 3935     if (LockingMode == LM_MONITOR) {
 3936       __ tst(oop, oop); // Set NE to indicate 'failure' -> take slow-path. We know that oop != 0.
 3937       __ b(cont);
 3938     } else if (LockingMode == LM_LEGACY) {
 3939       // Check if it is still a light weight lock, this is is true if we
 3940       // see the stack address of the basicLock in the markWord of the
 3941       // object.
 3942 
 3943       __ cmpxchg(oop, box, disp_hdr, Assembler::xword, /*acquire*/ false,
 3944                  /*release*/ true, /*weak*/ false, tmp);
 3945       __ b(cont);
 3946     } else {
 3947       assert(LockingMode == LM_LIGHTWEIGHT, "must be");
 3948       __ fast_unlock(oop, tmp, box, disp_hdr, cont);
 3949       __ b(cont);
 3950     }
 3951 
 3952     assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");
 3953 
 3954     // Handle existing monitor.
 3955     __ bind(object_has_monitor);
 3956     STATIC_ASSERT(markWord::monitor_value <= INT_MAX);
 3957     __ add(tmp, tmp, -(int)markWord::monitor_value); // monitor
 3958 
 3959     if (LockingMode == LM_LIGHTWEIGHT) {
 3960       // If the owner is anonymous, we need to fix it -- in an outline stub.
 3961       Register tmp2 = disp_hdr;
 3962       __ ldr(tmp2, Address(tmp, ObjectMonitor::owner_offset_in_bytes()));
 3963       // We cannot use tbnz here, the target might be too far away and cannot
 3964       // be encoded.
 3965       __ tst(tmp2, (uint64_t)ObjectMonitor::ANONYMOUS_OWNER);
 3966       C2HandleAnonOMOwnerStub* stub = new (Compile::current()->comp_arena()) C2HandleAnonOMOwnerStub(tmp, tmp2);
 3967       Compile::current()->output()->add_stub(stub);
 3968       __ br(Assembler::NE, stub->entry());
 3969       __ bind(stub->continuation());
 3970     }
 3971 
 3972     __ ldr(disp_hdr, Address(tmp, ObjectMonitor::recursions_offset_in_bytes()));
 3973 
 3974     Label notRecursive;
 3975     __ cbz(disp_hdr, notRecursive);
 3976 
 3977     // Recursive lock
 3978     __ sub(disp_hdr, disp_hdr, 1u);
 3979     __ str(disp_hdr, Address(tmp, ObjectMonitor::recursions_offset_in_bytes()));
 3980     __ cmp(disp_hdr, disp_hdr); // Sets flags for result
 3981     __ b(cont);
 3982 
 3983     __ bind(notRecursive);
 3984     __ ldr(rscratch1, Address(tmp, ObjectMonitor::EntryList_offset_in_bytes()));
 3985     __ ldr(disp_hdr, Address(tmp, ObjectMonitor::cxq_offset_in_bytes()));
 3986     __ orr(rscratch1, rscratch1, disp_hdr); // Will be 0 if both are 0.
 3987     __ cmp(rscratch1, zr); // Sets flags for result
 3988     __ cbnz(rscratch1, cont);
 3989     // need a release store here
 3990     __ lea(tmp, Address(tmp, ObjectMonitor::owner_offset_in_bytes()));
 3991     __ stlr(zr, tmp); // set unowned

 7458 %}
 7459 
 7460 // Load Klass Pointer
 7461 instruct loadKlass(iRegPNoSp dst, memory8 mem)
 7462 %{
 7463   match(Set dst (LoadKlass mem));
 7464   predicate(!needs_acquiring_load(n));
 7465 
 7466   ins_cost(4 * INSN_COST);
 7467   format %{ "ldr  $dst, $mem\t# class" %}
 7468 
 7469   ins_encode(aarch64_enc_ldr(dst, mem));
 7470 
 7471   ins_pipe(iload_reg_mem);
 7472 %}
 7473 
 7474 // Load Narrow Klass Pointer
 7475 instruct loadNKlass(iRegNNoSp dst, memory4 mem)
 7476 %{
 7477   match(Set dst (LoadNKlass mem));
 7478   predicate(!needs_acquiring_load(n) && !UseCompactObjectHeaders);
 7479 
 7480   ins_cost(4 * INSN_COST);
 7481   format %{ "ldrw  $dst, $mem\t# compressed class ptr" %}
 7482 
 7483   ins_encode(aarch64_enc_ldrw(dst, mem));
 7484 
 7485   ins_pipe(iload_reg_mem);
 7486 %}
 7487 
 7488 instruct loadNKlassLilliput(iRegNNoSp dst, memory4 mem, rFlagsReg cr)
 7489 %{
 7490   match(Set dst (LoadNKlass mem));
 7491   effect(KILL cr);
 7492   predicate(!needs_acquiring_load(n) && UseCompactObjectHeaders);
 7493 
 7494   ins_cost(4 * INSN_COST);
 7495   format %{ "ldrw  $dst, $mem\t# compressed class ptr" %}
 7496   ins_encode %{
 7497     assert($mem$$disp == oopDesc::klass_offset_in_bytes(), "expect correct offset");
 7498     assert($mem$$index$$Register == noreg, "expect no index");
 7499     Register dst = $dst$$Register;
 7500     Register obj = $mem$$base$$Register;
 7501     C2LoadNKlassStub* stub = new (Compile::current()->comp_arena()) C2LoadNKlassStub(dst);
 7502     Compile::current()->output()->add_stub(stub);
 7503     __ ldr(dst, Address(obj, oopDesc::mark_offset_in_bytes()));
 7504     // NOTE: We can't use tbnz here, because the target is sometimes too far away
 7505     // and cannot be encoded.
 7506     __ tst(dst, markWord::monitor_value);
 7507     __ br(Assembler::NE, stub->entry());
 7508     __ bind(stub->continuation());
 7509     __ lsr(dst, dst, markWord::klass_shift);
 7510   %}
 7511   ins_pipe(pipe_slow);
 7512 %}
 7513 
 7514 // Load Float
 7515 instruct loadF(vRegF dst, memory4 mem)
 7516 %{
 7517   match(Set dst (LoadF mem));
 7518   predicate(!needs_acquiring_load(n));
 7519 
 7520   ins_cost(4 * INSN_COST);
 7521   format %{ "ldrs  $dst, $mem\t# float" %}
 7522 
 7523   ins_encode( aarch64_enc_ldrs(dst, mem) );
 7524 
 7525   ins_pipe(pipe_class_memory);
 7526 %}
 7527 
 7528 // Load Double
 7529 instruct loadD(vRegD dst, memory8 mem)
 7530 %{
 7531   match(Set dst (LoadD mem));
 7532   predicate(!needs_acquiring_load(n));
 7533 
< prev index next >