< prev index next >

src/hotspot/cpu/riscv/macroAssembler_riscv.cpp

Print this page

2158   Label retry_load, nope;
2159   bind(retry_load);
2160   // Load reserved from the memory location
2161   lr_d(tmp, addr, Assembler::aqrl);
2162   // Fail and exit if it is not what we expect
2163   bne(tmp, oldv, nope);
2164   // If the store conditional succeeds, tmp will be zero
2165   sc_d(tmp, newv, addr, Assembler::rl);
2166   beqz(tmp, succeed);
2167   // Retry only when the store conditional failed
2168   j(retry_load);
2169 
2170   bind(nope);
2171   membar(AnyAny);
2172   mv(oldv, tmp);
2173   if (fail != NULL) {
2174     j(*fail);
2175   }
2176 }
2177 
2178 void MacroAssembler::cmpxchg_obj_header(Register oldv, Register newv, Register obj, Register tmp,
2179                                         Label &succeed, Label *fail) {
2180   assert(oopDesc::mark_offset_in_bytes() == 0, "assumption");
2181   cmpxchgptr(oldv, newv, obj, tmp, succeed, fail);
2182 }
2183 
2184 void MacroAssembler::load_reserved(Register addr,
2185                                    enum operand_size size,
2186                                    Assembler::Aqrl acquire) {
2187   switch (size) {
2188     case int64:
2189       lr_d(t0, addr, acquire);
2190       break;
2191     case int32:
2192       lr_w(t0, addr, acquire);
2193       break;
2194     case uint32:
2195       lr_w(t0, addr, acquire);
2196       zero_extend(t0, t0, 32);
2197       break;
2198     default:
2199       ShouldNotReachHere();
2200   }
2201 }
2202 
2203 void MacroAssembler::store_conditional(Register addr,

4133     }
4134   } else if (src.first() != dst.first()) {
4135     if (src.is_single_phys_reg() && dst.is_single_phys_reg()) {
4136       fmv_d(dst.first()->as_FloatRegister(), src.first()->as_FloatRegister());
4137     } else {
4138       ShouldNotReachHere();
4139     }
4140   }
4141 }
4142 
4143 void MacroAssembler::rt_call(address dest, Register tmp) {
4144   CodeBlob *cb = CodeCache::find_blob(dest);
4145   if (cb) {
4146     far_call(RuntimeAddress(dest));
4147   } else {
4148     int32_t offset = 0;
4149     la_patchable(tmp, RuntimeAddress(dest), offset);
4150     jalr(x1, tmp, offset);
4151   }
4152 }















































2158   Label retry_load, nope;
2159   bind(retry_load);
2160   // Load reserved from the memory location
2161   lr_d(tmp, addr, Assembler::aqrl);
2162   // Fail and exit if it is not what we expect
2163   bne(tmp, oldv, nope);
2164   // If the store conditional succeeds, tmp will be zero
2165   sc_d(tmp, newv, addr, Assembler::rl);
2166   beqz(tmp, succeed);
2167   // Retry only when the store conditional failed
2168   j(retry_load);
2169 
2170   bind(nope);
2171   membar(AnyAny);
2172   mv(oldv, tmp);
2173   if (fail != NULL) {
2174     j(*fail);
2175   }
2176 }
2177 






2178 void MacroAssembler::load_reserved(Register addr,
2179                                    enum operand_size size,
2180                                    Assembler::Aqrl acquire) {
2181   switch (size) {
2182     case int64:
2183       lr_d(t0, addr, acquire);
2184       break;
2185     case int32:
2186       lr_w(t0, addr, acquire);
2187       break;
2188     case uint32:
2189       lr_w(t0, addr, acquire);
2190       zero_extend(t0, t0, 32);
2191       break;
2192     default:
2193       ShouldNotReachHere();
2194   }
2195 }
2196 
2197 void MacroAssembler::store_conditional(Register addr,

4127     }
4128   } else if (src.first() != dst.first()) {
4129     if (src.is_single_phys_reg() && dst.is_single_phys_reg()) {
4130       fmv_d(dst.first()->as_FloatRegister(), src.first()->as_FloatRegister());
4131     } else {
4132       ShouldNotReachHere();
4133     }
4134   }
4135 }
4136 
4137 void MacroAssembler::rt_call(address dest, Register tmp) {
4138   CodeBlob *cb = CodeCache::find_blob(dest);
4139   if (cb) {
4140     far_call(RuntimeAddress(dest));
4141   } else {
4142     int32_t offset = 0;
4143     la_patchable(tmp, RuntimeAddress(dest), offset);
4144     jalr(x1, tmp, offset);
4145   }
4146 }
4147 
4148 // Attempt to fast-lock an object. Fall-through on success, branch to slow label
4149 // on failure.
4150 // Registers:
4151 //  - obj: the object to be locked
4152 //  - hdr: the header, already loaded from obj, will be destroyed
4153 //  - tmp1, tmp2, tmp3: temporary registers, will be destroyed
4154 void MacroAssembler::fast_lock(Register obj, Register hdr, Register tmp1, Register tmp2, Register tmp3, Label& slow) {
4155   // Check if we would have space on lock-stack for the object.
4156   ld(tmp1, Address(xthread, Thread::lock_stack_current_offset()));
4157   ld(tmp2, Address(xthread, Thread::lock_stack_limit_offset()));
4158   bge(tmp1, tmp2, slow, true);
4159 
4160   // Load (object->mark() | 1) into hdr
4161   ori(hdr, hdr, markWord::unlocked_value);
4162   // Clear lock-bits, into tmp2
4163   xori(tmp2, hdr, markWord::unlocked_value);
4164   // Try to swing header from unlocked to locked
4165   Label success;
4166   cmpxchgptr(hdr, tmp2, obj, tmp3, success, &slow);
4167   bind(success);
4168 
4169   // After successful lock, push object on lock-stack
4170   sd(obj, Address(tmp1, 0));
4171   add(tmp1, tmp1, oopSize);
4172   sd(tmp1, Address(xthread, Thread::lock_stack_current_offset()));
4173 }
4174 
4175 void MacroAssembler::fast_unlock(Register obj, Register hdr, Register tmp1, Register tmp2, Label& slow) {
4176   // Load the expected old header (lock-bits cleared to indicate 'locked') into hdr
4177   mv(tmp1, ~markWord::lock_mask_in_place);
4178   andr(hdr, hdr, tmp1);
4179 
4180   // Load the new header (unlocked) into tmp1
4181   ori(tmp1, hdr, markWord::unlocked_value);
4182 
4183   // Try to swing header from locked to unlocked
4184   Label success;
4185   cmpxchgptr(hdr, tmp1, obj, tmp2, success, &slow);
4186   bind(success);
4187 
4188   // After successful unlock, pop object from lock-stack
4189   ld(tmp1, Address(xthread, Thread::lock_stack_current_offset()));
4190   sub(tmp1, tmp1, oopSize);
4191   sd(tmp1, Address(xthread, Thread::lock_stack_current_offset()));
4192 }
< prev index next >