1 /*
  2  * Copyright (c) 2019, 2020, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "asm/macroAssembler.inline.hpp"
 28 #include "code/codeBlob.hpp"
 29 #include "code/vmreg.inline.hpp"
 30 #include "gc/z/zBarrier.inline.hpp"
 31 #include "gc/z/zBarrierSet.hpp"
 32 #include "gc/z/zBarrierSetAssembler.hpp"
 33 #include "gc/z/zBarrierSetRuntime.hpp"
 34 #include "gc/z/zThreadLocalData.hpp"
 35 #include "memory/resourceArea.hpp"
 36 #include "runtime/sharedRuntime.hpp"
 37 #include "utilities/macros.hpp"
 38 #ifdef COMPILER1
 39 #include "c1/c1_LIRAssembler.hpp"
 40 #include "c1/c1_MacroAssembler.hpp"
 41 #include "gc/z/c1/zBarrierSetC1.hpp"
 42 #endif // COMPILER1
 43 #ifdef COMPILER2
 44 #include "gc/z/c2/zBarrierSetC2.hpp"
 45 #endif // COMPILER2
 46 
 47 #ifdef PRODUCT
 48 #define BLOCK_COMMENT(str) /* nothing */
 49 #else
 50 #define BLOCK_COMMENT(str) __ block_comment(str)
 51 #endif
 52 
 53 #undef __
 54 #define __ masm->
 55 
 56 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
 57                                    DecoratorSet decorators,
 58                                    BasicType type,
 59                                    Register dst,
 60                                    Address src,
 61                                    Register tmp1,
 62                                    Register tmp_thread) {
 63   if (!ZBarrierSet::barrier_needed(decorators, type)) {
 64     // Barrier not needed
 65     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
 66     return;
 67   }
 68 
 69   assert_different_registers(t1, src.base());
 70   assert_different_registers(t0, t1, dst);
 71 
 72   Label done;
 73 
 74   // Load bad mask into temp register.
 75   __ la(t0, src);
 76   __ ld(t1, address_bad_mask_from_thread(xthread));
 77   __ ld(dst, Address(t0));
 78 
 79   // Test reference against bad mask. If mask bad, then we need to fix it up.
 80   __ andr(t1, dst, t1);
 81   __ beqz(t1, done);
 82 
 83   __ enter();
 84 
 85   __ push_call_clobbered_registers_except(RegSet::of(dst));
 86 
 87   if (c_rarg0 != dst) {
 88     __ mv(c_rarg0, dst);
 89   }
 90 
 91   __ mv(c_rarg1, t0);
 92 
 93   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 94 
 95   // Make sure dst has the return value.
 96   if (dst != x10) {
 97     __ mv(dst, x10);
 98   }
 99 
100   __ pop_call_clobbered_registers_except(RegSet::of(dst));
101   __ leave();
102 
103   __ bind(done);
104 }
105 
106 #ifdef ASSERT
107 
108 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
109                                     DecoratorSet decorators,
110                                     BasicType type,
111                                     Address dst,
112                                     Register val,
113                                     Register tmp1,
114                                     Register tmp2) {
115   // Verify value
116   if (is_reference_type(type)) {
117     // Note that src could be noreg, which means we
118     // are storing null and can skip verification.
119     if (val != noreg) {
120       Label done;
121 
122       // tmp1 and tmp2 are often set to noreg.
123       RegSet savedRegs = RegSet::of(t0);
124       __ push_reg(savedRegs, sp);
125 
126       __ ld(t0, address_bad_mask_from_thread(xthread));
127       __ andr(t0, val, t0);
128       __ beqz(t0, done);
129       __ stop("Verify oop store failed");
130       __ should_not_reach_here();
131       __ bind(done);
132       __ pop_reg(savedRegs, sp);
133     }
134   }
135 
136   // Store value
137   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2);
138 }
139 
140 #endif // ASSERT
141 
142 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
143                                               DecoratorSet decorators,
144                                               bool is_oop,
145                                               Register src,
146                                               Register dst,
147                                               Register count,
148                                               RegSet saved_regs) {
149   if (!is_oop) {
150     // Barrier not needed
151     return;
152   }
153 
154   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
155 
156   assert_different_registers(src, count, t0);
157 
158   __ push_reg(saved_regs, sp);
159 
160   if (count == c_rarg0 && src == c_rarg1) {
161     // exactly backwards!!
162     __ xorr(c_rarg0, c_rarg0, c_rarg1);
163     __ xorr(c_rarg1, c_rarg0, c_rarg1);
164     __ xorr(c_rarg0, c_rarg0, c_rarg1);
165   } else {
166     __ mv(c_rarg0, src);
167     __ mv(c_rarg1, count);
168   }
169 
170   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2);
171 
172   __ pop_reg(saved_regs, sp);
173 
174   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
175 }
176 
177 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
178                                                          Register jni_env,
179                                                          Register robj,
180                                                          Register tmp,
181                                                          Label& slowpath) {
182   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
183 
184   assert_different_registers(jni_env, robj, tmp);
185 
186   // Resolve jobject
187   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath);
188 
189   // Compute the offset of address bad mask from the field of jni_environment
190   long int bad_mask_relative_offset = (long int) (in_bytes(ZThreadLocalData::address_bad_mask_offset()) -
191                                                   in_bytes(JavaThread::jni_environment_offset()));
192 
193   // Load the address bad mask
194   __ ld(tmp, Address(jni_env, bad_mask_relative_offset));
195 
196   // Check address bad mask
197   __ andr(tmp, robj, tmp);
198   __ bnez(tmp, slowpath);
199 
200   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
201 }
202 
203 #ifdef COMPILER2
204 
205 OptoReg::Name ZBarrierSetAssembler::refine_register(const Node* node, OptoReg::Name opto_reg) {
206   if (!OptoReg::is_reg(opto_reg)) {
207     return OptoReg::Bad;
208   }
209 
210   const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);
211   if (vm_reg->is_FloatRegister()) {
212     return opto_reg & ~1;
213   }
214 
215   return opto_reg;
216 }
217 
218 #undef __
219 #define __ _masm->
220 
221 class ZSaveLiveRegisters {
222 private:
223   MacroAssembler* const _masm;
224   RegSet                _gp_regs;
225   FloatRegSet           _fp_regs;
226   VectorRegSet          _vp_regs;
227 
228 public:
229   void initialize(ZLoadBarrierStubC2* stub) {
230     // Record registers that needs to be saved/restored
231     RegMaskIterator rmi(stub->live());
232     while (rmi.has_next()) {
233       const OptoReg::Name opto_reg = rmi.next();
234       if (OptoReg::is_reg(opto_reg)) {
235         const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);
236         if (vm_reg->is_Register()) {
237           _gp_regs += RegSet::of(vm_reg->as_Register());
238         } else if (vm_reg->is_FloatRegister()) {
239           _fp_regs += FloatRegSet::of(vm_reg->as_FloatRegister());
240         } else if (vm_reg->is_VectorRegister()) {
241           const VMReg vm_reg_base = OptoReg::as_VMReg(opto_reg & ~(VectorRegisterImpl::max_slots_per_register - 1));
242           _vp_regs += VectorRegSet::of(vm_reg_base->as_VectorRegister());
243         } else {
244           fatal("Unknown register type");
245         }
246       }
247     }
248 
249     // Remove C-ABI SOE registers, tmp regs and _ref register that will be updated
250     _gp_regs -= RegSet::range(x18, x27) + RegSet::of(x2) + RegSet::of(x8, x9) + RegSet::of(x5, stub->ref());
251   }
252 
253   ZSaveLiveRegisters(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :
254       _masm(masm),
255       _gp_regs(),
256       _fp_regs(),
257       _vp_regs() {
258     // Figure out what registers to save/restore
259     initialize(stub);
260 
261     // Save registers
262     __ push_reg(_gp_regs, sp);
263     __ push_fp(_fp_regs, sp);
264     __ push_vp(_vp_regs, sp);
265   }
266 
267   ~ZSaveLiveRegisters() {
268     // Restore registers
269     __ pop_vp(_vp_regs, sp);
270     __ pop_fp(_fp_regs, sp);
271     __ pop_reg(_gp_regs, sp);
272   }
273 };
274 
275 class ZSetupArguments {
276 private:
277   MacroAssembler* const _masm;
278   const Register        _ref;
279   const Address         _ref_addr;
280 
281 public:
282   ZSetupArguments(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :
283       _masm(masm),
284       _ref(stub->ref()),
285       _ref_addr(stub->ref_addr()) {
286 
287     // Setup arguments
288     if (_ref_addr.base() == noreg) {
289       // No self healing
290       if (_ref != c_rarg0) {
291         __ mv(c_rarg0, _ref);
292       }
293       __ mv(c_rarg1, zr);
294     } else {
295       // Self healing
296       if (_ref == c_rarg0) {
297         // _ref is already at correct place
298         __ la(c_rarg1, _ref_addr);
299       } else if (_ref != c_rarg1) {
300         // _ref is in wrong place, but not in c_rarg1, so fix it first
301         __ la(c_rarg1, _ref_addr);
302         __ mv(c_rarg0, _ref);
303       } else if (_ref_addr.base() != c_rarg0) {
304         assert(_ref == c_rarg1, "Mov ref first, vacating c_rarg0");
305         __ mv(c_rarg0, _ref);
306         __ la(c_rarg1, _ref_addr);
307       } else {
308         assert(_ref == c_rarg1, "Need to vacate c_rarg1 and _ref_addr is using c_rarg0");
309         if (_ref_addr.base() == c_rarg0) {
310           __ mv(t1, c_rarg1);
311           __ la(c_rarg1, _ref_addr);
312           __ mv(c_rarg0, t1);
313         } else {
314           ShouldNotReachHere();
315         }
316       }
317     }
318   }
319 
320   ~ZSetupArguments() {
321     // Transfer result
322     if (_ref != x10) {
323       __ mv(_ref, x10);
324     }
325   }
326 };
327 
328 #undef __
329 #define __ masm->
330 
331 void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
332   BLOCK_COMMENT("ZLoadBarrierStubC2");
333 
334   // Stub entry
335   __ bind(*stub->entry());
336 
337   {
338     ZSaveLiveRegisters save_live_registers(masm, stub);
339     ZSetupArguments setup_arguments(masm, stub);
340     int32_t offset = 0;
341     __ la_patchable(t0, stub->slow_path(), offset);
342     __ jalr(x1, t0, offset);
343   }
344 
345   // Stub exit
346   __ j(*stub->continuation());
347 }
348 
349 #undef __
350 
351 #endif // COMPILER2
352 
353 #ifdef COMPILER1
354 #undef __
355 #define __ ce->masm()->
356 
357 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
358                                                          LIR_Opr ref) const {
359   assert_different_registers(xthread, ref->as_register(), t1);
360   __ ld(t1, address_bad_mask_from_thread(xthread));
361   __ andr(t1, t1, ref->as_register());
362 }
363 
364 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
365                                                          ZLoadBarrierStubC1* stub) const {
366   // Stub entry
367   __ bind(*stub->entry());
368 
369   Register ref = stub->ref()->as_register();
370   Register ref_addr = noreg;
371   Register tmp = noreg;
372 
373   if (stub->tmp()->is_valid()) {
374     // Load address into tmp register
375     ce->leal(stub->ref_addr(), stub->tmp());
376     ref_addr = tmp = stub->tmp()->as_pointer_register();
377   } else {
378     // Address already in register
379     ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
380   }
381 
382   assert_different_registers(ref, ref_addr, noreg);
383 
384   // Save x10 unless it is the result or tmp register
385   // Set up SP to accomodate parameters and maybe x10.
386   if (ref != x10 && tmp != x10) {
387     __ sub(sp, sp, 32);
388     __ sd(x10, Address(sp, 16));
389   } else {
390     __ sub(sp, sp, 16);
391   }
392 
393   // Setup arguments and call runtime stub
394   ce->store_parameter(ref_addr, 1);
395   ce->store_parameter(ref, 0);
396 
397   __ far_call(stub->runtime_stub());
398 
399   // Verify result
400   __ verify_oop(x10, "Bad oop");
401 
402 
403   // Move result into place
404   if (ref != x10) {
405     __ mv(ref, x10);
406   }
407 
408   // Restore x10 unless it is the result or tmp register
409   if (ref != x10 && tmp != x10) {
410     __ ld(x10, Address(sp, 16));
411     __ add(sp, sp, 32);
412   } else {
413     __ add(sp, sp, 16);
414   }
415 
416   // Stub exit
417   __ j(*stub->continuation());
418 }
419 
420 #undef __
421 #define __ sasm->
422 
423 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
424                                                                  DecoratorSet decorators) const {
425   __ prologue("zgc_load_barrier stub", false);
426 
427   __ push_call_clobbered_registers_except(RegSet::of(x10));
428 
429   // Setup arguments
430   __ load_parameter(0, c_rarg0);
431   __ load_parameter(1, c_rarg1);
432 
433   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
434 
435   __ pop_call_clobbered_registers_except(RegSet::of(x10));
436 
437   __ epilogue();
438 }
439 
440 #undef __
441 #endif // COMPILER1