1 /*
  2  * Copyright (c) 2019, 2020, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2021, Huawei Technologies Co., Ltd. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "asm/macroAssembler.inline.hpp"
 27 #include "code/codeBlob.hpp"
 28 #include "code/vmreg.inline.hpp"
 29 #include "gc/z/zBarrier.inline.hpp"
 30 #include "gc/z/zBarrierSet.hpp"
 31 #include "gc/z/zBarrierSetAssembler.hpp"
 32 #include "gc/z/zBarrierSetRuntime.hpp"
 33 #include "gc/z/zThreadLocalData.hpp"
 34 #include "memory/resourceArea.hpp"
 35 #include "runtime/sharedRuntime.hpp"
 36 #include "utilities/macros.hpp"
 37 #ifdef COMPILER1
 38 #include "c1/c1_LIRAssembler.hpp"
 39 #include "c1/c1_MacroAssembler.hpp"
 40 #include "gc/z/c1/zBarrierSetC1.hpp"
 41 #endif // COMPILER1
 42 #ifdef COMPILER2
 43 #include "gc/z/c2/zBarrierSetC2.hpp"
 44 #endif // COMPILER2
 45 
 46 #ifdef PRODUCT
 47 #define BLOCK_COMMENT(str) /* nothing */
 48 #else
 49 #define BLOCK_COMMENT(str) __ block_comment(str)
 50 #endif
 51 
 52 #undef __
 53 #define __ masm->
 54 
 55 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
 56                                    DecoratorSet decorators,
 57                                    BasicType type,
 58                                    Register dst,
 59                                    Address src,
 60                                    Register tmp1,
 61                                    Register tmp_thread) {
 62   if (!ZBarrierSet::barrier_needed(decorators, type)) {
 63     // Barrier not needed
 64     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
 65     return;
 66   }
 67 
 68   assert_different_registers(t1, src.base());
 69   assert_different_registers(t0, t1, dst);
 70 
 71   Label done;
 72 
 73   // Load bad mask into temp register.
 74   __ la(t0, src);
 75   __ ld(t1, address_bad_mask_from_thread(xthread));
 76   __ ld(dst, Address(t0));
 77 
 78   // Test reference against bad mask. If mask bad, then we need to fix it up.
 79   __ andr(t1, dst, t1);
 80   __ beqz(t1, done);
 81 
 82   __ enter();
 83 
 84   __ push_call_clobbered_registers_except(RegSet::of(dst));
 85 
 86   if (c_rarg0 != dst) {
 87     __ mv(c_rarg0, dst);
 88   }
 89 
 90   __ mv(c_rarg1, t0);
 91 
 92   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 93 
 94   // Make sure dst has the return value.
 95   if (dst != x10) {
 96     __ mv(dst, x10);
 97   }
 98 
 99   __ pop_call_clobbered_registers_except(RegSet::of(dst));
100   __ leave();
101 
102   __ bind(done);
103 }
104 
105 #ifdef ASSERT
106 
107 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
108                                     DecoratorSet decorators,
109                                     BasicType type,
110                                     Address dst,
111                                     Register val,
112                                     Register tmp1,
113                                     Register tmp2) {
114   // Verify value
115   if (is_reference_type(type)) {
116     // Note that src could be noreg, which means we
117     // are storing null and can skip verification.
118     if (val != noreg) {
119       Label done;
120 
121       // tmp1 and tmp2 are often set to noreg.
122       RegSet savedRegs = RegSet::of(t0);
123       __ push_reg(savedRegs, sp);
124 
125       __ ld(t0, address_bad_mask_from_thread(xthread));
126       __ andr(t0, val, t0);
127       __ beqz(t0, done);
128       __ stop("Verify oop store failed");
129       __ should_not_reach_here();
130       __ bind(done);
131       __ pop_reg(savedRegs, sp);
132     }
133   }
134 
135   // Store value
136   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2);
137 }
138 
139 #endif // ASSERT
140 
141 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
142                                               DecoratorSet decorators,
143                                               bool is_oop,
144                                               Register src,
145                                               Register dst,
146                                               Register count,
147                                               RegSet saved_regs) {
148   if (!is_oop) {
149     // Barrier not needed
150     return;
151   }
152 
153   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
154 
155   assert_different_registers(src, count, t0);
156 
157   __ push_reg(saved_regs, sp);
158 
159   if (count == c_rarg0 && src == c_rarg1) {
160     // exactly backwards!!
161     __ xorr(c_rarg0, c_rarg0, c_rarg1);
162     __ xorr(c_rarg1, c_rarg0, c_rarg1);
163     __ xorr(c_rarg0, c_rarg0, c_rarg1);
164   } else {
165     __ mv(c_rarg0, src);
166     __ mv(c_rarg1, count);
167   }
168 
169   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2);
170 
171   __ pop_reg(saved_regs, sp);
172 
173   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
174 }
175 
176 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
177                                                          Register jni_env,
178                                                          Register robj,
179                                                          Register tmp,
180                                                          Label& slowpath) {
181   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
182 
183   assert_different_registers(jni_env, robj, tmp);
184 
185   // Resolve jobject
186   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath);
187 
188   // Compute the offset of address bad mask from the field of jni_environment
189   long int bad_mask_relative_offset = (long int) (in_bytes(ZThreadLocalData::address_bad_mask_offset()) -
190                                                   in_bytes(JavaThread::jni_environment_offset()));
191 
192   // Load the address bad mask
193   __ ld(tmp, Address(jni_env, bad_mask_relative_offset));
194 
195   // Check address bad mask
196   __ andr(tmp, robj, tmp);
197   __ bnez(tmp, slowpath);
198 
199   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
200 }
201 
202 #ifdef COMPILER2
203 
204 OptoReg::Name ZBarrierSetAssembler::refine_register(const Node* node, OptoReg::Name opto_reg) {
205   if (!OptoReg::is_reg(opto_reg)) {
206     return OptoReg::Bad;
207   }
208 
209   const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);
210   if (vm_reg->is_FloatRegister()) {
211     return opto_reg & ~1;
212   }
213 
214   return opto_reg;
215 }
216 
217 #undef __
218 #define __ _masm->
219 
220 class ZSaveLiveRegisters {
221 private:
222   MacroAssembler* const _masm;
223   RegSet                _gp_regs;
224   RegSet                _fp_regs;
225   RegSet                _vp_regs;
226 
227 public:
228   void initialize(ZLoadBarrierStubC2* stub) {
229     // Record registers that needs to be saved/restored
230     RegMaskIterator rmi(stub->live());
231     while (rmi.has_next()) {
232       const OptoReg::Name opto_reg = rmi.next();
233       if (OptoReg::is_reg(opto_reg)) {
234         const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);
235         if (vm_reg->is_Register()) {
236           _gp_regs += RegSet::of(vm_reg->as_Register());
237         } else if (vm_reg->is_FloatRegister()) {
238           _fp_regs += RegSet::of((Register)vm_reg->as_FloatRegister());
239         } else if (vm_reg->is_VectorRegister()) {
240           const VMReg vm_reg_base = OptoReg::as_VMReg(opto_reg & ~(VectorRegisterImpl::max_slots_per_register - 1));
241           _vp_regs += RegSet::of((Register)vm_reg_base->as_VectorRegister());
242         } else {
243           fatal("Unknown register type");
244         }
245       }
246     }
247 
248     // Remove C-ABI SOE registers, tmp regs and _ref register that will be updated
249     _gp_regs -= RegSet::range(x18, x27) + RegSet::of(x2) + RegSet::of(x8, x9) + RegSet::of(x5, stub->ref());
250   }
251 
252   ZSaveLiveRegisters(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :
253       _masm(masm),
254       _gp_regs(),
255       _fp_regs(),
256       _vp_regs() {
257     // Figure out what registers to save/restore
258     initialize(stub);
259 
260     // Save registers
261     __ push_reg(_gp_regs, sp);
262     __ push_fp(_fp_regs, sp);
263     __ push_vp(_vp_regs, sp);
264   }
265 
266   ~ZSaveLiveRegisters() {
267     // Restore registers
268     __ pop_vp(_vp_regs, sp);
269     __ pop_fp(_fp_regs, sp);
270     __ pop_reg(_gp_regs, sp);
271   }
272 };
273 
274 class ZSetupArguments {
275 private:
276   MacroAssembler* const _masm;
277   const Register        _ref;
278   const Address         _ref_addr;
279 
280 public:
281   ZSetupArguments(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :
282       _masm(masm),
283       _ref(stub->ref()),
284       _ref_addr(stub->ref_addr()) {
285 
286     // Setup arguments
287     if (_ref_addr.base() == noreg) {
288       // No self healing
289       if (_ref != c_rarg0) {
290         __ mv(c_rarg0, _ref);
291       }
292       __ mv(c_rarg1, zr);
293     } else {
294       // Self healing
295       if (_ref == c_rarg0) {
296         // _ref is already at correct place
297         __ la(c_rarg1, _ref_addr);
298       } else if (_ref != c_rarg1) {
299         // _ref is in wrong place, but not in c_rarg1, so fix it first
300         __ la(c_rarg1, _ref_addr);
301         __ mv(c_rarg0, _ref);
302       } else if (_ref_addr.base() != c_rarg0) {
303         assert(_ref == c_rarg1, "Mov ref first, vacating c_rarg0");
304         __ mv(c_rarg0, _ref);
305         __ la(c_rarg1, _ref_addr);
306       } else {
307         assert(_ref == c_rarg1, "Need to vacate c_rarg1 and _ref_addr is using c_rarg0");
308         if (_ref_addr.base() == c_rarg0) {
309           __ mv(t1, c_rarg1);
310           __ la(c_rarg1, _ref_addr);
311           __ mv(c_rarg0, t1);
312         } else {
313           ShouldNotReachHere();
314         }
315       }
316     }
317   }
318 
319   ~ZSetupArguments() {
320     // Transfer result
321     if (_ref != x10) {
322       __ mv(_ref, x10);
323     }
324   }
325 };
326 
327 #undef __
328 #define __ masm->
329 
330 void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
331   BLOCK_COMMENT("ZLoadBarrierStubC2");
332 
333   // Stub entry
334   __ bind(*stub->entry());
335 
336   {
337     ZSaveLiveRegisters save_live_registers(masm, stub);
338     ZSetupArguments setup_arguments(masm, stub);
339     int32_t offset = 0;
340     __ la_patchable(t0, stub->slow_path(), offset);
341     __ jalr(x1, t0, offset);
342   }
343 
344   // Stub exit
345   __ j(*stub->continuation());
346 }
347 
348 #undef __
349 
350 #endif // COMPILER2
351 
352 #ifdef COMPILER1
353 #undef __
354 #define __ ce->masm()->
355 
356 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
357                                                          LIR_Opr ref) const {
358   assert_different_registers(xthread, ref->as_register(), t1);
359   __ ld(t1, address_bad_mask_from_thread(xthread));
360   __ andr(t1, t1, ref->as_register());
361 }
362 
363 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
364                                                          ZLoadBarrierStubC1* stub) const {
365   // Stub entry
366   __ bind(*stub->entry());
367 
368   Register ref = stub->ref()->as_register();
369   Register ref_addr = noreg;
370   Register tmp = noreg;
371 
372   if (stub->tmp()->is_valid()) {
373     // Load address into tmp register
374     ce->leal(stub->ref_addr(), stub->tmp());
375     ref_addr = tmp = stub->tmp()->as_pointer_register();
376   } else {
377     // Address already in register
378     ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
379   }
380 
381   assert_different_registers(ref, ref_addr, noreg);
382 
383   // Save x10 unless it is the result or tmp register
384   // Set up SP to accomodate parameters and maybe x10.
385   if (ref != x10 && tmp != x10) {
386     __ sub(sp, sp, 32);
387     __ sd(x10, Address(sp, 16));
388   } else {
389     __ sub(sp, sp, 16);
390   }
391 
392   // Setup arguments and call runtime stub
393   ce->store_parameter(ref_addr, 1);
394   ce->store_parameter(ref, 0);
395 
396   __ far_call(stub->runtime_stub());
397 
398   // Verify result
399   __ verify_oop(x10, "Bad oop");
400 
401 
402   // Move result into place
403   if (ref != x10) {
404     __ mv(ref, x10);
405   }
406 
407   // Restore x10 unless it is the result or tmp register
408   if (ref != x10 && tmp != x10) {
409     __ ld(x10, Address(sp, 16));
410     __ add(sp, sp, 32);
411   } else {
412     __ add(sp, sp, 16);
413   }
414 
415   // Stub exit
416   __ j(*stub->continuation());
417 }
418 
419 #undef __
420 #define __ sasm->
421 
422 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
423                                                                  DecoratorSet decorators) const {
424   __ prologue("zgc_load_barrier stub", false);
425 
426   __ push_call_clobbered_registers_except(RegSet::of(x10));
427 
428   // Setup arguments
429   __ load_parameter(0, c_rarg0);
430   __ load_parameter(1, c_rarg1);
431 
432   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
433 
434   __ pop_call_clobbered_registers_except(RegSet::of(x10));
435 
436   __ epilogue();
437 }
438 
439 #undef __
440 #endif // COMPILER1