1 /*
  2  * Copyright (c) 2019, 2022, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  */
 23 
 24 #include "precompiled.hpp"
 25 #include "asm/macroAssembler.inline.hpp"
 26 #include "code/codeBlob.hpp"
 27 #include "code/vmreg.inline.hpp"
 28 #include "gc/z/zBarrier.inline.hpp"
 29 #include "gc/z/zBarrierSet.hpp"
 30 #include "gc/z/zBarrierSetAssembler.hpp"
 31 #include "gc/z/zBarrierSetRuntime.hpp"
 32 #include "gc/z/zThreadLocalData.hpp"
 33 #include "memory/resourceArea.hpp"
 34 #include "runtime/sharedRuntime.hpp"
 35 #include "utilities/macros.hpp"
 36 #ifdef COMPILER1
 37 #include "c1/c1_LIRAssembler.hpp"
 38 #include "c1/c1_MacroAssembler.hpp"
 39 #include "gc/z/c1/zBarrierSetC1.hpp"
 40 #endif // COMPILER1
 41 #ifdef COMPILER2
 42 #include "gc/z/c2/zBarrierSetC2.hpp"
 43 #endif // COMPILER2
 44 
 45 #ifdef PRODUCT
 46 #define BLOCK_COMMENT(str) /* nothing */
 47 #else
 48 #define BLOCK_COMMENT(str) __ block_comment(str)
 49 #endif
 50 
 51 #undef __
 52 #define __ masm->
 53 
 54 void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
 55                                    DecoratorSet decorators,
 56                                    BasicType type,
 57                                    Register dst,
 58                                    Address src,
 59                                    Register tmp1,
 60                                    Register tmp_thread) {
 61   if (!ZBarrierSet::barrier_needed(decorators, type)) {
 62     // Barrier not needed
 63     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
 64     return;
 65   }
 66 
 67   assert_different_registers(rscratch1, rscratch2, src.base());
 68   assert_different_registers(rscratch1, rscratch2, dst);
 69 
 70   Label done;
 71 
 72   // Load bad mask into scratch register.
 73   __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
 74   __ lea(rscratch2, src);
 75   __ ldr(dst, src);
 76 
 77   // Test reference against bad mask. If mask bad, then we need to fix it up.
 78   __ tst(dst, rscratch1);
 79   __ br(Assembler::EQ, done);
 80 
 81   __ enter(/*strip_ret_addr*/true);
 82 
 83   __ push_call_clobbered_registers_except(RegSet::of(dst));
 84 
 85   if (c_rarg0 != dst) {
 86     __ mov(c_rarg0, dst);
 87   }
 88   __ mov(c_rarg1, rscratch2);
 89 
 90   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
 91 
 92   // Make sure dst has the return value.
 93   if (dst != r0) {
 94     __ mov(dst, r0);
 95   }
 96 
 97   __ pop_call_clobbered_registers_except(RegSet::of(dst));
 98   __ leave();
 99 
100   __ bind(done);
101 }
102 
103 #ifdef ASSERT
104 
105 void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
106                                         DecoratorSet decorators,
107                                         BasicType type,
108                                         Address dst,
109                                         Register val,
110                                         Register tmp1,
111                                         Register tmp2,
112                                         Register tmp3) {
113   // Verify value
114   if (is_reference_type(type)) {
115     // Note that src could be noreg, which means we
116     // are storing null and can skip verification.
117     if (val != noreg) {
118       Label done;
119 
120       // tmp1 and tmp2 are often set to noreg.
121       RegSet savedRegs = RegSet::of(rscratch1);
122       __ push(savedRegs, sp);
123 
124       __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
125       __ tst(val, rscratch1);
126       __ br(Assembler::EQ, done);
127       __ stop("Verify oop store failed");
128       __ should_not_reach_here();
129       __ bind(done);
130       __ pop(savedRegs, sp);
131     }
132   }
133 
134   // Store value
135   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2);
136 }
137 
138 #endif // ASSERT
139 
140 void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
141                                               DecoratorSet decorators,
142                                               bool is_oop,
143                                               Register src,
144                                               Register dst,
145                                               Register count,
146                                               RegSet saved_regs) {
147   if (!is_oop) {
148     // Barrier not needed
149     return;
150   }
151 
152   BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
153 
154   assert_different_registers(src, count, rscratch1);
155 
156   __ push(saved_regs, sp);
157 
158   if (count == c_rarg0) {
159     if (src == c_rarg1) {
160       // exactly backwards!!
161       __ mov(rscratch1, c_rarg0);
162       __ mov(c_rarg0, c_rarg1);
163       __ mov(c_rarg1, rscratch1);
164     } else {
165       __ mov(c_rarg1, count);
166       __ mov(c_rarg0, src);
167     }
168   } else {
169     __ mov(c_rarg0, src);
170     __ mov(c_rarg1, count);
171   }
172 
173   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2);
174 
175   __ pop(saved_regs, sp);
176 
177   BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
178 }
179 
180 void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
181                                                          Register jni_env,
182                                                          Register robj,
183                                                          Register tmp,
184                                                          Label& slowpath) {
185   BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
186 
187   assert_different_registers(jni_env, robj, tmp);
188 
189   // Resolve jobject
190   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath);
191 
192   // The Address offset is too large to direct load - -784. Our range is +127, -128.
193   __ mov(tmp, (int64_t)(in_bytes(ZThreadLocalData::address_bad_mask_offset()) -
194               in_bytes(JavaThread::jni_environment_offset())));
195 
196   // Load address bad mask
197   __ add(tmp, jni_env, tmp);
198   __ ldr(tmp, Address(tmp));
199 
200   // Check address bad mask
201   __ tst(robj, tmp);
202   __ br(Assembler::NE, slowpath);
203 
204   BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
205 }
206 
207 #ifdef COMPILER1
208 
209 #undef __
210 #define __ ce->masm()->
211 
212 void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
213                                                          LIR_Opr ref) const {
214   assert_different_registers(rscratch1, rthread, ref->as_register());
215 
216   __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
217   __ tst(ref->as_register(), rscratch1);
218 }
219 
220 void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
221                                                          ZLoadBarrierStubC1* stub) const {
222   // Stub entry
223   __ bind(*stub->entry());
224 
225   Register ref = stub->ref()->as_register();
226   Register ref_addr = noreg;
227   Register tmp = noreg;
228 
229   if (stub->tmp()->is_valid()) {
230     // Load address into tmp register
231     ce->leal(stub->ref_addr(), stub->tmp());
232     ref_addr = tmp = stub->tmp()->as_pointer_register();
233   } else {
234     // Address already in register
235     ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
236   }
237 
238   assert_different_registers(ref, ref_addr, noreg);
239 
240   // Save r0 unless it is the result or tmp register
241   // Set up SP to accomodate parameters and maybe r0..
242   if (ref != r0 && tmp != r0) {
243     __ sub(sp, sp, 32);
244     __ str(r0, Address(sp, 16));
245   } else {
246     __ sub(sp, sp, 16);
247   }
248 
249   // Setup arguments and call runtime stub
250   ce->store_parameter(ref_addr, 1);
251   ce->store_parameter(ref, 0);
252 
253   __ far_call(stub->runtime_stub());
254 
255   // Verify result
256   __ verify_oop(r0, "Bad oop");
257 
258   // Move result into place
259   if (ref != r0) {
260     __ mov(ref, r0);
261   }
262 
263   // Restore r0 unless it is the result or tmp register
264   if (ref != r0 && tmp != r0) {
265     __ ldr(r0, Address(sp, 16));
266     __ add(sp, sp, 32);
267   } else {
268     __ add(sp, sp, 16);
269   }
270 
271   // Stub exit
272   __ b(*stub->continuation());
273 }
274 
275 #undef __
276 #define __ sasm->
277 
278 void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
279                                                                  DecoratorSet decorators) const {
280   __ prologue("zgc_load_barrier stub", false);
281 
282   __ push_call_clobbered_registers_except(RegSet::of(r0));
283 
284   // Setup arguments
285   __ load_parameter(0, c_rarg0);
286   __ load_parameter(1, c_rarg1);
287 
288   __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
289 
290   __ pop_call_clobbered_registers_except(RegSet::of(r0));
291 
292   __ epilogue();
293 }
294 #endif // COMPILER1
295 
296 #ifdef COMPILER2
297 
298 OptoReg::Name ZBarrierSetAssembler::refine_register(const Node* node, OptoReg::Name opto_reg) {
299   if (!OptoReg::is_reg(opto_reg)) {
300     return OptoReg::Bad;
301   }
302 
303   const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);
304   if (vm_reg->is_FloatRegister()) {
305     return opto_reg & ~1;
306   }
307 
308   return opto_reg;
309 }
310 
311 #undef __
312 #define __ _masm->
313 
314 class ZSaveLiveRegisters {
315 private:
316   MacroAssembler* const _masm;
317   RegSet                _gp_regs;
318   FloatRegSet           _fp_regs;
319   PRegSet               _p_regs;
320 
321 public:
322   void initialize(ZLoadBarrierStubC2* stub) {
323     // Record registers that needs to be saved/restored
324     RegMaskIterator rmi(stub->live());
325     while (rmi.has_next()) {
326       const OptoReg::Name opto_reg = rmi.next();
327       if (OptoReg::is_reg(opto_reg)) {
328         const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);
329         if (vm_reg->is_Register()) {
330           _gp_regs += RegSet::of(vm_reg->as_Register());
331         } else if (vm_reg->is_FloatRegister()) {
332           _fp_regs += FloatRegSet::of(vm_reg->as_FloatRegister());
333         } else if (vm_reg->is_PRegister()) {
334           _p_regs += PRegSet::of(vm_reg->as_PRegister());
335         } else {
336           fatal("Unknown register type");
337         }
338       }
339     }
340 
341     // Remove C-ABI SOE registers, scratch regs and _ref register that will be updated
342     _gp_regs -= RegSet::range(r19, r30) + RegSet::of(r8, r9, stub->ref());
343   }
344 
345   ZSaveLiveRegisters(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :
346       _masm(masm),
347       _gp_regs(),
348       _fp_regs(),
349       _p_regs() {
350 
351     // Figure out what registers to save/restore
352     initialize(stub);
353 
354     // Save registers
355     __ push(_gp_regs, sp);
356     __ push_fp(_fp_regs, sp);
357     __ push_p(_p_regs, sp);
358   }
359 
360   ~ZSaveLiveRegisters() {
361     // Restore registers
362     __ pop_p(_p_regs, sp);
363     __ pop_fp(_fp_regs, sp);
364 
365     // External runtime call may clobber ptrue reg
366     __ reinitialize_ptrue();
367 
368     __ pop(_gp_regs, sp);
369   }
370 };
371 
372 #undef __
373 #define __ _masm->
374 
375 class ZSetupArguments {
376 private:
377   MacroAssembler* const _masm;
378   const Register        _ref;
379   const Address         _ref_addr;
380 
381 public:
382   ZSetupArguments(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :
383       _masm(masm),
384       _ref(stub->ref()),
385       _ref_addr(stub->ref_addr()) {
386 
387     // Setup arguments
388     if (_ref_addr.base() == noreg) {
389       // No self healing
390       if (_ref != c_rarg0) {
391         __ mov(c_rarg0, _ref);
392       }
393       __ mov(c_rarg1, 0);
394     } else {
395       // Self healing
396       if (_ref == c_rarg0) {
397         // _ref is already at correct place
398         __ lea(c_rarg1, _ref_addr);
399       } else if (_ref != c_rarg1) {
400         // _ref is in wrong place, but not in c_rarg1, so fix it first
401         __ lea(c_rarg1, _ref_addr);
402         __ mov(c_rarg0, _ref);
403       } else if (_ref_addr.base() != c_rarg0 && _ref_addr.index() != c_rarg0) {
404         assert(_ref == c_rarg1, "Mov ref first, vacating c_rarg0");
405         __ mov(c_rarg0, _ref);
406         __ lea(c_rarg1, _ref_addr);
407       } else {
408         assert(_ref == c_rarg1, "Need to vacate c_rarg1 and _ref_addr is using c_rarg0");
409         if (_ref_addr.base() == c_rarg0 || _ref_addr.index() == c_rarg0) {
410           __ mov(rscratch2, c_rarg1);
411           __ lea(c_rarg1, _ref_addr);
412           __ mov(c_rarg0, rscratch2);
413         } else {
414           ShouldNotReachHere();
415         }
416       }
417     }
418   }
419 
420   ~ZSetupArguments() {
421     // Transfer result
422     if (_ref != r0) {
423       __ mov(_ref, r0);
424     }
425   }
426 };
427 
428 #undef __
429 #define __ masm->
430 
431 void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
432   BLOCK_COMMENT("ZLoadBarrierStubC2");
433 
434   // Stub entry
435   __ bind(*stub->entry());
436 
437   {
438     ZSaveLiveRegisters save_live_registers(masm, stub);
439     ZSetupArguments setup_arguments(masm, stub);
440     __ mov(rscratch1, stub->slow_path());
441     __ blr(rscratch1);
442   }
443   // Stub exit
444   __ b(*stub->continuation());
445 }
446 
447 #undef __
448 
449 #endif // COMPILER2