1 /*
  2  * Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  */
 23 
 24 #include "precompiled.hpp"
 25 #include "c1/c1_LIR.hpp"
 26 #include "c1/c1_LIRGenerator.hpp"
 27 #include "c1/c1_CodeStubs.hpp"
 28 #include "gc/z/c1/zBarrierSetC1.hpp"
 29 #include "gc/z/zBarrierSet.hpp"
 30 #include "gc/z/zBarrierSetAssembler.hpp"
 31 #include "gc/z/zThreadLocalData.hpp"
 32 #include "utilities/macros.hpp"
 33 
 34 ZLoadBarrierStubC1::ZLoadBarrierStubC1(LIRAccess& access, LIR_Opr ref, address runtime_stub) :
 35     _decorators(access.decorators()),
 36     _ref_addr(access.resolved_addr()),
 37     _ref(ref),
 38     _tmp(LIR_OprFact::illegalOpr),
 39     _runtime_stub(runtime_stub) {
 40 
 41   assert(_ref_addr->is_address(), "Must be an address");
 42   assert(_ref->is_register(), "Must be a register");
 43 
 44   // Allocate tmp register if needed
 45   if (_ref_addr->as_address_ptr()->index()->is_valid() ||
 46       _ref_addr->as_address_ptr()->disp() != 0) {
 47     // Has index or displacement, need tmp register to load address into
 48     _tmp = access.gen()->new_pointer_register();
 49   }
 50 }
 51 
 52 DecoratorSet ZLoadBarrierStubC1::decorators() const {
 53   return _decorators;
 54 }
 55 
 56 LIR_Opr ZLoadBarrierStubC1::ref() const {
 57   return _ref;
 58 }
 59 
 60 LIR_Opr ZLoadBarrierStubC1::ref_addr() const {
 61   return _ref_addr;
 62 }
 63 
 64 LIR_Opr ZLoadBarrierStubC1::tmp() const {
 65   return _tmp;
 66 }
 67 
 68 address ZLoadBarrierStubC1::runtime_stub() const {
 69   return _runtime_stub;
 70 }
 71 
 72 void ZLoadBarrierStubC1::visit(LIR_OpVisitState* visitor) {
 73   visitor->do_slow_case();
 74   visitor->do_input(_ref_addr);
 75   visitor->do_output(_ref);
 76   if (_tmp->is_valid()) {
 77     visitor->do_temp(_tmp);
 78   }
 79 }
 80 
 81 void ZLoadBarrierStubC1::emit_code(LIR_Assembler* ce) {
 82   ZBarrierSet::assembler()->generate_c1_load_barrier_stub(ce, this);
 83 }
 84 
 85 #ifndef PRODUCT
 86 void ZLoadBarrierStubC1::print_name(outputStream* out) const {
 87   out->print("ZLoadBarrierStubC1");
 88 }
 89 #endif // PRODUCT
 90 
 91 class LIR_OpZLoadBarrierTest : public LIR_Op {
 92 private:
 93 #ifdef RISCV
 94   LIR_Opr _mask;
 95 #endif
 96   LIR_Opr _opr;
 97 
 98 public:
 99   LIR_OpZLoadBarrierTest(
100 #ifdef RISCV
101                          LIR_Opr mask,
102 #endif
103                          LIR_Opr opr) :
104       LIR_Op(),
105 #ifdef RISCV
106       _mask(mask),
107 #endif
108       _opr(opr) {}
109 
110   virtual void visit(LIR_OpVisitState* state) {
111 #ifdef RISCV
112     state->do_output(_opr);
113     state->do_output(_mask);
114 #else
115     state->do_input(_opr);
116 #endif
117   }
118 
119   virtual void emit_code(LIR_Assembler* ce) {
120     ZBarrierSet::assembler()->generate_c1_load_barrier_test(ce,
121 #ifdef RISCV
122                                                             _mask,
123 #endif
124                                                             _opr);
125   }
126 
127   virtual void print_instr(outputStream* out) const {
128 #ifdef RISCV
129     _mask->print(out);
130     out->print(" ");
131 #endif
132     _opr->print(out);
133     out->print(" ");
134   }
135 
136 #ifndef PRODUCT
137   virtual const char* name() const {
138     return "lir_z_load_barrier_test";
139   }
140 #endif // PRODUCT
141 };
142 
143 static bool barrier_needed(LIRAccess& access) {
144   return ZBarrierSet::barrier_needed(access.decorators(), access.type());
145 }
146 
147 ZBarrierSetC1::ZBarrierSetC1() :
148     _load_barrier_on_oop_field_preloaded_runtime_stub(NULL),
149     _load_barrier_on_weak_oop_field_preloaded_runtime_stub(NULL) {}
150 
151 address ZBarrierSetC1::load_barrier_on_oop_field_preloaded_runtime_stub(DecoratorSet decorators) const {
152   assert((decorators & ON_PHANTOM_OOP_REF) == 0, "Unsupported decorator");
153   //assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Unsupported decorator");
154 
155   if ((decorators & ON_WEAK_OOP_REF) != 0) {
156     return _load_barrier_on_weak_oop_field_preloaded_runtime_stub;
157   } else {
158     return _load_barrier_on_oop_field_preloaded_runtime_stub;
159   }
160 }
161 
162 #ifdef ASSERT
163 #define __ access.gen()->lir(__FILE__, __LINE__)->
164 #else
165 #define __ access.gen()->lir()->
166 #endif
167 
168 void ZBarrierSetC1::load_barrier(LIRAccess& access, LIR_Opr result) const {
169   // Fast path
170 #ifdef RISCV
171   LIR_Opr mask = access.gen()->new_pointer_register();
172 #endif
173   __ append(new LIR_OpZLoadBarrierTest(
174 #ifdef RISCV
175                                        mask,
176 #endif
177                                        result));
178 
179   // Slow path
180   const address runtime_stub = load_barrier_on_oop_field_preloaded_runtime_stub(access.decorators());
181   CodeStub* const stub = new ZLoadBarrierStubC1(access, result, runtime_stub);
182 #ifdef RISCV
183   __ cmp(lir_cond_notEqual, mask, LIR_OprFact::longConst(0));
184 #endif
185   __ branch(lir_cond_notEqual, stub);
186   __ branch_destination(stub->continuation());
187 }
188 
189 LIR_Opr ZBarrierSetC1::resolve_address(LIRAccess& access, bool resolve_in_register) {
190   // We must resolve in register when patching. This is to avoid
191   // having a patch area in the load barrier stub, since the call
192   // into the runtime to patch will not have the proper oop map.
193   const bool patch_before_barrier = barrier_needed(access) && (access.decorators() & C1_NEEDS_PATCHING) != 0;
194   return BarrierSetC1::resolve_address(access, resolve_in_register || patch_before_barrier);
195 }
196 
197 #undef __
198 
199 void ZBarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) {
200   BarrierSetC1::load_at_resolved(access, result);
201 
202   if (barrier_needed(access)) {
203     load_barrier(access, result);
204   }
205 }
206 
207 static void pre_load_barrier(LIRAccess& access) {
208   DecoratorSet decorators = access.decorators();
209 
210   // Downgrade access to MO_UNORDERED
211   decorators = (decorators & ~MO_DECORATOR_MASK) | MO_UNORDERED;
212 
213   // Remove ACCESS_WRITE
214   decorators = (decorators & ~ACCESS_WRITE);
215 
216   // Generate synthetic load at
217   access.gen()->access_load_at(decorators,
218                                access.type(),
219                                access.base().item(),
220                                access.offset().opr(),
221                                access.gen()->new_register(access.type()),
222                                NULL /* patch_emit_info */,
223                                NULL /* load_emit_info */);
224 }
225 
226 LIR_Opr ZBarrierSetC1::atomic_xchg_at_resolved(LIRAccess& access, LIRItem& value) {
227   if (barrier_needed(access)) {
228     pre_load_barrier(access);
229   }
230 
231   return BarrierSetC1::atomic_xchg_at_resolved(access, value);
232 }
233 
234 LIR_Opr ZBarrierSetC1::atomic_cmpxchg_at_resolved(LIRAccess& access, LIRItem& cmp_value, LIRItem& new_value) {
235   if (barrier_needed(access)) {
236     pre_load_barrier(access);
237   }
238 
239   return BarrierSetC1::atomic_cmpxchg_at_resolved(access, cmp_value, new_value);
240 }
241 
242 class ZLoadBarrierRuntimeStubCodeGenClosure : public StubAssemblerCodeGenClosure {
243 private:
244   const DecoratorSet _decorators;
245 
246 public:
247   ZLoadBarrierRuntimeStubCodeGenClosure(DecoratorSet decorators) :
248       _decorators(decorators) {}
249 
250   virtual OopMapSet* generate_code(StubAssembler* sasm) {
251     ZBarrierSet::assembler()->generate_c1_load_barrier_runtime_stub(sasm, _decorators);
252     return NULL;
253   }
254 };
255 
256 static address generate_c1_runtime_stub(BufferBlob* blob, DecoratorSet decorators, const char* name) {
257   ZLoadBarrierRuntimeStubCodeGenClosure cl(decorators);
258   CodeBlob* const code_blob = Runtime1::generate_blob(blob, -1 /* stub_id */, name, false /* expect_oop_map*/, &cl);
259   return code_blob->code_begin();
260 }
261 
262 void ZBarrierSetC1::generate_c1_runtime_stubs(BufferBlob* blob) {
263   _load_barrier_on_oop_field_preloaded_runtime_stub =
264     generate_c1_runtime_stub(blob, ON_STRONG_OOP_REF, "load_barrier_on_oop_field_preloaded_runtime_stub");
265   _load_barrier_on_weak_oop_field_preloaded_runtime_stub =
266     generate_c1_runtime_stub(blob, ON_WEAK_OOP_REF, "load_barrier_on_weak_oop_field_preloaded_runtime_stub");
267 }