1 /* 2 * Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 */ 23 24 #include "precompiled.hpp" 25 #include "c1/c1_LIR.hpp" 26 #include "c1/c1_LIRGenerator.hpp" 27 #include "c1/c1_CodeStubs.hpp" 28 #include "gc/z/c1/zBarrierSetC1.hpp" 29 #include "gc/z/zBarrierSet.hpp" 30 #include "gc/z/zBarrierSetAssembler.hpp" 31 #include "gc/z/zThreadLocalData.hpp" 32 #include "utilities/macros.hpp" 33 34 ZLoadBarrierStubC1::ZLoadBarrierStubC1(LIRAccess& access, LIR_Opr ref, address runtime_stub) : 35 _decorators(access.decorators()), 36 _ref_addr(access.resolved_addr()), 37 _ref(ref), 38 _tmp(LIR_OprFact::illegalOpr), 39 _runtime_stub(runtime_stub) { 40 41 assert(_ref_addr->is_address(), "Must be an address"); 42 assert(_ref->is_register(), "Must be a register"); 43 44 // Allocate tmp register if needed 45 if (_ref_addr->as_address_ptr()->index()->is_valid() || 46 _ref_addr->as_address_ptr()->disp() != 0) { 47 // Has index or displacement, need tmp register to load address into 48 _tmp = access.gen()->new_pointer_register(); 49 } 50 } 51 52 DecoratorSet ZLoadBarrierStubC1::decorators() const { 53 return _decorators; 54 } 55 56 LIR_Opr ZLoadBarrierStubC1::ref() const { 57 return _ref; 58 } 59 60 LIR_Opr ZLoadBarrierStubC1::ref_addr() const { 61 return _ref_addr; 62 } 63 64 LIR_Opr ZLoadBarrierStubC1::tmp() const { 65 return _tmp; 66 } 67 68 address ZLoadBarrierStubC1::runtime_stub() const { 69 return _runtime_stub; 70 } 71 72 void ZLoadBarrierStubC1::visit(LIR_OpVisitState* visitor) { 73 visitor->do_slow_case(); 74 visitor->do_input(_ref_addr); 75 visitor->do_output(_ref); 76 if (_tmp->is_valid()) { 77 visitor->do_temp(_tmp); 78 } 79 } 80 81 void ZLoadBarrierStubC1::emit_code(LIR_Assembler* ce) { 82 ZBarrierSet::assembler()->generate_c1_load_barrier_stub(ce, this); 83 } 84 85 #ifndef PRODUCT 86 void ZLoadBarrierStubC1::print_name(outputStream* out) const { 87 out->print("ZLoadBarrierStubC1"); 88 } 89 #endif // PRODUCT 90 91 class LIR_OpZLoadBarrierTest : public LIR_Op { 92 private: 93 LIR_Opr _opr; 94 95 public: 96 LIR_OpZLoadBarrierTest(LIR_Opr opr) : 97 #ifdef RISCV 98 LIR_Op(lir_zloadbarrier_test, LIR_OprFact::illegalOpr, NULL), 99 #else 100 LIR_Op(), 101 #endif 102 _opr(opr) {} 103 104 virtual void visit(LIR_OpVisitState* state) { 105 state->do_input(_opr); 106 } 107 108 virtual void emit_code(LIR_Assembler* ce) { 109 ZBarrierSet::assembler()->generate_c1_load_barrier_test(ce, _opr); 110 } 111 112 virtual void print_instr(outputStream* out) const { 113 _opr->print(out); 114 out->print(" "); 115 } 116 117 #ifndef PRODUCT 118 virtual const char* name() const { 119 return "lir_z_load_barrier_test"; 120 } 121 #endif // PRODUCT 122 }; 123 124 static bool barrier_needed(LIRAccess& access) { 125 return ZBarrierSet::barrier_needed(access.decorators(), access.type()); 126 } 127 128 ZBarrierSetC1::ZBarrierSetC1() : 129 _load_barrier_on_oop_field_preloaded_runtime_stub(NULL), 130 _load_barrier_on_weak_oop_field_preloaded_runtime_stub(NULL) {} 131 132 address ZBarrierSetC1::load_barrier_on_oop_field_preloaded_runtime_stub(DecoratorSet decorators) const { 133 assert((decorators & ON_PHANTOM_OOP_REF) == 0, "Unsupported decorator"); 134 //assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Unsupported decorator"); 135 136 if ((decorators & ON_WEAK_OOP_REF) != 0) { 137 return _load_barrier_on_weak_oop_field_preloaded_runtime_stub; 138 } else { 139 return _load_barrier_on_oop_field_preloaded_runtime_stub; 140 } 141 } 142 143 #ifdef ASSERT 144 #define __ access.gen()->lir(__FILE__, __LINE__)-> 145 #else 146 #define __ access.gen()->lir()-> 147 #endif 148 149 void ZBarrierSetC1::load_barrier(LIRAccess& access, LIR_Opr result) const { 150 // Fast path 151 __ append(new LIR_OpZLoadBarrierTest(result)); 152 153 // Slow path 154 const address runtime_stub = load_barrier_on_oop_field_preloaded_runtime_stub(access.decorators()); 155 CodeStub* const stub = new ZLoadBarrierStubC1(access, result, runtime_stub); 156 __ branch(lir_cond_notEqual, stub); 157 __ branch_destination(stub->continuation()); 158 } 159 160 LIR_Opr ZBarrierSetC1::resolve_address(LIRAccess& access, bool resolve_in_register) { 161 // We must resolve in register when patching. This is to avoid 162 // having a patch area in the load barrier stub, since the call 163 // into the runtime to patch will not have the proper oop map. 164 const bool patch_before_barrier = barrier_needed(access) && (access.decorators() & C1_NEEDS_PATCHING) != 0; 165 return BarrierSetC1::resolve_address(access, resolve_in_register || patch_before_barrier); 166 } 167 168 #undef __ 169 170 void ZBarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) { 171 BarrierSetC1::load_at_resolved(access, result); 172 173 if (barrier_needed(access)) { 174 load_barrier(access, result); 175 } 176 } 177 178 static void pre_load_barrier(LIRAccess& access) { 179 DecoratorSet decorators = access.decorators(); 180 181 // Downgrade access to MO_UNORDERED 182 decorators = (decorators & ~MO_DECORATOR_MASK) | MO_UNORDERED; 183 184 // Remove ACCESS_WRITE 185 decorators = (decorators & ~ACCESS_WRITE); 186 187 // Generate synthetic load at 188 access.gen()->access_load_at(decorators, 189 access.type(), 190 access.base().item(), 191 access.offset().opr(), 192 access.gen()->new_register(access.type()), 193 NULL /* patch_emit_info */, 194 NULL /* load_emit_info */); 195 } 196 197 LIR_Opr ZBarrierSetC1::atomic_xchg_at_resolved(LIRAccess& access, LIRItem& value) { 198 if (barrier_needed(access)) { 199 pre_load_barrier(access); 200 } 201 202 return BarrierSetC1::atomic_xchg_at_resolved(access, value); 203 } 204 205 LIR_Opr ZBarrierSetC1::atomic_cmpxchg_at_resolved(LIRAccess& access, LIRItem& cmp_value, LIRItem& new_value) { 206 if (barrier_needed(access)) { 207 pre_load_barrier(access); 208 } 209 210 return BarrierSetC1::atomic_cmpxchg_at_resolved(access, cmp_value, new_value); 211 } 212 213 class ZLoadBarrierRuntimeStubCodeGenClosure : public StubAssemblerCodeGenClosure { 214 private: 215 const DecoratorSet _decorators; 216 217 public: 218 ZLoadBarrierRuntimeStubCodeGenClosure(DecoratorSet decorators) : 219 _decorators(decorators) {} 220 221 virtual OopMapSet* generate_code(StubAssembler* sasm) { 222 ZBarrierSet::assembler()->generate_c1_load_barrier_runtime_stub(sasm, _decorators); 223 return NULL; 224 } 225 }; 226 227 static address generate_c1_runtime_stub(BufferBlob* blob, DecoratorSet decorators, const char* name) { 228 ZLoadBarrierRuntimeStubCodeGenClosure cl(decorators); 229 CodeBlob* const code_blob = Runtime1::generate_blob(blob, -1 /* stub_id */, name, false /* expect_oop_map*/, &cl); 230 return code_blob->code_begin(); 231 } 232 233 void ZBarrierSetC1::generate_c1_runtime_stubs(BufferBlob* blob) { 234 _load_barrier_on_oop_field_preloaded_runtime_stub = 235 generate_c1_runtime_stub(blob, ON_STRONG_OOP_REF, "load_barrier_on_oop_field_preloaded_runtime_stub"); 236 _load_barrier_on_weak_oop_field_preloaded_runtime_stub = 237 generate_c1_runtime_stub(blob, ON_WEAK_OOP_REF, "load_barrier_on_weak_oop_field_preloaded_runtime_stub"); 238 }