1 // 2 // Copyright (c) 2019, 2021, Oracle and/or its affiliates. All rights reserved. 3 // Copyright (c) 2020, 2021, Huawei Technologies Co., Ltd. All rights reserved. 4 // DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 // 6 // This code is free software; you can redistribute it and/or modify it 7 // under the terms of the GNU General Public License version 2 only, as 8 // published by the Free Software Foundation. 9 // 10 // This code is distributed in the hope that it will be useful, but WITHOUT 11 // ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 // FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 // version 2 for more details (a copy is included in the LICENSE file that 14 // accompanied this code). 15 // 16 // You should have received a copy of the GNU General Public License version 17 // 2 along with this work; if not, write to the Free Software Foundation, 18 // Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 // 20 // Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 // or visit www.oracle.com if you need additional information or have any 22 // questions. 23 // 24 25 source_hpp %{ 26 27 #include "gc/shared/gc_globals.hpp" 28 #include "gc/z/c2/zBarrierSetC2.hpp" 29 #include "gc/z/zThreadLocalData.hpp" 30 31 %} 32 33 source %{ 34 35 static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) { 36 if (barrier_data == ZLoadBarrierElided) { 37 return; 38 } 39 ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, barrier_data); 40 __ ld(tmp, Address(xthread, ZThreadLocalData::address_bad_mask_offset())); 41 __ andr(tmp, tmp, ref); 42 __ bnez(tmp, *stub->entry(), true /* far */); 43 __ bind(*stub->continuation()); 44 } 45 46 static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp) { 47 ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, ZLoadBarrierStrong); 48 __ j(*stub->entry()); 49 __ bind(*stub->continuation()); 50 } 51 52 %} 53 54 // Load Pointer 55 instruct zLoadP(iRegPNoSp dst, memory mem) 56 %{ 57 match(Set dst (LoadP mem)); 58 predicate(UseZGC && (n->as_Load()->barrier_data() != 0)); 59 effect(TEMP dst); 60 61 ins_cost(4 * DEFAULT_COST); 62 63 format %{ "ld $dst, $mem, #@zLoadP" %} 64 65 ins_encode %{ 66 const Address ref_addr (as_Register($mem$$base), $mem$$disp); 67 __ ld($dst$$Register, ref_addr); 68 z_load_barrier(_masm, this, ref_addr, $dst$$Register, t0 /* tmp */, barrier_data()); 69 %} 70 71 ins_pipe(iload_reg_mem); 72 %} 73 74 instruct zCompareAndSwapP(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr) %{ 75 match(Set res (CompareAndSwapP mem (Binary oldval newval))); 76 match(Set res (WeakCompareAndSwapP mem (Binary oldval newval))); 77 predicate(UseZGC && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong); 78 effect(KILL cr, TEMP_DEF res); 79 80 ins_cost(2 * VOLATILE_REF_COST); 81 82 format %{ "cmpxchg $mem, $oldval, $newval, #@zCompareAndSwapP\n\t" 83 "mv $res, $res == $oldval" %} 84 85 ins_encode %{ 86 Label failed; 87 guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding"); 88 __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64, 89 Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register, 90 true /* result_as_bool */); 91 __ beqz($res$$Register, failed); 92 __ mv(t0, $oldval$$Register); 93 __ bind(failed); 94 if (barrier_data() != ZLoadBarrierElided) { 95 Label good; 96 __ ld(t1, Address(xthread, ZThreadLocalData::address_bad_mask_offset()), t1 /* tmp */); 97 __ andr(t1, t1, t0); 98 __ beqz(t1, good); 99 z_load_barrier_slow_path(_masm, this, Address($mem$$Register), t0 /* ref */, t1 /* tmp */); 100 __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64, 101 Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register, 102 true /* result_as_bool */); 103 __ bind(good); 104 } 105 %} 106 107 ins_pipe(pipe_slow); 108 %} 109 110 instruct zCompareAndSwapPAcq(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr) %{ 111 match(Set res (CompareAndSwapP mem (Binary oldval newval))); 112 match(Set res (WeakCompareAndSwapP mem (Binary oldval newval))); 113 predicate(UseZGC && needs_acquiring_load_reserved(n) && (n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong)); 114 effect(KILL cr, TEMP_DEF res); 115 116 ins_cost(2 * VOLATILE_REF_COST); 117 118 format %{ "cmpxchg $mem, $oldval, $newval, #@zCompareAndSwapPAcq\n\t" 119 "mv $res, $res == $oldval" %} 120 121 ins_encode %{ 122 Label failed; 123 guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding"); 124 __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64, 125 Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register, 126 true /* result_as_bool */); 127 __ beqz($res$$Register, failed); 128 __ mv(t0, $oldval$$Register); 129 __ bind(failed); 130 if (barrier_data() != ZLoadBarrierElided) { 131 Label good; 132 __ ld(t1, Address(xthread, ZThreadLocalData::address_bad_mask_offset()), t1 /* tmp */); 133 __ andr(t1, t1, t0); 134 __ beqz(t1, good); 135 z_load_barrier_slow_path(_masm, this, Address($mem$$Register), t0 /* ref */, t1 /* tmp */); 136 __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64, 137 Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register, 138 true /* result_as_bool */); 139 __ bind(good); 140 } 141 %} 142 143 ins_pipe(pipe_slow); 144 %} 145 146 instruct zCompareAndExchangeP(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval) %{ 147 match(Set res (CompareAndExchangeP mem (Binary oldval newval))); 148 predicate(UseZGC && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong); 149 effect(TEMP_DEF res); 150 151 ins_cost(2 * VOLATILE_REF_COST); 152 153 format %{ "cmpxchg $res = $mem, $oldval, $newval, #@zCompareAndExchangeP" %} 154 155 ins_encode %{ 156 guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding"); 157 __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64, 158 Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register); 159 if (barrier_data() != ZLoadBarrierElided) { 160 Label good; 161 __ ld(t0, Address(xthread, ZThreadLocalData::address_bad_mask_offset())); 162 __ andr(t0, t0, $res$$Register); 163 __ beqz(t0, good); 164 z_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, t0 /* tmp */); 165 __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64, 166 Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register); 167 __ bind(good); 168 } 169 %} 170 171 ins_pipe(pipe_slow); 172 %} 173 174 instruct zCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval) %{ 175 match(Set res (CompareAndExchangeP mem (Binary oldval newval))); 176 predicate(UseZGC && needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong); 177 effect(TEMP_DEF res); 178 179 ins_cost(2 * VOLATILE_REF_COST); 180 181 format %{ "cmpxchg $res = $mem, $oldval, $newval, #@zCompareAndExchangePAcq" %} 182 183 ins_encode %{ 184 guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding"); 185 __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64, 186 Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register); 187 if (barrier_data() != ZLoadBarrierElided) { 188 Label good; 189 __ ld(t0, Address(xthread, ZThreadLocalData::address_bad_mask_offset())); 190 __ andr(t0, t0, $res$$Register); 191 __ beqz(t0, good); 192 z_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, t0 /* tmp */); 193 __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64, 194 Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register); 195 __ bind(good); 196 } 197 %} 198 199 ins_pipe(pipe_slow); 200 %} 201 202 instruct zGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{ 203 match(Set prev (GetAndSetP mem newv)); 204 predicate(UseZGC && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() != 0); 205 effect(TEMP_DEF prev, KILL cr); 206 207 ins_cost(2 * VOLATILE_REF_COST); 208 209 format %{ "atomic_xchg $prev, $newv, [$mem], #@zGetAndSetP" %} 210 211 ins_encode %{ 212 __ atomic_xchg($prev$$Register, $newv$$Register, as_Register($mem$$base)); 213 z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, t0 /* tmp */, barrier_data()); 214 %} 215 216 ins_pipe(pipe_serial); 217 %} 218 219 instruct zGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{ 220 match(Set prev (GetAndSetP mem newv)); 221 predicate(UseZGC && needs_acquiring_load_reserved(n) && (n->as_LoadStore()->barrier_data() != 0)); 222 effect(TEMP_DEF prev, KILL cr); 223 224 ins_cost(VOLATILE_REF_COST); 225 226 format %{ "atomic_xchg_acq $prev, $newv, [$mem], #@zGetAndSetPAcq" %} 227 228 ins_encode %{ 229 __ atomic_xchgal($prev$$Register, $newv$$Register, as_Register($mem$$base)); 230 z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, t0 /* tmp */, barrier_data()); 231 %} 232 ins_pipe(pipe_serial); 233 %}