1 /*
2 * Copyright (c) 2018, 2021, Red Hat, Inc. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_GC_SHENANDOAH_C2_SHENANDOAHBARRIERSETC2_HPP
26 #define SHARE_GC_SHENANDOAH_C2_SHENANDOAHBARRIERSETC2_HPP
27
28 #include "gc/shared/c2/barrierSetC2.hpp"
29 #include "gc/shared/gc_globals.hpp"
30 #include "gc/shenandoah/shenandoahBarrierSetAssembler.hpp"
31 #include "gc/shenandoah/shenandoahRuntime.hpp"
32 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
33 #include "utilities/growableArray.hpp"
34
35 static const uint8_t ShenandoahBitStrong = 1 << 0; // Barrier: LRB, strong
36 static const uint8_t ShenandoahBitWeak = 1 << 1; // Barrier: LRB, weak
37 static const uint8_t ShenandoahBitPhantom = 1 << 2; // Barrier: LRB, phantom
38 static const uint8_t ShenandoahBitKeepAlive = 1 << 3; // Barrier: KeepAlive (SATB for stores, KA for loads)
39 static const uint8_t ShenandoahBitCardMark = 1 << 4; // Barrier: CM
40 static const uint8_t ShenandoahBitNotNull = 1 << 5; // Metadata: src/dst is not null
41 static const uint8_t ShenandoahBitNative = 1 << 6; // Metadata: access is in native, not in heap
42 static const uint8_t ShenandoahBitElided = 1 << 7; // Metadata: barrier is elided
43
44 // Barrier data that implies real barriers, not additional metadata.
45 static const uint8_t ShenandoahBitsReal = ShenandoahBitStrong | ShenandoahBitWeak | ShenandoahBitPhantom |
46 ShenandoahBitKeepAlive |
47 ShenandoahBitCardMark;
48
49 class ShenandoahBarrierStubC2;
50
51 class ShenandoahBarrierSetC2State : public BarrierSetC2State {
52 GrowableArray<ShenandoahBarrierStubC2*>* _stubs;
53 int _stubs_start_offset;
54
55 public:
56 explicit ShenandoahBarrierSetC2State(Arena* comp_arena);
57
58 bool needs_liveness_data(const MachNode* mach) const override;
59 bool needs_livein_data() const override;
60
61 GrowableArray<ShenandoahBarrierStubC2*>* stubs() {
62 return _stubs;
63 }
64
65 void set_stubs_start_offset(int offset) {
66 _stubs_start_offset = offset;
67 }
68
69 int stubs_start_offset() {
70 return _stubs_start_offset;
71 }
72 };
73
74 class ShenandoahBarrierSetC2 : public BarrierSetC2 {
75
76 static bool clone_needs_barrier(const TypeOopPtr* src_type, bool& is_oop_array);
77
78 static bool can_remove_load_barrier(Node* node);
79
80 static void refine_load(Node* node);
81 static void refine_store(const Node* node);
82
83 protected:
84 virtual Node* load_at_resolved(C2Access& access, const Type* val_type) const;
85 virtual Node* store_at_resolved(C2Access& access, C2AccessValue& val) const;
86 virtual Node* atomic_cmpxchg_val_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
87 Node* new_val, const Type* val_type) const;
88 virtual Node* atomic_cmpxchg_bool_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
89 Node* new_val, const Type* value_type) const;
90 virtual Node* atomic_xchg_at_resolved(C2AtomicParseAccess& access, Node* new_val, const Type* val_type) const;
91
92 public:
93 static ShenandoahBarrierSetC2* bsc2();
94
95 ShenandoahBarrierSetC2State* state() const;
96
97 // This is the entry-point for the backend to perform accesses through the Access API.
98 virtual void clone(GraphKit* kit, Node* src_base, Node* dst_base, Node* size, bool is_array) const;
99 virtual void clone_at_expansion(PhaseMacroExpand* phase, ArrayCopyNode* ac) const;
100
101 // These are general helper methods used by C2
102 virtual bool array_copy_requires_gc_barriers(bool tightly_coupled_alloc, BasicType type, bool is_clone, bool is_clone_instance, ArrayCopyPhase phase) const;
103
104 // Support for GC barriers emitted during parsing
105 virtual bool expand_barriers(Compile* C, PhaseIterGVN& igvn) const;
106 virtual void final_refinement(Compile* C) const;
107
108 // Support for macro expanded GC barriers
109 virtual void eliminate_gc_barrier(PhaseMacroExpand* macro, Node* node) const;
110 virtual void eliminate_gc_barrier_data(Node* node) const;
111
112 // Allow barrier sets to have shared state that is preserved across a compilation unit.
113 // This could for example comprise macro nodes to be expanded during macro expansion.
114 virtual void* create_barrier_state(Arena* comp_arena) const;
115
116 #ifdef ASSERT
117 virtual void verify_gc_barriers(Compile* compile, CompilePhase phase) const;
118 static void verify_gc_barrier_assert(bool cond, const char* msg, uint8_t bd, Node* n);
119 #endif
120
121 int estimate_stub_size() const /* override */;
122 void emit_stubs(CodeBuffer& cb) const /* override */;
123 void late_barrier_analysis() const /* override*/ {
124 compute_liveness_at_stubs();
125 analyze_dominating_barriers();
126 }
127
128 void elide_dominated_barrier(MachNode* mach) const;
129 void analyze_dominating_barriers() const;
130
131 virtual uint estimated_barrier_size(const Node* node) const;
132
133 static void print_barrier_data(outputStream* os, uint8_t data);
134 };
135
136 class ShenandoahBarrierStubC2 : public BarrierStubC2 {
137 protected:
138 explicit ShenandoahBarrierStubC2(const MachNode* node) : BarrierStubC2(node) {
139 assert(!ShenandoahSkipBarriers, "Do not touch stubs when disabled");
140 }
141 void register_stub();
142 static bool is_heap_access(const MachNode* node) {
143 return (node->barrier_data() & ShenandoahBitNative) == 0;
144 }
145 void satb(MacroAssembler* masm, ShenandoahBarrierStubC2* stub, Register scratch1, Register scratch2, Register scratch3, Label* L_done);
146 void lrb(MacroAssembler* masm, ShenandoahBarrierStubC2* stub, Register obj, Register addr, Label* L_done, bool narrow);
147 static Register select_temp_register(Address addr, Register reg1 = noreg, Register reg2 = noreg);
148
149 public:
150 virtual void emit_code(MacroAssembler& masm) = 0;
151 };
152
153 class ShenandoahLoadBarrierStubC2 : public ShenandoahBarrierStubC2 {
154 Register const _dst;
155 Register _addr_reg; // Used on x64
156 Address const _src; // Used on aarch64
157 const bool _narrow;
158 const bool _maybe_null;
159 const bool _needs_load_ref_barrier;
160 const bool _needs_keep_alive_barrier;
161
162 ShenandoahLoadBarrierStubC2(const MachNode* node, Register dst, Register addr_reg, Address src) :
163 ShenandoahBarrierStubC2(node), _dst(dst), _addr_reg(addr_reg), _src(src), _narrow(is_narrow_result(node)),
164 _maybe_null(!src_not_null(node)), _needs_load_ref_barrier(needs_load_ref_barrier(node)),
165 _needs_keep_alive_barrier(needs_keep_alive_barrier(node)) {
166 assert(!_narrow || is_heap_access(node), "Only heap accesses can be narrow");
167 }
168
169 public:
170 static bool needs_barrier(const MachNode* node) {
171 return needs_load_ref_barrier(node) || needs_keep_alive_barrier(node);
172 }
173 static bool needs_keep_alive_barrier(const MachNode* node) {
174 return (node->barrier_data() & ShenandoahBitKeepAlive) != 0;
175 }
176 static bool needs_load_ref_barrier(const MachNode* node) {
177 return (node->barrier_data() & (ShenandoahBitStrong | ShenandoahBitWeak | ShenandoahBitPhantom)) != 0;
178 }
179 static bool needs_load_ref_barrier_weak(const MachNode* node) {
180 return (node->barrier_data() & (ShenandoahBitWeak | ShenandoahBitPhantom)) != 0;
181 }
182 static bool src_not_null(const MachNode* node) {
183 return (node->barrier_data() & ShenandoahBitNotNull) != 0;
184 }
185 static bool is_narrow_result(const MachNode* node) {
186 return node->bottom_type()->isa_narrowoop();
187 }
188
189 static ShenandoahLoadBarrierStubC2* create(const MachNode* node, Register dst, Address src);
190 static ShenandoahLoadBarrierStubC2* create(const MachNode* node, Register dst, Register addr);
191
192 void emit_code(MacroAssembler& masm) override;
193 };
194
195 class ShenandoahStoreBarrierStubC2 : public ShenandoahBarrierStubC2 {
196 Register const _addr_reg; // Used on aarch64
197 Address const _dst; // Used on x64
198 Register const _src;
199 Register const _tmp;
200 const bool _dst_narrow;
201 const bool _src_narrow;
202
203 ShenandoahStoreBarrierStubC2(const MachNode* node, Register addr_reg, Address dst, bool dst_narrow, Register src, bool src_narrow, Register tmp) :
204 ShenandoahBarrierStubC2(node), _addr_reg(addr_reg), _dst(dst), _src(src), _tmp(tmp), _dst_narrow(dst_narrow), _src_narrow(src_narrow) {
205 assert(!_dst_narrow || is_heap_access(node), "Only heap accesses can be narrow");
206 }
207
208 public:
209 static bool needs_barrier(const MachNode* node) {
210 return needs_card_barrier(node) || needs_keep_alive_barrier(node);
211 }
212 static bool needs_keep_alive_barrier(const MachNode* node) {
213 return (node->barrier_data() & ShenandoahBitKeepAlive) != 0;
214 }
215 static bool needs_card_barrier(const MachNode* node) {
216 return (node->barrier_data() & ShenandoahBitCardMark) != 0;
217 }
218 static bool src_not_null(const MachNode* node) {
219 return (node->barrier_data() & ShenandoahBitNotNull) != 0;
220 }
221
222 static ShenandoahStoreBarrierStubC2* create(const MachNode* node, Address dst, bool dst_narrow, Register src, bool src_narrow, Register tmp);
223 static ShenandoahStoreBarrierStubC2* create(const MachNode* node, Register addr, bool dst_narrow);
224
225 void emit_code(MacroAssembler& masm) override;
226 };
227
228 class ShenandoahCASBarrierStubC2 : public ShenandoahBarrierStubC2 {
229 Register _addr_reg; // Used on aarch64
230 Address _addr; // Used on x64
231 Register _expected;
232 Register _new_val;
233 Register _result;
234 Register _tmp1;
235 Register _tmp2;
236 bool const _narrow;
237 bool const _cae;
238 bool const _maybe_null;
239 bool const _acquire;
240 bool const _release;
241 bool const _weak;
242
243 explicit ShenandoahCASBarrierStubC2(const MachNode* node, Register addr_reg, Address addr, Register expected, Register new_val, Register result, Register tmp1, Register tmp2, bool narrow, bool cae, bool maybe_null, bool acquire, bool release, bool weak) :
244 ShenandoahBarrierStubC2(node),
245 _addr_reg(addr_reg), _addr(addr), _expected(expected), _new_val(new_val), _result(result), _tmp1(tmp1), _tmp2(tmp2), _narrow(narrow), _cae(cae), _maybe_null(maybe_null), _acquire(acquire), _release(release), _weak(weak) {
246 assert(!_narrow || is_heap_access(node), "Only heap accesses can be narrow");
247 }
248
249 public:
250 static bool needs_barrier(const MachNode* node) {
251 return needs_card_barrier(node) || needs_load_ref_barrier(node) || needs_keep_alive_barrier(node);
252 }
253 static bool needs_keep_alive_barrier(const MachNode* node) {
254 return (node->barrier_data() & ShenandoahBitKeepAlive) != 0;
255 }
256 static bool needs_card_barrier(const MachNode* node) {
257 return (node->barrier_data() & ShenandoahBitCardMark) != 0;
258 }
259 static bool needs_load_ref_barrier(const MachNode* node) {
260 return (node->barrier_data() & ShenandoahBitStrong) != 0;
261 }
262
263 static ShenandoahCASBarrierStubC2* create(const MachNode* node, Register addr, Register expected, Register new_val, Register result, bool narrow, bool cae, bool maybe_null, bool acquire, bool release, bool weak);
264 static ShenandoahCASBarrierStubC2* create(const MachNode* node, Address addr, Register expected, Register new_val, Register result, Register tmp1, Register tmp2, bool narrow, bool cae);
265 void emit_code(MacroAssembler& masm) override;
266 };
267 #endif // SHARE_GC_SHENANDOAH_C2_SHENANDOAHBARRIERSETC2_HPP