156
157 void elide_dominated_barrier(MachNode* mach) const;
158 void analyze_dominating_barriers() const;
159 void strip_extra_data(const Node* node) const;
160 void strip_extra_data(Node_List& accesses) const;
161
162 virtual uint estimated_barrier_size(const Node* node) const;
163
164 static void print_barrier_data(outputStream* os, uint8_t data);
165 };
166
167 class ShenandoahBarrierStubC2 : public BarrierStubC2 {
168 Register _obj;
169 Address const _addr;
170 const bool _do_load;
171 const bool _narrow;
172 const bool _maybe_null;
173 const bool _needs_load_ref_barrier;
174 const bool _needs_load_ref_weak_barrier;
175 const bool _needs_keep_alive_barrier;
176 const int _fastpath_branch_offset;
177 bool _use_trampoline;
178 Label _trampoline_entry;
179 bool _do_emit_actual;
180 int _save_slots_idx;
181
182 static void register_stub(ShenandoahBarrierStubC2* stub);
183 static void inc_trampoline_stubs_count();
184 static int trampoline_stubs_count();
185 static int stubs_start_offset();
186 static int save_slots_stack_offset();
187
188 // Manage save slots on stack. We cannot move SP freely when in statically-sized
189 // C2 frame. These methods emulate the stack where a stub can save registers temporarily
190 // without moving SP.
191 void push_save_register(MacroAssembler& masm, Register reg);
192 void pop_save_register(MacroAssembler& masm, Register reg);
193 bool push_save_register_if_live(MacroAssembler& masm, Register reg);
194 int push_save_slot();
195 int pop_save_slot();
196
197 bool has_live_vector_registers();
198 bool is_live(Register reg);
199 Register select_temp_register(bool& selected_live, Address addr, Register reg1);
200
201 void load_and_decode(MacroAssembler& masm, Label& target_if_null);
202 void reencode_if_needed(MacroAssembler& masm);
203
204 void keepalive(MacroAssembler& masm, Register obj, Register tmp1);
205 void lrb(MacroAssembler& masm, Register obj, Address addr, Register tmp);
206
207 address keepalive_runtime_entry_addr();
208 address lrb_runtime_entry_addr();
209
210 void emit_code_actual(MacroAssembler& masm);
211
212 void post_init(int offset);
213
214 public:
215 ShenandoahBarrierStubC2(const MachNode* node, Register obj, Address addr, bool narrow, bool do_load, int offset) :
216 BarrierStubC2(node),
217 _obj(obj),
218 _addr(addr),
219 _do_load(do_load),
220 _narrow(narrow),
221 _maybe_null(maybe_null(node)),
222 _needs_load_ref_barrier(needs_load_ref_barrier(node)),
223 _needs_load_ref_weak_barrier(needs_load_ref_barrier_weak(node)),
224 _needs_keep_alive_barrier(needs_keep_alive_barrier(node)),
225 _fastpath_branch_offset(offset),
226 _use_trampoline(),
227 _trampoline_entry(),
228 _do_emit_actual(),
229 _save_slots_idx(0) {
230 assert(!_narrow || is_heap_access(node), "Only heap accesses can be narrow");
231 post_init(offset);
232 }
233
234 static bool is_heap_access(const MachNode* node) {
235 return (node->barrier_data() & ShenandoahBitNative) == 0;
236 }
237 static bool needs_slow_barrier(const MachNode* node) {
238 return needs_load_ref_barrier(node) || needs_keep_alive_barrier(node);
239 }
240 static bool needs_load_ref_barrier(const MachNode* node) {
241 return (node->barrier_data() & (ShenandoahBitStrong | ShenandoahBitWeak | ShenandoahBitPhantom)) != 0;
242 }
243 static bool needs_load_ref_barrier_weak(const MachNode* node) {
244 return (node->barrier_data() & (ShenandoahBitWeak | ShenandoahBitPhantom)) != 0;
245 }
246 static bool needs_keep_alive_barrier(const MachNode* node) {
247 return (node->barrier_data() & ShenandoahBitKeepAlive) != 0;
248 }
249 static bool needs_card_barrier(const MachNode* node) {
250 return (node->barrier_data() & ShenandoahBitCardMark) != 0;
251 }
|
156
157 void elide_dominated_barrier(MachNode* mach) const;
158 void analyze_dominating_barriers() const;
159 void strip_extra_data(const Node* node) const;
160 void strip_extra_data(Node_List& accesses) const;
161
162 virtual uint estimated_barrier_size(const Node* node) const;
163
164 static void print_barrier_data(outputStream* os, uint8_t data);
165 };
166
167 class ShenandoahBarrierStubC2 : public BarrierStubC2 {
168 Register _obj;
169 Address const _addr;
170 const bool _do_load;
171 const bool _narrow;
172 const bool _maybe_null;
173 const bool _needs_load_ref_barrier;
174 const bool _needs_load_ref_weak_barrier;
175 const bool _needs_keep_alive_barrier;
176 int _save_slots_idx;
177
178 static void register_stub(ShenandoahBarrierStubC2* stub);
179 static void inc_trampoline_stubs_count();
180 static int trampoline_stubs_count();
181 static int stubs_start_offset();
182 static int save_slots_stack_offset();
183
184 // Manage save slots on stack. We cannot move SP freely when in statically-sized
185 // C2 frame. These methods emulate the stack where a stub can save registers temporarily
186 // without moving SP.
187 void push_save_register(MacroAssembler& masm, Register reg);
188 void pop_save_register(MacroAssembler& masm, Register reg);
189 bool push_save_register_if_live(MacroAssembler& masm, Register reg);
190 int push_save_slot();
191 int pop_save_slot();
192
193 bool has_live_vector_registers();
194 bool is_live(Register reg);
195 Register select_temp_register(bool& selected_live, Address addr, Register reg1);
196
197 void load_and_decode(MacroAssembler& masm, Label& target_if_null);
198 void reencode_if_needed(MacroAssembler& masm);
199
200 void keepalive(MacroAssembler& masm, Register obj, Register tmp1);
201 void lrb(MacroAssembler& masm, Register obj, Address addr, Register tmp);
202
203 address keepalive_runtime_entry_addr();
204 address lrb_runtime_entry_addr();
205
206 public:
207 ShenandoahBarrierStubC2(const MachNode* node, Register obj, Address addr, bool narrow, bool do_load, int offset) :
208 BarrierStubC2(node),
209 _obj(obj),
210 _addr(addr),
211 _do_load(do_load),
212 _narrow(narrow),
213 _maybe_null(maybe_null(node)),
214 _needs_load_ref_barrier(needs_load_ref_barrier(node)),
215 _needs_load_ref_weak_barrier(needs_load_ref_barrier_weak(node)),
216 _needs_keep_alive_barrier(needs_keep_alive_barrier(node)),
217 _save_slots_idx(0) {
218 assert(!_narrow || is_heap_access(node), "Only heap accesses can be narrow");
219 }
220
221 static bool is_heap_access(const MachNode* node) {
222 return (node->barrier_data() & ShenandoahBitNative) == 0;
223 }
224 static bool needs_slow_barrier(const MachNode* node) {
225 return needs_load_ref_barrier(node) || needs_keep_alive_barrier(node);
226 }
227 static bool needs_load_ref_barrier(const MachNode* node) {
228 return (node->barrier_data() & (ShenandoahBitStrong | ShenandoahBitWeak | ShenandoahBitPhantom)) != 0;
229 }
230 static bool needs_load_ref_barrier_weak(const MachNode* node) {
231 return (node->barrier_data() & (ShenandoahBitWeak | ShenandoahBitPhantom)) != 0;
232 }
233 static bool needs_keep_alive_barrier(const MachNode* node) {
234 return (node->barrier_data() & ShenandoahBitKeepAlive) != 0;
235 }
236 static bool needs_card_barrier(const MachNode* node) {
237 return (node->barrier_data() & ShenandoahBitCardMark) != 0;
238 }
|