244 // Registers that are live-in/live-out of the entire memory access
245 // implementation (possibly including multiple barriers). Whether live-in or
246 // live-out registers are returned depends on
247 // BarrierSetC2State::needs_livein_data().
248 RegMask& live() const;
249
250 public:
251 BarrierStubC2(const MachNode* node);
252
253 // Entry point to the stub.
254 Label* entry();
255 // Return point from the stub (typically end of barrier).
256 Label* continuation();
257 // High-level, GC-specific barrier flags.
258 uint8_t barrier_data() const;
259
260 // Preserve the value in reg across runtime calls in this barrier.
261 void preserve(Register reg);
262 // Do not preserve the value in reg across runtime calls in this barrier.
263 void dont_preserve(Register reg);
264 // Set of registers whose value needs to be preserved across runtime calls in this barrier.
265 const RegMask& preserve_set() const;
266 };
267
268 // This is the top-level class for the backend of the Access API in C2.
269 // The top-level class is responsible for performing raw accesses. The
270 // various GC barrier sets inherit from the BarrierSetC2 class to sprinkle
271 // barriers into the accesses.
272 class BarrierSetC2: public CHeapObj<mtGC> {
273 private:
274 static const TypeFunc* _clone_type_Type;
275
276 protected:
277 virtual void resolve_address(C2Access& access) const;
278 virtual Node* store_at_resolved(C2Access& access, C2AccessValue& val) const;
279 virtual Node* load_at_resolved(C2Access& access, const Type* val_type) const;
280
281 virtual Node* atomic_cmpxchg_val_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
282 Node* new_val, const Type* val_type) const;
283 virtual Node* atomic_cmpxchg_bool_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
353 BeforeMacroExpand,
354 BeforeCodeGen
355 };
356
357 #ifdef ASSERT
358 virtual void verify_gc_barriers(Compile* compile, CompilePhase phase) const {}
359 #endif
360
361 virtual bool final_graph_reshaping(Compile* compile, Node* n, uint opcode, Unique_Node_List& dead_nodes) const { return false; }
362
363 virtual bool escape_add_to_con_graph(ConnectionGraph* conn_graph, PhaseGVN* gvn, Unique_Node_List* delayed_worklist, Node* n, uint opcode) const { return false; }
364 virtual bool escape_add_final_edges(ConnectionGraph* conn_graph, PhaseGVN* gvn, Node* n, uint opcode) const { return false; }
365 virtual bool escape_has_out_with_unsafe_object(Node* n) const { return false; }
366
367 virtual bool matcher_find_shared_post_visit(Matcher* matcher, Node* n, uint opcode) const { return false; };
368 virtual bool matcher_is_store_load_barrier(Node* x, uint xop) const { return false; }
369
370 // Whether the given phi node joins OOPs from fast and slow allocation paths.
371 static bool is_allocation(const Node* node);
372 // Elide GC barriers from a Mach node according to elide_dominated_barriers().
373 virtual void elide_dominated_barrier(MachNode* mach) const { }
374 // Elide GC barriers from instructions in 'accesses' if they are dominated by
375 // instructions in 'access_dominators' (according to elide_mach_barrier()) and
376 // there is no safepoint poll in between.
377 void elide_dominated_barriers(Node_List& accesses, Node_List& access_dominators) const;
378 virtual void late_barrier_analysis() const { }
379 virtual void compute_liveness_at_stubs() const;
380 virtual int estimate_stub_size() const { return 0; }
381 virtual void emit_stubs(CodeBuffer& cb) const { }
382
383 static int arraycopy_payload_base_offset(bool is_array);
384
385 static void make_clone_type();
386 static const TypeFunc* clone_type();
387
388 #ifndef PRODUCT
389 virtual void dump_barrier_data(const MachNode* mach, outputStream* st) const {
390 st->print("%x", mach->barrier_data());
391 };
392 #endif
393 };
|
244 // Registers that are live-in/live-out of the entire memory access
245 // implementation (possibly including multiple barriers). Whether live-in or
246 // live-out registers are returned depends on
247 // BarrierSetC2State::needs_livein_data().
248 RegMask& live() const;
249
250 public:
251 BarrierStubC2(const MachNode* node);
252
253 // Entry point to the stub.
254 Label* entry();
255 // Return point from the stub (typically end of barrier).
256 Label* continuation();
257 // High-level, GC-specific barrier flags.
258 uint8_t barrier_data() const;
259
260 // Preserve the value in reg across runtime calls in this barrier.
261 void preserve(Register reg);
262 // Do not preserve the value in reg across runtime calls in this barrier.
263 void dont_preserve(Register reg);
264 // Check if register is in preserved set
265 bool is_preserved(Register reg);
266 // Set of registers whose value needs to be preserved across runtime calls in this barrier.
267 const RegMask& preserve_set() const;
268 };
269
270 // This is the top-level class for the backend of the Access API in C2.
271 // The top-level class is responsible for performing raw accesses. The
272 // various GC barrier sets inherit from the BarrierSetC2 class to sprinkle
273 // barriers into the accesses.
274 class BarrierSetC2: public CHeapObj<mtGC> {
275 private:
276 static const TypeFunc* _clone_type_Type;
277
278 protected:
279 virtual void resolve_address(C2Access& access) const;
280 virtual Node* store_at_resolved(C2Access& access, C2AccessValue& val) const;
281 virtual Node* load_at_resolved(C2Access& access, const Type* val_type) const;
282
283 virtual Node* atomic_cmpxchg_val_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
284 Node* new_val, const Type* val_type) const;
285 virtual Node* atomic_cmpxchg_bool_at_resolved(C2AtomicParseAccess& access, Node* expected_val,
355 BeforeMacroExpand,
356 BeforeCodeGen
357 };
358
359 #ifdef ASSERT
360 virtual void verify_gc_barriers(Compile* compile, CompilePhase phase) const {}
361 #endif
362
363 virtual bool final_graph_reshaping(Compile* compile, Node* n, uint opcode, Unique_Node_List& dead_nodes) const { return false; }
364
365 virtual bool escape_add_to_con_graph(ConnectionGraph* conn_graph, PhaseGVN* gvn, Unique_Node_List* delayed_worklist, Node* n, uint opcode) const { return false; }
366 virtual bool escape_add_final_edges(ConnectionGraph* conn_graph, PhaseGVN* gvn, Node* n, uint opcode) const { return false; }
367 virtual bool escape_has_out_with_unsafe_object(Node* n) const { return false; }
368
369 virtual bool matcher_find_shared_post_visit(Matcher* matcher, Node* n, uint opcode) const { return false; };
370 virtual bool matcher_is_store_load_barrier(Node* x, uint xop) const { return false; }
371
372 // Whether the given phi node joins OOPs from fast and slow allocation paths.
373 static bool is_allocation(const Node* node);
374 // Elide GC barriers from a Mach node according to elide_dominated_barriers().
375 virtual void elide_dominated_barrier(MachNode* mach, MachNode* dominator) const { }
376 // Elide GC barriers from instructions in 'accesses' if they are dominated by
377 // instructions in 'access_dominators' (according to elide_mach_barrier()) and
378 // there is no safepoint poll in between.
379 void elide_dominated_barriers(Node_List& accesses, Node_List& access_dominators) const;
380 virtual void late_barrier_analysis() const { }
381 virtual void compute_liveness_at_stubs() const;
382 virtual int estimate_stub_size() const { return 0; }
383 virtual void emit_stubs(CodeBuffer& cb) const { }
384
385 static int arraycopy_payload_base_offset(bool is_array);
386
387 static void make_clone_type();
388 static const TypeFunc* clone_type();
389
390 #ifndef PRODUCT
391 virtual void dump_barrier_data(const MachNode* mach, outputStream* st) const {
392 st->print("%x", mach->barrier_data());
393 };
394 #endif
395 };
|