115 return dom_result == DomResult::Dominate;
116 }
117
118 virtual const class TypePtr *adr_type() const; // returns bottom_type of address
119
120 // Shared code for Ideal methods:
121 Node *Ideal_common(PhaseGVN *phase, bool can_reshape); // Return -1 for short-circuit null.
122
123 // Helper function for adr_type() implementations.
124 static const TypePtr* calculate_adr_type(const Type* t, const TypePtr* cross_check = nullptr);
125
126 // Raw access function, to allow copying of adr_type efficiently in
127 // product builds and retain the debug info for debug builds.
128 const TypePtr *raw_adr_type() const {
129 return DEBUG_ONLY(_adr_type) NOT_DEBUG(nullptr);
130 }
131
132 // Return the barrier data of n, if available, or 0 otherwise.
133 static uint8_t barrier_data(const Node* n);
134
135 // Map a load or store opcode to its corresponding store opcode.
136 // (Return -1 if unknown.)
137 virtual int store_Opcode() const { return -1; }
138
139 // What is the type of the value in memory? (T_VOID mean "unspecified".)
140 // The returned type is a property of the value that is loaded/stored and
141 // not the memory that is accessed. For mismatched memory accesses
142 // they might differ. For instance, a value of type 'short' may be stored
143 // into an array of elements of type 'long'.
144 virtual BasicType value_basic_type() const = 0;
145 virtual int memory_size() const {
146 #ifdef ASSERT
147 return type2aelembytes(value_basic_type(), true);
148 #else
149 return type2aelembytes(value_basic_type());
150 #endif
151 }
152
153 uint8_t barrier_data() { return _barrier_data; }
154 void set_barrier_data(uint8_t barrier_data) { _barrier_data = barrier_data; }
256 // Should LoadNode::Ideal() attempt to remove control edges?
257 virtual bool can_remove_control() const;
258 const Type* const _type; // What kind of value is loaded?
259
260 virtual Node* find_previous_arraycopy(PhaseValues* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const;
261 public:
262
263 LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
264 : MemNode(c,mem,adr,at), _control_dependency(control_dependency), _mo(mo), _type(rt) {
265 init_class_id(Class_Load);
266 }
267 inline bool is_unordered() const { return !is_acquire(); }
268 inline bool is_acquire() const {
269 assert(_mo == unordered || _mo == acquire, "unexpected");
270 return _mo == acquire;
271 }
272 inline bool is_unsigned() const {
273 int lop = Opcode();
274 return (lop == Op_LoadUB) || (lop == Op_LoadUS);
275 }
276
277 // Polymorphic factory method:
278 static Node* make(PhaseGVN& gvn, Node* c, Node* mem, Node* adr,
279 const TypePtr* at, const Type* rt, BasicType bt,
280 MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
281 bool require_atomic_access = false, bool unaligned = false, bool mismatched = false, bool unsafe = false,
282 uint8_t barrier_data = 0);
283
284 virtual uint hash() const; // Check the type
285
286 // Handle algebraic identities here. If we have an identity, return the Node
287 // we are equivalent to. We look for Load of a Store.
288 virtual Node* Identity(PhaseGVN* phase);
289
290 // If the load is from Field memory and the pointer is non-null, it might be possible to
291 // zero out the control input.
292 // If the offset is constant and the base is an object allocation,
293 // try to hook me up to the exact initializing store.
294 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
295
627 public:
628 // We must ensure that stores of object references will be visible
629 // only after the object's initialization. So the callers of this
630 // procedure must indicate that the store requires `release'
631 // semantics, if the stored value is an object reference that might
632 // point to a new object and may become externally visible.
633 StoreNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
634 : MemNode(c, mem, adr, at, val), _mo(mo) {
635 init_class_id(Class_Store);
636 }
637 StoreNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, Node *oop_store, MemOrd mo)
638 : MemNode(c, mem, adr, at, val, oop_store), _mo(mo) {
639 init_class_id(Class_Store);
640 }
641
642 inline bool is_unordered() const { return !is_release(); }
643 inline bool is_release() const {
644 assert((_mo == unordered || _mo == release), "unexpected");
645 return _mo == release;
646 }
647
648 // Conservatively release stores of object references in order to
649 // ensure visibility of object initialization.
650 static inline MemOrd release_if_reference(const BasicType t) {
651 #ifdef AARCH64
652 // AArch64 doesn't need a release store here because object
653 // initialization contains the necessary barriers.
654 return unordered;
655 #else
656 const MemOrd mo = (t == T_ARRAY ||
657 t == T_ADDRESS || // Might be the address of an object reference (`boxing').
658 t == T_OBJECT) ? release : unordered;
659 return mo;
660 #endif
661 }
662
663 // Polymorphic factory method
664 //
665 // We must ensure that stores of object references will be visible
666 // only after the object's initialization. So the callers of this
667 // procedure must indicate that the store requires `release'
683
684 // Check for identity function on memory (Load then Store at same address)
685 virtual Node* Identity(PhaseGVN* phase);
686
687 // Do not match memory edge
688 virtual uint match_edge(uint idx) const;
689
690 virtual const Type *bottom_type() const; // returns Type::MEMORY
691
692 // Map a store opcode to its corresponding own opcode, trivially.
693 virtual int store_Opcode() const { return Opcode(); }
694
695 // have all possible loads of the value stored been optimized away?
696 bool value_never_loaded(PhaseValues* phase) const;
697
698 bool has_reinterpret_variant(const Type* vt);
699 Node* convert_to_reinterpret_store(PhaseGVN& gvn, Node* val, const Type* vt);
700
701 MemBarNode* trailing_membar() const;
702
703 private:
704 virtual bool depends_only_on_test_impl() const { return false; }
705 };
706
707 //------------------------------StoreBNode-------------------------------------
708 // Store byte to memory
709 class StoreBNode : public StoreNode {
710 public:
711 StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
712 : StoreNode(c, mem, adr, at, val, mo) {}
713 virtual int Opcode() const;
714 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
715 virtual BasicType value_basic_type() const { return T_BYTE; }
716 };
717
718 //------------------------------StoreCNode-------------------------------------
719 // Store char/short to memory
720 class StoreCNode : public StoreNode {
721 public:
722 StoreCNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
|
115 return dom_result == DomResult::Dominate;
116 }
117
118 virtual const class TypePtr *adr_type() const; // returns bottom_type of address
119
120 // Shared code for Ideal methods:
121 Node *Ideal_common(PhaseGVN *phase, bool can_reshape); // Return -1 for short-circuit null.
122
123 // Helper function for adr_type() implementations.
124 static const TypePtr* calculate_adr_type(const Type* t, const TypePtr* cross_check = nullptr);
125
126 // Raw access function, to allow copying of adr_type efficiently in
127 // product builds and retain the debug info for debug builds.
128 const TypePtr *raw_adr_type() const {
129 return DEBUG_ONLY(_adr_type) NOT_DEBUG(nullptr);
130 }
131
132 // Return the barrier data of n, if available, or 0 otherwise.
133 static uint8_t barrier_data(const Node* n);
134
135 static MemOrd memory_order(const Node* n);
136
137 // Map a load or store opcode to its corresponding store opcode.
138 // (Return -1 if unknown.)
139 virtual int store_Opcode() const { return -1; }
140
141 // What is the type of the value in memory? (T_VOID mean "unspecified".)
142 // The returned type is a property of the value that is loaded/stored and
143 // not the memory that is accessed. For mismatched memory accesses
144 // they might differ. For instance, a value of type 'short' may be stored
145 // into an array of elements of type 'long'.
146 virtual BasicType value_basic_type() const = 0;
147 virtual int memory_size() const {
148 #ifdef ASSERT
149 return type2aelembytes(value_basic_type(), true);
150 #else
151 return type2aelembytes(value_basic_type());
152 #endif
153 }
154
155 uint8_t barrier_data() { return _barrier_data; }
156 void set_barrier_data(uint8_t barrier_data) { _barrier_data = barrier_data; }
258 // Should LoadNode::Ideal() attempt to remove control edges?
259 virtual bool can_remove_control() const;
260 const Type* const _type; // What kind of value is loaded?
261
262 virtual Node* find_previous_arraycopy(PhaseValues* phase, Node* ld_alloc, Node*& mem, bool can_see_stored_value) const;
263 public:
264
265 LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo, ControlDependency control_dependency)
266 : MemNode(c,mem,adr,at), _control_dependency(control_dependency), _mo(mo), _type(rt) {
267 init_class_id(Class_Load);
268 }
269 inline bool is_unordered() const { return !is_acquire(); }
270 inline bool is_acquire() const {
271 assert(_mo == unordered || _mo == acquire, "unexpected");
272 return _mo == acquire;
273 }
274 inline bool is_unsigned() const {
275 int lop = Opcode();
276 return (lop == Op_LoadUB) || (lop == Op_LoadUS);
277 }
278 inline MemOrd memory_order() const {
279 return _mo;
280 }
281
282 // Polymorphic factory method:
283 static Node* make(PhaseGVN& gvn, Node* c, Node* mem, Node* adr,
284 const TypePtr* at, const Type* rt, BasicType bt,
285 MemOrd mo, ControlDependency control_dependency = DependsOnlyOnTest,
286 bool require_atomic_access = false, bool unaligned = false, bool mismatched = false, bool unsafe = false,
287 uint8_t barrier_data = 0);
288
289 virtual uint hash() const; // Check the type
290
291 // Handle algebraic identities here. If we have an identity, return the Node
292 // we are equivalent to. We look for Load of a Store.
293 virtual Node* Identity(PhaseGVN* phase);
294
295 // If the load is from Field memory and the pointer is non-null, it might be possible to
296 // zero out the control input.
297 // If the offset is constant and the base is an object allocation,
298 // try to hook me up to the exact initializing store.
299 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
300
632 public:
633 // We must ensure that stores of object references will be visible
634 // only after the object's initialization. So the callers of this
635 // procedure must indicate that the store requires `release'
636 // semantics, if the stored value is an object reference that might
637 // point to a new object and may become externally visible.
638 StoreNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
639 : MemNode(c, mem, adr, at, val), _mo(mo) {
640 init_class_id(Class_Store);
641 }
642 StoreNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, Node *oop_store, MemOrd mo)
643 : MemNode(c, mem, adr, at, val, oop_store), _mo(mo) {
644 init_class_id(Class_Store);
645 }
646
647 inline bool is_unordered() const { return !is_release(); }
648 inline bool is_release() const {
649 assert((_mo == unordered || _mo == release), "unexpected");
650 return _mo == release;
651 }
652 inline MemOrd memory_order() const {
653 return _mo;
654 }
655 // Conservatively release stores of object references in order to
656 // ensure visibility of object initialization.
657 static inline MemOrd release_if_reference(const BasicType t) {
658 #ifdef AARCH64
659 // AArch64 doesn't need a release store here because object
660 // initialization contains the necessary barriers.
661 return unordered;
662 #else
663 const MemOrd mo = (t == T_ARRAY ||
664 t == T_ADDRESS || // Might be the address of an object reference (`boxing').
665 t == T_OBJECT) ? release : unordered;
666 return mo;
667 #endif
668 }
669
670 // Polymorphic factory method
671 //
672 // We must ensure that stores of object references will be visible
673 // only after the object's initialization. So the callers of this
674 // procedure must indicate that the store requires `release'
690
691 // Check for identity function on memory (Load then Store at same address)
692 virtual Node* Identity(PhaseGVN* phase);
693
694 // Do not match memory edge
695 virtual uint match_edge(uint idx) const;
696
697 virtual const Type *bottom_type() const; // returns Type::MEMORY
698
699 // Map a store opcode to its corresponding own opcode, trivially.
700 virtual int store_Opcode() const { return Opcode(); }
701
702 // have all possible loads of the value stored been optimized away?
703 bool value_never_loaded(PhaseValues* phase) const;
704
705 bool has_reinterpret_variant(const Type* vt);
706 Node* convert_to_reinterpret_store(PhaseGVN& gvn, Node* val, const Type* vt);
707
708 MemBarNode* trailing_membar() const;
709
710 #ifndef PRODUCT
711 virtual void dump_spec(outputStream *st) const {
712 MemNode::dump_spec(st);
713 if (is_release()) st->print("is_release");
714 }
715 #endif
716
717 private:
718 virtual bool depends_only_on_test_impl() const { return false; }
719 };
720
721 //------------------------------StoreBNode-------------------------------------
722 // Store byte to memory
723 class StoreBNode : public StoreNode {
724 public:
725 StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
726 : StoreNode(c, mem, adr, at, val, mo) {}
727 virtual int Opcode() const;
728 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
729 virtual BasicType value_basic_type() const { return T_BYTE; }
730 };
731
732 //------------------------------StoreCNode-------------------------------------
733 // Store char/short to memory
734 class StoreCNode : public StoreNode {
735 public:
736 StoreCNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
|