< prev index next >

src/hotspot/share/opto/memnode.hpp

Print this page

 109   static bool all_controls_dominate(Node* dom, Node* sub);
 110 
 111   virtual const class TypePtr *adr_type() const;  // returns bottom_type of address
 112 
 113   // Shared code for Ideal methods:
 114   Node *Ideal_common(PhaseGVN *phase, bool can_reshape);  // Return -1 for short-circuit null.
 115 
 116   // Helper function for adr_type() implementations.
 117   static const TypePtr* calculate_adr_type(const Type* t, const TypePtr* cross_check = nullptr);
 118 
 119   // Raw access function, to allow copying of adr_type efficiently in
 120   // product builds and retain the debug info for debug builds.
 121   const TypePtr *raw_adr_type() const {
 122 #ifdef ASSERT
 123     return _adr_type;
 124 #else
 125     return 0;
 126 #endif
 127   }
 128 




 129   // Return the barrier data of n, if available, or 0 otherwise.
 130   static uint8_t barrier_data(const Node* n);
 131 
 132   // Map a load or store opcode to its corresponding store opcode.
 133   // (Return -1 if unknown.)
 134   virtual int store_Opcode() const { return -1; }
 135 
 136   // What is the type of the value in memory?  (T_VOID mean "unspecified".)
 137   virtual BasicType memory_type() const = 0;
 138   virtual int memory_size() const {
 139 #ifdef ASSERT
 140     return type2aelembytes(memory_type(), true);
 141 #else
 142     return type2aelembytes(memory_type());
 143 #endif
 144   }
 145 
 146   uint8_t barrier_data() { return _barrier_data; }
 147   void set_barrier_data(uint8_t barrier_data) { _barrier_data = barrier_data; }
 148 

 538   static Node* make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* at,
 539                     const TypeKlassPtr* tk = TypeInstKlassPtr::OBJECT);
 540 };
 541 
 542 //------------------------------LoadNKlassNode---------------------------------
 543 // Load a narrow Klass from an object.
 544 class LoadNKlassNode : public LoadNNode {
 545 public:
 546   LoadNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeNarrowKlass *tk, MemOrd mo)
 547     : LoadNNode(c, mem, adr, at, tk, mo) {}
 548   virtual int Opcode() const;
 549   virtual uint ideal_reg() const { return Op_RegN; }
 550   virtual int store_Opcode() const { return Op_StoreNKlass; }
 551   virtual BasicType memory_type() const { return T_NARROWKLASS; }
 552 
 553   virtual const Type* Value(PhaseGVN* phase) const;
 554   virtual Node* Identity(PhaseGVN* phase);
 555   virtual bool depends_only_on_test() const { return true; }
 556 };
 557 
 558 
 559 //------------------------------StoreNode--------------------------------------
 560 // Store value; requires Store, Address and Value
 561 class StoreNode : public MemNode {
 562 private:
 563   // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
 564   // stores that can be reordered, and such requiring release semantics to
 565   // adhere to the Java specification.  The required behaviour is stored in
 566   // this field.
 567   const MemOrd _mo;
 568   // Needed for proper cloning.
 569   virtual uint size_of() const { return sizeof(*this); }
 570 protected:
 571   virtual bool cmp( const Node &n ) const;
 572   virtual bool depends_only_on_test() const { return false; }
 573 
 574   Node *Ideal_masked_input       (PhaseGVN *phase, uint mask);
 575   Node *Ideal_sign_extended_input(PhaseGVN *phase, int  num_bits);
 576 
 577 public:
 578   // We must ensure that stores of object references will be visible

1077 };
1078 
1079 //------------------------------GetAndSetPNode---------------------------
1080 class GetAndSetPNode : public LoadStoreNode {
1081 public:
1082   GetAndSetPNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
1083   virtual int Opcode() const;
1084 };
1085 
1086 //------------------------------GetAndSetNNode---------------------------
1087 class GetAndSetNNode : public LoadStoreNode {
1088 public:
1089   GetAndSetNNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
1090   virtual int Opcode() const;
1091 };
1092 
1093 //------------------------------ClearArray-------------------------------------
1094 class ClearArrayNode: public Node {
1095 private:
1096   bool _is_large;

1097 public:
1098   ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base, bool is_large)
1099     : Node(ctrl,arymem,word_cnt,base), _is_large(is_large) {

1100     init_class_id(Class_ClearArray);
1101   }
1102   virtual int         Opcode() const;
1103   virtual const Type *bottom_type() const { return Type::MEMORY; }
1104   // ClearArray modifies array elements, and so affects only the
1105   // array memory addressed by the bottom_type of its base address.
1106   virtual const class TypePtr *adr_type() const;
1107   virtual Node* Identity(PhaseGVN* phase);
1108   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1109   virtual uint match_edge(uint idx) const;
1110   bool is_large() const { return _is_large; }

1111 
1112   // Clear the given area of an object or array.
1113   // The start offset must always be aligned mod BytesPerInt.
1114   // The end offset must always be aligned mod BytesPerLong.
1115   // Return the new memory.
1116   static Node* clear_memory(Node* control, Node* mem, Node* dest,


1117                             intptr_t start_offset,
1118                             intptr_t end_offset,
1119                             PhaseGVN* phase);
1120   static Node* clear_memory(Node* control, Node* mem, Node* dest,


1121                             intptr_t start_offset,
1122                             Node* end_offset,
1123                             PhaseGVN* phase);
1124   static Node* clear_memory(Node* control, Node* mem, Node* dest,

1125                             Node* start_offset,
1126                             Node* end_offset,
1127                             PhaseGVN* phase);
1128   // Return allocation input memory edge if it is different instance
1129   // or itself if it is the one we are looking for.
1130   static bool step_through(Node** np, uint instance_id, PhaseValues* phase);
1131 };
1132 
1133 //------------------------------MemBar-----------------------------------------
1134 // There are different flavors of Memory Barriers to match the Java Memory
1135 // Model.  Monitor-enter and volatile-load act as Acquires: no following ref
1136 // can be moved to before them.  We insert a MemBar-Acquire after a FastLock or
1137 // volatile-load.  Monitor-exit and volatile-store act as Release: no
1138 // preceding ref can be moved to after them.  We insert a MemBar-Release
1139 // before a FastUnlock or volatile-store.  All volatiles need to be
1140 // serialized, so we follow all volatile-stores with a MemBar-Volatile to
1141 // separate it from any following volatile-load.
1142 class MemBarNode: public MultiNode {
1143   virtual uint hash() const ;                  // { return NO_HASH; }
1144   virtual bool cmp( const Node &n ) const ;    // Always fail, except on self

1156     TrailingLoadStore,
1157     LeadingLoadStore,
1158     TrailingPartialArrayCopy
1159   } _kind;
1160 
1161 #ifdef ASSERT
1162   uint _pair_idx;
1163 #endif
1164 
1165 public:
1166   enum {
1167     Precedent = TypeFunc::Parms  // optional edge to force precedence
1168   };
1169   MemBarNode(Compile* C, int alias_idx, Node* precedent);
1170   virtual int Opcode() const = 0;
1171   virtual const class TypePtr *adr_type() const { return _adr_type; }
1172   virtual const Type* Value(PhaseGVN* phase) const;
1173   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1174   virtual uint match_edge(uint idx) const { return 0; }
1175   virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
1176   virtual Node *match( const ProjNode *proj, const Matcher *m );
1177   // Factory method.  Builds a wide or narrow membar.
1178   // Optional 'precedent' becomes an extra edge if not null.
1179   static MemBarNode* make(Compile* C, int opcode,
1180                           int alias_idx = Compile::AliasIdxBot,
1181                           Node* precedent = nullptr);
1182 
1183   MemBarNode* trailing_membar() const;
1184   MemBarNode* leading_membar() const;
1185 
1186   void set_trailing_load() { _kind = TrailingLoad; }
1187   bool trailing_load() const { return _kind == TrailingLoad; }
1188   bool trailing_store() const { return _kind == TrailingStore; }
1189   bool leading_store() const { return _kind == LeadingStore; }
1190   bool trailing_load_store() const { return _kind == TrailingLoadStore; }
1191   bool leading_load_store() const { return _kind == LeadingLoadStore; }
1192   bool trailing() const { return _kind == TrailingLoad || _kind == TrailingStore || _kind == TrailingLoadStore; }
1193   bool leading() const { return _kind == LeadingStore || _kind == LeadingLoadStore; }
1194   bool standalone() const { return _kind == Standalone; }
1195   void set_trailing_partial_array_copy() { _kind = TrailingPartialArrayCopy; }
1196   bool trailing_partial_array_copy() const { return _kind == TrailingPartialArrayCopy; }

 109   static bool all_controls_dominate(Node* dom, Node* sub);
 110 
 111   virtual const class TypePtr *adr_type() const;  // returns bottom_type of address
 112 
 113   // Shared code for Ideal methods:
 114   Node *Ideal_common(PhaseGVN *phase, bool can_reshape);  // Return -1 for short-circuit null.
 115 
 116   // Helper function for adr_type() implementations.
 117   static const TypePtr* calculate_adr_type(const Type* t, const TypePtr* cross_check = nullptr);
 118 
 119   // Raw access function, to allow copying of adr_type efficiently in
 120   // product builds and retain the debug info for debug builds.
 121   const TypePtr *raw_adr_type() const {
 122 #ifdef ASSERT
 123     return _adr_type;
 124 #else
 125     return 0;
 126 #endif
 127   }
 128 
 129 #ifdef ASSERT
 130   void set_adr_type(const TypePtr* adr_type) { _adr_type = adr_type; }
 131 #endif
 132 
 133   // Return the barrier data of n, if available, or 0 otherwise.
 134   static uint8_t barrier_data(const Node* n);
 135 
 136   // Map a load or store opcode to its corresponding store opcode.
 137   // (Return -1 if unknown.)
 138   virtual int store_Opcode() const { return -1; }
 139 
 140   // What is the type of the value in memory?  (T_VOID mean "unspecified".)
 141   virtual BasicType memory_type() const = 0;
 142   virtual int memory_size() const {
 143 #ifdef ASSERT
 144     return type2aelembytes(memory_type(), true);
 145 #else
 146     return type2aelembytes(memory_type());
 147 #endif
 148   }
 149 
 150   uint8_t barrier_data() { return _barrier_data; }
 151   void set_barrier_data(uint8_t barrier_data) { _barrier_data = barrier_data; }
 152 

 542   static Node* make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* at,
 543                     const TypeKlassPtr* tk = TypeInstKlassPtr::OBJECT);
 544 };
 545 
 546 //------------------------------LoadNKlassNode---------------------------------
 547 // Load a narrow Klass from an object.
 548 class LoadNKlassNode : public LoadNNode {
 549 public:
 550   LoadNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeNarrowKlass *tk, MemOrd mo)
 551     : LoadNNode(c, mem, adr, at, tk, mo) {}
 552   virtual int Opcode() const;
 553   virtual uint ideal_reg() const { return Op_RegN; }
 554   virtual int store_Opcode() const { return Op_StoreNKlass; }
 555   virtual BasicType memory_type() const { return T_NARROWKLASS; }
 556 
 557   virtual const Type* Value(PhaseGVN* phase) const;
 558   virtual Node* Identity(PhaseGVN* phase);
 559   virtual bool depends_only_on_test() const { return true; }
 560 };
 561 

 562 //------------------------------StoreNode--------------------------------------
 563 // Store value; requires Store, Address and Value
 564 class StoreNode : public MemNode {
 565 private:
 566   // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
 567   // stores that can be reordered, and such requiring release semantics to
 568   // adhere to the Java specification.  The required behaviour is stored in
 569   // this field.
 570   const MemOrd _mo;
 571   // Needed for proper cloning.
 572   virtual uint size_of() const { return sizeof(*this); }
 573 protected:
 574   virtual bool cmp( const Node &n ) const;
 575   virtual bool depends_only_on_test() const { return false; }
 576 
 577   Node *Ideal_masked_input       (PhaseGVN *phase, uint mask);
 578   Node *Ideal_sign_extended_input(PhaseGVN *phase, int  num_bits);
 579 
 580 public:
 581   // We must ensure that stores of object references will be visible

1080 };
1081 
1082 //------------------------------GetAndSetPNode---------------------------
1083 class GetAndSetPNode : public LoadStoreNode {
1084 public:
1085   GetAndSetPNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
1086   virtual int Opcode() const;
1087 };
1088 
1089 //------------------------------GetAndSetNNode---------------------------
1090 class GetAndSetNNode : public LoadStoreNode {
1091 public:
1092   GetAndSetNNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
1093   virtual int Opcode() const;
1094 };
1095 
1096 //------------------------------ClearArray-------------------------------------
1097 class ClearArrayNode: public Node {
1098 private:
1099   bool _is_large;
1100   bool _word_copy_only;
1101 public:
1102   ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base, Node* val, bool is_large)
1103     : Node(ctrl, arymem, word_cnt, base, val), _is_large(is_large),
1104       _word_copy_only(val->bottom_type()->isa_long() && (!val->bottom_type()->is_long()->is_con() || val->bottom_type()->is_long()->get_con() != 0)) {
1105     init_class_id(Class_ClearArray);
1106   }
1107   virtual int         Opcode() const;
1108   virtual const Type *bottom_type() const { return Type::MEMORY; }
1109   // ClearArray modifies array elements, and so affects only the
1110   // array memory addressed by the bottom_type of its base address.
1111   virtual const class TypePtr *adr_type() const;
1112   virtual Node* Identity(PhaseGVN* phase);
1113   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1114   virtual uint match_edge(uint idx) const;
1115   bool is_large() const { return _is_large; }
1116   bool word_copy_only() const { return _word_copy_only; }
1117 
1118   // Clear the given area of an object or array.
1119   // The start offset must always be aligned mod BytesPerInt.
1120   // The end offset must always be aligned mod BytesPerLong.
1121   // Return the new memory.
1122   static Node* clear_memory(Node* control, Node* mem, Node* dest,
1123                             Node* val,
1124                             Node* raw_val,
1125                             intptr_t start_offset,
1126                             intptr_t end_offset,
1127                             PhaseGVN* phase);
1128   static Node* clear_memory(Node* control, Node* mem, Node* dest,
1129                             Node* val,
1130                             Node* raw_val,
1131                             intptr_t start_offset,
1132                             Node* end_offset,
1133                             PhaseGVN* phase);
1134   static Node* clear_memory(Node* control, Node* mem, Node* dest,
1135                             Node* raw_val,
1136                             Node* start_offset,
1137                             Node* end_offset,
1138                             PhaseGVN* phase);
1139   // Return allocation input memory edge if it is different instance
1140   // or itself if it is the one we are looking for.
1141   static bool step_through(Node** np, uint instance_id, PhaseValues* phase);
1142 };
1143 
1144 //------------------------------MemBar-----------------------------------------
1145 // There are different flavors of Memory Barriers to match the Java Memory
1146 // Model.  Monitor-enter and volatile-load act as Acquires: no following ref
1147 // can be moved to before them.  We insert a MemBar-Acquire after a FastLock or
1148 // volatile-load.  Monitor-exit and volatile-store act as Release: no
1149 // preceding ref can be moved to after them.  We insert a MemBar-Release
1150 // before a FastUnlock or volatile-store.  All volatiles need to be
1151 // serialized, so we follow all volatile-stores with a MemBar-Volatile to
1152 // separate it from any following volatile-load.
1153 class MemBarNode: public MultiNode {
1154   virtual uint hash() const ;                  // { return NO_HASH; }
1155   virtual bool cmp( const Node &n ) const ;    // Always fail, except on self

1167     TrailingLoadStore,
1168     LeadingLoadStore,
1169     TrailingPartialArrayCopy
1170   } _kind;
1171 
1172 #ifdef ASSERT
1173   uint _pair_idx;
1174 #endif
1175 
1176 public:
1177   enum {
1178     Precedent = TypeFunc::Parms  // optional edge to force precedence
1179   };
1180   MemBarNode(Compile* C, int alias_idx, Node* precedent);
1181   virtual int Opcode() const = 0;
1182   virtual const class TypePtr *adr_type() const { return _adr_type; }
1183   virtual const Type* Value(PhaseGVN* phase) const;
1184   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1185   virtual uint match_edge(uint idx) const { return 0; }
1186   virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
1187   virtual Node *match(const ProjNode *proj, const Matcher *m, const RegMask* mask);
1188   // Factory method.  Builds a wide or narrow membar.
1189   // Optional 'precedent' becomes an extra edge if not null.
1190   static MemBarNode* make(Compile* C, int opcode,
1191                           int alias_idx = Compile::AliasIdxBot,
1192                           Node* precedent = nullptr);
1193 
1194   MemBarNode* trailing_membar() const;
1195   MemBarNode* leading_membar() const;
1196 
1197   void set_trailing_load() { _kind = TrailingLoad; }
1198   bool trailing_load() const { return _kind == TrailingLoad; }
1199   bool trailing_store() const { return _kind == TrailingStore; }
1200   bool leading_store() const { return _kind == LeadingStore; }
1201   bool trailing_load_store() const { return _kind == TrailingLoadStore; }
1202   bool leading_load_store() const { return _kind == LeadingLoadStore; }
1203   bool trailing() const { return _kind == TrailingLoad || _kind == TrailingStore || _kind == TrailingLoadStore; }
1204   bool leading() const { return _kind == LeadingStore || _kind == LeadingLoadStore; }
1205   bool standalone() const { return _kind == Standalone; }
1206   void set_trailing_partial_array_copy() { _kind = TrailingPartialArrayCopy; }
1207   bool trailing_partial_array_copy() const { return _kind == TrailingPartialArrayCopy; }
< prev index next >