< prev index next >

src/hotspot/share/opto/cfgnode.hpp

Print this page

 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_OPTO_CFGNODE_HPP
 26 #define SHARE_OPTO_CFGNODE_HPP
 27 
 28 #include "opto/multnode.hpp"
 29 #include "opto/node.hpp"
 30 #include "opto/opcodes.hpp"
 31 #include "opto/predicates_enums.hpp"
 32 #include "opto/type.hpp"

 33 
 34 // Portions of code courtesy of Clifford Click
 35 
 36 // Optimization - Graph Style
 37 
 38 class Matcher;
 39 class Node;
 40 class   RegionNode;
 41 class   TypeNode;
 42 class     PhiNode;
 43 class   GotoNode;
 44 class   MultiNode;
 45 class     MultiBranchNode;
 46 class       IfNode;
 47 class       PCTableNode;
 48 class         JumpNode;
 49 class         CatchNode;
 50 class       NeverBranchNode;
 51 class     BlackholeNode;
 52 class   ProjNode;

162         int _inst_mem_id; // Instance memory id (node index of the memory Phi)
163         int _inst_id;     // Instance id of the memory slice.
164   const int _inst_index;  // Alias index of the instance memory slice.
165   // Array elements references have the same alias_idx but different offset.
166   const int _inst_offset; // Offset of the instance memory slice.
167   // Size is bigger to hold the _adr_type field.
168   virtual uint hash() const;    // Check the type
169   virtual bool cmp( const Node &n ) const;
170   virtual uint size_of() const { return sizeof(*this); }
171 
172   // Determine if CMoveNode::is_cmove_id can be used at this join point.
173   Node* is_cmove_id(PhaseTransform* phase, int true_path);
174   bool wait_for_region_igvn(PhaseGVN* phase);
175   bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase);
176 
177   static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn);
178   static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn);
179 
180   bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const;
181 



182   bool is_split_through_mergemem_terminating() const;
183 
184   void verify_type_stability(const PhaseGVN* phase, const Type* union_of_input_types, const Type* new_type) const NOT_DEBUG_RETURN;
185   bool wait_for_cast_input_igvn(const PhaseIterGVN* igvn) const;
186 
187 public:
188   // Node layout (parallels RegionNode):
189   enum { Region,                // Control input is the Phi's region.
190          Input                  // Input values are [1..len)
191   };
192 
193   PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr,
194            const int imid = -1,
195            const int iid = TypeOopPtr::InstanceTop,
196            const int iidx = Compile::AliasIdxTop,
197            const int ioffs = Type::OffsetTop )
198     : TypeNode(t,r->req()),
199       _adr_type(at),
200       _inst_mem_id(imid),
201       _inst_id(iid),

214   PhiNode* slice_memory(const TypePtr* adr_type) const;
215   PhiNode* split_out_instance(const TypePtr* at, PhaseIterGVN *igvn) const;
216   // like make(r, x), but does not initialize the in edges to x
217   static PhiNode* make_blank( Node* r, Node* x );
218 
219   // Accessors
220   RegionNode* region() const { Node* r = in(Region); assert(!r || r->is_Region(), ""); return (RegionNode*)r; }
221 
222   bool is_tripcount(BasicType bt) const;
223 
224   // Determine a unique non-trivial input, if any.
225   // Ignore casts if it helps.  Return null on failure.
226   Node* unique_input(PhaseValues* phase, bool uncast);
227   Node* unique_input(PhaseValues* phase) {
228     Node* uin = unique_input(phase, false);
229     if (uin == nullptr) {
230       uin = unique_input(phase, true);
231     }
232     return uin;
233   }

234 
235   // Check for a simple dead loop.
236   enum LoopSafety { Safe = 0, Unsafe, UnsafeLoop };
237   LoopSafety simple_data_loop_check(Node *in) const;
238   // Is it unsafe data loop? It becomes a dead loop if this phi node removed.
239   bool is_unsafe_data_reference(Node *in) const;
240   int is_diamond_phi() const;
241   bool try_clean_memory_phi(PhaseIterGVN* igvn);
242   virtual int Opcode() const;
243   virtual bool pinned() const { return in(0) != nullptr; }
244   virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
245 
246   void  set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
247   int inst_mem_id() const { return _inst_mem_id; }
248   int inst_id()     const { return _inst_id; }
249   int inst_index()  const { return _inst_index; }
250   int inst_offset() const { return _inst_offset; }
251   bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
252     return type()->basic_type() == tp->basic_type() &&
253            inst_mem_id() == mem_id &&
254            inst_id()     == id     &&
255            inst_index()  == index  &&
256            inst_offset() == offset &&
257            type()->higher_equal(tp);
258   }
259 







260   virtual const Type* Value(PhaseGVN* phase) const;
261   virtual Node* Identity(PhaseGVN* phase);
262   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
263   virtual const RegMask &out_RegMask() const;
264   virtual const RegMask &in_RegMask(uint) const;
265 #ifndef PRODUCT
266   virtual void dump_spec(outputStream *st) const;
267 #endif
268 #ifdef ASSERT
269   void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
270   void verify_adr_type(bool recursive = false) const;
271 #else //ASSERT
272   void verify_adr_type(bool recursive = false) const {}
273 #endif //ASSERT
274 
275   const TypeTuple* collect_types(PhaseGVN* phase) const;
276   bool can_be_replaced_by(const PhiNode* other) const;
277 };
278 
279 //------------------------------GotoNode---------------------------------------

451   }
452 
453   virtual int Opcode() const;
454   virtual bool pinned() const { return true; }
455   virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
456   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
457   virtual const Type* Value(PhaseGVN* phase) const;
458   virtual uint required_outcnt() const { return 2; }
459   virtual const RegMask &out_RegMask() const;
460   Node* fold_compares(PhaseIterGVN* phase);
461   static Node* up_one_dom(Node* curr, bool linear_only = false);
462   bool is_zero_trip_guard() const;
463   Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn, bool prev_dom_not_imply_this);
464   ProjNode* uncommon_trap_proj(CallStaticJavaNode*& call, Deoptimization::DeoptReason reason = Deoptimization::Reason_none) const;
465 
466   // Takes the type of val and filters it through the test represented
467   // by if_proj and returns a more refined type if one is produced.
468   // Returns null is it couldn't improve the type.
469   static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
470 


471   AssertionPredicateType assertion_predicate_type() const {
472     return _assertion_predicate_type;
473   }
474 
475 #ifndef PRODUCT
476   virtual void dump_spec(outputStream *st) const;
477 #endif
478 
479   bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
480 };
481 
482 class RangeCheckNode : public IfNode {
483 private:
484   int is_range_check(Node*& range, Node*& index, jint& offset);
485 
486 public:
487   RangeCheckNode(Node* control, Node* bol, float p, float fcnt) : IfNode(control, bol, p, fcnt) {
488     init_class_id(Class_RangeCheck);
489   }
490 

739   virtual int Opcode() const;
740   virtual bool pinned() const { return true; };
741   virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
742   virtual const Type* Value(PhaseGVN* phase) const;
743   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
744   virtual uint required_outcnt() const { return 2; }
745   virtual void emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { }
746   virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
747 #ifndef PRODUCT
748   virtual void format( PhaseRegAlloc *, outputStream *st ) const;
749 #endif
750 };
751 
752 //------------------------------BlackholeNode----------------------------
753 // Blackhole all arguments. This node would survive through the compiler
754 // the effects on its arguments, and would be finally matched to nothing.
755 class BlackholeNode : public MultiNode {
756 public:
757   BlackholeNode(Node* ctrl) : MultiNode(1) {
758     init_req(TypeFunc::Control, ctrl);

759   }
760   virtual int   Opcode() const;
761   virtual uint ideal_reg() const { return 0; } // not matched in the AD file
762   virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
763   virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
764 
765   const RegMask &in_RegMask(uint idx) const {
766     // Fake the incoming arguments mask for blackholes: accept all registers
767     // and all stack slots. This would avoid any redundant register moves
768     // for blackhole inputs.
769     return RegMask::ALL;
770   }
771 #ifndef PRODUCT
772   virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
773 #endif
774 };
775 
776 
777 #endif // SHARE_OPTO_CFGNODE_HPP

 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_OPTO_CFGNODE_HPP
 26 #define SHARE_OPTO_CFGNODE_HPP
 27 
 28 #include "opto/multnode.hpp"
 29 #include "opto/node.hpp"
 30 #include "opto/opcodes.hpp"
 31 #include "opto/predicates_enums.hpp"
 32 #include "opto/type.hpp"
 33 #include "runtime/arguments.hpp"
 34 
 35 // Portions of code courtesy of Clifford Click
 36 
 37 // Optimization - Graph Style
 38 
 39 class Matcher;
 40 class Node;
 41 class   RegionNode;
 42 class   TypeNode;
 43 class     PhiNode;
 44 class   GotoNode;
 45 class   MultiNode;
 46 class     MultiBranchNode;
 47 class       IfNode;
 48 class       PCTableNode;
 49 class         JumpNode;
 50 class         CatchNode;
 51 class       NeverBranchNode;
 52 class     BlackholeNode;
 53 class   ProjNode;

163         int _inst_mem_id; // Instance memory id (node index of the memory Phi)
164         int _inst_id;     // Instance id of the memory slice.
165   const int _inst_index;  // Alias index of the instance memory slice.
166   // Array elements references have the same alias_idx but different offset.
167   const int _inst_offset; // Offset of the instance memory slice.
168   // Size is bigger to hold the _adr_type field.
169   virtual uint hash() const;    // Check the type
170   virtual bool cmp( const Node &n ) const;
171   virtual uint size_of() const { return sizeof(*this); }
172 
173   // Determine if CMoveNode::is_cmove_id can be used at this join point.
174   Node* is_cmove_id(PhaseTransform* phase, int true_path);
175   bool wait_for_region_igvn(PhaseGVN* phase);
176   bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase);
177 
178   static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn);
179   static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn);
180 
181   bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const;
182 
183   bool can_push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass*& inline_klass);
184   InlineTypeNode* push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass* inline_klass);
185 
186   bool is_split_through_mergemem_terminating() const;
187 
188   void verify_type_stability(const PhaseGVN* phase, const Type* union_of_input_types, const Type* new_type) const NOT_DEBUG_RETURN;
189   bool wait_for_cast_input_igvn(const PhaseIterGVN* igvn) const;
190 
191 public:
192   // Node layout (parallels RegionNode):
193   enum { Region,                // Control input is the Phi's region.
194          Input                  // Input values are [1..len)
195   };
196 
197   PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr,
198            const int imid = -1,
199            const int iid = TypeOopPtr::InstanceTop,
200            const int iidx = Compile::AliasIdxTop,
201            const int ioffs = Type::OffsetTop )
202     : TypeNode(t,r->req()),
203       _adr_type(at),
204       _inst_mem_id(imid),
205       _inst_id(iid),

218   PhiNode* slice_memory(const TypePtr* adr_type) const;
219   PhiNode* split_out_instance(const TypePtr* at, PhaseIterGVN *igvn) const;
220   // like make(r, x), but does not initialize the in edges to x
221   static PhiNode* make_blank( Node* r, Node* x );
222 
223   // Accessors
224   RegionNode* region() const { Node* r = in(Region); assert(!r || r->is_Region(), ""); return (RegionNode*)r; }
225 
226   bool is_tripcount(BasicType bt) const;
227 
228   // Determine a unique non-trivial input, if any.
229   // Ignore casts if it helps.  Return null on failure.
230   Node* unique_input(PhaseValues* phase, bool uncast);
231   Node* unique_input(PhaseValues* phase) {
232     Node* uin = unique_input(phase, false);
233     if (uin == nullptr) {
234       uin = unique_input(phase, true);
235     }
236     return uin;
237   }
238   Node* unique_constant_input_recursive(PhaseGVN* phase);
239 
240   // Check for a simple dead loop.
241   enum LoopSafety { Safe = 0, Unsafe, UnsafeLoop };
242   LoopSafety simple_data_loop_check(Node *in) const;
243   // Is it unsafe data loop? It becomes a dead loop if this phi node removed.
244   bool is_unsafe_data_reference(Node *in) const;
245   int is_diamond_phi() const;
246   bool try_clean_memory_phi(PhaseIterGVN* igvn);
247   virtual int Opcode() const;
248   virtual bool pinned() const { return in(0) != nullptr; }
249   virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
250 
251   void  set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
252   int inst_mem_id() const { return _inst_mem_id; }
253   int inst_id()     const { return _inst_id; }
254   int inst_index()  const { return _inst_index; }
255   int inst_offset() const { return _inst_offset; }
256   bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
257     return type()->basic_type() == tp->basic_type() &&
258            inst_mem_id() == mem_id &&
259            inst_id()     == id     &&
260            inst_index()  == index  &&
261            inst_offset() == offset &&
262            type()->higher_equal(tp);
263   }
264 
265   bool can_be_inline_type() const {
266     return Arguments::is_valhalla_enabled() && _type->isa_instptr() && _type->is_instptr()->can_be_inline_type();
267   }
268 
269   Node* try_push_inline_types_down(PhaseGVN* phase, bool can_reshape);
270   DEBUG_ONLY(bool can_push_inline_types_down(PhaseGVN* phase);)
271 
272   virtual const Type* Value(PhaseGVN* phase) const;
273   virtual Node* Identity(PhaseGVN* phase);
274   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
275   virtual const RegMask &out_RegMask() const;
276   virtual const RegMask &in_RegMask(uint) const;
277 #ifndef PRODUCT
278   virtual void dump_spec(outputStream *st) const;
279 #endif
280 #ifdef ASSERT
281   void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
282   void verify_adr_type(bool recursive = false) const;
283 #else //ASSERT
284   void verify_adr_type(bool recursive = false) const {}
285 #endif //ASSERT
286 
287   const TypeTuple* collect_types(PhaseGVN* phase) const;
288   bool can_be_replaced_by(const PhiNode* other) const;
289 };
290 
291 //------------------------------GotoNode---------------------------------------

463   }
464 
465   virtual int Opcode() const;
466   virtual bool pinned() const { return true; }
467   virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
468   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
469   virtual const Type* Value(PhaseGVN* phase) const;
470   virtual uint required_outcnt() const { return 2; }
471   virtual const RegMask &out_RegMask() const;
472   Node* fold_compares(PhaseIterGVN* phase);
473   static Node* up_one_dom(Node* curr, bool linear_only = false);
474   bool is_zero_trip_guard() const;
475   Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn, bool prev_dom_not_imply_this);
476   ProjNode* uncommon_trap_proj(CallStaticJavaNode*& call, Deoptimization::DeoptReason reason = Deoptimization::Reason_none) const;
477 
478   // Takes the type of val and filters it through the test represented
479   // by if_proj and returns a more refined type if one is produced.
480   // Returns null is it couldn't improve the type.
481   static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
482 
483   bool is_flat_array_check(PhaseTransform* phase, Node** array = nullptr);
484 
485   AssertionPredicateType assertion_predicate_type() const {
486     return _assertion_predicate_type;
487   }
488 
489 #ifndef PRODUCT
490   virtual void dump_spec(outputStream *st) const;
491 #endif
492 
493   bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
494 };
495 
496 class RangeCheckNode : public IfNode {
497 private:
498   int is_range_check(Node*& range, Node*& index, jint& offset);
499 
500 public:
501   RangeCheckNode(Node* control, Node* bol, float p, float fcnt) : IfNode(control, bol, p, fcnt) {
502     init_class_id(Class_RangeCheck);
503   }
504 

753   virtual int Opcode() const;
754   virtual bool pinned() const { return true; };
755   virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
756   virtual const Type* Value(PhaseGVN* phase) const;
757   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
758   virtual uint required_outcnt() const { return 2; }
759   virtual void emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { }
760   virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
761 #ifndef PRODUCT
762   virtual void format( PhaseRegAlloc *, outputStream *st ) const;
763 #endif
764 };
765 
766 //------------------------------BlackholeNode----------------------------
767 // Blackhole all arguments. This node would survive through the compiler
768 // the effects on its arguments, and would be finally matched to nothing.
769 class BlackholeNode : public MultiNode {
770 public:
771   BlackholeNode(Node* ctrl) : MultiNode(1) {
772     init_req(TypeFunc::Control, ctrl);
773     init_class_id(Class_Blackhole);
774   }
775   virtual int   Opcode() const;
776   virtual uint ideal_reg() const { return 0; } // not matched in the AD file
777   virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
778   virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
779 
780   const RegMask &in_RegMask(uint idx) const {
781     // Fake the incoming arguments mask for blackholes: accept all registers
782     // and all stack slots. This would avoid any redundant register moves
783     // for blackhole inputs.
784     return RegMask::ALL;
785   }
786 #ifndef PRODUCT
787   virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
788 #endif
789 };
790 
791 
792 #endif // SHARE_OPTO_CFGNODE_HPP
< prev index next >