1 /*
  2  * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_OPTO_CFGNODE_HPP
 26 #define SHARE_OPTO_CFGNODE_HPP
 27 
 28 #include "opto/multnode.hpp"
 29 #include "opto/node.hpp"
 30 #include "opto/opcodes.hpp"
 31 #include "opto/predicates_enums.hpp"
 32 #include "opto/type.hpp"
 33 #include "runtime/arguments.hpp"
 34 
 35 // Portions of code courtesy of Clifford Click
 36 
 37 // Optimization - Graph Style
 38 
 39 class Matcher;
 40 class Node;
 41 class   RegionNode;
 42 class   TypeNode;
 43 class     PhiNode;
 44 class   GotoNode;
 45 class   MultiNode;
 46 class     MultiBranchNode;
 47 class       IfNode;
 48 class       PCTableNode;
 49 class         JumpNode;
 50 class         CatchNode;
 51 class       NeverBranchNode;
 52 class     BlackholeNode;
 53 class   ProjNode;
 54 class     CProjNode;
 55 class       IfTrueNode;
 56 class       IfFalseNode;
 57 class       CatchProjNode;
 58 class     JProjNode;
 59 class       JumpProjNode;
 60 class     SCMemProjNode;
 61 class PhaseIdealLoop;
 62 enum class AssertionPredicateType;
 63 enum class PredicateState;
 64 
 65 //------------------------------RegionNode-------------------------------------
 66 // The class of RegionNodes, which can be mapped to basic blocks in the
 67 // program.  Their inputs point to Control sources.  PhiNodes (described
 68 // below) have an input point to a RegionNode.  Merged data inputs to PhiNodes
 69 // correspond 1-to-1 with RegionNode inputs.  The zero input of a PhiNode is
 70 // the RegionNode, and the zero input of the RegionNode is itself.
 71 class RegionNode : public Node {
 72 public:
 73   enum LoopStatus {
 74     // No guarantee: the region may be an irreducible loop entry, thus we have to
 75     // be careful when removing entry control to it.
 76     MaybeIrreducibleEntry,
 77     // Limited guarantee: this region may be (nested) inside an irreducible loop,
 78     // but it will never be an irreducible loop entry.
 79     NeverIrreducibleEntry,
 80     // Strong guarantee: this region is not (nested) inside an irreducible loop.
 81     Reducible,
 82   };
 83 
 84 private:
 85   bool _is_unreachable_region;
 86   LoopStatus _loop_status;
 87 
 88   bool is_possible_unsafe_loop() const;
 89   bool is_unreachable_from_root(const PhaseGVN* phase) const;
 90 public:
 91   // Node layout (parallels PhiNode):
 92   enum { Region,                // Generally points to self.
 93          Control                // Control arcs are [1..len)
 94   };
 95 
 96   RegionNode(uint required)
 97     : Node(required),
 98       _is_unreachable_region(false),
 99       _loop_status(LoopStatus::NeverIrreducibleEntry)
100   {
101     init_class_id(Class_Region);
102     init_req(0, this);
103   }
104 
105   Node* is_copy() const {
106     const Node* r = _in[Region];
107     if (r == nullptr)
108       return nonnull_req();
109     return nullptr;  // not a copy!
110   }
111   PhiNode* has_phi() const;        // returns an arbitrary phi user, or null
112   PhiNode* has_unique_phi() const; // returns the unique phi user, or null
113   // Is this region node unreachable from root?
114   bool is_unreachable_region(const PhaseGVN* phase);
115 #ifdef ASSERT
116   bool is_in_infinite_subgraph();
117   static bool are_all_nodes_in_infinite_subgraph(Unique_Node_List& worklist);
118 #endif //ASSERT
119   LoopStatus loop_status() const { return _loop_status; };
120   void set_loop_status(LoopStatus status);
121   bool can_be_irreducible_entry() const;
122 
123   virtual int Opcode() const;
124   virtual uint size_of() const { return sizeof(*this); }
125   virtual bool pinned() const { return (const Node*)in(0) == this; }
126   virtual bool is_CFG() const { return true; }
127   virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
128   virtual bool depends_only_on_test() const { return false; }
129   virtual const Type* bottom_type() const { return Type::CONTROL; }
130   virtual const Type* Value(PhaseGVN* phase) const;
131   virtual Node* Identity(PhaseGVN* phase);
132   virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
133   void remove_unreachable_subgraph(PhaseIterGVN* igvn);
134   virtual const RegMask &out_RegMask() const;
135   bool is_diamond() const;
136   void try_clean_mem_phis(PhaseIterGVN* phase);
137   bool optimize_trichotomy(PhaseIterGVN* igvn);
138   NOT_PRODUCT(virtual void dump_spec(outputStream* st) const;)
139 };
140 
141 //------------------------------JProjNode--------------------------------------
142 // jump projection for node that produces multiple control-flow paths
143 class JProjNode : public ProjNode {
144  public:
145   JProjNode( Node* ctrl, uint idx ) : ProjNode(ctrl,idx) {}
146   virtual int Opcode() const;
147   virtual bool  is_CFG() const { return true; }
148   virtual uint  hash() const { return NO_HASH; }  // CFG nodes do not hash
149   virtual const Node* is_block_proj() const { return in(0); }
150   virtual const RegMask& out_RegMask() const;
151   virtual uint  ideal_reg() const { return 0; }
152 };
153 
154 //------------------------------PhiNode----------------------------------------
155 // PhiNodes merge values from different Control paths.  Slot 0 points to the
156 // controlling RegionNode.  Other slots map 1-for-1 with incoming control flow
157 // paths to the RegionNode.
158 class PhiNode : public TypeNode {
159   friend class PhaseRenumberLive;
160 
161   const TypePtr* const _adr_type; // non-null only for Type::MEMORY nodes.
162   // The following fields are only used for data PhiNodes to indicate
163   // that the PhiNode represents the value of a known instance field.
164         int _inst_mem_id; // Instance memory id (node index of the memory Phi)
165         int _inst_id;     // Instance id of the memory slice.
166   const int _inst_index;  // Alias index of the instance memory slice.
167   // Array elements references have the same alias_idx but different offset.
168   const int _inst_offset; // Offset of the instance memory slice.
169   // Size is bigger to hold the _adr_type field.
170   virtual uint hash() const;    // Check the type
171   virtual bool cmp( const Node &n ) const;
172   virtual uint size_of() const { return sizeof(*this); }
173 
174   // Determine if CMoveNode::is_cmove_id can be used at this join point.
175   Node* is_cmove_id(PhaseTransform* phase, int true_path);
176   bool wait_for_region_igvn(PhaseGVN* phase);
177   bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase);
178 
179   static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn);
180   static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn);
181 
182   bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const;
183 
184   bool can_push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass*& inline_klass);
185   InlineTypeNode* push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass* inline_klass);
186 
187   bool is_split_through_mergemem_terminating() const;
188 
189   void verify_type_stability(const PhaseGVN* phase, const Type* union_of_input_types, const Type* new_type) const NOT_DEBUG_RETURN;
190   bool wait_for_cast_input_igvn(const PhaseIterGVN* igvn) const;
191 
192 public:
193   // Node layout (parallels RegionNode):
194   enum { Region,                // Control input is the Phi's region.
195          Input                  // Input values are [1..len)
196   };
197 
198   PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr,
199            const int imid = -1,
200            const int iid = TypeOopPtr::InstanceTop,
201            const int iidx = Compile::AliasIdxTop,
202            const int ioffs = Type::OffsetTop )
203     : TypeNode(t,r->req()),
204       _adr_type(at),
205       _inst_mem_id(imid),
206       _inst_id(iid),
207       _inst_index(iidx),
208       _inst_offset(ioffs)
209   {
210     init_class_id(Class_Phi);
211     init_req(0, r);
212     verify_adr_type();
213   }
214   // create a new phi with in edges matching r and set (initially) to x
215   static PhiNode* make( Node* r, Node* x );
216   // extra type arguments override the new phi's bottom_type and adr_type
217   static PhiNode* make( Node* r, Node* x, const Type *t, const TypePtr* at = nullptr );
218   // create a new phi with narrowed memory type
219   PhiNode* slice_memory(const TypePtr* adr_type) const;
220   PhiNode* split_out_instance(const TypePtr* at, PhaseIterGVN *igvn) const;
221   // like make(r, x), but does not initialize the in edges to x
222   static PhiNode* make_blank( Node* r, Node* x );
223 
224   // Accessors
225   RegionNode* region() const { Node* r = in(Region); assert(!r || r->is_Region(), ""); return (RegionNode*)r; }
226 
227   bool is_tripcount(BasicType bt) const;
228 
229   // Determine a unique non-trivial input, if any.
230   // Ignore casts if it helps.  Return null on failure.
231   Node* unique_input(PhaseValues* phase, bool uncast);
232   Node* unique_input(PhaseValues* phase) {
233     Node* uin = unique_input(phase, false);
234     if (uin == nullptr) {
235       uin = unique_input(phase, true);
236     }
237     return uin;
238   }
239   Node* unique_constant_input_recursive(PhaseGVN* phase);
240 
241   // Check for a simple dead loop.
242   enum LoopSafety { Safe = 0, Unsafe, UnsafeLoop };
243   LoopSafety simple_data_loop_check(Node *in) const;
244   // Is it unsafe data loop? It becomes a dead loop if this phi node removed.
245   bool is_unsafe_data_reference(Node *in) const;
246   int is_diamond_phi() const;
247   bool try_clean_memory_phi(PhaseIterGVN* igvn);
248   virtual int Opcode() const;
249   virtual bool pinned() const { return in(0) != nullptr; }
250   virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
251 
252   void  set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
253   int inst_mem_id() const { return _inst_mem_id; }
254   int inst_id()     const { return _inst_id; }
255   int inst_index()  const { return _inst_index; }
256   int inst_offset() const { return _inst_offset; }
257   bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
258     return type()->basic_type() == tp->basic_type() &&
259            inst_mem_id() == mem_id &&
260            inst_id()     == id     &&
261            inst_index()  == index  &&
262            inst_offset() == offset &&
263            type()->higher_equal(tp);
264   }
265 
266   bool can_be_inline_type() const {
267     return Arguments::is_valhalla_enabled() && _type->isa_instptr() && _type->is_instptr()->can_be_inline_type();
268   }
269 
270   Node* try_push_inline_types_down(PhaseGVN* phase, bool can_reshape);
271   DEBUG_ONLY(bool can_push_inline_types_down(PhaseGVN* phase);)
272 
273   virtual const Type* Value(PhaseGVN* phase) const;
274   virtual Node* Identity(PhaseGVN* phase);
275   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
276   virtual const RegMask &out_RegMask() const;
277   virtual const RegMask &in_RegMask(uint) const;
278 #ifndef PRODUCT
279   virtual void dump_spec(outputStream *st) const;
280 #endif
281 #ifdef ASSERT
282   void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
283   void verify_adr_type(bool recursive = false) const;
284 #else //ASSERT
285   void verify_adr_type(bool recursive = false) const {}
286 #endif //ASSERT
287 
288   const TypeTuple* collect_types(PhaseGVN* phase) const;
289   bool can_be_replaced_by(const PhiNode* other) const;
290 };
291 
292 //------------------------------GotoNode---------------------------------------
293 // GotoNodes perform direct branches.
294 class GotoNode : public Node {
295 public:
296   GotoNode( Node *control ) : Node(control) {}
297   virtual int Opcode() const;
298   virtual bool pinned() const { return true; }
299   virtual bool  is_CFG() const { return true; }
300   virtual uint hash() const { return NO_HASH; }  // CFG nodes do not hash
301   virtual const Node *is_block_proj() const { return this; }
302   virtual bool depends_only_on_test() const { return false; }
303   virtual const Type *bottom_type() const { return Type::CONTROL; }
304   virtual const Type* Value(PhaseGVN* phase) const;
305   virtual Node* Identity(PhaseGVN* phase);
306   virtual const RegMask &out_RegMask() const;
307 };
308 
309 //------------------------------CProjNode--------------------------------------
310 // control projection for node that produces multiple control-flow paths
311 class CProjNode : public ProjNode {
312 public:
313   CProjNode( Node *ctrl, uint idx ) : ProjNode(ctrl,idx) {}
314   virtual int Opcode() const;
315   virtual bool  is_CFG() const { return true; }
316   virtual uint hash() const { return NO_HASH; }  // CFG nodes do not hash
317   virtual const Node *is_block_proj() const { return in(0); }
318   virtual const RegMask &out_RegMask() const;
319   virtual uint ideal_reg() const { return 0; }
320 };
321 
322 //---------------------------MultiBranchNode-----------------------------------
323 // This class defines a MultiBranchNode, a MultiNode which yields multiple
324 // control values. These are distinguished from other types of MultiNodes
325 // which yield multiple values, but control is always and only projection #0.
326 class MultiBranchNode : public MultiNode {
327 public:
328   MultiBranchNode( uint required ) : MultiNode(required) {
329     init_class_id(Class_MultiBranch);
330   }
331   // returns required number of users to be well formed.
332   virtual uint required_outcnt() const = 0;
333 };
334 
335 //------------------------------IfNode-----------------------------------------
336 // Output selected Control, based on a boolean test
337 class IfNode : public MultiBranchNode {
338  public:
339   float _prob;                           // Probability of true path being taken.
340   float _fcnt;                           // Frequency counter
341 
342  private:
343   AssertionPredicateType _assertion_predicate_type;
344 
345   void init_node(Node* control, Node* bol) {
346     init_class_id(Class_If);
347     init_req(0, control);
348     init_req(1, bol);
349   }
350 
351   // Size is bigger to hold the probability field.  However, _prob does not
352   // change the semantics so it does not appear in the hash & cmp functions.
353   virtual uint size_of() const { return sizeof(*this); }
354 
355   // Helper methods for fold_compares
356   bool cmpi_folds(PhaseIterGVN* igvn, bool fold_ne = false);
357   bool is_ctrl_folds(Node* ctrl, PhaseIterGVN* igvn);
358   bool has_shared_region(IfProjNode* proj, IfProjNode*& success, IfProjNode*& fail) const;
359   bool has_only_uncommon_traps(IfProjNode* proj, IfProjNode*& success, IfProjNode*& fail, PhaseIterGVN* igvn) const;
360   Node* merge_uncommon_traps(IfProjNode* proj, IfProjNode* success, IfProjNode* fail, PhaseIterGVN* igvn);
361   static void improve_address_types(Node* l, Node* r, ProjNode* fail, PhaseIterGVN* igvn);
362   bool is_cmp_with_loadrange(IfProjNode* proj) const;
363   bool is_null_check(IfProjNode* proj, PhaseIterGVN* igvn) const;
364   bool is_side_effect_free_test(IfProjNode* proj, PhaseIterGVN* igvn) const;
365   static void reroute_side_effect_free_unc(IfProjNode* proj, IfProjNode* dom_proj, PhaseIterGVN* igvn);
366   bool fold_compares_helper(IfProjNode* proj, IfProjNode* success, IfProjNode* fail, PhaseIterGVN* igvn);
367   static bool is_dominator_unc(CallStaticJavaNode* dom_unc, CallStaticJavaNode* unc);
368 
369 protected:
370   IfProjNode* range_check_trap_proj(int& flip, Node*& l, Node*& r) const;
371   Node* Ideal_common(PhaseGVN *phase, bool can_reshape);
372   Node* search_identical(int dist, PhaseIterGVN* igvn);
373 
374   Node* simple_subsuming(PhaseIterGVN* igvn);
375 
376 public:
377 
378   // Degrees of branch prediction probability by order of magnitude:
379   // PROB_UNLIKELY_1e(N) is a 1 in 1eN chance.
380   // PROB_LIKELY_1e(N) is a 1 - PROB_UNLIKELY_1e(N)
381 #define PROB_UNLIKELY_MAG(N)    (1e- ## N ## f)
382 #define PROB_LIKELY_MAG(N)      (1.0f-PROB_UNLIKELY_MAG(N))
383 
384   // Maximum and minimum branch prediction probabilties
385   // 1 in 1,000,000 (magnitude 6)
386   //
387   // Although PROB_NEVER == PROB_MIN and PROB_ALWAYS == PROB_MAX
388   // they are used to distinguish different situations:
389   //
390   // The name PROB_MAX (PROB_MIN) is for probabilities which correspond to
391   // very likely (unlikely) but with a concrete possibility of a rare
392   // contrary case.  These constants would be used for pinning
393   // measurements, and as measures for assertions that have high
394   // confidence, but some evidence of occasional failure.
395   //
396   // The name PROB_ALWAYS (PROB_NEVER) is to stand for situations for which
397   // there is no evidence at all that the contrary case has ever occurred.
398 
399 #define PROB_NEVER              PROB_UNLIKELY_MAG(6)
400 #define PROB_ALWAYS             PROB_LIKELY_MAG(6)
401 
402 #define PROB_MIN                PROB_UNLIKELY_MAG(6)
403 #define PROB_MAX                PROB_LIKELY_MAG(6)
404 
405   // Static branch prediction probabilities
406   // 1 in 10 (magnitude 1)
407 #define PROB_STATIC_INFREQUENT  PROB_UNLIKELY_MAG(1)
408 #define PROB_STATIC_FREQUENT    PROB_LIKELY_MAG(1)
409 
410   // Fair probability 50/50
411 #define PROB_FAIR               (0.5f)
412 
413   // Unknown probability sentinel
414 #define PROB_UNKNOWN            (-1.0f)
415 
416   // Probability "constructors", to distinguish as a probability any manifest
417   // constant without a names
418 #define PROB_LIKELY(x)          ((float) (x))
419 #define PROB_UNLIKELY(x)        (1.0f - (float)(x))
420 
421   // Other probabilities in use, but without a unique name, are documented
422   // here for lack of a better place:
423   //
424   // 1 in 1000 probabilities (magnitude 3):
425   //     threshold for converting to conditional move
426   //     likelihood of null check failure if a null HAS been seen before
427   //     likelihood of slow path taken in library calls
428   //
429   // 1 in 10,000 probabilities (magnitude 4):
430   //     threshold for making an uncommon trap probability more extreme
431   //     threshold for for making a null check implicit
432   //     likelihood of needing a gc if eden top moves during an allocation
433   //     likelihood of a predicted call failure
434   //
435   // 1 in 100,000 probabilities (magnitude 5):
436   //     threshold for ignoring counts when estimating path frequency
437   //     likelihood of FP clipping failure
438   //     likelihood of catching an exception from a try block
439   //     likelihood of null check failure if a null has NOT been seen before
440   //
441   // Magic manifest probabilities such as 0.83, 0.7, ... can be found in
442   // gen_subtype_check() and catch_inline_exceptions().
443 
444   IfNode(Node* control, Node* bol, float p, float fcnt);
445   IfNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type);
446 
447   static IfNode* make_with_same_profile(IfNode* if_node_profile, Node* ctrl, Node* bol);
448 
449   IfTrueNode* true_proj() const {
450     return proj_out(true)->as_IfTrue();
451   }
452 
453   IfTrueNode* true_proj_or_null() const {
454     ProjNode* true_proj = proj_out_or_null(true);
455     return true_proj == nullptr ? nullptr : true_proj->as_IfTrue();
456   }
457 
458   IfFalseNode* false_proj() const {
459     return proj_out(false)->as_IfFalse();
460   }
461 
462   IfFalseNode* false_proj_or_null() const {
463     ProjNode* false_proj = proj_out_or_null(false);
464     return false_proj == nullptr ? nullptr : false_proj->as_IfFalse();
465   }
466 
467   virtual int Opcode() const;
468   virtual bool pinned() const { return true; }
469   virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
470   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
471   virtual const Type* Value(PhaseGVN* phase) const;
472   virtual uint required_outcnt() const { return 2; }
473   virtual const RegMask &out_RegMask() const;
474   Node* fold_compares(PhaseIterGVN* phase);
475   static Node* up_one_dom(Node* curr, bool linear_only = false);
476   bool is_zero_trip_guard() const;
477   Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn, bool pin_array_access_nodes);
478   ProjNode* uncommon_trap_proj(CallStaticJavaNode*& call, Deoptimization::DeoptReason reason = Deoptimization::Reason_none) const;
479 
480   // Takes the type of val and filters it through the test represented
481   // by if_proj and returns a more refined type if one is produced.
482   // Returns null is it couldn't improve the type.
483   static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
484 
485   bool is_flat_array_check(PhaseTransform* phase, Node** array = nullptr);
486 
487   AssertionPredicateType assertion_predicate_type() const {
488     return _assertion_predicate_type;
489   }
490 
491 #ifndef PRODUCT
492   virtual void dump_spec(outputStream *st) const;
493 #endif
494 
495   bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
496 };
497 
498 class RangeCheckNode : public IfNode {
499 private:
500   int is_range_check(Node*& range, Node*& index, jint& offset);
501 
502 public:
503   RangeCheckNode(Node* control, Node* bol, float p, float fcnt) : IfNode(control, bol, p, fcnt) {
504     init_class_id(Class_RangeCheck);
505   }
506 
507   RangeCheckNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type)
508       : IfNode(control, bol, p, fcnt, assertion_predicate_type) {
509     init_class_id(Class_RangeCheck);
510   }
511 
512   virtual int Opcode() const;
513   virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
514 };
515 
516 // Special node that denotes a Parse Predicate added during parsing. A Parse Predicate serves as placeholder to later
517 // create Regular Predicates (Runtime Predicates with possible Assertion Predicates) above it. Together they form a
518 // Predicate Block. The Parse Predicate and Regular Predicates share the same uncommon trap.
519 // There are three kinds of Parse Predicates:
520 // Loop Parse Predicate, Profiled Loop Parse Predicate (both used by Loop Predication), and Loop Limit Check Parse
521 // Predicate (used for integer overflow checks when creating a counted loop).
522 // More information about predicates can be found in loopPredicate.cpp.
523 class ParsePredicateNode : public IfNode {
524   Deoptimization::DeoptReason _deopt_reason;
525 
526   // When a Parse Predicate loses its connection to a loop head, it will be marked useless by
527   // EliminateUselessPredicates and cleaned up by Value(). It can also become useless when cloning it to both loops
528   // during Loop Multiversioning - we no longer use the old version.
529   PredicateState _predicate_state;
530  public:
531   ParsePredicateNode(Node* control, Deoptimization::DeoptReason deopt_reason, PhaseGVN* gvn);
532   virtual int Opcode() const;
533   virtual uint size_of() const { return sizeof(*this); }
534 
535   Deoptimization::DeoptReason deopt_reason() const {
536     return _deopt_reason;
537   }
538 
539   bool is_useless() const {
540     return _predicate_state == PredicateState::Useless;
541   }
542 
543   void mark_useless(PhaseIterGVN& igvn);
544 
545   void mark_maybe_useful() {
546     _predicate_state = PredicateState::MaybeUseful;
547   }
548 
549   bool is_useful() const {
550     return _predicate_state == PredicateState::Useful;
551   }
552 
553   void mark_useful() {
554     _predicate_state = PredicateState::Useful;
555   }
556 
557   // Return the uncommon trap If projection of this Parse Predicate.
558   ParsePredicateUncommonProj* uncommon_proj() const {
559     return false_proj();
560   }
561 
562   Node* uncommon_trap() const;
563 
564   Node* Ideal(PhaseGVN* phase, bool can_reshape) {
565     return nullptr; // Don't optimize
566   }
567 
568   const Type* Value(PhaseGVN* phase) const;
569   NOT_PRODUCT(void dump_spec(outputStream* st) const;)
570 };
571 
572 class IfProjNode : public CProjNode {
573 public:
574   IfProjNode(IfNode *ifnode, uint idx) : CProjNode(ifnode,idx) {}
575   virtual Node* Identity(PhaseGVN* phase);
576 
577   // Return the other IfProj node.
578   IfProjNode* other_if_proj() const {
579     return in(0)->as_If()->proj_out(1 - _con)->as_IfProj();
580   }
581 
582   void pin_array_access_nodes(PhaseIterGVN* igvn);
583 
584 protected:
585   // Type of If input when this branch is always taken
586   virtual bool always_taken(const TypeTuple* t) const = 0;
587 };
588 
589 class IfTrueNode : public IfProjNode {
590 public:
591   IfTrueNode( IfNode *ifnode ) : IfProjNode(ifnode,1) {
592     init_class_id(Class_IfTrue);
593   }
594   virtual int Opcode() const;
595 
596 protected:
597   virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFTRUE; }
598 };
599 
600 class IfFalseNode : public IfProjNode {
601 public:
602   IfFalseNode( IfNode *ifnode ) : IfProjNode(ifnode,0) {
603     init_class_id(Class_IfFalse);
604   }
605   virtual int Opcode() const;
606 
607 protected:
608   virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFFALSE; }
609 };
610 
611 
612 //------------------------------PCTableNode------------------------------------
613 // Build an indirect branch table.  Given a control and a table index,
614 // control is passed to the Projection matching the table index.  Used to
615 // implement switch statements and exception-handling capabilities.
616 // Undefined behavior if passed-in index is not inside the table.
617 class PCTableNode : public MultiBranchNode {
618   virtual uint hash() const;    // Target count; table size
619   virtual bool cmp( const Node &n ) const;
620   virtual uint size_of() const { return sizeof(*this); }
621 
622 public:
623   const uint _size;             // Number of targets
624 
625   PCTableNode( Node *ctrl, Node *idx, uint size ) : MultiBranchNode(2), _size(size) {
626     init_class_id(Class_PCTable);
627     init_req(0, ctrl);
628     init_req(1, idx);
629   }
630   virtual int Opcode() const;
631   virtual const Type* Value(PhaseGVN* phase) const;
632   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
633   virtual const Type *bottom_type() const;
634   virtual bool pinned() const { return true; }
635   virtual uint required_outcnt() const { return _size; }
636 };
637 
638 //------------------------------JumpNode---------------------------------------
639 // Indirect branch.  Uses PCTable above to implement a switch statement.
640 // It emits as a table load and local branch.
641 class JumpNode : public PCTableNode {
642   virtual uint size_of() const { return sizeof(*this); }
643 public:
644   float* _probs; // probability of each projection
645   float _fcnt;   // total number of times this Jump was executed
646   JumpNode( Node* control, Node* switch_val, uint size, float* probs, float cnt)
647     : PCTableNode(control, switch_val, size),
648       _probs(probs), _fcnt(cnt) {
649     init_class_id(Class_Jump);
650   }
651   virtual int   Opcode() const;
652   virtual const RegMask& out_RegMask() const;
653   virtual const Node* is_block_proj() const { return this; }
654 };
655 
656 class JumpProjNode : public JProjNode {
657   virtual uint hash() const;
658   virtual bool cmp( const Node &n ) const;
659   virtual uint size_of() const { return sizeof(*this); }
660 
661  private:
662   const int  _dest_bci;
663   const uint _proj_no;
664   const int  _switch_val;
665  public:
666   JumpProjNode(Node* jumpnode, uint proj_no, int dest_bci, int switch_val)
667     : JProjNode(jumpnode, proj_no), _dest_bci(dest_bci), _proj_no(proj_no), _switch_val(switch_val) {
668     init_class_id(Class_JumpProj);
669   }
670 
671   virtual int Opcode() const;
672   virtual const Type* bottom_type() const { return Type::CONTROL; }
673   int  dest_bci()    const { return _dest_bci; }
674   int  switch_val()  const { return _switch_val; }
675   uint proj_no()     const { return _proj_no; }
676 #ifndef PRODUCT
677   virtual void dump_spec(outputStream *st) const;
678   virtual void dump_compact_spec(outputStream *st) const;
679 #endif
680 };
681 
682 //------------------------------CatchNode--------------------------------------
683 // Helper node to fork exceptions.  "Catch" catches any exceptions thrown by
684 // a just-prior call.  Looks like a PCTableNode but emits no code - just the
685 // table.  The table lookup and branch is implemented by RethrowNode.
686 class CatchNode : public PCTableNode {
687 public:
688   CatchNode( Node *ctrl, Node *idx, uint size ) : PCTableNode(ctrl,idx,size){
689     init_class_id(Class_Catch);
690   }
691   virtual int Opcode() const;
692   virtual const Type* Value(PhaseGVN* phase) const;
693 };
694 
695 // CatchProjNode controls which exception handler is targeted after a call.
696 // It is passed in the bci of the target handler, or no_handler_bci in case
697 // the projection doesn't lead to an exception handler.
698 class CatchProjNode : public CProjNode {
699   virtual uint hash() const;
700   virtual bool cmp( const Node &n ) const;
701   virtual uint size_of() const { return sizeof(*this); }
702 
703 private:
704   const int _handler_bci;
705 
706 public:
707   enum {
708     fall_through_index =  0,      // the fall through projection index
709     catch_all_index    =  1,      // the projection index for catch-alls
710     no_handler_bci     = -1       // the bci for fall through or catch-all projs
711   };
712 
713   CatchProjNode(Node* catchnode, uint proj_no, int handler_bci)
714     : CProjNode(catchnode, proj_no), _handler_bci(handler_bci) {
715     init_class_id(Class_CatchProj);
716     assert(proj_no != fall_through_index || handler_bci < 0, "fall through case must have bci < 0");
717   }
718 
719   virtual int Opcode() const;
720   virtual Node* Identity(PhaseGVN* phase);
721   virtual const Type *bottom_type() const { return Type::CONTROL; }
722   int  handler_bci() const        { return _handler_bci; }
723   bool is_handler_proj() const    { return _handler_bci >= 0; }
724 #ifndef PRODUCT
725   virtual void dump_spec(outputStream *st) const;
726 #endif
727 };
728 
729 
730 //---------------------------------CreateExNode--------------------------------
731 // Helper node to create the exception coming back from a call
732 class CreateExNode : public TypeNode {
733 public:
734   CreateExNode(const Type* t, Node* control, Node* i_o) : TypeNode(t, 2) {
735     init_req(0, control);
736     init_req(1, i_o);
737   }
738   virtual int Opcode() const;
739   virtual Node* Identity(PhaseGVN* phase);
740   virtual bool pinned() const { return true; }
741   uint match_edge(uint idx) const { return 0; }
742   virtual uint ideal_reg() const { return Op_RegP; }
743 };
744 
745 //------------------------------NeverBranchNode-------------------------------
746 // The never-taken branch.  Used to give the appearance of exiting infinite
747 // loops to those algorithms that like all paths to be reachable.  Encodes
748 // empty.
749 class NeverBranchNode : public MultiBranchNode {
750 public:
751   NeverBranchNode(Node* ctrl) : MultiBranchNode(1) {
752     init_req(0, ctrl);
753     init_class_id(Class_NeverBranch);
754   }
755   virtual int Opcode() const;
756   virtual bool pinned() const { return true; };
757   virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
758   virtual const Type* Value(PhaseGVN* phase) const;
759   virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
760   virtual uint required_outcnt() const { return 2; }
761   virtual void emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { }
762   virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
763 #ifndef PRODUCT
764   virtual void format( PhaseRegAlloc *, outputStream *st ) const;
765 #endif
766 };
767 
768 //------------------------------BlackholeNode----------------------------
769 // Blackhole all arguments. This node would survive through the compiler
770 // the effects on its arguments, and would be finally matched to nothing.
771 class BlackholeNode : public MultiNode {
772 public:
773   BlackholeNode(Node* ctrl) : MultiNode(1) {
774     init_req(TypeFunc::Control, ctrl);
775     init_class_id(Class_Blackhole);
776   }
777   virtual int   Opcode() const;
778   virtual uint ideal_reg() const { return 0; } // not matched in the AD file
779   virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
780   virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
781 
782   const RegMask &in_RegMask(uint idx) const {
783     // Fake the incoming arguments mask for blackholes: accept all registers
784     // and all stack slots. This would avoid any redundant register moves
785     // for blackhole inputs.
786     return RegMask::ALL;
787   }
788 #ifndef PRODUCT
789   virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
790 #endif
791 };
792 
793 
794 #endif // SHARE_OPTO_CFGNODE_HPP