1 /*
2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OPTO_CFGNODE_HPP
26 #define SHARE_OPTO_CFGNODE_HPP
27
28 #include "opto/multnode.hpp"
29 #include "opto/node.hpp"
30 #include "opto/opcodes.hpp"
31 #include "opto/predicates_enums.hpp"
32 #include "opto/type.hpp"
33 #include "runtime/arguments.hpp"
34
35 // Portions of code courtesy of Clifford Click
36
37 // Optimization - Graph Style
38
39 class Matcher;
40 class Node;
41 class RegionNode;
42 class TypeNode;
43 class PhiNode;
44 class GotoNode;
45 class MultiNode;
46 class MultiBranchNode;
47 class IfNode;
48 class PCTableNode;
49 class JumpNode;
50 class CatchNode;
51 class NeverBranchNode;
52 class BlackholeNode;
53 class ProjNode;
54 class CProjNode;
55 class IfTrueNode;
56 class IfFalseNode;
57 class CatchProjNode;
58 class JProjNode;
59 class JumpProjNode;
60 class SCMemProjNode;
61 class PhaseIdealLoop;
62 enum class AssertionPredicateType;
63 enum class PredicateState;
64
65 //------------------------------RegionNode-------------------------------------
66 // The class of RegionNodes, which can be mapped to basic blocks in the
67 // program. Their inputs point to Control sources. PhiNodes (described
68 // below) have an input point to a RegionNode. Merged data inputs to PhiNodes
69 // correspond 1-to-1 with RegionNode inputs. The zero input of a PhiNode is
70 // the RegionNode, and the zero input of the RegionNode is itself.
71 class RegionNode : public Node {
72 public:
73 enum LoopStatus {
74 // No guarantee: the region may be an irreducible loop entry, thus we have to
75 // be careful when removing entry control to it.
76 MaybeIrreducibleEntry,
77 // Limited guarantee: this region may be (nested) inside an irreducible loop,
78 // but it will never be an irreducible loop entry.
79 NeverIrreducibleEntry,
80 // Strong guarantee: this region is not (nested) inside an irreducible loop.
81 Reducible,
82 };
83
84 private:
85 bool _is_unreachable_region;
86 LoopStatus _loop_status;
87
88 bool is_possible_unsafe_loop() const;
89 bool is_unreachable_from_root(const PhaseGVN* phase) const;
90 public:
91 // Node layout (parallels PhiNode):
92 enum { Region, // Generally points to self.
93 Control // Control arcs are [1..len)
94 };
95
96 RegionNode(uint required)
97 : Node(required),
98 _is_unreachable_region(false),
99 _loop_status(LoopStatus::NeverIrreducibleEntry)
100 {
101 init_class_id(Class_Region);
102 init_req(0, this);
103 }
104
105 Node* is_copy() const {
106 const Node* r = _in[Region];
107 if (r == nullptr)
108 return nonnull_req();
109 return nullptr; // not a copy!
110 }
111 PhiNode* has_phi() const; // returns an arbitrary phi user, or null
112 PhiNode* has_unique_phi() const; // returns the unique phi user, or null
113 // Is this region node unreachable from root?
114 bool is_unreachable_region(const PhaseGVN* phase);
115 #ifdef ASSERT
116 bool is_in_infinite_subgraph();
117 static bool are_all_nodes_in_infinite_subgraph(Unique_Node_List& worklist);
118 #endif //ASSERT
119 LoopStatus loop_status() const { return _loop_status; };
120 void set_loop_status(LoopStatus status);
121 bool can_be_irreducible_entry() const;
122
123 virtual int Opcode() const;
124 virtual uint size_of() const { return sizeof(*this); }
125 virtual bool pinned() const { return (const Node*)in(0) == this; }
126 virtual bool is_CFG() const { return true; }
127 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
128 virtual bool depends_only_on_test() const { return false; }
129 virtual const Type* bottom_type() const { return Type::CONTROL; }
130 virtual const Type* Value(PhaseGVN* phase) const;
131 virtual Node* Identity(PhaseGVN* phase);
132 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
133 void remove_unreachable_subgraph(PhaseIterGVN* igvn);
134 virtual const RegMask &out_RegMask() const;
135 bool is_diamond() const;
136 void try_clean_mem_phis(PhaseIterGVN* phase);
137 bool optimize_trichotomy(PhaseIterGVN* igvn);
138 NOT_PRODUCT(virtual void dump_spec(outputStream* st) const;)
139 };
140
141 //------------------------------JProjNode--------------------------------------
142 // jump projection for node that produces multiple control-flow paths
143 class JProjNode : public ProjNode {
144 public:
145 JProjNode( Node* ctrl, uint idx ) : ProjNode(ctrl,idx) {}
146 virtual int Opcode() const;
147 virtual bool is_CFG() const { return true; }
148 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
149 virtual const Node* is_block_proj() const { return in(0); }
150 virtual const RegMask& out_RegMask() const;
151 virtual uint ideal_reg() const { return 0; }
152 };
153
154 //------------------------------PhiNode----------------------------------------
155 // PhiNodes merge values from different Control paths. Slot 0 points to the
156 // controlling RegionNode. Other slots map 1-for-1 with incoming control flow
157 // paths to the RegionNode.
158 class PhiNode : public TypeNode {
159 friend class PhaseRenumberLive;
160
161 const TypePtr* const _adr_type; // non-null only for Type::MEMORY nodes.
162 // The following fields are only used for data PhiNodes to indicate
163 // that the PhiNode represents the value of a known instance field.
164 int _inst_mem_id; // Instance memory id (node index of the memory Phi)
165 int _inst_id; // Instance id of the memory slice.
166 const int _inst_index; // Alias index of the instance memory slice.
167 // Array elements references have the same alias_idx but different offset.
168 const int _inst_offset; // Offset of the instance memory slice.
169 // Size is bigger to hold the _adr_type field.
170 virtual uint hash() const; // Check the type
171 virtual bool cmp( const Node &n ) const;
172 virtual uint size_of() const { return sizeof(*this); }
173
174 // Determine if CMoveNode::is_cmove_id can be used at this join point.
175 Node* is_cmove_id(PhaseTransform* phase, int true_path);
176 bool wait_for_region_igvn(PhaseGVN* phase);
177 bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase);
178
179 static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn);
180 static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn);
181
182 bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const;
183
184 bool can_push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass*& inline_klass);
185 InlineTypeNode* push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass* inline_klass);
186
187 bool is_split_through_mergemem_terminating() const;
188
189 void verify_type_stability(const PhaseGVN* phase, const Type* union_of_input_types, const Type* new_type) const NOT_DEBUG_RETURN;
190 bool wait_for_cast_input_igvn(const PhaseIterGVN* igvn) const;
191
192 public:
193 // Node layout (parallels RegionNode):
194 enum { Region, // Control input is the Phi's region.
195 Input // Input values are [1..len)
196 };
197
198 PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr,
199 const int imid = -1,
200 const int iid = TypeOopPtr::InstanceTop,
201 const int iidx = Compile::AliasIdxTop,
202 const int ioffs = Type::OffsetTop )
203 : TypeNode(t,r->req()),
204 _adr_type(at),
205 _inst_mem_id(imid),
206 _inst_id(iid),
207 _inst_index(iidx),
208 _inst_offset(ioffs)
209 {
210 init_class_id(Class_Phi);
211 init_req(0, r);
212 verify_adr_type();
213 }
214 // create a new phi with in edges matching r and set (initially) to x
215 static PhiNode* make( Node* r, Node* x );
216 // extra type arguments override the new phi's bottom_type and adr_type
217 static PhiNode* make( Node* r, Node* x, const Type *t, const TypePtr* at = nullptr );
218 // create a new phi with narrowed memory type
219 PhiNode* slice_memory(const TypePtr* adr_type) const;
220 PhiNode* split_out_instance(const TypePtr* at, PhaseIterGVN *igvn) const;
221 // like make(r, x), but does not initialize the in edges to x
222 static PhiNode* make_blank( Node* r, Node* x );
223
224 // Accessors
225 RegionNode* region() const { Node* r = in(Region); assert(!r || r->is_Region(), ""); return (RegionNode*)r; }
226
227 bool is_tripcount(BasicType bt) const;
228
229 // Determine a unique non-trivial input, if any.
230 // Ignore casts if it helps. Return null on failure.
231 Node* unique_input(PhaseValues* phase, bool uncast);
232 Node* unique_input(PhaseValues* phase) {
233 Node* uin = unique_input(phase, false);
234 if (uin == nullptr) {
235 uin = unique_input(phase, true);
236 }
237 return uin;
238 }
239 Node* unique_constant_input_recursive(PhaseGVN* phase);
240
241 // Check for a simple dead loop.
242 enum LoopSafety { Safe = 0, Unsafe, UnsafeLoop };
243 LoopSafety simple_data_loop_check(Node *in) const;
244 // Is it unsafe data loop? It becomes a dead loop if this phi node removed.
245 bool is_unsafe_data_reference(Node *in) const;
246 int is_diamond_phi() const;
247 bool try_clean_memory_phi(PhaseIterGVN* igvn);
248 virtual int Opcode() const;
249 virtual bool pinned() const { return in(0) != nullptr; }
250 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
251
252 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
253 int inst_mem_id() const { return _inst_mem_id; }
254 int inst_id() const { return _inst_id; }
255 int inst_index() const { return _inst_index; }
256 int inst_offset() const { return _inst_offset; }
257 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
258 return type()->basic_type() == tp->basic_type() &&
259 inst_mem_id() == mem_id &&
260 inst_id() == id &&
261 inst_index() == index &&
262 inst_offset() == offset &&
263 type()->higher_equal(tp);
264 }
265
266 bool can_be_inline_type() const {
267 return Arguments::is_valhalla_enabled() && _type->isa_instptr() && _type->is_instptr()->can_be_inline_type();
268 }
269
270 Node* try_push_inline_types_down(PhaseGVN* phase, bool can_reshape);
271 DEBUG_ONLY(bool can_push_inline_types_down(PhaseGVN* phase);)
272
273 virtual const Type* Value(PhaseGVN* phase) const;
274 virtual Node* Identity(PhaseGVN* phase);
275 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
276 virtual const RegMask &out_RegMask() const;
277 virtual const RegMask &in_RegMask(uint) const;
278 #ifndef PRODUCT
279 virtual void dump_spec(outputStream *st) const;
280 #endif
281 #ifdef ASSERT
282 void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
283 void verify_adr_type(bool recursive = false) const;
284 #else //ASSERT
285 void verify_adr_type(bool recursive = false) const {}
286 #endif //ASSERT
287
288 const TypeTuple* collect_types(PhaseGVN* phase) const;
289 };
290
291 //------------------------------GotoNode---------------------------------------
292 // GotoNodes perform direct branches.
293 class GotoNode : public Node {
294 public:
295 GotoNode( Node *control ) : Node(control) {}
296 virtual int Opcode() const;
297 virtual bool pinned() const { return true; }
298 virtual bool is_CFG() const { return true; }
299 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
300 virtual const Node *is_block_proj() const { return this; }
301 virtual bool depends_only_on_test() const { return false; }
302 virtual const Type *bottom_type() const { return Type::CONTROL; }
303 virtual const Type* Value(PhaseGVN* phase) const;
304 virtual Node* Identity(PhaseGVN* phase);
305 virtual const RegMask &out_RegMask() const;
306 };
307
308 //------------------------------CProjNode--------------------------------------
309 // control projection for node that produces multiple control-flow paths
310 class CProjNode : public ProjNode {
311 public:
312 CProjNode( Node *ctrl, uint idx ) : ProjNode(ctrl,idx) {}
313 virtual int Opcode() const;
314 virtual bool is_CFG() const { return true; }
315 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
316 virtual const Node *is_block_proj() const { return in(0); }
317 virtual const RegMask &out_RegMask() const;
318 virtual uint ideal_reg() const { return 0; }
319 };
320
321 //---------------------------MultiBranchNode-----------------------------------
322 // This class defines a MultiBranchNode, a MultiNode which yields multiple
323 // control values. These are distinguished from other types of MultiNodes
324 // which yield multiple values, but control is always and only projection #0.
325 class MultiBranchNode : public MultiNode {
326 public:
327 MultiBranchNode( uint required ) : MultiNode(required) {
328 init_class_id(Class_MultiBranch);
329 }
330 // returns required number of users to be well formed.
331 virtual uint required_outcnt() const = 0;
332 };
333
334 //------------------------------IfNode-----------------------------------------
335 // Output selected Control, based on a boolean test
336 class IfNode : public MultiBranchNode {
337 public:
338 float _prob; // Probability of true path being taken.
339 float _fcnt; // Frequency counter
340
341 private:
342 AssertionPredicateType _assertion_predicate_type;
343
344 void init_node(Node* control, Node* bol) {
345 init_class_id(Class_If);
346 init_req(0, control);
347 init_req(1, bol);
348 }
349
350 // Size is bigger to hold the probability field. However, _prob does not
351 // change the semantics so it does not appear in the hash & cmp functions.
352 virtual uint size_of() const { return sizeof(*this); }
353
354 // Helper methods for fold_compares
355 bool cmpi_folds(PhaseIterGVN* igvn, bool fold_ne = false);
356 bool is_ctrl_folds(Node* ctrl, PhaseIterGVN* igvn);
357 bool has_shared_region(IfProjNode* proj, IfProjNode*& success, IfProjNode*& fail) const;
358 bool has_only_uncommon_traps(IfProjNode* proj, IfProjNode*& success, IfProjNode*& fail, PhaseIterGVN* igvn) const;
359 Node* merge_uncommon_traps(IfProjNode* proj, IfProjNode* success, IfProjNode* fail, PhaseIterGVN* igvn);
360 static void improve_address_types(Node* l, Node* r, ProjNode* fail, PhaseIterGVN* igvn);
361 bool is_cmp_with_loadrange(IfProjNode* proj) const;
362 bool is_null_check(IfProjNode* proj, PhaseIterGVN* igvn) const;
363 bool is_side_effect_free_test(IfProjNode* proj, PhaseIterGVN* igvn) const;
364 static void reroute_side_effect_free_unc(IfProjNode* proj, IfProjNode* dom_proj, PhaseIterGVN* igvn);
365 bool fold_compares_helper(IfProjNode* proj, IfProjNode* success, IfProjNode* fail, PhaseIterGVN* igvn);
366 static bool is_dominator_unc(CallStaticJavaNode* dom_unc, CallStaticJavaNode* unc);
367
368 protected:
369 IfProjNode* range_check_trap_proj(int& flip, Node*& l, Node*& r) const;
370 Node* Ideal_common(PhaseGVN *phase, bool can_reshape);
371 Node* search_identical(int dist, PhaseIterGVN* igvn);
372
373 Node* simple_subsuming(PhaseIterGVN* igvn);
374
375 public:
376
377 // Degrees of branch prediction probability by order of magnitude:
378 // PROB_UNLIKELY_1e(N) is a 1 in 1eN chance.
379 // PROB_LIKELY_1e(N) is a 1 - PROB_UNLIKELY_1e(N)
380 #define PROB_UNLIKELY_MAG(N) (1e- ## N ## f)
381 #define PROB_LIKELY_MAG(N) (1.0f-PROB_UNLIKELY_MAG(N))
382
383 // Maximum and minimum branch prediction probabilties
384 // 1 in 1,000,000 (magnitude 6)
385 //
386 // Although PROB_NEVER == PROB_MIN and PROB_ALWAYS == PROB_MAX
387 // they are used to distinguish different situations:
388 //
389 // The name PROB_MAX (PROB_MIN) is for probabilities which correspond to
390 // very likely (unlikely) but with a concrete possibility of a rare
391 // contrary case. These constants would be used for pinning
392 // measurements, and as measures for assertions that have high
393 // confidence, but some evidence of occasional failure.
394 //
395 // The name PROB_ALWAYS (PROB_NEVER) is to stand for situations for which
396 // there is no evidence at all that the contrary case has ever occurred.
397
398 #define PROB_NEVER PROB_UNLIKELY_MAG(6)
399 #define PROB_ALWAYS PROB_LIKELY_MAG(6)
400
401 #define PROB_MIN PROB_UNLIKELY_MAG(6)
402 #define PROB_MAX PROB_LIKELY_MAG(6)
403
404 // Static branch prediction probabilities
405 // 1 in 10 (magnitude 1)
406 #define PROB_STATIC_INFREQUENT PROB_UNLIKELY_MAG(1)
407 #define PROB_STATIC_FREQUENT PROB_LIKELY_MAG(1)
408
409 // Fair probability 50/50
410 #define PROB_FAIR (0.5f)
411
412 // Unknown probability sentinel
413 #define PROB_UNKNOWN (-1.0f)
414
415 // Probability "constructors", to distinguish as a probability any manifest
416 // constant without a names
417 #define PROB_LIKELY(x) ((float) (x))
418 #define PROB_UNLIKELY(x) (1.0f - (float)(x))
419
420 // Other probabilities in use, but without a unique name, are documented
421 // here for lack of a better place:
422 //
423 // 1 in 1000 probabilities (magnitude 3):
424 // threshold for converting to conditional move
425 // likelihood of null check failure if a null HAS been seen before
426 // likelihood of slow path taken in library calls
427 //
428 // 1 in 10,000 probabilities (magnitude 4):
429 // threshold for making an uncommon trap probability more extreme
430 // threshold for for making a null check implicit
431 // likelihood of needing a gc if eden top moves during an allocation
432 // likelihood of a predicted call failure
433 //
434 // 1 in 100,000 probabilities (magnitude 5):
435 // threshold for ignoring counts when estimating path frequency
436 // likelihood of FP clipping failure
437 // likelihood of catching an exception from a try block
438 // likelihood of null check failure if a null has NOT been seen before
439 //
440 // Magic manifest probabilities such as 0.83, 0.7, ... can be found in
441 // gen_subtype_check() and catch_inline_exceptions().
442
443 IfNode(Node* control, Node* bol, float p, float fcnt);
444 IfNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type);
445
446 static IfNode* make_with_same_profile(IfNode* if_node_profile, Node* ctrl, Node* bol);
447
448 IfTrueNode* true_proj() const {
449 return proj_out(true)->as_IfTrue();
450 }
451
452 IfTrueNode* true_proj_or_null() const {
453 ProjNode* true_proj = proj_out_or_null(true);
454 return true_proj == nullptr ? nullptr : true_proj->as_IfTrue();
455 }
456
457 IfFalseNode* false_proj() const {
458 return proj_out(false)->as_IfFalse();
459 }
460
461 IfFalseNode* false_proj_or_null() const {
462 ProjNode* false_proj = proj_out_or_null(false);
463 return false_proj == nullptr ? nullptr : false_proj->as_IfFalse();
464 }
465
466 virtual int Opcode() const;
467 virtual bool pinned() const { return true; }
468 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
469 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
470 virtual const Type* Value(PhaseGVN* phase) const;
471 virtual uint required_outcnt() const { return 2; }
472 virtual const RegMask &out_RegMask() const;
473 Node* fold_compares(PhaseIterGVN* phase);
474 static Node* up_one_dom(Node* curr, bool linear_only = false);
475 bool is_zero_trip_guard() const;
476 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn, bool pin_array_access_nodes);
477 ProjNode* uncommon_trap_proj(CallStaticJavaNode*& call, Deoptimization::DeoptReason reason = Deoptimization::Reason_none) const;
478
479 // Takes the type of val and filters it through the test represented
480 // by if_proj and returns a more refined type if one is produced.
481 // Returns null is it couldn't improve the type.
482 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
483
484 bool is_flat_array_check(PhaseTransform* phase, Node** array = nullptr);
485
486 AssertionPredicateType assertion_predicate_type() const {
487 return _assertion_predicate_type;
488 }
489
490 #ifndef PRODUCT
491 virtual void dump_spec(outputStream *st) const;
492 #endif
493
494 bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
495 };
496
497 class RangeCheckNode : public IfNode {
498 private:
499 int is_range_check(Node*& range, Node*& index, jint& offset);
500
501 public:
502 RangeCheckNode(Node* control, Node* bol, float p, float fcnt) : IfNode(control, bol, p, fcnt) {
503 init_class_id(Class_RangeCheck);
504 }
505
506 RangeCheckNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type)
507 : IfNode(control, bol, p, fcnt, assertion_predicate_type) {
508 init_class_id(Class_RangeCheck);
509 }
510
511 virtual int Opcode() const;
512 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
513 };
514
515 // Special node that denotes a Parse Predicate added during parsing. A Parse Predicate serves as placeholder to later
516 // create Regular Predicates (Runtime Predicates with possible Assertion Predicates) above it. Together they form a
517 // Predicate Block. The Parse Predicate and Regular Predicates share the same uncommon trap.
518 // There are three kinds of Parse Predicates:
519 // Loop Parse Predicate, Profiled Loop Parse Predicate (both used by Loop Predication), and Loop Limit Check Parse
520 // Predicate (used for integer overflow checks when creating a counted loop).
521 // More information about predicates can be found in loopPredicate.cpp.
522 class ParsePredicateNode : public IfNode {
523 Deoptimization::DeoptReason _deopt_reason;
524
525 // When a Parse Predicate loses its connection to a loop head, it will be marked useless by
526 // EliminateUselessPredicates and cleaned up by Value(). It can also become useless when cloning it to both loops
527 // during Loop Multiversioning - we no longer use the old version.
528 PredicateState _predicate_state;
529 public:
530 ParsePredicateNode(Node* control, Deoptimization::DeoptReason deopt_reason, PhaseGVN* gvn);
531 virtual int Opcode() const;
532 virtual uint size_of() const { return sizeof(*this); }
533
534 Deoptimization::DeoptReason deopt_reason() const {
535 return _deopt_reason;
536 }
537
538 bool is_useless() const {
539 return _predicate_state == PredicateState::Useless;
540 }
541
542 void mark_useless(PhaseIterGVN& igvn);
543
544 void mark_maybe_useful() {
545 _predicate_state = PredicateState::MaybeUseful;
546 }
547
548 bool is_useful() const {
549 return _predicate_state == PredicateState::Useful;
550 }
551
552 void mark_useful() {
553 _predicate_state = PredicateState::Useful;
554 }
555
556 // Return the uncommon trap If projection of this Parse Predicate.
557 ParsePredicateUncommonProj* uncommon_proj() const {
558 return false_proj();
559 }
560
561 Node* uncommon_trap() const;
562
563 Node* Ideal(PhaseGVN* phase, bool can_reshape) {
564 return nullptr; // Don't optimize
565 }
566
567 const Type* Value(PhaseGVN* phase) const;
568 NOT_PRODUCT(void dump_spec(outputStream* st) const;)
569 };
570
571 class IfProjNode : public CProjNode {
572 public:
573 IfProjNode(IfNode *ifnode, uint idx) : CProjNode(ifnode,idx) {}
574 virtual Node* Identity(PhaseGVN* phase);
575
576 // Return the other IfProj node.
577 IfProjNode* other_if_proj() const {
578 return in(0)->as_If()->proj_out(1 - _con)->as_IfProj();
579 }
580
581 void pin_array_access_nodes(PhaseIterGVN* igvn);
582
583 protected:
584 // Type of If input when this branch is always taken
585 virtual bool always_taken(const TypeTuple* t) const = 0;
586 };
587
588 class IfTrueNode : public IfProjNode {
589 public:
590 IfTrueNode( IfNode *ifnode ) : IfProjNode(ifnode,1) {
591 init_class_id(Class_IfTrue);
592 }
593 virtual int Opcode() const;
594
595 protected:
596 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFTRUE; }
597 };
598
599 class IfFalseNode : public IfProjNode {
600 public:
601 IfFalseNode( IfNode *ifnode ) : IfProjNode(ifnode,0) {
602 init_class_id(Class_IfFalse);
603 }
604 virtual int Opcode() const;
605
606 protected:
607 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFFALSE; }
608 };
609
610
611 //------------------------------PCTableNode------------------------------------
612 // Build an indirect branch table. Given a control and a table index,
613 // control is passed to the Projection matching the table index. Used to
614 // implement switch statements and exception-handling capabilities.
615 // Undefined behavior if passed-in index is not inside the table.
616 class PCTableNode : public MultiBranchNode {
617 virtual uint hash() const; // Target count; table size
618 virtual bool cmp( const Node &n ) const;
619 virtual uint size_of() const { return sizeof(*this); }
620
621 public:
622 const uint _size; // Number of targets
623
624 PCTableNode( Node *ctrl, Node *idx, uint size ) : MultiBranchNode(2), _size(size) {
625 init_class_id(Class_PCTable);
626 init_req(0, ctrl);
627 init_req(1, idx);
628 }
629 virtual int Opcode() const;
630 virtual const Type* Value(PhaseGVN* phase) const;
631 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
632 virtual const Type *bottom_type() const;
633 virtual bool pinned() const { return true; }
634 virtual uint required_outcnt() const { return _size; }
635 };
636
637 //------------------------------JumpNode---------------------------------------
638 // Indirect branch. Uses PCTable above to implement a switch statement.
639 // It emits as a table load and local branch.
640 class JumpNode : public PCTableNode {
641 virtual uint size_of() const { return sizeof(*this); }
642 public:
643 float* _probs; // probability of each projection
644 float _fcnt; // total number of times this Jump was executed
645 JumpNode( Node* control, Node* switch_val, uint size, float* probs, float cnt)
646 : PCTableNode(control, switch_val, size),
647 _probs(probs), _fcnt(cnt) {
648 init_class_id(Class_Jump);
649 }
650 virtual int Opcode() const;
651 virtual const RegMask& out_RegMask() const;
652 virtual const Node* is_block_proj() const { return this; }
653 };
654
655 class JumpProjNode : public JProjNode {
656 virtual uint hash() const;
657 virtual bool cmp( const Node &n ) const;
658 virtual uint size_of() const { return sizeof(*this); }
659
660 private:
661 const int _dest_bci;
662 const uint _proj_no;
663 const int _switch_val;
664 public:
665 JumpProjNode(Node* jumpnode, uint proj_no, int dest_bci, int switch_val)
666 : JProjNode(jumpnode, proj_no), _dest_bci(dest_bci), _proj_no(proj_no), _switch_val(switch_val) {
667 init_class_id(Class_JumpProj);
668 }
669
670 virtual int Opcode() const;
671 virtual const Type* bottom_type() const { return Type::CONTROL; }
672 int dest_bci() const { return _dest_bci; }
673 int switch_val() const { return _switch_val; }
674 uint proj_no() const { return _proj_no; }
675 #ifndef PRODUCT
676 virtual void dump_spec(outputStream *st) const;
677 virtual void dump_compact_spec(outputStream *st) const;
678 #endif
679 };
680
681 //------------------------------CatchNode--------------------------------------
682 // Helper node to fork exceptions. "Catch" catches any exceptions thrown by
683 // a just-prior call. Looks like a PCTableNode but emits no code - just the
684 // table. The table lookup and branch is implemented by RethrowNode.
685 class CatchNode : public PCTableNode {
686 public:
687 CatchNode( Node *ctrl, Node *idx, uint size ) : PCTableNode(ctrl,idx,size){
688 init_class_id(Class_Catch);
689 }
690 virtual int Opcode() const;
691 virtual const Type* Value(PhaseGVN* phase) const;
692 };
693
694 // CatchProjNode controls which exception handler is targeted after a call.
695 // It is passed in the bci of the target handler, or no_handler_bci in case
696 // the projection doesn't lead to an exception handler.
697 class CatchProjNode : public CProjNode {
698 virtual uint hash() const;
699 virtual bool cmp( const Node &n ) const;
700 virtual uint size_of() const { return sizeof(*this); }
701
702 private:
703 const int _handler_bci;
704
705 public:
706 enum {
707 fall_through_index = 0, // the fall through projection index
708 catch_all_index = 1, // the projection index for catch-alls
709 no_handler_bci = -1 // the bci for fall through or catch-all projs
710 };
711
712 CatchProjNode(Node* catchnode, uint proj_no, int handler_bci)
713 : CProjNode(catchnode, proj_no), _handler_bci(handler_bci) {
714 init_class_id(Class_CatchProj);
715 assert(proj_no != fall_through_index || handler_bci < 0, "fall through case must have bci < 0");
716 }
717
718 virtual int Opcode() const;
719 virtual Node* Identity(PhaseGVN* phase);
720 virtual const Type *bottom_type() const { return Type::CONTROL; }
721 int handler_bci() const { return _handler_bci; }
722 bool is_handler_proj() const { return _handler_bci >= 0; }
723 #ifndef PRODUCT
724 virtual void dump_spec(outputStream *st) const;
725 #endif
726 };
727
728
729 //---------------------------------CreateExNode--------------------------------
730 // Helper node to create the exception coming back from a call
731 class CreateExNode : public TypeNode {
732 public:
733 CreateExNode(const Type* t, Node* control, Node* i_o) : TypeNode(t, 2) {
734 init_req(0, control);
735 init_req(1, i_o);
736 }
737 virtual int Opcode() const;
738 virtual Node* Identity(PhaseGVN* phase);
739 virtual bool pinned() const { return true; }
740 uint match_edge(uint idx) const { return 0; }
741 virtual uint ideal_reg() const { return Op_RegP; }
742 };
743
744 //------------------------------NeverBranchNode-------------------------------
745 // The never-taken branch. Used to give the appearance of exiting infinite
746 // loops to those algorithms that like all paths to be reachable. Encodes
747 // empty.
748 class NeverBranchNode : public MultiBranchNode {
749 public:
750 NeverBranchNode(Node* ctrl) : MultiBranchNode(1) {
751 init_req(0, ctrl);
752 init_class_id(Class_NeverBranch);
753 }
754 virtual int Opcode() const;
755 virtual bool pinned() const { return true; };
756 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
757 virtual const Type* Value(PhaseGVN* phase) const;
758 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
759 virtual uint required_outcnt() const { return 2; }
760 virtual void emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { }
761 virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
762 #ifndef PRODUCT
763 virtual void format( PhaseRegAlloc *, outputStream *st ) const;
764 #endif
765 };
766
767 //------------------------------BlackholeNode----------------------------
768 // Blackhole all arguments. This node would survive through the compiler
769 // the effects on its arguments, and would be finally matched to nothing.
770 class BlackholeNode : public MultiNode {
771 public:
772 BlackholeNode(Node* ctrl) : MultiNode(1) {
773 init_req(TypeFunc::Control, ctrl);
774 init_class_id(Class_Blackhole);
775 }
776 virtual int Opcode() const;
777 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
778 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
779 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
780
781 const RegMask &in_RegMask(uint idx) const {
782 // Fake the incoming arguments mask for blackholes: accept all registers
783 // and all stack slots. This would avoid any redundant register moves
784 // for blackhole inputs.
785 return RegMask::ALL;
786 }
787 #ifndef PRODUCT
788 virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
789 #endif
790 };
791
792
793 #endif // SHARE_OPTO_CFGNODE_HPP