1 /*
2 * Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OPTO_CFGNODE_HPP
26 #define SHARE_OPTO_CFGNODE_HPP
27
28 #include "opto/multnode.hpp"
29 #include "opto/node.hpp"
30 #include "opto/opcodes.hpp"
31 #include "opto/predicates_enums.hpp"
32 #include "opto/type.hpp"
33
34 // Portions of code courtesy of Clifford Click
35
36 // Optimization - Graph Style
37
38 class Matcher;
39 class Node;
40 class RegionNode;
41 class TypeNode;
42 class PhiNode;
43 class GotoNode;
44 class MultiNode;
45 class MultiBranchNode;
46 class IfNode;
47 class PCTableNode;
48 class JumpNode;
49 class CatchNode;
50 class NeverBranchNode;
51 class BlackholeNode;
52 class ProjNode;
53 class CProjNode;
54 class IfTrueNode;
55 class IfFalseNode;
56 class CatchProjNode;
57 class JProjNode;
58 class JumpProjNode;
59 class SCMemProjNode;
60 class PhaseIdealLoop;
61 enum class AssertionPredicateType;
62 enum class PredicateState;
63
64 //------------------------------RegionNode-------------------------------------
65 // The class of RegionNodes, which can be mapped to basic blocks in the
66 // program. Their inputs point to Control sources. PhiNodes (described
67 // below) have an input point to a RegionNode. Merged data inputs to PhiNodes
68 // correspond 1-to-1 with RegionNode inputs. The zero input of a PhiNode is
69 // the RegionNode, and the zero input of the RegionNode is itself.
70 class RegionNode : public Node {
71 public:
72 enum LoopStatus {
73 // No guarantee: the region may be an irreducible loop entry, thus we have to
74 // be careful when removing entry control to it.
75 MaybeIrreducibleEntry,
76 // Limited guarantee: this region may be (nested) inside an irreducible loop,
77 // but it will never be an irreducible loop entry.
78 NeverIrreducibleEntry,
79 // Strong guarantee: this region is not (nested) inside an irreducible loop.
80 Reducible,
81 };
82
83 private:
84 bool _is_unreachable_region;
85 LoopStatus _loop_status;
86
87 bool is_possible_unsafe_loop() const;
88 bool is_unreachable_from_root(const PhaseGVN* phase) const;
89 public:
90 // Node layout (parallels PhiNode):
91 enum { Region, // Generally points to self.
92 Control // Control arcs are [1..len)
93 };
94
95 RegionNode(uint required)
96 : Node(required),
97 _is_unreachable_region(false),
98 _loop_status(LoopStatus::NeverIrreducibleEntry)
99 {
100 init_class_id(Class_Region);
101 init_req(0, this);
102 }
103
104 Node* is_copy() const {
105 const Node* r = _in[Region];
106 if (r == nullptr)
107 return nonnull_req();
108 return nullptr; // not a copy!
109 }
110 PhiNode* has_phi() const; // returns an arbitrary phi user, or null
111 PhiNode* has_unique_phi() const; // returns the unique phi user, or null
112 // Is this region node unreachable from root?
113 bool is_unreachable_region(const PhaseGVN* phase);
114 #ifdef ASSERT
115 bool is_in_infinite_subgraph();
116 static bool are_all_nodes_in_infinite_subgraph(Unique_Node_List& worklist);
117 #endif //ASSERT
118 LoopStatus loop_status() const { return _loop_status; };
119 void set_loop_status(LoopStatus status);
120 bool can_be_irreducible_entry() const;
121
122 virtual int Opcode() const;
123 virtual uint size_of() const { return sizeof(*this); }
124 virtual bool pinned() const { return (const Node*)in(0) == this; }
125 virtual bool is_CFG() const { return true; }
126 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
127 virtual const Type* bottom_type() const { return Type::CONTROL; }
128 virtual const Type* Value(PhaseGVN* phase) const;
129 virtual Node* Identity(PhaseGVN* phase);
130 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
131 void remove_unreachable_subgraph(PhaseIterGVN* igvn);
132 virtual const RegMask &out_RegMask() const;
133 bool is_diamond() const;
134 void try_clean_mem_phis(PhaseIterGVN* phase);
135 bool optimize_trichotomy(PhaseIterGVN* igvn);
136 NOT_PRODUCT(virtual void dump_spec(outputStream* st) const;)
137 };
138
139 //------------------------------JProjNode--------------------------------------
140 // jump projection for node that produces multiple control-flow paths
141 class JProjNode : public ProjNode {
142 public:
143 JProjNode( Node* ctrl, uint idx ) : ProjNode(ctrl,idx) {}
144 virtual int Opcode() const;
145 virtual bool is_CFG() const { return true; }
146 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
147 virtual const Node* is_block_proj() const { return in(0); }
148 virtual const RegMask& out_RegMask() const;
149 virtual uint ideal_reg() const { return 0; }
150 };
151
152 //------------------------------PhiNode----------------------------------------
153 // PhiNodes merge values from different Control paths. Slot 0 points to the
154 // controlling RegionNode. Other slots map 1-for-1 with incoming control flow
155 // paths to the RegionNode.
156 class PhiNode : public TypeNode {
157 friend class PhaseRenumberLive;
158
159 const TypePtr* const _adr_type; // non-null only for Type::MEMORY nodes.
160 // The following fields are only used for data PhiNodes to indicate
161 // that the PhiNode represents the value of a known instance field.
162 int _inst_mem_id; // Instance memory id (node index of the memory Phi)
163 int _inst_id; // Instance id of the memory slice.
164 const int _inst_index; // Alias index of the instance memory slice.
165 // Array elements references have the same alias_idx but different offset.
166 const int _inst_offset; // Offset of the instance memory slice.
167 // Size is bigger to hold the _adr_type field.
168 virtual uint hash() const; // Check the type
169 virtual bool cmp( const Node &n ) const;
170 virtual uint size_of() const { return sizeof(*this); }
171
172 // Determine if CMoveNode::is_cmove_id can be used at this join point.
173 Node* is_cmove_id(PhaseTransform* phase, int true_path);
174 bool wait_for_region_igvn(PhaseGVN* phase);
175 bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase);
176
177 static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn);
178 static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn);
179
180 bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const;
181
182 bool is_split_through_mergemem_terminating() const;
183
184 void verify_type_stability(const PhaseGVN* phase, const Type* union_of_input_types, const Type* new_type) const NOT_DEBUG_RETURN;
185 bool wait_for_cast_input_igvn(const PhaseIterGVN* igvn) const;
186
187 public:
188 // Node layout (parallels RegionNode):
189 enum { Region, // Control input is the Phi's region.
190 Input // Input values are [1..len)
191 };
192
193 PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr,
194 const int imid = -1,
195 const int iid = TypeOopPtr::InstanceTop,
196 const int iidx = Compile::AliasIdxTop,
197 const int ioffs = Type::OffsetTop )
198 : TypeNode(t,r->req()),
199 _adr_type(at),
200 _inst_mem_id(imid),
201 _inst_id(iid),
202 _inst_index(iidx),
203 _inst_offset(ioffs)
204 {
205 init_class_id(Class_Phi);
206 init_req(0, r);
207 verify_adr_type();
208 }
209 // create a new phi with in edges matching r and set (initially) to x
210 static PhiNode* make( Node* r, Node* x );
211 // extra type arguments override the new phi's bottom_type and adr_type
212 static PhiNode* make( Node* r, Node* x, const Type *t, const TypePtr* at = nullptr );
213 // create a new phi with narrowed memory type
214 PhiNode* slice_memory(const TypePtr* adr_type) const;
215 PhiNode* split_out_instance(const TypePtr* at, PhaseIterGVN *igvn) const;
216 // like make(r, x), but does not initialize the in edges to x
217 static PhiNode* make_blank( Node* r, Node* x );
218
219 // Accessors
220 RegionNode* region() const { Node* r = in(Region); assert(!r || r->is_Region(), ""); return (RegionNode*)r; }
221
222 bool is_tripcount(BasicType bt) const;
223
224 // Determine a unique non-trivial input, if any.
225 // Ignore casts if it helps. Return null on failure.
226 Node* unique_input(PhaseValues* phase, bool uncast);
227 Node* unique_input(PhaseValues* phase) {
228 Node* uin = unique_input(phase, false);
229 if (uin == nullptr) {
230 uin = unique_input(phase, true);
231 }
232 return uin;
233 }
234
235 // Check for a simple dead loop.
236 enum LoopSafety { Safe = 0, Unsafe, UnsafeLoop };
237 LoopSafety simple_data_loop_check(Node *in) const;
238 // Is it unsafe data loop? It becomes a dead loop if this phi node removed.
239 bool is_unsafe_data_reference(Node *in) const;
240 int is_diamond_phi() const;
241 bool try_clean_memory_phi(PhaseIterGVN* igvn);
242 virtual int Opcode() const;
243 virtual bool pinned() const { return in(0) != nullptr; }
244 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
245
246 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
247 int inst_mem_id() const { return _inst_mem_id; }
248 int inst_id() const { return _inst_id; }
249 int inst_index() const { return _inst_index; }
250 int inst_offset() const { return _inst_offset; }
251 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
252 return type()->basic_type() == tp->basic_type() &&
253 inst_mem_id() == mem_id &&
254 inst_id() == id &&
255 inst_index() == index &&
256 inst_offset() == offset &&
257 type()->higher_equal(tp);
258 }
259
260 virtual const Type* Value(PhaseGVN* phase) const;
261 virtual Node* Identity(PhaseGVN* phase);
262 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
263 virtual const RegMask &out_RegMask() const;
264 virtual const RegMask &in_RegMask(uint) const;
265 #ifndef PRODUCT
266 virtual void dump_spec(outputStream *st) const;
267 #endif
268 #ifdef ASSERT
269 void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
270 void verify_adr_type(bool recursive = false) const;
271 #else //ASSERT
272 void verify_adr_type(bool recursive = false) const {}
273 #endif //ASSERT
274
275 const TypeTuple* collect_types(PhaseGVN* phase) const;
276 bool can_be_replaced_by(const PhiNode* other) const;
277 };
278
279 //------------------------------GotoNode---------------------------------------
280 // GotoNodes perform direct branches.
281 class GotoNode : public Node {
282 public:
283 GotoNode( Node *control ) : Node(control) {}
284 virtual int Opcode() const;
285 virtual bool pinned() const { return true; }
286 virtual bool is_CFG() const { return true; }
287 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
288 virtual const Node *is_block_proj() const { return this; }
289 virtual const Type *bottom_type() const { return Type::CONTROL; }
290 virtual const Type* Value(PhaseGVN* phase) const;
291 virtual Node* Identity(PhaseGVN* phase);
292 virtual const RegMask &out_RegMask() const;
293 };
294
295 //------------------------------CProjNode--------------------------------------
296 // control projection for node that produces multiple control-flow paths
297 class CProjNode : public ProjNode {
298 public:
299 CProjNode( Node *ctrl, uint idx ) : ProjNode(ctrl,idx) {}
300 virtual int Opcode() const;
301 virtual bool is_CFG() const { return true; }
302 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
303 virtual const Node *is_block_proj() const { return in(0); }
304 virtual const RegMask &out_RegMask() const;
305 virtual uint ideal_reg() const { return 0; }
306 };
307
308 //---------------------------MultiBranchNode-----------------------------------
309 // This class defines a MultiBranchNode, a MultiNode which yields multiple
310 // control values. These are distinguished from other types of MultiNodes
311 // which yield multiple values, but control is always and only projection #0.
312 class MultiBranchNode : public MultiNode {
313 public:
314 MultiBranchNode( uint required ) : MultiNode(required) {
315 init_class_id(Class_MultiBranch);
316 }
317 // returns required number of users to be well formed.
318 virtual uint required_outcnt() const = 0;
319 };
320
321 //------------------------------IfNode-----------------------------------------
322 // Output selected Control, based on a boolean test
323 class IfNode : public MultiBranchNode {
324 public:
325 float _prob; // Probability of true path being taken.
326 float _fcnt; // Frequency counter
327
328 private:
329 AssertionPredicateType _assertion_predicate_type;
330
331 void init_node(Node* control, Node* bol) {
332 init_class_id(Class_If);
333 init_req(0, control);
334 init_req(1, bol);
335 }
336
337 // Size is bigger to hold the probability field. However, _prob does not
338 // change the semantics so it does not appear in the hash & cmp functions.
339 virtual uint size_of() const { return sizeof(*this); }
340
341 // Helper methods for fold_compares
342 bool cmpi_folds(PhaseIterGVN* igvn, bool fold_ne = false);
343 bool is_ctrl_folds(Node* ctrl, PhaseIterGVN* igvn);
344 bool has_shared_region(IfProjNode* proj, IfProjNode*& success, IfProjNode*& fail) const;
345 bool has_only_uncommon_traps(IfProjNode* proj, IfProjNode*& success, IfProjNode*& fail, PhaseIterGVN* igvn) const;
346 Node* merge_uncommon_traps(IfProjNode* proj, IfProjNode* success, IfProjNode* fail, PhaseIterGVN* igvn);
347 static void improve_address_types(Node* l, Node* r, ProjNode* fail, PhaseIterGVN* igvn);
348 bool is_cmp_with_loadrange(IfProjNode* proj) const;
349 bool is_null_check(IfProjNode* proj, PhaseIterGVN* igvn) const;
350 bool is_side_effect_free_test(IfProjNode* proj, PhaseIterGVN* igvn) const;
351 static void reroute_side_effect_free_unc(IfProjNode* proj, IfProjNode* dom_proj, PhaseIterGVN* igvn);
352 bool fold_compares_helper(IfProjNode* proj, IfProjNode* success, IfProjNode* fail, PhaseIterGVN* igvn);
353 static bool is_dominator_unc(CallStaticJavaNode* dom_unc, CallStaticJavaNode* unc);
354
355 protected:
356 IfProjNode* range_check_trap_proj(int& flip, Node*& l, Node*& r) const;
357 Node* Ideal_common(PhaseGVN *phase, bool can_reshape);
358 Node* search_identical(int dist, PhaseIterGVN* igvn);
359
360 Node* simple_subsuming(PhaseIterGVN* igvn);
361
362 public:
363
364 // Degrees of branch prediction probability by order of magnitude:
365 // PROB_UNLIKELY_1e(N) is a 1 in 1eN chance.
366 // PROB_LIKELY_1e(N) is a 1 - PROB_UNLIKELY_1e(N)
367 #define PROB_UNLIKELY_MAG(N) (1e- ## N ## f)
368 #define PROB_LIKELY_MAG(N) (1.0f-PROB_UNLIKELY_MAG(N))
369
370 // Maximum and minimum branch prediction probabilties
371 // 1 in 1,000,000 (magnitude 6)
372 //
373 // Although PROB_NEVER == PROB_MIN and PROB_ALWAYS == PROB_MAX
374 // they are used to distinguish different situations:
375 //
376 // The name PROB_MAX (PROB_MIN) is for probabilities which correspond to
377 // very likely (unlikely) but with a concrete possibility of a rare
378 // contrary case. These constants would be used for pinning
379 // measurements, and as measures for assertions that have high
380 // confidence, but some evidence of occasional failure.
381 //
382 // The name PROB_ALWAYS (PROB_NEVER) is to stand for situations for which
383 // there is no evidence at all that the contrary case has ever occurred.
384
385 #define PROB_NEVER PROB_UNLIKELY_MAG(6)
386 #define PROB_ALWAYS PROB_LIKELY_MAG(6)
387
388 #define PROB_MIN PROB_UNLIKELY_MAG(6)
389 #define PROB_MAX PROB_LIKELY_MAG(6)
390
391 // Static branch prediction probabilities
392 // 1 in 10 (magnitude 1)
393 #define PROB_STATIC_INFREQUENT PROB_UNLIKELY_MAG(1)
394 #define PROB_STATIC_FREQUENT PROB_LIKELY_MAG(1)
395
396 // Fair probability 50/50
397 #define PROB_FAIR (0.5f)
398
399 // Unknown probability sentinel
400 #define PROB_UNKNOWN (-1.0f)
401
402 // Probability "constructors", to distinguish as a probability any manifest
403 // constant without a names
404 #define PROB_LIKELY(x) ((float) (x))
405 #define PROB_UNLIKELY(x) (1.0f - (float)(x))
406
407 // Other probabilities in use, but without a unique name, are documented
408 // here for lack of a better place:
409 //
410 // 1 in 1000 probabilities (magnitude 3):
411 // threshold for converting to conditional move
412 // likelihood of null check failure if a null HAS been seen before
413 // likelihood of slow path taken in library calls
414 //
415 // 1 in 10,000 probabilities (magnitude 4):
416 // threshold for making an uncommon trap probability more extreme
417 // threshold for for making a null check implicit
418 // likelihood of needing a gc if eden top moves during an allocation
419 // likelihood of a predicted call failure
420 //
421 // 1 in 100,000 probabilities (magnitude 5):
422 // threshold for ignoring counts when estimating path frequency
423 // likelihood of FP clipping failure
424 // likelihood of catching an exception from a try block
425 // likelihood of null check failure if a null has NOT been seen before
426 //
427 // Magic manifest probabilities such as 0.83, 0.7, ... can be found in
428 // gen_subtype_check() and catch_inline_exceptions().
429
430 IfNode(Node* control, Node* bol, float p, float fcnt);
431 IfNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type);
432
433 static IfNode* make_with_same_profile(IfNode* if_node_profile, Node* ctrl, Node* bol);
434
435 IfTrueNode* true_proj() const {
436 return proj_out(true)->as_IfTrue();
437 }
438
439 IfTrueNode* true_proj_or_null() const {
440 ProjNode* true_proj = proj_out_or_null(true);
441 return true_proj == nullptr ? nullptr : true_proj->as_IfTrue();
442 }
443
444 IfFalseNode* false_proj() const {
445 return proj_out(false)->as_IfFalse();
446 }
447
448 IfFalseNode* false_proj_or_null() const {
449 ProjNode* false_proj = proj_out_or_null(false);
450 return false_proj == nullptr ? nullptr : false_proj->as_IfFalse();
451 }
452
453 virtual int Opcode() const;
454 virtual bool pinned() const { return true; }
455 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
456 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
457 virtual const Type* Value(PhaseGVN* phase) const;
458 virtual uint required_outcnt() const { return 2; }
459 virtual const RegMask &out_RegMask() const;
460 Node* fold_compares(PhaseIterGVN* phase);
461 static Node* up_one_dom(Node* curr, bool linear_only = false);
462 bool is_zero_trip_guard() const;
463 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn, bool prev_dom_not_imply_this);
464 ProjNode* uncommon_trap_proj(CallStaticJavaNode*& call, Deoptimization::DeoptReason reason = Deoptimization::Reason_none) const;
465
466 // Takes the type of val and filters it through the test represented
467 // by if_proj and returns a more refined type if one is produced.
468 // Returns null is it couldn't improve the type.
469 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
470
471 AssertionPredicateType assertion_predicate_type() const {
472 return _assertion_predicate_type;
473 }
474
475 #ifndef PRODUCT
476 virtual void dump_spec(outputStream *st) const;
477 #endif
478
479 bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
480 };
481
482 class RangeCheckNode : public IfNode {
483 private:
484 int is_range_check(Node*& range, Node*& index, jint& offset);
485
486 public:
487 RangeCheckNode(Node* control, Node* bol, float p, float fcnt) : IfNode(control, bol, p, fcnt) {
488 init_class_id(Class_RangeCheck);
489 }
490
491 RangeCheckNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type)
492 : IfNode(control, bol, p, fcnt, assertion_predicate_type) {
493 init_class_id(Class_RangeCheck);
494 }
495
496 virtual int Opcode() const;
497 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
498 };
499
500 // Special node that denotes a Parse Predicate added during parsing. A Parse Predicate serves as placeholder to later
501 // create Regular Predicates (Runtime Predicates with possible Assertion Predicates) above it. Together they form a
502 // Predicate Block. The Parse Predicate and Regular Predicates share the same uncommon trap.
503 // There are three kinds of Parse Predicates:
504 // Loop Parse Predicate, Profiled Loop Parse Predicate (both used by Loop Predication), and Loop Limit Check Parse
505 // Predicate (used for integer overflow checks when creating a counted loop).
506 // More information about predicates can be found in loopPredicate.cpp.
507 class ParsePredicateNode : public IfNode {
508 Deoptimization::DeoptReason _deopt_reason;
509
510 // When a Parse Predicate loses its connection to a loop head, it will be marked useless by
511 // EliminateUselessPredicates and cleaned up by Value(). It can also become useless when cloning it to both loops
512 // during Loop Multiversioning - we no longer use the old version.
513 PredicateState _predicate_state;
514 public:
515 ParsePredicateNode(Node* control, Deoptimization::DeoptReason deopt_reason, PhaseGVN* gvn);
516 virtual int Opcode() const;
517 virtual uint size_of() const { return sizeof(*this); }
518
519 Deoptimization::DeoptReason deopt_reason() const {
520 return _deopt_reason;
521 }
522
523 bool is_useless() const {
524 return _predicate_state == PredicateState::Useless;
525 }
526
527 void mark_useless(PhaseIterGVN& igvn);
528
529 void mark_maybe_useful() {
530 _predicate_state = PredicateState::MaybeUseful;
531 }
532
533 bool is_useful() const {
534 return _predicate_state == PredicateState::Useful;
535 }
536
537 void mark_useful() {
538 _predicate_state = PredicateState::Useful;
539 }
540
541 // Return the uncommon trap If projection of this Parse Predicate.
542 ParsePredicateUncommonProj* uncommon_proj() const {
543 return false_proj();
544 }
545
546 Node* uncommon_trap() const;
547
548 Node* Ideal(PhaseGVN* phase, bool can_reshape) {
549 return nullptr; // Don't optimize
550 }
551
552 const Type* Value(PhaseGVN* phase) const;
553 NOT_PRODUCT(void dump_spec(outputStream* st) const;)
554 };
555
556 class IfProjNode : public CProjNode {
557 public:
558 IfProjNode(IfNode *ifnode, uint idx) : CProjNode(ifnode,idx) {}
559 virtual Node* Identity(PhaseGVN* phase);
560
561 // Return the other IfProj node.
562 IfProjNode* other_if_proj() const {
563 return in(0)->as_If()->proj_out(1 - _con)->as_IfProj();
564 }
565
566 void pin_dependent_nodes(PhaseIterGVN* igvn);
567
568 protected:
569 // Type of If input when this branch is always taken
570 virtual bool always_taken(const TypeTuple* t) const = 0;
571 };
572
573 class IfTrueNode : public IfProjNode {
574 public:
575 IfTrueNode( IfNode *ifnode ) : IfProjNode(ifnode,1) {
576 init_class_id(Class_IfTrue);
577 }
578 virtual int Opcode() const;
579
580 protected:
581 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFTRUE; }
582 };
583
584 class IfFalseNode : public IfProjNode {
585 public:
586 IfFalseNode( IfNode *ifnode ) : IfProjNode(ifnode,0) {
587 init_class_id(Class_IfFalse);
588 }
589 virtual int Opcode() const;
590
591 protected:
592 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFFALSE; }
593 };
594
595
596 //------------------------------PCTableNode------------------------------------
597 // Build an indirect branch table. Given a control and a table index,
598 // control is passed to the Projection matching the table index. Used to
599 // implement switch statements and exception-handling capabilities.
600 // Undefined behavior if passed-in index is not inside the table.
601 class PCTableNode : public MultiBranchNode {
602 virtual uint hash() const; // Target count; table size
603 virtual bool cmp( const Node &n ) const;
604 virtual uint size_of() const { return sizeof(*this); }
605
606 public:
607 const uint _size; // Number of targets
608
609 PCTableNode( Node *ctrl, Node *idx, uint size ) : MultiBranchNode(2), _size(size) {
610 init_class_id(Class_PCTable);
611 init_req(0, ctrl);
612 init_req(1, idx);
613 }
614 virtual int Opcode() const;
615 virtual const Type* Value(PhaseGVN* phase) const;
616 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
617 virtual const Type *bottom_type() const;
618 virtual bool pinned() const { return true; }
619 virtual uint required_outcnt() const { return _size; }
620 };
621
622 //------------------------------JumpNode---------------------------------------
623 // Indirect branch. Uses PCTable above to implement a switch statement.
624 // It emits as a table load and local branch.
625 class JumpNode : public PCTableNode {
626 virtual uint size_of() const { return sizeof(*this); }
627 public:
628 float* _probs; // probability of each projection
629 float _fcnt; // total number of times this Jump was executed
630 JumpNode( Node* control, Node* switch_val, uint size, float* probs, float cnt)
631 : PCTableNode(control, switch_val, size),
632 _probs(probs), _fcnt(cnt) {
633 init_class_id(Class_Jump);
634 }
635 virtual int Opcode() const;
636 virtual const RegMask& out_RegMask() const;
637 virtual const Node* is_block_proj() const { return this; }
638 };
639
640 class JumpProjNode : public JProjNode {
641 virtual uint hash() const;
642 virtual bool cmp( const Node &n ) const;
643 virtual uint size_of() const { return sizeof(*this); }
644
645 private:
646 const int _dest_bci;
647 const uint _proj_no;
648 const int _switch_val;
649 public:
650 JumpProjNode(Node* jumpnode, uint proj_no, int dest_bci, int switch_val)
651 : JProjNode(jumpnode, proj_no), _dest_bci(dest_bci), _proj_no(proj_no), _switch_val(switch_val) {
652 init_class_id(Class_JumpProj);
653 }
654
655 virtual int Opcode() const;
656 virtual const Type* bottom_type() const { return Type::CONTROL; }
657 int dest_bci() const { return _dest_bci; }
658 int switch_val() const { return _switch_val; }
659 uint proj_no() const { return _proj_no; }
660 #ifndef PRODUCT
661 virtual void dump_spec(outputStream *st) const;
662 virtual void dump_compact_spec(outputStream *st) const;
663 #endif
664 };
665
666 //------------------------------CatchNode--------------------------------------
667 // Helper node to fork exceptions. "Catch" catches any exceptions thrown by
668 // a just-prior call. Looks like a PCTableNode but emits no code - just the
669 // table. The table lookup and branch is implemented by RethrowNode.
670 class CatchNode : public PCTableNode {
671 public:
672 CatchNode( Node *ctrl, Node *idx, uint size ) : PCTableNode(ctrl,idx,size){
673 init_class_id(Class_Catch);
674 }
675 virtual int Opcode() const;
676 virtual const Type* Value(PhaseGVN* phase) const;
677 };
678
679 // CatchProjNode controls which exception handler is targeted after a call.
680 // It is passed in the bci of the target handler, or no_handler_bci in case
681 // the projection doesn't lead to an exception handler.
682 class CatchProjNode : public CProjNode {
683 virtual uint hash() const;
684 virtual bool cmp( const Node &n ) const;
685 virtual uint size_of() const { return sizeof(*this); }
686
687 private:
688 const int _handler_bci;
689
690 public:
691 enum {
692 fall_through_index = 0, // the fall through projection index
693 catch_all_index = 1, // the projection index for catch-alls
694 no_handler_bci = -1 // the bci for fall through or catch-all projs
695 };
696
697 CatchProjNode(Node* catchnode, uint proj_no, int handler_bci)
698 : CProjNode(catchnode, proj_no), _handler_bci(handler_bci) {
699 init_class_id(Class_CatchProj);
700 assert(proj_no != fall_through_index || handler_bci < 0, "fall through case must have bci < 0");
701 }
702
703 virtual int Opcode() const;
704 virtual Node* Identity(PhaseGVN* phase);
705 virtual const Type *bottom_type() const { return Type::CONTROL; }
706 int handler_bci() const { return _handler_bci; }
707 bool is_handler_proj() const { return _handler_bci >= 0; }
708 #ifndef PRODUCT
709 virtual void dump_spec(outputStream *st) const;
710 #endif
711 };
712
713
714 //---------------------------------CreateExNode--------------------------------
715 // Helper node to create the exception coming back from a call
716 class CreateExNode : public TypeNode {
717 public:
718 CreateExNode(const Type* t, Node* control, Node* i_o) : TypeNode(t, 2) {
719 init_req(0, control);
720 init_req(1, i_o);
721 }
722 virtual int Opcode() const;
723 virtual Node* Identity(PhaseGVN* phase);
724 virtual bool pinned() const { return true; }
725 uint match_edge(uint idx) const { return 0; }
726 virtual uint ideal_reg() const { return Op_RegP; }
727 };
728
729 //------------------------------NeverBranchNode-------------------------------
730 // The never-taken branch. Used to give the appearance of exiting infinite
731 // loops to those algorithms that like all paths to be reachable. Encodes
732 // empty.
733 class NeverBranchNode : public MultiBranchNode {
734 public:
735 NeverBranchNode(Node* ctrl) : MultiBranchNode(1) {
736 init_req(0, ctrl);
737 init_class_id(Class_NeverBranch);
738 }
739 virtual int Opcode() const;
740 virtual bool pinned() const { return true; };
741 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
742 virtual const Type* Value(PhaseGVN* phase) const;
743 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
744 virtual uint required_outcnt() const { return 2; }
745 virtual void emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { }
746 virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
747 #ifndef PRODUCT
748 virtual void format( PhaseRegAlloc *, outputStream *st ) const;
749 #endif
750 };
751
752 //------------------------------BlackholeNode----------------------------
753 // Blackhole all arguments. This node would survive through the compiler
754 // the effects on its arguments, and would be finally matched to nothing.
755 class BlackholeNode : public MultiNode {
756 public:
757 BlackholeNode(Node* ctrl) : MultiNode(1) {
758 init_req(TypeFunc::Control, ctrl);
759 }
760 virtual int Opcode() const;
761 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
762 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
763 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
764
765 const RegMask &in_RegMask(uint idx) const {
766 // Fake the incoming arguments mask for blackholes: accept all registers
767 // and all stack slots. This would avoid any redundant register moves
768 // for blackhole inputs.
769 return RegMask::ALL;
770 }
771 #ifndef PRODUCT
772 virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
773 #endif
774 };
775
776
777 #endif // SHARE_OPTO_CFGNODE_HPP