1 /*
2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OPTO_CFGNODE_HPP
26 #define SHARE_OPTO_CFGNODE_HPP
27
28 #include "opto/multnode.hpp"
29 #include "opto/node.hpp"
30 #include "opto/opcodes.hpp"
31 #include "opto/predicates_enums.hpp"
32 #include "opto/type.hpp"
33
34 // Portions of code courtesy of Clifford Click
35
36 // Optimization - Graph Style
37
38 class Matcher;
39 class Node;
40 class RegionNode;
41 class TypeNode;
42 class PhiNode;
43 class GotoNode;
44 class MultiNode;
45 class MultiBranchNode;
46 class IfNode;
47 class PCTableNode;
48 class JumpNode;
49 class CatchNode;
50 class NeverBranchNode;
51 class BlackholeNode;
52 class ProjNode;
53 class CProjNode;
54 class IfTrueNode;
55 class IfFalseNode;
56 class CatchProjNode;
57 class JProjNode;
58 class JumpProjNode;
59 class SCMemProjNode;
60 class PhaseIdealLoop;
61 enum class AssertionPredicateType;
62 enum class PredicateState;
63
64 //------------------------------RegionNode-------------------------------------
65 // The class of RegionNodes, which can be mapped to basic blocks in the
66 // program. Their inputs point to Control sources. PhiNodes (described
67 // below) have an input point to a RegionNode. Merged data inputs to PhiNodes
68 // correspond 1-to-1 with RegionNode inputs. The zero input of a PhiNode is
69 // the RegionNode, and the zero input of the RegionNode is itself.
70 class RegionNode : public Node {
71 public:
72 enum LoopStatus {
73 // No guarantee: the region may be an irreducible loop entry, thus we have to
74 // be careful when removing entry control to it.
75 MaybeIrreducibleEntry,
76 // Limited guarantee: this region may be (nested) inside an irreducible loop,
77 // but it will never be an irreducible loop entry.
78 NeverIrreducibleEntry,
79 // Strong guarantee: this region is not (nested) inside an irreducible loop.
80 Reducible,
81 };
82
83 private:
84 bool _is_unreachable_region;
85 LoopStatus _loop_status;
86
87 bool is_possible_unsafe_loop() const;
88 bool is_unreachable_from_root(const PhaseGVN* phase) const;
89 public:
90 // Node layout (parallels PhiNode):
91 enum { Region, // Generally points to self.
92 Control // Control arcs are [1..len)
93 };
94
95 RegionNode(uint required)
96 : Node(required),
97 _is_unreachable_region(false),
98 _loop_status(LoopStatus::NeverIrreducibleEntry)
99 {
100 init_class_id(Class_Region);
101 init_req(0, this);
102 }
103
104 Node* is_copy() const {
105 const Node* r = _in[Region];
106 if (r == nullptr)
107 return nonnull_req();
108 return nullptr; // not a copy!
109 }
110 PhiNode* has_phi() const; // returns an arbitrary phi user, or null
111 PhiNode* has_unique_phi() const; // returns the unique phi user, or null
112 // Is this region node unreachable from root?
113 bool is_unreachable_region(const PhaseGVN* phase);
114 #ifdef ASSERT
115 bool is_in_infinite_subgraph();
116 static bool are_all_nodes_in_infinite_subgraph(Unique_Node_List& worklist);
117 #endif //ASSERT
118 LoopStatus loop_status() const { return _loop_status; };
119 void set_loop_status(LoopStatus status);
120 bool can_be_irreducible_entry() const;
121
122 virtual int Opcode() const;
123 virtual uint size_of() const { return sizeof(*this); }
124 virtual bool pinned() const { return (const Node*)in(0) == this; }
125 virtual bool is_CFG() const { return true; }
126 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
127 virtual bool depends_only_on_test() const { return false; }
128 virtual const Type* bottom_type() const { return Type::CONTROL; }
129 virtual const Type* Value(PhaseGVN* phase) const;
130 virtual Node* Identity(PhaseGVN* phase);
131 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
132 void remove_unreachable_subgraph(PhaseIterGVN* igvn);
133 virtual const RegMask &out_RegMask() const;
134 bool is_diamond() const;
135 void try_clean_mem_phis(PhaseIterGVN* phase);
136 bool optimize_trichotomy(PhaseIterGVN* igvn);
137 NOT_PRODUCT(virtual void dump_spec(outputStream* st) const;)
138 };
139
140 //------------------------------JProjNode--------------------------------------
141 // jump projection for node that produces multiple control-flow paths
142 class JProjNode : public ProjNode {
143 public:
144 JProjNode( Node* ctrl, uint idx ) : ProjNode(ctrl,idx) {}
145 virtual int Opcode() const;
146 virtual bool is_CFG() const { return true; }
147 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
148 virtual const Node* is_block_proj() const { return in(0); }
149 virtual const RegMask& out_RegMask() const;
150 virtual uint ideal_reg() const { return 0; }
151 };
152
153 //------------------------------PhiNode----------------------------------------
154 // PhiNodes merge values from different Control paths. Slot 0 points to the
155 // controlling RegionNode. Other slots map 1-for-1 with incoming control flow
156 // paths to the RegionNode.
157 class PhiNode : public TypeNode {
158 friend class PhaseRenumberLive;
159
160 const TypePtr* const _adr_type; // non-null only for Type::MEMORY nodes.
161 // The following fields are only used for data PhiNodes to indicate
162 // that the PhiNode represents the value of a known instance field.
163 int _inst_mem_id; // Instance memory id (node index of the memory Phi)
164 int _inst_id; // Instance id of the memory slice.
165 const int _inst_index; // Alias index of the instance memory slice.
166 // Array elements references have the same alias_idx but different offset.
167 const int _inst_offset; // Offset of the instance memory slice.
168 // Size is bigger to hold the _adr_type field.
169 virtual uint hash() const; // Check the type
170 virtual bool cmp( const Node &n ) const;
171 virtual uint size_of() const { return sizeof(*this); }
172
173 // Determine if CMoveNode::is_cmove_id can be used at this join point.
174 Node* is_cmove_id(PhaseTransform* phase, int true_path);
175 bool wait_for_region_igvn(PhaseGVN* phase);
176 bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase);
177
178 static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn);
179 static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn);
180
181 bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const;
182
183 bool is_split_through_mergemem_terminating() const;
184
185 void verify_type_stability(const PhaseGVN* phase, const Type* union_of_input_types, const Type* new_type) const NOT_DEBUG_RETURN;
186 bool wait_for_cast_input_igvn(const PhaseIterGVN* igvn) const;
187
188 public:
189 // Node layout (parallels RegionNode):
190 enum { Region, // Control input is the Phi's region.
191 Input // Input values are [1..len)
192 };
193
194 PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr,
195 const int imid = -1,
196 const int iid = TypeOopPtr::InstanceTop,
197 const int iidx = Compile::AliasIdxTop,
198 const int ioffs = Type::OffsetTop )
199 : TypeNode(t,r->req()),
200 _adr_type(at),
201 _inst_mem_id(imid),
202 _inst_id(iid),
203 _inst_index(iidx),
204 _inst_offset(ioffs)
205 {
206 init_class_id(Class_Phi);
207 init_req(0, r);
208 verify_adr_type();
209 }
210 // create a new phi with in edges matching r and set (initially) to x
211 static PhiNode* make( Node* r, Node* x );
212 // extra type arguments override the new phi's bottom_type and adr_type
213 static PhiNode* make( Node* r, Node* x, const Type *t, const TypePtr* at = nullptr );
214 // create a new phi with narrowed memory type
215 PhiNode* slice_memory(const TypePtr* adr_type) const;
216 PhiNode* split_out_instance(const TypePtr* at, PhaseIterGVN *igvn) const;
217 // like make(r, x), but does not initialize the in edges to x
218 static PhiNode* make_blank( Node* r, Node* x );
219
220 // Accessors
221 RegionNode* region() const { Node* r = in(Region); assert(!r || r->is_Region(), ""); return (RegionNode*)r; }
222
223 bool is_tripcount(BasicType bt) const;
224
225 // Determine a unique non-trivial input, if any.
226 // Ignore casts if it helps. Return null on failure.
227 Node* unique_input(PhaseValues* phase, bool uncast);
228 Node* unique_input(PhaseValues* phase) {
229 Node* uin = unique_input(phase, false);
230 if (uin == nullptr) {
231 uin = unique_input(phase, true);
232 }
233 return uin;
234 }
235
236 // Check for a simple dead loop.
237 enum LoopSafety { Safe = 0, Unsafe, UnsafeLoop };
238 LoopSafety simple_data_loop_check(Node *in) const;
239 // Is it unsafe data loop? It becomes a dead loop if this phi node removed.
240 bool is_unsafe_data_reference(Node *in) const;
241 int is_diamond_phi() const;
242 bool try_clean_memory_phi(PhaseIterGVN* igvn);
243 virtual int Opcode() const;
244 virtual bool pinned() const { return in(0) != nullptr; }
245 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
246
247 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
248 int inst_mem_id() const { return _inst_mem_id; }
249 int inst_id() const { return _inst_id; }
250 int inst_index() const { return _inst_index; }
251 int inst_offset() const { return _inst_offset; }
252 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
253 return type()->basic_type() == tp->basic_type() &&
254 inst_mem_id() == mem_id &&
255 inst_id() == id &&
256 inst_index() == index &&
257 inst_offset() == offset &&
258 type()->higher_equal(tp);
259 }
260
261 virtual const Type* Value(PhaseGVN* phase) const;
262 virtual Node* Identity(PhaseGVN* phase);
263 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
264 virtual const RegMask &out_RegMask() const;
265 virtual const RegMask &in_RegMask(uint) const;
266 #ifndef PRODUCT
267 virtual void dump_spec(outputStream *st) const;
268 #endif
269 #ifdef ASSERT
270 void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
271 void verify_adr_type(bool recursive = false) const;
272 #else //ASSERT
273 void verify_adr_type(bool recursive = false) const {}
274 #endif //ASSERT
275
276 const TypeTuple* collect_types(PhaseGVN* phase) const;
277 bool can_be_replaced_by(const PhiNode* other) const;
278 };
279
280 //------------------------------GotoNode---------------------------------------
281 // GotoNodes perform direct branches.
282 class GotoNode : public Node {
283 public:
284 GotoNode( Node *control ) : Node(control) {}
285 virtual int Opcode() const;
286 virtual bool pinned() const { return true; }
287 virtual bool is_CFG() const { return true; }
288 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
289 virtual const Node *is_block_proj() const { return this; }
290 virtual bool depends_only_on_test() const { return false; }
291 virtual const Type *bottom_type() const { return Type::CONTROL; }
292 virtual const Type* Value(PhaseGVN* phase) const;
293 virtual Node* Identity(PhaseGVN* phase);
294 virtual const RegMask &out_RegMask() const;
295 };
296
297 //------------------------------CProjNode--------------------------------------
298 // control projection for node that produces multiple control-flow paths
299 class CProjNode : public ProjNode {
300 public:
301 CProjNode( Node *ctrl, uint idx ) : ProjNode(ctrl,idx) {}
302 virtual int Opcode() const;
303 virtual bool is_CFG() const { return true; }
304 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
305 virtual const Node *is_block_proj() const { return in(0); }
306 virtual const RegMask &out_RegMask() const;
307 virtual uint ideal_reg() const { return 0; }
308 };
309
310 //---------------------------MultiBranchNode-----------------------------------
311 // This class defines a MultiBranchNode, a MultiNode which yields multiple
312 // control values. These are distinguished from other types of MultiNodes
313 // which yield multiple values, but control is always and only projection #0.
314 class MultiBranchNode : public MultiNode {
315 public:
316 MultiBranchNode( uint required ) : MultiNode(required) {
317 init_class_id(Class_MultiBranch);
318 }
319 // returns required number of users to be well formed.
320 virtual uint required_outcnt() const = 0;
321 };
322
323 //------------------------------IfNode-----------------------------------------
324 // Output selected Control, based on a boolean test
325 class IfNode : public MultiBranchNode {
326 public:
327 float _prob; // Probability of true path being taken.
328 float _fcnt; // Frequency counter
329
330 private:
331 AssertionPredicateType _assertion_predicate_type;
332
333 void init_node(Node* control, Node* bol) {
334 init_class_id(Class_If);
335 init_req(0, control);
336 init_req(1, bol);
337 }
338
339 // Size is bigger to hold the probability field. However, _prob does not
340 // change the semantics so it does not appear in the hash & cmp functions.
341 virtual uint size_of() const { return sizeof(*this); }
342
343 // Helper methods for fold_compares
344 bool cmpi_folds(PhaseIterGVN* igvn, bool fold_ne = false);
345 bool is_ctrl_folds(Node* ctrl, PhaseIterGVN* igvn);
346 bool has_shared_region(IfProjNode* proj, IfProjNode*& success, IfProjNode*& fail) const;
347 bool has_only_uncommon_traps(IfProjNode* proj, IfProjNode*& success, IfProjNode*& fail, PhaseIterGVN* igvn) const;
348 Node* merge_uncommon_traps(IfProjNode* proj, IfProjNode* success, IfProjNode* fail, PhaseIterGVN* igvn);
349 static void improve_address_types(Node* l, Node* r, ProjNode* fail, PhaseIterGVN* igvn);
350 bool is_cmp_with_loadrange(IfProjNode* proj) const;
351 bool is_null_check(IfProjNode* proj, PhaseIterGVN* igvn) const;
352 bool is_side_effect_free_test(IfProjNode* proj, PhaseIterGVN* igvn) const;
353 static void reroute_side_effect_free_unc(IfProjNode* proj, IfProjNode* dom_proj, PhaseIterGVN* igvn);
354 bool fold_compares_helper(IfProjNode* proj, IfProjNode* success, IfProjNode* fail, PhaseIterGVN* igvn);
355 static bool is_dominator_unc(CallStaticJavaNode* dom_unc, CallStaticJavaNode* unc);
356
357 protected:
358 IfProjNode* range_check_trap_proj(int& flip, Node*& l, Node*& r) const;
359 Node* Ideal_common(PhaseGVN *phase, bool can_reshape);
360 Node* search_identical(int dist, PhaseIterGVN* igvn);
361
362 Node* simple_subsuming(PhaseIterGVN* igvn);
363
364 public:
365
366 // Degrees of branch prediction probability by order of magnitude:
367 // PROB_UNLIKELY_1e(N) is a 1 in 1eN chance.
368 // PROB_LIKELY_1e(N) is a 1 - PROB_UNLIKELY_1e(N)
369 #define PROB_UNLIKELY_MAG(N) (1e- ## N ## f)
370 #define PROB_LIKELY_MAG(N) (1.0f-PROB_UNLIKELY_MAG(N))
371
372 // Maximum and minimum branch prediction probabilties
373 // 1 in 1,000,000 (magnitude 6)
374 //
375 // Although PROB_NEVER == PROB_MIN and PROB_ALWAYS == PROB_MAX
376 // they are used to distinguish different situations:
377 //
378 // The name PROB_MAX (PROB_MIN) is for probabilities which correspond to
379 // very likely (unlikely) but with a concrete possibility of a rare
380 // contrary case. These constants would be used for pinning
381 // measurements, and as measures for assertions that have high
382 // confidence, but some evidence of occasional failure.
383 //
384 // The name PROB_ALWAYS (PROB_NEVER) is to stand for situations for which
385 // there is no evidence at all that the contrary case has ever occurred.
386
387 #define PROB_NEVER PROB_UNLIKELY_MAG(6)
388 #define PROB_ALWAYS PROB_LIKELY_MAG(6)
389
390 #define PROB_MIN PROB_UNLIKELY_MAG(6)
391 #define PROB_MAX PROB_LIKELY_MAG(6)
392
393 // Static branch prediction probabilities
394 // 1 in 10 (magnitude 1)
395 #define PROB_STATIC_INFREQUENT PROB_UNLIKELY_MAG(1)
396 #define PROB_STATIC_FREQUENT PROB_LIKELY_MAG(1)
397
398 // Fair probability 50/50
399 #define PROB_FAIR (0.5f)
400
401 // Unknown probability sentinel
402 #define PROB_UNKNOWN (-1.0f)
403
404 // Probability "constructors", to distinguish as a probability any manifest
405 // constant without a names
406 #define PROB_LIKELY(x) ((float) (x))
407 #define PROB_UNLIKELY(x) (1.0f - (float)(x))
408
409 // Other probabilities in use, but without a unique name, are documented
410 // here for lack of a better place:
411 //
412 // 1 in 1000 probabilities (magnitude 3):
413 // threshold for converting to conditional move
414 // likelihood of null check failure if a null HAS been seen before
415 // likelihood of slow path taken in library calls
416 //
417 // 1 in 10,000 probabilities (magnitude 4):
418 // threshold for making an uncommon trap probability more extreme
419 // threshold for for making a null check implicit
420 // likelihood of needing a gc if eden top moves during an allocation
421 // likelihood of a predicted call failure
422 //
423 // 1 in 100,000 probabilities (magnitude 5):
424 // threshold for ignoring counts when estimating path frequency
425 // likelihood of FP clipping failure
426 // likelihood of catching an exception from a try block
427 // likelihood of null check failure if a null has NOT been seen before
428 //
429 // Magic manifest probabilities such as 0.83, 0.7, ... can be found in
430 // gen_subtype_check() and catch_inline_exceptions().
431
432 IfNode(Node* control, Node* bol, float p, float fcnt);
433 IfNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type);
434
435 static IfNode* make_with_same_profile(IfNode* if_node_profile, Node* ctrl, Node* bol);
436
437 IfTrueNode* true_proj() const {
438 return proj_out(true)->as_IfTrue();
439 }
440
441 IfTrueNode* true_proj_or_null() const {
442 ProjNode* true_proj = proj_out_or_null(true);
443 return true_proj == nullptr ? nullptr : true_proj->as_IfTrue();
444 }
445
446 IfFalseNode* false_proj() const {
447 return proj_out(false)->as_IfFalse();
448 }
449
450 IfFalseNode* false_proj_or_null() const {
451 ProjNode* false_proj = proj_out_or_null(false);
452 return false_proj == nullptr ? nullptr : false_proj->as_IfFalse();
453 }
454
455 virtual int Opcode() const;
456 virtual bool pinned() const { return true; }
457 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
458 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
459 virtual const Type* Value(PhaseGVN* phase) const;
460 virtual uint required_outcnt() const { return 2; }
461 virtual const RegMask &out_RegMask() const;
462 Node* fold_compares(PhaseIterGVN* phase);
463 static Node* up_one_dom(Node* curr, bool linear_only = false);
464 bool is_zero_trip_guard() const;
465 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn, bool pin_array_access_nodes);
466 ProjNode* uncommon_trap_proj(CallStaticJavaNode*& call, Deoptimization::DeoptReason reason = Deoptimization::Reason_none) const;
467
468 // Takes the type of val and filters it through the test represented
469 // by if_proj and returns a more refined type if one is produced.
470 // Returns null is it couldn't improve the type.
471 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
472
473 AssertionPredicateType assertion_predicate_type() const {
474 return _assertion_predicate_type;
475 }
476
477 #ifndef PRODUCT
478 virtual void dump_spec(outputStream *st) const;
479 #endif
480
481 bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
482 };
483
484 class RangeCheckNode : public IfNode {
485 private:
486 int is_range_check(Node*& range, Node*& index, jint& offset);
487
488 public:
489 RangeCheckNode(Node* control, Node* bol, float p, float fcnt) : IfNode(control, bol, p, fcnt) {
490 init_class_id(Class_RangeCheck);
491 }
492
493 RangeCheckNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type)
494 : IfNode(control, bol, p, fcnt, assertion_predicate_type) {
495 init_class_id(Class_RangeCheck);
496 }
497
498 virtual int Opcode() const;
499 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
500 };
501
502 // Special node that denotes a Parse Predicate added during parsing. A Parse Predicate serves as placeholder to later
503 // create Regular Predicates (Runtime Predicates with possible Assertion Predicates) above it. Together they form a
504 // Predicate Block. The Parse Predicate and Regular Predicates share the same uncommon trap.
505 // There are three kinds of Parse Predicates:
506 // Loop Parse Predicate, Profiled Loop Parse Predicate (both used by Loop Predication), and Loop Limit Check Parse
507 // Predicate (used for integer overflow checks when creating a counted loop).
508 // More information about predicates can be found in loopPredicate.cpp.
509 class ParsePredicateNode : public IfNode {
510 Deoptimization::DeoptReason _deopt_reason;
511
512 // When a Parse Predicate loses its connection to a loop head, it will be marked useless by
513 // EliminateUselessPredicates and cleaned up by Value(). It can also become useless when cloning it to both loops
514 // during Loop Multiversioning - we no longer use the old version.
515 PredicateState _predicate_state;
516 public:
517 ParsePredicateNode(Node* control, Deoptimization::DeoptReason deopt_reason, PhaseGVN* gvn);
518 virtual int Opcode() const;
519 virtual uint size_of() const { return sizeof(*this); }
520
521 Deoptimization::DeoptReason deopt_reason() const {
522 return _deopt_reason;
523 }
524
525 bool is_useless() const {
526 return _predicate_state == PredicateState::Useless;
527 }
528
529 void mark_useless(PhaseIterGVN& igvn);
530
531 void mark_maybe_useful() {
532 _predicate_state = PredicateState::MaybeUseful;
533 }
534
535 bool is_useful() const {
536 return _predicate_state == PredicateState::Useful;
537 }
538
539 void mark_useful() {
540 _predicate_state = PredicateState::Useful;
541 }
542
543 // Return the uncommon trap If projection of this Parse Predicate.
544 ParsePredicateUncommonProj* uncommon_proj() const {
545 return false_proj();
546 }
547
548 Node* uncommon_trap() const;
549
550 Node* Ideal(PhaseGVN* phase, bool can_reshape) {
551 return nullptr; // Don't optimize
552 }
553
554 const Type* Value(PhaseGVN* phase) const;
555 NOT_PRODUCT(void dump_spec(outputStream* st) const;)
556 };
557
558 class IfProjNode : public CProjNode {
559 public:
560 IfProjNode(IfNode *ifnode, uint idx) : CProjNode(ifnode,idx) {}
561 virtual Node* Identity(PhaseGVN* phase);
562
563 // Return the other IfProj node.
564 IfProjNode* other_if_proj() const {
565 return in(0)->as_If()->proj_out(1 - _con)->as_IfProj();
566 }
567
568 void pin_array_access_nodes(PhaseIterGVN* igvn);
569
570 protected:
571 // Type of If input when this branch is always taken
572 virtual bool always_taken(const TypeTuple* t) const = 0;
573 };
574
575 class IfTrueNode : public IfProjNode {
576 public:
577 IfTrueNode( IfNode *ifnode ) : IfProjNode(ifnode,1) {
578 init_class_id(Class_IfTrue);
579 }
580 virtual int Opcode() const;
581
582 protected:
583 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFTRUE; }
584 };
585
586 class IfFalseNode : public IfProjNode {
587 public:
588 IfFalseNode( IfNode *ifnode ) : IfProjNode(ifnode,0) {
589 init_class_id(Class_IfFalse);
590 }
591 virtual int Opcode() const;
592
593 protected:
594 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFFALSE; }
595 };
596
597
598 //------------------------------PCTableNode------------------------------------
599 // Build an indirect branch table. Given a control and a table index,
600 // control is passed to the Projection matching the table index. Used to
601 // implement switch statements and exception-handling capabilities.
602 // Undefined behavior if passed-in index is not inside the table.
603 class PCTableNode : public MultiBranchNode {
604 virtual uint hash() const; // Target count; table size
605 virtual bool cmp( const Node &n ) const;
606 virtual uint size_of() const { return sizeof(*this); }
607
608 public:
609 const uint _size; // Number of targets
610
611 PCTableNode( Node *ctrl, Node *idx, uint size ) : MultiBranchNode(2), _size(size) {
612 init_class_id(Class_PCTable);
613 init_req(0, ctrl);
614 init_req(1, idx);
615 }
616 virtual int Opcode() const;
617 virtual const Type* Value(PhaseGVN* phase) const;
618 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
619 virtual const Type *bottom_type() const;
620 virtual bool pinned() const { return true; }
621 virtual uint required_outcnt() const { return _size; }
622 };
623
624 //------------------------------JumpNode---------------------------------------
625 // Indirect branch. Uses PCTable above to implement a switch statement.
626 // It emits as a table load and local branch.
627 class JumpNode : public PCTableNode {
628 virtual uint size_of() const { return sizeof(*this); }
629 public:
630 float* _probs; // probability of each projection
631 float _fcnt; // total number of times this Jump was executed
632 JumpNode( Node* control, Node* switch_val, uint size, float* probs, float cnt)
633 : PCTableNode(control, switch_val, size),
634 _probs(probs), _fcnt(cnt) {
635 init_class_id(Class_Jump);
636 }
637 virtual int Opcode() const;
638 virtual const RegMask& out_RegMask() const;
639 virtual const Node* is_block_proj() const { return this; }
640 };
641
642 class JumpProjNode : public JProjNode {
643 virtual uint hash() const;
644 virtual bool cmp( const Node &n ) const;
645 virtual uint size_of() const { return sizeof(*this); }
646
647 private:
648 const int _dest_bci;
649 const uint _proj_no;
650 const int _switch_val;
651 public:
652 JumpProjNode(Node* jumpnode, uint proj_no, int dest_bci, int switch_val)
653 : JProjNode(jumpnode, proj_no), _dest_bci(dest_bci), _proj_no(proj_no), _switch_val(switch_val) {
654 init_class_id(Class_JumpProj);
655 }
656
657 virtual int Opcode() const;
658 virtual const Type* bottom_type() const { return Type::CONTROL; }
659 int dest_bci() const { return _dest_bci; }
660 int switch_val() const { return _switch_val; }
661 uint proj_no() const { return _proj_no; }
662 #ifndef PRODUCT
663 virtual void dump_spec(outputStream *st) const;
664 virtual void dump_compact_spec(outputStream *st) const;
665 #endif
666 };
667
668 //------------------------------CatchNode--------------------------------------
669 // Helper node to fork exceptions. "Catch" catches any exceptions thrown by
670 // a just-prior call. Looks like a PCTableNode but emits no code - just the
671 // table. The table lookup and branch is implemented by RethrowNode.
672 class CatchNode : public PCTableNode {
673 public:
674 CatchNode( Node *ctrl, Node *idx, uint size ) : PCTableNode(ctrl,idx,size){
675 init_class_id(Class_Catch);
676 }
677 virtual int Opcode() const;
678 virtual const Type* Value(PhaseGVN* phase) const;
679 };
680
681 // CatchProjNode controls which exception handler is targeted after a call.
682 // It is passed in the bci of the target handler, or no_handler_bci in case
683 // the projection doesn't lead to an exception handler.
684 class CatchProjNode : public CProjNode {
685 virtual uint hash() const;
686 virtual bool cmp( const Node &n ) const;
687 virtual uint size_of() const { return sizeof(*this); }
688
689 private:
690 const int _handler_bci;
691
692 public:
693 enum {
694 fall_through_index = 0, // the fall through projection index
695 catch_all_index = 1, // the projection index for catch-alls
696 no_handler_bci = -1 // the bci for fall through or catch-all projs
697 };
698
699 CatchProjNode(Node* catchnode, uint proj_no, int handler_bci)
700 : CProjNode(catchnode, proj_no), _handler_bci(handler_bci) {
701 init_class_id(Class_CatchProj);
702 assert(proj_no != fall_through_index || handler_bci < 0, "fall through case must have bci < 0");
703 }
704
705 virtual int Opcode() const;
706 virtual Node* Identity(PhaseGVN* phase);
707 virtual const Type *bottom_type() const { return Type::CONTROL; }
708 int handler_bci() const { return _handler_bci; }
709 bool is_handler_proj() const { return _handler_bci >= 0; }
710 #ifndef PRODUCT
711 virtual void dump_spec(outputStream *st) const;
712 #endif
713 };
714
715
716 //---------------------------------CreateExNode--------------------------------
717 // Helper node to create the exception coming back from a call
718 class CreateExNode : public TypeNode {
719 public:
720 CreateExNode(const Type* t, Node* control, Node* i_o) : TypeNode(t, 2) {
721 init_req(0, control);
722 init_req(1, i_o);
723 }
724 virtual int Opcode() const;
725 virtual Node* Identity(PhaseGVN* phase);
726 virtual bool pinned() const { return true; }
727 uint match_edge(uint idx) const { return 0; }
728 virtual uint ideal_reg() const { return Op_RegP; }
729 };
730
731 //------------------------------NeverBranchNode-------------------------------
732 // The never-taken branch. Used to give the appearance of exiting infinite
733 // loops to those algorithms that like all paths to be reachable. Encodes
734 // empty.
735 class NeverBranchNode : public MultiBranchNode {
736 public:
737 NeverBranchNode(Node* ctrl) : MultiBranchNode(1) {
738 init_req(0, ctrl);
739 init_class_id(Class_NeverBranch);
740 }
741 virtual int Opcode() const;
742 virtual bool pinned() const { return true; };
743 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
744 virtual const Type* Value(PhaseGVN* phase) const;
745 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
746 virtual uint required_outcnt() const { return 2; }
747 virtual void emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { }
748 virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
749 #ifndef PRODUCT
750 virtual void format( PhaseRegAlloc *, outputStream *st ) const;
751 #endif
752 };
753
754 //------------------------------BlackholeNode----------------------------
755 // Blackhole all arguments. This node would survive through the compiler
756 // the effects on its arguments, and would be finally matched to nothing.
757 class BlackholeNode : public MultiNode {
758 public:
759 BlackholeNode(Node* ctrl) : MultiNode(1) {
760 init_req(TypeFunc::Control, ctrl);
761 }
762 virtual int Opcode() const;
763 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
764 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
765 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape);
766
767 const RegMask &in_RegMask(uint idx) const {
768 // Fake the incoming arguments mask for blackholes: accept all registers
769 // and all stack slots. This would avoid any redundant register moves
770 // for blackhole inputs.
771 return RegMask::ALL;
772 }
773 #ifndef PRODUCT
774 virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
775 #endif
776 };
777
778
779 #endif // SHARE_OPTO_CFGNODE_HPP