1 /* 2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OPTO_CFGNODE_HPP 26 #define SHARE_OPTO_CFGNODE_HPP 27 28 #include "opto/multnode.hpp" 29 #include "opto/node.hpp" 30 #include "opto/opcodes.hpp" 31 #include "opto/predicates_enums.hpp" 32 #include "opto/type.hpp" 33 34 // Portions of code courtesy of Clifford Click 35 36 // Optimization - Graph Style 37 38 class Matcher; 39 class Node; 40 class RegionNode; 41 class TypeNode; 42 class PhiNode; 43 class GotoNode; 44 class MultiNode; 45 class MultiBranchNode; 46 class IfNode; 47 class PCTableNode; 48 class JumpNode; 49 class CatchNode; 50 class NeverBranchNode; 51 class BlackholeNode; 52 class ProjNode; 53 class CProjNode; 54 class IfTrueNode; 55 class IfFalseNode; 56 class CatchProjNode; 57 class JProjNode; 58 class JumpProjNode; 59 class SCMemProjNode; 60 class PhaseIdealLoop; 61 enum class AssertionPredicateType; 62 enum class PredicateState; 63 64 //------------------------------RegionNode------------------------------------- 65 // The class of RegionNodes, which can be mapped to basic blocks in the 66 // program. Their inputs point to Control sources. PhiNodes (described 67 // below) have an input point to a RegionNode. Merged data inputs to PhiNodes 68 // correspond 1-to-1 with RegionNode inputs. The zero input of a PhiNode is 69 // the RegionNode, and the zero input of the RegionNode is itself. 70 class RegionNode : public Node { 71 public: 72 enum LoopStatus { 73 // No guarantee: the region may be an irreducible loop entry, thus we have to 74 // be careful when removing entry control to it. 75 MaybeIrreducibleEntry, 76 // Limited guarantee: this region may be (nested) inside an irreducible loop, 77 // but it will never be an irreducible loop entry. 78 NeverIrreducibleEntry, 79 // Strong guarantee: this region is not (nested) inside an irreducible loop. 80 Reducible, 81 }; 82 83 private: 84 bool _is_unreachable_region; 85 LoopStatus _loop_status; 86 87 bool is_possible_unsafe_loop(const PhaseGVN* phase) const; 88 bool is_unreachable_from_root(const PhaseGVN* phase) const; 89 public: 90 // Node layout (parallels PhiNode): 91 enum { Region, // Generally points to self. 92 Control // Control arcs are [1..len) 93 }; 94 95 RegionNode(uint required) 96 : Node(required), 97 _is_unreachable_region(false), 98 _loop_status(LoopStatus::NeverIrreducibleEntry) 99 { 100 init_class_id(Class_Region); 101 init_req(0, this); 102 } 103 104 Node* is_copy() const { 105 const Node* r = _in[Region]; 106 if (r == nullptr) 107 return nonnull_req(); 108 return nullptr; // not a copy! 109 } 110 PhiNode* has_phi() const; // returns an arbitrary phi user, or null 111 PhiNode* has_unique_phi() const; // returns the unique phi user, or null 112 // Is this region node unreachable from root? 113 bool is_unreachable_region(const PhaseGVN* phase); 114 #ifdef ASSERT 115 bool is_in_infinite_subgraph(); 116 static bool are_all_nodes_in_infinite_subgraph(Unique_Node_List& worklist); 117 #endif //ASSERT 118 LoopStatus loop_status() const { return _loop_status; }; 119 void set_loop_status(LoopStatus status); 120 bool can_be_irreducible_entry() const; 121 122 virtual int Opcode() const; 123 virtual uint size_of() const { return sizeof(*this); } 124 virtual bool pinned() const { return (const Node*)in(0) == this; } 125 virtual bool is_CFG() const { return true; } 126 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash 127 virtual bool depends_only_on_test() const { return false; } 128 virtual const Type* bottom_type() const { return Type::CONTROL; } 129 virtual const Type* Value(PhaseGVN* phase) const; 130 virtual Node* Identity(PhaseGVN* phase); 131 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape); 132 void remove_unreachable_subgraph(PhaseIterGVN* igvn); 133 virtual const RegMask &out_RegMask() const; 134 bool is_diamond() const; 135 void try_clean_mem_phis(PhaseIterGVN* phase); 136 bool optimize_trichotomy(PhaseIterGVN* igvn); 137 NOT_PRODUCT(virtual void dump_spec(outputStream* st) const;) 138 }; 139 140 //------------------------------JProjNode-------------------------------------- 141 // jump projection for node that produces multiple control-flow paths 142 class JProjNode : public ProjNode { 143 public: 144 JProjNode( Node* ctrl, uint idx ) : ProjNode(ctrl,idx) {} 145 virtual int Opcode() const; 146 virtual bool is_CFG() const { return true; } 147 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash 148 virtual const Node* is_block_proj() const { return in(0); } 149 virtual const RegMask& out_RegMask() const; 150 virtual uint ideal_reg() const { return 0; } 151 }; 152 153 //------------------------------PhiNode---------------------------------------- 154 // PhiNodes merge values from different Control paths. Slot 0 points to the 155 // controlling RegionNode. Other slots map 1-for-1 with incoming control flow 156 // paths to the RegionNode. 157 class PhiNode : public TypeNode { 158 friend class PhaseRenumberLive; 159 160 const TypePtr* const _adr_type; // non-null only for Type::MEMORY nodes. 161 // The following fields are only used for data PhiNodes to indicate 162 // that the PhiNode represents the value of a known instance field. 163 int _inst_mem_id; // Instance memory id (node index of the memory Phi) 164 int _inst_id; // Instance id of the memory slice. 165 const int _inst_index; // Alias index of the instance memory slice. 166 // Array elements references have the same alias_idx but different offset. 167 const int _inst_offset; // Offset of the instance memory slice. 168 // Size is bigger to hold the _adr_type field. 169 virtual uint hash() const; // Check the type 170 virtual bool cmp( const Node &n ) const; 171 virtual uint size_of() const { return sizeof(*this); } 172 173 // Determine if CMoveNode::is_cmove_id can be used at this join point. 174 Node* is_cmove_id(PhaseTransform* phase, int true_path); 175 bool wait_for_region_igvn(PhaseGVN* phase); 176 bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase); 177 178 static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn); 179 static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn); 180 181 bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const; 182 183 bool can_push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass*& inline_klass); 184 InlineTypeNode* push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass* inline_klass); 185 186 bool is_split_through_mergemem_terminating() const; 187 188 public: 189 // Node layout (parallels RegionNode): 190 enum { Region, // Control input is the Phi's region. 191 Input // Input values are [1..len) 192 }; 193 194 PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr, 195 const int imid = -1, 196 const int iid = TypeOopPtr::InstanceTop, 197 const int iidx = Compile::AliasIdxTop, 198 const int ioffs = Type::OffsetTop ) 199 : TypeNode(t,r->req()), 200 _adr_type(at), 201 _inst_mem_id(imid), 202 _inst_id(iid), 203 _inst_index(iidx), 204 _inst_offset(ioffs) 205 { 206 init_class_id(Class_Phi); 207 init_req(0, r); 208 verify_adr_type(); 209 } 210 // create a new phi with in edges matching r and set (initially) to x 211 static PhiNode* make( Node* r, Node* x ); 212 // extra type arguments override the new phi's bottom_type and adr_type 213 static PhiNode* make( Node* r, Node* x, const Type *t, const TypePtr* at = nullptr ); 214 // create a new phi with narrowed memory type 215 PhiNode* slice_memory(const TypePtr* adr_type) const; 216 PhiNode* split_out_instance(const TypePtr* at, PhaseIterGVN *igvn) const; 217 // like make(r, x), but does not initialize the in edges to x 218 static PhiNode* make_blank( Node* r, Node* x ); 219 220 // Accessors 221 RegionNode* region() const { Node* r = in(Region); assert(!r || r->is_Region(), ""); return (RegionNode*)r; } 222 223 bool is_tripcount(BasicType bt) const; 224 225 // Determine a unique non-trivial input, if any. 226 // Ignore casts if it helps. Return null on failure. 227 Node* unique_input(PhaseValues* phase, bool uncast); 228 Node* unique_input(PhaseValues* phase) { 229 Node* uin = unique_input(phase, false); 230 if (uin == nullptr) { 231 uin = unique_input(phase, true); 232 } 233 return uin; 234 } 235 Node* unique_input_recursive(PhaseGVN* phase); 236 237 // Check for a simple dead loop. 238 enum LoopSafety { Safe = 0, Unsafe, UnsafeLoop }; 239 LoopSafety simple_data_loop_check(Node *in) const; 240 // Is it unsafe data loop? It becomes a dead loop if this phi node removed. 241 bool is_unsafe_data_reference(Node *in) const; 242 int is_diamond_phi() const; 243 bool try_clean_memory_phi(PhaseIterGVN* igvn); 244 virtual int Opcode() const; 245 virtual bool pinned() const { return in(0) != nullptr; } 246 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; } 247 248 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; } 249 int inst_mem_id() const { return _inst_mem_id; } 250 int inst_id() const { return _inst_id; } 251 int inst_index() const { return _inst_index; } 252 int inst_offset() const { return _inst_offset; } 253 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) { 254 return type()->basic_type() == tp->basic_type() && 255 inst_mem_id() == mem_id && 256 inst_id() == id && 257 inst_index() == index && 258 inst_offset() == offset && 259 type()->higher_equal(tp); 260 } 261 262 bool can_be_inline_type() const { 263 return EnableValhalla && _type->isa_instptr() && _type->is_instptr()->can_be_inline_type(); 264 } 265 266 Node* try_push_inline_types_down(PhaseGVN* phase, bool can_reshape); 267 DEBUG_ONLY(bool can_push_inline_types_down(PhaseGVN* phase);) 268 269 virtual const Type* Value(PhaseGVN* phase) const; 270 virtual Node* Identity(PhaseGVN* phase); 271 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 272 virtual const RegMask &out_RegMask() const; 273 virtual const RegMask &in_RegMask(uint) const; 274 #ifndef PRODUCT 275 virtual void dump_spec(outputStream *st) const; 276 #endif 277 #ifdef ASSERT 278 void verify_adr_type(VectorSet& visited, const TypePtr* at) const; 279 void verify_adr_type(bool recursive = false) const; 280 #else //ASSERT 281 void verify_adr_type(bool recursive = false) const {} 282 #endif //ASSERT 283 284 const TypeTuple* collect_types(PhaseGVN* phase) const; 285 }; 286 287 //------------------------------GotoNode--------------------------------------- 288 // GotoNodes perform direct branches. 289 class GotoNode : public Node { 290 public: 291 GotoNode( Node *control ) : Node(control) {} 292 virtual int Opcode() const; 293 virtual bool pinned() const { return true; } 294 virtual bool is_CFG() const { return true; } 295 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash 296 virtual const Node *is_block_proj() const { return this; } 297 virtual bool depends_only_on_test() const { return false; } 298 virtual const Type *bottom_type() const { return Type::CONTROL; } 299 virtual const Type* Value(PhaseGVN* phase) const; 300 virtual Node* Identity(PhaseGVN* phase); 301 virtual const RegMask &out_RegMask() const; 302 }; 303 304 //------------------------------CProjNode-------------------------------------- 305 // control projection for node that produces multiple control-flow paths 306 class CProjNode : public ProjNode { 307 public: 308 CProjNode( Node *ctrl, uint idx ) : ProjNode(ctrl,idx) {} 309 virtual int Opcode() const; 310 virtual bool is_CFG() const { return true; } 311 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash 312 virtual const Node *is_block_proj() const { return in(0); } 313 virtual const RegMask &out_RegMask() const; 314 virtual uint ideal_reg() const { return 0; } 315 }; 316 317 //---------------------------MultiBranchNode----------------------------------- 318 // This class defines a MultiBranchNode, a MultiNode which yields multiple 319 // control values. These are distinguished from other types of MultiNodes 320 // which yield multiple values, but control is always and only projection #0. 321 class MultiBranchNode : public MultiNode { 322 public: 323 MultiBranchNode( uint required ) : MultiNode(required) { 324 init_class_id(Class_MultiBranch); 325 } 326 // returns required number of users to be well formed. 327 virtual int required_outcnt() const = 0; 328 }; 329 330 //------------------------------IfNode----------------------------------------- 331 // Output selected Control, based on a boolean test 332 class IfNode : public MultiBranchNode { 333 public: 334 float _prob; // Probability of true path being taken. 335 float _fcnt; // Frequency counter 336 337 private: 338 AssertionPredicateType _assertion_predicate_type; 339 340 void init_node(Node* control, Node* bol) { 341 init_class_id(Class_If); 342 init_req(0, control); 343 init_req(1, bol); 344 } 345 346 // Size is bigger to hold the probability field. However, _prob does not 347 // change the semantics so it does not appear in the hash & cmp functions. 348 virtual uint size_of() const { return sizeof(*this); } 349 350 // Helper methods for fold_compares 351 bool cmpi_folds(PhaseIterGVN* igvn, bool fold_ne = false); 352 bool is_ctrl_folds(Node* ctrl, PhaseIterGVN* igvn); 353 bool has_shared_region(ProjNode* proj, ProjNode*& success, ProjNode*& fail); 354 bool has_only_uncommon_traps(ProjNode* proj, ProjNode*& success, ProjNode*& fail, PhaseIterGVN* igvn); 355 Node* merge_uncommon_traps(ProjNode* proj, ProjNode* success, ProjNode* fail, PhaseIterGVN* igvn); 356 static void improve_address_types(Node* l, Node* r, ProjNode* fail, PhaseIterGVN* igvn); 357 bool is_cmp_with_loadrange(ProjNode* proj); 358 bool is_null_check(ProjNode* proj, PhaseIterGVN* igvn); 359 bool is_side_effect_free_test(ProjNode* proj, PhaseIterGVN* igvn); 360 void reroute_side_effect_free_unc(ProjNode* proj, ProjNode* dom_proj, PhaseIterGVN* igvn); 361 bool fold_compares_helper(ProjNode* proj, ProjNode* success, ProjNode* fail, PhaseIterGVN* igvn); 362 static bool is_dominator_unc(CallStaticJavaNode* dom_unc, CallStaticJavaNode* unc); 363 364 protected: 365 ProjNode* range_check_trap_proj(int& flip, Node*& l, Node*& r); 366 Node* Ideal_common(PhaseGVN *phase, bool can_reshape); 367 Node* search_identical(int dist, PhaseIterGVN* igvn); 368 369 Node* simple_subsuming(PhaseIterGVN* igvn); 370 371 public: 372 373 // Degrees of branch prediction probability by order of magnitude: 374 // PROB_UNLIKELY_1e(N) is a 1 in 1eN chance. 375 // PROB_LIKELY_1e(N) is a 1 - PROB_UNLIKELY_1e(N) 376 #define PROB_UNLIKELY_MAG(N) (1e- ## N ## f) 377 #define PROB_LIKELY_MAG(N) (1.0f-PROB_UNLIKELY_MAG(N)) 378 379 // Maximum and minimum branch prediction probabilties 380 // 1 in 1,000,000 (magnitude 6) 381 // 382 // Although PROB_NEVER == PROB_MIN and PROB_ALWAYS == PROB_MAX 383 // they are used to distinguish different situations: 384 // 385 // The name PROB_MAX (PROB_MIN) is for probabilities which correspond to 386 // very likely (unlikely) but with a concrete possibility of a rare 387 // contrary case. These constants would be used for pinning 388 // measurements, and as measures for assertions that have high 389 // confidence, but some evidence of occasional failure. 390 // 391 // The name PROB_ALWAYS (PROB_NEVER) is to stand for situations for which 392 // there is no evidence at all that the contrary case has ever occurred. 393 394 #define PROB_NEVER PROB_UNLIKELY_MAG(6) 395 #define PROB_ALWAYS PROB_LIKELY_MAG(6) 396 397 #define PROB_MIN PROB_UNLIKELY_MAG(6) 398 #define PROB_MAX PROB_LIKELY_MAG(6) 399 400 // Static branch prediction probabilities 401 // 1 in 10 (magnitude 1) 402 #define PROB_STATIC_INFREQUENT PROB_UNLIKELY_MAG(1) 403 #define PROB_STATIC_FREQUENT PROB_LIKELY_MAG(1) 404 405 // Fair probability 50/50 406 #define PROB_FAIR (0.5f) 407 408 // Unknown probability sentinel 409 #define PROB_UNKNOWN (-1.0f) 410 411 // Probability "constructors", to distinguish as a probability any manifest 412 // constant without a names 413 #define PROB_LIKELY(x) ((float) (x)) 414 #define PROB_UNLIKELY(x) (1.0f - (float)(x)) 415 416 // Other probabilities in use, but without a unique name, are documented 417 // here for lack of a better place: 418 // 419 // 1 in 1000 probabilities (magnitude 3): 420 // threshold for converting to conditional move 421 // likelihood of null check failure if a null HAS been seen before 422 // likelihood of slow path taken in library calls 423 // 424 // 1 in 10,000 probabilities (magnitude 4): 425 // threshold for making an uncommon trap probability more extreme 426 // threshold for for making a null check implicit 427 // likelihood of needing a gc if eden top moves during an allocation 428 // likelihood of a predicted call failure 429 // 430 // 1 in 100,000 probabilities (magnitude 5): 431 // threshold for ignoring counts when estimating path frequency 432 // likelihood of FP clipping failure 433 // likelihood of catching an exception from a try block 434 // likelihood of null check failure if a null has NOT been seen before 435 // 436 // Magic manifest probabilities such as 0.83, 0.7, ... can be found in 437 // gen_subtype_check() and catch_inline_exceptions(). 438 439 IfNode(Node* control, Node* bol, float p, float fcnt); 440 IfNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type); 441 442 static IfNode* make_with_same_profile(IfNode* if_node_profile, Node* ctrl, Node* bol); 443 444 virtual int Opcode() const; 445 virtual bool pinned() const { return true; } 446 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; } 447 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 448 virtual const Type* Value(PhaseGVN* phase) const; 449 virtual int required_outcnt() const { return 2; } 450 virtual const RegMask &out_RegMask() const; 451 Node* fold_compares(PhaseIterGVN* phase); 452 static Node* up_one_dom(Node* curr, bool linear_only = false); 453 bool is_zero_trip_guard() const; 454 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn, bool pin_array_access_nodes); 455 ProjNode* uncommon_trap_proj(CallStaticJavaNode*& call, Deoptimization::DeoptReason reason = Deoptimization::Reason_none) const; 456 457 // Takes the type of val and filters it through the test represented 458 // by if_proj and returns a more refined type if one is produced. 459 // Returns null is it couldn't improve the type. 460 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj); 461 462 bool is_flat_array_check(PhaseTransform* phase, Node** array = nullptr); 463 464 AssertionPredicateType assertion_predicate_type() const { 465 return _assertion_predicate_type; 466 } 467 468 #ifndef PRODUCT 469 virtual void dump_spec(outputStream *st) const; 470 #endif 471 472 bool same_condition(const Node* dom, PhaseIterGVN* igvn) const; 473 }; 474 475 class RangeCheckNode : public IfNode { 476 private: 477 int is_range_check(Node*& range, Node*& index, jint& offset); 478 479 public: 480 RangeCheckNode(Node* control, Node* bol, float p, float fcnt) : IfNode(control, bol, p, fcnt) { 481 init_class_id(Class_RangeCheck); 482 } 483 484 RangeCheckNode(Node* control, Node* bol, float p, float fcnt, AssertionPredicateType assertion_predicate_type) 485 : IfNode(control, bol, p, fcnt, assertion_predicate_type) { 486 init_class_id(Class_RangeCheck); 487 } 488 489 virtual int Opcode() const; 490 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape); 491 }; 492 493 // Special node that denotes a Parse Predicate added during parsing. A Parse Predicate serves as placeholder to later 494 // create Regular Predicates (Runtime Predicates with possible Assertion Predicates) above it. Together they form a 495 // Predicate Block. The Parse Predicate and Regular Predicates share the same uncommon trap. 496 // There are three kinds of Parse Predicates: 497 // Loop Parse Predicate, Profiled Loop Parse Predicate (both used by Loop Predication), and Loop Limit Check Parse 498 // Predicate (used for integer overflow checks when creating a counted loop). 499 // More information about predicates can be found in loopPredicate.cpp. 500 class ParsePredicateNode : public IfNode { 501 Deoptimization::DeoptReason _deopt_reason; 502 503 // When a Parse Predicate loses its connection to a loop head, it will be marked useless by 504 // EliminateUselessPredicates and cleaned up by Value(). It can also become useless when cloning it to both loops 505 // during Loop Multiversioning - we no longer use the old version. 506 PredicateState _predicate_state; 507 public: 508 ParsePredicateNode(Node* control, Deoptimization::DeoptReason deopt_reason, PhaseGVN* gvn); 509 virtual int Opcode() const; 510 virtual uint size_of() const { return sizeof(*this); } 511 512 Deoptimization::DeoptReason deopt_reason() const { 513 return _deopt_reason; 514 } 515 516 bool is_useless() const { 517 return _predicate_state == PredicateState::Useless; 518 } 519 520 void mark_useless(PhaseIterGVN& igvn); 521 522 void mark_maybe_useful() { 523 _predicate_state = PredicateState::MaybeUseful; 524 } 525 526 bool is_useful() const { 527 return _predicate_state == PredicateState::Useful; 528 } 529 530 void mark_useful() { 531 _predicate_state = PredicateState::Useful; 532 } 533 534 // Return the uncommon trap If projection of this Parse Predicate. 535 ParsePredicateUncommonProj* uncommon_proj() const { 536 return proj_out(0)->as_IfFalse(); 537 } 538 539 Node* uncommon_trap() const; 540 541 Node* Ideal(PhaseGVN* phase, bool can_reshape) { 542 return nullptr; // Don't optimize 543 } 544 545 const Type* Value(PhaseGVN* phase) const; 546 NOT_PRODUCT(void dump_spec(outputStream* st) const;) 547 }; 548 549 class IfProjNode : public CProjNode { 550 public: 551 IfProjNode(IfNode *ifnode, uint idx) : CProjNode(ifnode,idx) {} 552 virtual Node* Identity(PhaseGVN* phase); 553 554 void pin_array_access_nodes(PhaseIterGVN* igvn); 555 556 protected: 557 // Type of If input when this branch is always taken 558 virtual bool always_taken(const TypeTuple* t) const = 0; 559 }; 560 561 class IfTrueNode : public IfProjNode { 562 public: 563 IfTrueNode( IfNode *ifnode ) : IfProjNode(ifnode,1) { 564 init_class_id(Class_IfTrue); 565 } 566 virtual int Opcode() const; 567 568 protected: 569 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFTRUE; } 570 }; 571 572 class IfFalseNode : public IfProjNode { 573 public: 574 IfFalseNode( IfNode *ifnode ) : IfProjNode(ifnode,0) { 575 init_class_id(Class_IfFalse); 576 } 577 virtual int Opcode() const; 578 579 protected: 580 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFFALSE; } 581 }; 582 583 584 //------------------------------PCTableNode------------------------------------ 585 // Build an indirect branch table. Given a control and a table index, 586 // control is passed to the Projection matching the table index. Used to 587 // implement switch statements and exception-handling capabilities. 588 // Undefined behavior if passed-in index is not inside the table. 589 class PCTableNode : public MultiBranchNode { 590 virtual uint hash() const; // Target count; table size 591 virtual bool cmp( const Node &n ) const; 592 virtual uint size_of() const { return sizeof(*this); } 593 594 public: 595 const uint _size; // Number of targets 596 597 PCTableNode( Node *ctrl, Node *idx, uint size ) : MultiBranchNode(2), _size(size) { 598 init_class_id(Class_PCTable); 599 init_req(0, ctrl); 600 init_req(1, idx); 601 } 602 virtual int Opcode() const; 603 virtual const Type* Value(PhaseGVN* phase) const; 604 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 605 virtual const Type *bottom_type() const; 606 virtual bool pinned() const { return true; } 607 virtual int required_outcnt() const { return _size; } 608 }; 609 610 //------------------------------JumpNode--------------------------------------- 611 // Indirect branch. Uses PCTable above to implement a switch statement. 612 // It emits as a table load and local branch. 613 class JumpNode : public PCTableNode { 614 virtual uint size_of() const { return sizeof(*this); } 615 public: 616 float* _probs; // probability of each projection 617 float _fcnt; // total number of times this Jump was executed 618 JumpNode( Node* control, Node* switch_val, uint size, float* probs, float cnt) 619 : PCTableNode(control, switch_val, size), 620 _probs(probs), _fcnt(cnt) { 621 init_class_id(Class_Jump); 622 } 623 virtual int Opcode() const; 624 virtual const RegMask& out_RegMask() const; 625 virtual const Node* is_block_proj() const { return this; } 626 }; 627 628 class JumpProjNode : public JProjNode { 629 virtual uint hash() const; 630 virtual bool cmp( const Node &n ) const; 631 virtual uint size_of() const { return sizeof(*this); } 632 633 private: 634 const int _dest_bci; 635 const uint _proj_no; 636 const int _switch_val; 637 public: 638 JumpProjNode(Node* jumpnode, uint proj_no, int dest_bci, int switch_val) 639 : JProjNode(jumpnode, proj_no), _dest_bci(dest_bci), _proj_no(proj_no), _switch_val(switch_val) { 640 init_class_id(Class_JumpProj); 641 } 642 643 virtual int Opcode() const; 644 virtual const Type* bottom_type() const { return Type::CONTROL; } 645 int dest_bci() const { return _dest_bci; } 646 int switch_val() const { return _switch_val; } 647 uint proj_no() const { return _proj_no; } 648 #ifndef PRODUCT 649 virtual void dump_spec(outputStream *st) const; 650 virtual void dump_compact_spec(outputStream *st) const; 651 #endif 652 }; 653 654 //------------------------------CatchNode-------------------------------------- 655 // Helper node to fork exceptions. "Catch" catches any exceptions thrown by 656 // a just-prior call. Looks like a PCTableNode but emits no code - just the 657 // table. The table lookup and branch is implemented by RethrowNode. 658 class CatchNode : public PCTableNode { 659 public: 660 CatchNode( Node *ctrl, Node *idx, uint size ) : PCTableNode(ctrl,idx,size){ 661 init_class_id(Class_Catch); 662 } 663 virtual int Opcode() const; 664 virtual const Type* Value(PhaseGVN* phase) const; 665 }; 666 667 // CatchProjNode controls which exception handler is targeted after a call. 668 // It is passed in the bci of the target handler, or no_handler_bci in case 669 // the projection doesn't lead to an exception handler. 670 class CatchProjNode : public CProjNode { 671 virtual uint hash() const; 672 virtual bool cmp( const Node &n ) const; 673 virtual uint size_of() const { return sizeof(*this); } 674 675 private: 676 const int _handler_bci; 677 678 public: 679 enum { 680 fall_through_index = 0, // the fall through projection index 681 catch_all_index = 1, // the projection index for catch-alls 682 no_handler_bci = -1 // the bci for fall through or catch-all projs 683 }; 684 685 CatchProjNode(Node* catchnode, uint proj_no, int handler_bci) 686 : CProjNode(catchnode, proj_no), _handler_bci(handler_bci) { 687 init_class_id(Class_CatchProj); 688 assert(proj_no != fall_through_index || handler_bci < 0, "fall through case must have bci < 0"); 689 } 690 691 virtual int Opcode() const; 692 virtual Node* Identity(PhaseGVN* phase); 693 virtual const Type *bottom_type() const { return Type::CONTROL; } 694 int handler_bci() const { return _handler_bci; } 695 bool is_handler_proj() const { return _handler_bci >= 0; } 696 #ifndef PRODUCT 697 virtual void dump_spec(outputStream *st) const; 698 #endif 699 }; 700 701 702 //---------------------------------CreateExNode-------------------------------- 703 // Helper node to create the exception coming back from a call 704 class CreateExNode : public TypeNode { 705 public: 706 CreateExNode(const Type* t, Node* control, Node* i_o) : TypeNode(t, 2) { 707 init_req(0, control); 708 init_req(1, i_o); 709 } 710 virtual int Opcode() const; 711 virtual Node* Identity(PhaseGVN* phase); 712 virtual bool pinned() const { return true; } 713 uint match_edge(uint idx) const { return 0; } 714 virtual uint ideal_reg() const { return Op_RegP; } 715 }; 716 717 //------------------------------NeverBranchNode------------------------------- 718 // The never-taken branch. Used to give the appearance of exiting infinite 719 // loops to those algorithms that like all paths to be reachable. Encodes 720 // empty. 721 class NeverBranchNode : public MultiBranchNode { 722 public: 723 NeverBranchNode(Node* ctrl) : MultiBranchNode(1) { 724 init_req(0, ctrl); 725 init_class_id(Class_NeverBranch); 726 } 727 virtual int Opcode() const; 728 virtual bool pinned() const { return true; }; 729 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; } 730 virtual const Type* Value(PhaseGVN* phase) const; 731 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 732 virtual int required_outcnt() const { return 2; } 733 virtual void emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { } 734 virtual uint size(PhaseRegAlloc *ra_) const { return 0; } 735 #ifndef PRODUCT 736 virtual void format( PhaseRegAlloc *, outputStream *st ) const; 737 #endif 738 }; 739 740 //------------------------------BlackholeNode---------------------------- 741 // Blackhole all arguments. This node would survive through the compiler 742 // the effects on its arguments, and would be finally matched to nothing. 743 class BlackholeNode : public MultiNode { 744 public: 745 BlackholeNode(Node* ctrl) : MultiNode(1) { 746 init_req(TypeFunc::Control, ctrl); 747 init_class_id(Class_Blackhole); 748 } 749 virtual int Opcode() const; 750 virtual uint ideal_reg() const { return 0; } // not matched in the AD file 751 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; } 752 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape); 753 754 const RegMask &in_RegMask(uint idx) const { 755 // Fake the incoming arguments mask for blackholes: accept all registers 756 // and all stack slots. This would avoid any redundant register moves 757 // for blackhole inputs. 758 return RegMask::All; 759 } 760 #ifndef PRODUCT 761 virtual void format(PhaseRegAlloc* ra, outputStream* st) const; 762 #endif 763 }; 764 765 766 #endif // SHARE_OPTO_CFGNODE_HPP