1 /* 2 * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OPTO_CFGNODE_HPP 26 #define SHARE_OPTO_CFGNODE_HPP 27 28 #include "opto/multnode.hpp" 29 #include "opto/node.hpp" 30 #include "opto/opcodes.hpp" 31 #include "opto/type.hpp" 32 33 // Portions of code courtesy of Clifford Click 34 35 // Optimization - Graph Style 36 37 class Matcher; 38 class Node; 39 class RegionNode; 40 class TypeNode; 41 class PhiNode; 42 class GotoNode; 43 class MultiNode; 44 class MultiBranchNode; 45 class IfNode; 46 class PCTableNode; 47 class JumpNode; 48 class CatchNode; 49 class NeverBranchNode; 50 class BlackholeNode; 51 class ProjNode; 52 class CProjNode; 53 class IfTrueNode; 54 class IfFalseNode; 55 class CatchProjNode; 56 class JProjNode; 57 class JumpProjNode; 58 class SCMemProjNode; 59 class PhaseIdealLoop; 60 61 // The success projection of a Parse Predicate is always an IfTrueNode and the uncommon projection an IfFalseNode 62 typedef IfTrueNode ParsePredicateSuccessProj; 63 typedef IfFalseNode ParsePredicateUncommonProj; 64 65 //------------------------------RegionNode------------------------------------- 66 // The class of RegionNodes, which can be mapped to basic blocks in the 67 // program. Their inputs point to Control sources. PhiNodes (described 68 // below) have an input point to a RegionNode. Merged data inputs to PhiNodes 69 // correspond 1-to-1 with RegionNode inputs. The zero input of a PhiNode is 70 // the RegionNode, and the zero input of the RegionNode is itself. 71 class RegionNode : public Node { 72 public: 73 enum LoopStatus { 74 // No guarantee: the region may be an irreducible loop entry, thus we have to 75 // be careful when removing entry control to it. 76 MaybeIrreducibleEntry, 77 // Limited guarantee: this region may be (nested) inside an irreducible loop, 78 // but it will never be an irreducible loop entry. 79 NeverIrreducibleEntry, 80 // Strong guarantee: this region is not (nested) inside an irreducible loop. 81 Reducible, 82 }; 83 84 private: 85 bool _is_unreachable_region; 86 LoopStatus _loop_status; 87 88 bool is_possible_unsafe_loop(const PhaseGVN* phase) const; 89 bool is_unreachable_from_root(const PhaseGVN* phase) const; 90 public: 91 // Node layout (parallels PhiNode): 92 enum { Region, // Generally points to self. 93 Control // Control arcs are [1..len) 94 }; 95 96 RegionNode(uint required) 97 : Node(required), 98 _is_unreachable_region(false), 99 _loop_status(LoopStatus::NeverIrreducibleEntry) 100 { 101 init_class_id(Class_Region); 102 init_req(0, this); 103 } 104 105 Node* is_copy() const { 106 const Node* r = _in[Region]; 107 if (r == nullptr) 108 return nonnull_req(); 109 return nullptr; // not a copy! 110 } 111 PhiNode* has_phi() const; // returns an arbitrary phi user, or null 112 PhiNode* has_unique_phi() const; // returns the unique phi user, or null 113 // Is this region node unreachable from root? 114 bool is_unreachable_region(const PhaseGVN* phase); 115 #ifdef ASSERT 116 bool is_in_infinite_subgraph(); 117 static bool are_all_nodes_in_infinite_subgraph(Unique_Node_List& worklist); 118 #endif //ASSERT 119 LoopStatus loop_status() const { return _loop_status; }; 120 void set_loop_status(LoopStatus status); 121 DEBUG_ONLY(void verify_can_be_irreducible_entry() const;) 122 123 virtual int Opcode() const; 124 virtual uint size_of() const { return sizeof(*this); } 125 virtual bool pinned() const { return (const Node*)in(0) == this; } 126 virtual bool is_CFG() const { return true; } 127 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash 128 virtual bool depends_only_on_test() const { return false; } 129 virtual const Type* bottom_type() const { return Type::CONTROL; } 130 virtual const Type* Value(PhaseGVN* phase) const; 131 virtual Node* Identity(PhaseGVN* phase); 132 virtual Node* Ideal(PhaseGVN* phase, bool can_reshape); 133 void remove_unreachable_subgraph(PhaseIterGVN* igvn); 134 virtual const RegMask &out_RegMask() const; 135 bool optimize_trichotomy(PhaseIterGVN* igvn); 136 NOT_PRODUCT(virtual void dump_spec(outputStream* st) const;) 137 }; 138 139 //------------------------------JProjNode-------------------------------------- 140 // jump projection for node that produces multiple control-flow paths 141 class JProjNode : public ProjNode { 142 public: 143 JProjNode( Node* ctrl, uint idx ) : ProjNode(ctrl,idx) {} 144 virtual int Opcode() const; 145 virtual bool is_CFG() const { return true; } 146 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash 147 virtual const Node* is_block_proj() const { return in(0); } 148 virtual const RegMask& out_RegMask() const; 149 virtual uint ideal_reg() const { return 0; } 150 }; 151 152 //------------------------------PhiNode---------------------------------------- 153 // PhiNodes merge values from different Control paths. Slot 0 points to the 154 // controlling RegionNode. Other slots map 1-for-1 with incoming control flow 155 // paths to the RegionNode. 156 class PhiNode : public TypeNode { 157 friend class PhaseRenumberLive; 158 159 const TypePtr* const _adr_type; // non-null only for Type::MEMORY nodes. 160 // The following fields are only used for data PhiNodes to indicate 161 // that the PhiNode represents the value of a known instance field. 162 int _inst_mem_id; // Instance memory id (node index of the memory Phi) 163 int _inst_id; // Instance id of the memory slice. 164 const int _inst_index; // Alias index of the instance memory slice. 165 // Array elements references have the same alias_idx but different offset. 166 const int _inst_offset; // Offset of the instance memory slice. 167 // Size is bigger to hold the _adr_type field. 168 virtual uint hash() const; // Check the type 169 virtual bool cmp( const Node &n ) const; 170 virtual uint size_of() const { return sizeof(*this); } 171 172 // Determine if CMoveNode::is_cmove_id can be used at this join point. 173 Node* is_cmove_id(PhaseTransform* phase, int true_path); 174 bool wait_for_region_igvn(PhaseGVN* phase); 175 bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase); 176 177 static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn); 178 static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn); 179 180 bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const; 181 182 public: 183 // Node layout (parallels RegionNode): 184 enum { Region, // Control input is the Phi's region. 185 Input // Input values are [1..len) 186 }; 187 188 PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr, 189 const int imid = -1, 190 const int iid = TypeOopPtr::InstanceTop, 191 const int iidx = Compile::AliasIdxTop, 192 const int ioffs = Type::OffsetTop ) 193 : TypeNode(t,r->req()), 194 _adr_type(at), 195 _inst_mem_id(imid), 196 _inst_id(iid), 197 _inst_index(iidx), 198 _inst_offset(ioffs) 199 { 200 init_class_id(Class_Phi); 201 init_req(0, r); 202 verify_adr_type(); 203 } 204 // create a new phi with in edges matching r and set (initially) to x 205 static PhiNode* make( Node* r, Node* x ); 206 // extra type arguments override the new phi's bottom_type and adr_type 207 static PhiNode* make( Node* r, Node* x, const Type *t, const TypePtr* at = nullptr ); 208 // create a new phi with narrowed memory type 209 PhiNode* slice_memory(const TypePtr* adr_type) const; 210 PhiNode* split_out_instance(const TypePtr* at, PhaseIterGVN *igvn) const; 211 // like make(r, x), but does not initialize the in edges to x 212 static PhiNode* make_blank( Node* r, Node* x ); 213 214 // Accessors 215 RegionNode* region() const { Node* r = in(Region); assert(!r || r->is_Region(), ""); return (RegionNode*)r; } 216 217 bool is_tripcount(BasicType bt) const; 218 219 // Determine a unique non-trivial input, if any. 220 // Ignore casts if it helps. Return null on failure. 221 Node* unique_input(PhaseTransform *phase, bool uncast); 222 Node* unique_input(PhaseTransform *phase) { 223 Node* uin = unique_input(phase, false); 224 if (uin == nullptr) { 225 uin = unique_input(phase, true); 226 } 227 return uin; 228 } 229 230 // Check for a simple dead loop. 231 enum LoopSafety { Safe = 0, Unsafe, UnsafeLoop }; 232 LoopSafety simple_data_loop_check(Node *in) const; 233 // Is it unsafe data loop? It becomes a dead loop if this phi node removed. 234 bool is_unsafe_data_reference(Node *in) const; 235 int is_diamond_phi(bool check_control_only = false) const; 236 virtual int Opcode() const; 237 virtual bool pinned() const { return in(0) != 0; } 238 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; } 239 240 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; } 241 const int inst_mem_id() const { return _inst_mem_id; } 242 const int inst_id() const { return _inst_id; } 243 const int inst_index() const { return _inst_index; } 244 const int inst_offset() const { return _inst_offset; } 245 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) { 246 return type()->basic_type() == tp->basic_type() && 247 inst_mem_id() == mem_id && 248 inst_id() == id && 249 inst_index() == index && 250 inst_offset() == offset && 251 type()->higher_equal(tp); 252 } 253 Node* try_clean_mem_phi(PhaseGVN *phase); 254 255 InlineTypeNode* push_inline_types_through(PhaseGVN* phase, bool can_reshape, ciInlineKlass* vk, bool is_init); 256 257 virtual const Type* Value(PhaseGVN* phase) const; 258 virtual Node* Identity(PhaseGVN* phase); 259 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 260 virtual const RegMask &out_RegMask() const; 261 virtual const RegMask &in_RegMask(uint) const; 262 #ifndef PRODUCT 263 virtual void dump_spec(outputStream *st) const; 264 #endif 265 #ifdef ASSERT 266 void verify_adr_type(VectorSet& visited, const TypePtr* at) const; 267 void verify_adr_type(bool recursive = false) const; 268 #else //ASSERT 269 void verify_adr_type(bool recursive = false) const {} 270 #endif //ASSERT 271 }; 272 273 //------------------------------GotoNode--------------------------------------- 274 // GotoNodes perform direct branches. 275 class GotoNode : public Node { 276 public: 277 GotoNode( Node *control ) : Node(control) {} 278 virtual int Opcode() const; 279 virtual bool pinned() const { return true; } 280 virtual bool is_CFG() const { return true; } 281 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash 282 virtual const Node *is_block_proj() const { return this; } 283 virtual bool depends_only_on_test() const { return false; } 284 virtual const Type *bottom_type() const { return Type::CONTROL; } 285 virtual const Type* Value(PhaseGVN* phase) const; 286 virtual Node* Identity(PhaseGVN* phase); 287 virtual const RegMask &out_RegMask() const; 288 }; 289 290 //------------------------------CProjNode-------------------------------------- 291 // control projection for node that produces multiple control-flow paths 292 class CProjNode : public ProjNode { 293 public: 294 CProjNode( Node *ctrl, uint idx ) : ProjNode(ctrl,idx) {} 295 virtual int Opcode() const; 296 virtual bool is_CFG() const { return true; } 297 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash 298 virtual const Node *is_block_proj() const { return in(0); } 299 virtual const RegMask &out_RegMask() const; 300 virtual uint ideal_reg() const { return 0; } 301 }; 302 303 //---------------------------MultiBranchNode----------------------------------- 304 // This class defines a MultiBranchNode, a MultiNode which yields multiple 305 // control values. These are distinguished from other types of MultiNodes 306 // which yield multiple values, but control is always and only projection #0. 307 class MultiBranchNode : public MultiNode { 308 public: 309 MultiBranchNode( uint required ) : MultiNode(required) { 310 init_class_id(Class_MultiBranch); 311 } 312 // returns required number of users to be well formed. 313 virtual int required_outcnt() const = 0; 314 }; 315 316 //------------------------------IfNode----------------------------------------- 317 // Output selected Control, based on a boolean test 318 class IfNode : public MultiBranchNode { 319 // Size is bigger to hold the probability field. However, _prob does not 320 // change the semantics so it does not appear in the hash & cmp functions. 321 virtual uint size_of() const { return sizeof(*this); } 322 323 private: 324 // Helper methods for fold_compares 325 bool cmpi_folds(PhaseIterGVN* igvn, bool fold_ne = false); 326 bool is_ctrl_folds(Node* ctrl, PhaseIterGVN* igvn); 327 bool has_shared_region(ProjNode* proj, ProjNode*& success, ProjNode*& fail); 328 bool has_only_uncommon_traps(ProjNode* proj, ProjNode*& success, ProjNode*& fail, PhaseIterGVN* igvn); 329 Node* merge_uncommon_traps(ProjNode* proj, ProjNode* success, ProjNode* fail, PhaseIterGVN* igvn); 330 static void improve_address_types(Node* l, Node* r, ProjNode* fail, PhaseIterGVN* igvn); 331 bool is_cmp_with_loadrange(ProjNode* proj); 332 bool is_null_check(ProjNode* proj, PhaseIterGVN* igvn); 333 bool is_side_effect_free_test(ProjNode* proj, PhaseIterGVN* igvn); 334 void reroute_side_effect_free_unc(ProjNode* proj, ProjNode* dom_proj, PhaseIterGVN* igvn); 335 ProjNode* uncommon_trap_proj(CallStaticJavaNode*& call) const; 336 bool fold_compares_helper(ProjNode* proj, ProjNode* success, ProjNode* fail, PhaseIterGVN* igvn); 337 static bool is_dominator_unc(CallStaticJavaNode* dom_unc, CallStaticJavaNode* unc); 338 339 protected: 340 ProjNode* range_check_trap_proj(int& flip, Node*& l, Node*& r); 341 Node* Ideal_common(PhaseGVN *phase, bool can_reshape); 342 Node* search_identical(int dist); 343 344 Node* simple_subsuming(PhaseIterGVN* igvn); 345 346 public: 347 348 // Degrees of branch prediction probability by order of magnitude: 349 // PROB_UNLIKELY_1e(N) is a 1 in 1eN chance. 350 // PROB_LIKELY_1e(N) is a 1 - PROB_UNLIKELY_1e(N) 351 #define PROB_UNLIKELY_MAG(N) (1e- ## N ## f) 352 #define PROB_LIKELY_MAG(N) (1.0f-PROB_UNLIKELY_MAG(N)) 353 354 // Maximum and minimum branch prediction probabilties 355 // 1 in 1,000,000 (magnitude 6) 356 // 357 // Although PROB_NEVER == PROB_MIN and PROB_ALWAYS == PROB_MAX 358 // they are used to distinguish different situations: 359 // 360 // The name PROB_MAX (PROB_MIN) is for probabilities which correspond to 361 // very likely (unlikely) but with a concrete possibility of a rare 362 // contrary case. These constants would be used for pinning 363 // measurements, and as measures for assertions that have high 364 // confidence, but some evidence of occasional failure. 365 // 366 // The name PROB_ALWAYS (PROB_NEVER) is to stand for situations for which 367 // there is no evidence at all that the contrary case has ever occurred. 368 369 #define PROB_NEVER PROB_UNLIKELY_MAG(6) 370 #define PROB_ALWAYS PROB_LIKELY_MAG(6) 371 372 #define PROB_MIN PROB_UNLIKELY_MAG(6) 373 #define PROB_MAX PROB_LIKELY_MAG(6) 374 375 // Static branch prediction probabilities 376 // 1 in 10 (magnitude 1) 377 #define PROB_STATIC_INFREQUENT PROB_UNLIKELY_MAG(1) 378 #define PROB_STATIC_FREQUENT PROB_LIKELY_MAG(1) 379 380 // Fair probability 50/50 381 #define PROB_FAIR (0.5f) 382 383 // Unknown probability sentinel 384 #define PROB_UNKNOWN (-1.0f) 385 386 // Probability "constructors", to distinguish as a probability any manifest 387 // constant without a names 388 #define PROB_LIKELY(x) ((float) (x)) 389 #define PROB_UNLIKELY(x) (1.0f - (float)(x)) 390 391 // Other probabilities in use, but without a unique name, are documented 392 // here for lack of a better place: 393 // 394 // 1 in 1000 probabilities (magnitude 3): 395 // threshold for converting to conditional move 396 // likelihood of null check failure if a null HAS been seen before 397 // likelihood of slow path taken in library calls 398 // 399 // 1 in 10,000 probabilities (magnitude 4): 400 // threshold for making an uncommon trap probability more extreme 401 // threshold for for making a null check implicit 402 // likelihood of needing a gc if eden top moves during an allocation 403 // likelihood of a predicted call failure 404 // 405 // 1 in 100,000 probabilities (magnitude 5): 406 // threshold for ignoring counts when estimating path frequency 407 // likelihood of FP clipping failure 408 // likelihood of catching an exception from a try block 409 // likelihood of null check failure if a null has NOT been seen before 410 // 411 // Magic manifest probabilities such as 0.83, 0.7, ... can be found in 412 // gen_subtype_check() and catch_inline_exceptions(). 413 414 float _prob; // Probability of true path being taken. 415 float _fcnt; // Frequency counter 416 IfNode( Node *control, Node *b, float p, float fcnt ) 417 : MultiBranchNode(2), _prob(p), _fcnt(fcnt) { 418 init_class_id(Class_If); 419 init_req(0,control); 420 init_req(1,b); 421 } 422 virtual int Opcode() const; 423 virtual bool pinned() const { return true; } 424 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; } 425 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 426 virtual const Type* Value(PhaseGVN* phase) const; 427 virtual int required_outcnt() const { return 2; } 428 virtual const RegMask &out_RegMask() const; 429 Node* fold_compares(PhaseIterGVN* phase); 430 static Node* up_one_dom(Node* curr, bool linear_only = false); 431 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn); 432 433 // Takes the type of val and filters it through the test represented 434 // by if_proj and returns a more refined type if one is produced. 435 // Returns null is it couldn't improve the type. 436 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj); 437 438 bool is_flat_array_check(PhaseTransform* phase, Node** array = NULL); 439 440 #ifndef PRODUCT 441 virtual void dump_spec(outputStream *st) const; 442 #endif 443 }; 444 445 class RangeCheckNode : public IfNode { 446 private: 447 int is_range_check(Node* &range, Node* &index, jint &offset); 448 449 public: 450 RangeCheckNode(Node* control, Node *b, float p, float fcnt) 451 : IfNode(control, b, p, fcnt) { 452 init_class_id(Class_RangeCheck); 453 } 454 455 virtual int Opcode() const; 456 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape); 457 }; 458 459 class IfProjNode : public CProjNode { 460 public: 461 IfProjNode(IfNode *ifnode, uint idx) : CProjNode(ifnode,idx) {} 462 virtual Node* Identity(PhaseGVN* phase); 463 464 protected: 465 // Type of If input when this branch is always taken 466 virtual bool always_taken(const TypeTuple* t) const = 0; 467 }; 468 469 class IfTrueNode : public IfProjNode { 470 public: 471 IfTrueNode( IfNode *ifnode ) : IfProjNode(ifnode,1) { 472 init_class_id(Class_IfTrue); 473 } 474 virtual int Opcode() const; 475 476 protected: 477 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFTRUE; } 478 }; 479 480 class IfFalseNode : public IfProjNode { 481 public: 482 IfFalseNode( IfNode *ifnode ) : IfProjNode(ifnode,0) { 483 init_class_id(Class_IfFalse); 484 } 485 virtual int Opcode() const; 486 487 protected: 488 virtual bool always_taken(const TypeTuple* t) const { return t == TypeTuple::IFFALSE; } 489 }; 490 491 492 //------------------------------PCTableNode------------------------------------ 493 // Build an indirect branch table. Given a control and a table index, 494 // control is passed to the Projection matching the table index. Used to 495 // implement switch statements and exception-handling capabilities. 496 // Undefined behavior if passed-in index is not inside the table. 497 class PCTableNode : public MultiBranchNode { 498 virtual uint hash() const; // Target count; table size 499 virtual bool cmp( const Node &n ) const; 500 virtual uint size_of() const { return sizeof(*this); } 501 502 public: 503 const uint _size; // Number of targets 504 505 PCTableNode( Node *ctrl, Node *idx, uint size ) : MultiBranchNode(2), _size(size) { 506 init_class_id(Class_PCTable); 507 init_req(0, ctrl); 508 init_req(1, idx); 509 } 510 virtual int Opcode() const; 511 virtual const Type* Value(PhaseGVN* phase) const; 512 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 513 virtual const Type *bottom_type() const; 514 virtual bool pinned() const { return true; } 515 virtual int required_outcnt() const { return _size; } 516 }; 517 518 //------------------------------JumpNode--------------------------------------- 519 // Indirect branch. Uses PCTable above to implement a switch statement. 520 // It emits as a table load and local branch. 521 class JumpNode : public PCTableNode { 522 virtual uint size_of() const { return sizeof(*this); } 523 public: 524 float* _probs; // probability of each projection 525 float _fcnt; // total number of times this Jump was executed 526 JumpNode( Node* control, Node* switch_val, uint size, float* probs, float cnt) 527 : PCTableNode(control, switch_val, size), 528 _probs(probs), _fcnt(cnt) { 529 init_class_id(Class_Jump); 530 } 531 virtual int Opcode() const; 532 virtual const RegMask& out_RegMask() const; 533 virtual const Node* is_block_proj() const { return this; } 534 }; 535 536 class JumpProjNode : public JProjNode { 537 virtual uint hash() const; 538 virtual bool cmp( const Node &n ) const; 539 virtual uint size_of() const { return sizeof(*this); } 540 541 private: 542 const int _dest_bci; 543 const uint _proj_no; 544 const int _switch_val; 545 public: 546 JumpProjNode(Node* jumpnode, uint proj_no, int dest_bci, int switch_val) 547 : JProjNode(jumpnode, proj_no), _dest_bci(dest_bci), _proj_no(proj_no), _switch_val(switch_val) { 548 init_class_id(Class_JumpProj); 549 } 550 551 virtual int Opcode() const; 552 virtual const Type* bottom_type() const { return Type::CONTROL; } 553 int dest_bci() const { return _dest_bci; } 554 int switch_val() const { return _switch_val; } 555 uint proj_no() const { return _proj_no; } 556 #ifndef PRODUCT 557 virtual void dump_spec(outputStream *st) const; 558 virtual void dump_compact_spec(outputStream *st) const; 559 #endif 560 }; 561 562 //------------------------------CatchNode-------------------------------------- 563 // Helper node to fork exceptions. "Catch" catches any exceptions thrown by 564 // a just-prior call. Looks like a PCTableNode but emits no code - just the 565 // table. The table lookup and branch is implemented by RethrowNode. 566 class CatchNode : public PCTableNode { 567 public: 568 CatchNode( Node *ctrl, Node *idx, uint size ) : PCTableNode(ctrl,idx,size){ 569 init_class_id(Class_Catch); 570 } 571 virtual int Opcode() const; 572 virtual const Type* Value(PhaseGVN* phase) const; 573 }; 574 575 // CatchProjNode controls which exception handler is targeted after a call. 576 // It is passed in the bci of the target handler, or no_handler_bci in case 577 // the projection doesn't lead to an exception handler. 578 class CatchProjNode : public CProjNode { 579 virtual uint hash() const; 580 virtual bool cmp( const Node &n ) const; 581 virtual uint size_of() const { return sizeof(*this); } 582 583 private: 584 const int _handler_bci; 585 586 public: 587 enum { 588 fall_through_index = 0, // the fall through projection index 589 catch_all_index = 1, // the projection index for catch-alls 590 no_handler_bci = -1 // the bci for fall through or catch-all projs 591 }; 592 593 CatchProjNode(Node* catchnode, uint proj_no, int handler_bci) 594 : CProjNode(catchnode, proj_no), _handler_bci(handler_bci) { 595 init_class_id(Class_CatchProj); 596 assert(proj_no != fall_through_index || handler_bci < 0, "fall through case must have bci < 0"); 597 } 598 599 virtual int Opcode() const; 600 virtual Node* Identity(PhaseGVN* phase); 601 virtual const Type *bottom_type() const { return Type::CONTROL; } 602 int handler_bci() const { return _handler_bci; } 603 bool is_handler_proj() const { return _handler_bci >= 0; } 604 #ifndef PRODUCT 605 virtual void dump_spec(outputStream *st) const; 606 #endif 607 }; 608 609 610 //---------------------------------CreateExNode-------------------------------- 611 // Helper node to create the exception coming back from a call 612 class CreateExNode : public TypeNode { 613 public: 614 CreateExNode(const Type* t, Node* control, Node* i_o) : TypeNode(t, 2) { 615 init_req(0, control); 616 init_req(1, i_o); 617 } 618 virtual int Opcode() const; 619 virtual Node* Identity(PhaseGVN* phase); 620 virtual bool pinned() const { return true; } 621 uint match_edge(uint idx) const { return 0; } 622 virtual uint ideal_reg() const { return Op_RegP; } 623 }; 624 625 //------------------------------NeverBranchNode------------------------------- 626 // The never-taken branch. Used to give the appearance of exiting infinite 627 // loops to those algorithms that like all paths to be reachable. Encodes 628 // empty. 629 class NeverBranchNode : public MultiBranchNode { 630 public: 631 NeverBranchNode(Node* ctrl) : MultiBranchNode(1) { 632 init_req(0, ctrl); 633 init_class_id(Class_NeverBranch); 634 } 635 virtual int Opcode() const; 636 virtual bool pinned() const { return true; }; 637 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; } 638 virtual const Type* Value(PhaseGVN* phase) const; 639 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 640 virtual int required_outcnt() const { return 2; } 641 virtual void emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const { } 642 virtual uint size(PhaseRegAlloc *ra_) const { return 0; } 643 #ifndef PRODUCT 644 virtual void format( PhaseRegAlloc *, outputStream *st ) const; 645 #endif 646 }; 647 648 //------------------------------BlackholeNode---------------------------- 649 // Blackhole all arguments. This node would survive through the compiler 650 // the effects on its arguments, and would be finally matched to nothing. 651 class BlackholeNode : public MultiNode { 652 public: 653 BlackholeNode(Node* ctrl) : MultiNode(1) { 654 init_req(TypeFunc::Control, ctrl); 655 init_class_id(Class_Blackhole); 656 } 657 virtual int Opcode() const; 658 virtual uint ideal_reg() const { return 0; } // not matched in the AD file 659 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; } 660 661 const RegMask &in_RegMask(uint idx) const { 662 // Fake the incoming arguments mask for blackholes: accept all registers 663 // and all stack slots. This would avoid any redundant register moves 664 // for blackhole inputs. 665 return RegMask::All; 666 } 667 #ifndef PRODUCT 668 virtual void format(PhaseRegAlloc* ra, outputStream* st) const; 669 #endif 670 }; 671 672 673 #endif // SHARE_OPTO_CFGNODE_HPP