237 int is_diamond_phi() const;
238 bool try_clean_memory_phi(PhaseIterGVN* igvn);
239 virtual int Opcode() const;
240 virtual bool pinned() const { return in(0) != 0; }
241 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
242
243 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
244 int inst_mem_id() const { return _inst_mem_id; }
245 int inst_id() const { return _inst_id; }
246 int inst_index() const { return _inst_index; }
247 int inst_offset() const { return _inst_offset; }
248 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
249 return type()->basic_type() == tp->basic_type() &&
250 inst_mem_id() == mem_id &&
251 inst_id() == id &&
252 inst_index() == index &&
253 inst_offset() == offset &&
254 type()->higher_equal(tp);
255 }
256
257 virtual const Type* Value(PhaseGVN* phase) const;
258 virtual Node* Identity(PhaseGVN* phase);
259 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
260 virtual const RegMask &out_RegMask() const;
261 virtual const RegMask &in_RegMask(uint) const;
262 #ifndef PRODUCT
263 virtual void dump_spec(outputStream *st) const;
264 #endif
265 #ifdef ASSERT
266 void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
267 void verify_adr_type(bool recursive = false) const;
268 #else //ASSERT
269 void verify_adr_type(bool recursive = false) const {}
270 #endif //ASSERT
271 };
272
273 //------------------------------GotoNode---------------------------------------
274 // GotoNodes perform direct branches.
275 class GotoNode : public Node {
276 public:
418 init_class_id(Class_If);
419 init_req(0,control);
420 init_req(1,b);
421 }
422 virtual int Opcode() const;
423 virtual bool pinned() const { return true; }
424 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
425 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
426 virtual const Type* Value(PhaseGVN* phase) const;
427 virtual int required_outcnt() const { return 2; }
428 virtual const RegMask &out_RegMask() const;
429 Node* fold_compares(PhaseIterGVN* phase);
430 static Node* up_one_dom(Node* curr, bool linear_only = false);
431 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn);
432
433 // Takes the type of val and filters it through the test represented
434 // by if_proj and returns a more refined type if one is produced.
435 // Returns null is it couldn't improve the type.
436 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
437
438 #ifndef PRODUCT
439 virtual void dump_spec(outputStream *st) const;
440 #endif
441
442 bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
443 };
444
445 class RangeCheckNode : public IfNode {
446 private:
447 int is_range_check(Node* &range, Node* &index, jint &offset);
448
449 public:
450 RangeCheckNode(Node* control, Node *b, float p, float fcnt)
451 : IfNode(control, b, p, fcnt) {
452 init_class_id(Class_RangeCheck);
453 }
454
455 virtual int Opcode() const;
456 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
457 };
676 virtual int Opcode() const;
677 virtual bool pinned() const { return true; };
678 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
679 virtual const Type* Value(PhaseGVN* phase) const;
680 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
681 virtual int required_outcnt() const { return 2; }
682 virtual void emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const { }
683 virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
684 #ifndef PRODUCT
685 virtual void format( PhaseRegAlloc *, outputStream *st ) const;
686 #endif
687 };
688
689 //------------------------------BlackholeNode----------------------------
690 // Blackhole all arguments. This node would survive through the compiler
691 // the effects on its arguments, and would be finally matched to nothing.
692 class BlackholeNode : public MultiNode {
693 public:
694 BlackholeNode(Node* ctrl) : MultiNode(1) {
695 init_req(TypeFunc::Control, ctrl);
696 }
697 virtual int Opcode() const;
698 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
699 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
700
701 const RegMask &in_RegMask(uint idx) const {
702 // Fake the incoming arguments mask for blackholes: accept all registers
703 // and all stack slots. This would avoid any redundant register moves
704 // for blackhole inputs.
705 return RegMask::All;
706 }
707 #ifndef PRODUCT
708 virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
709 #endif
710 };
711
712
713 #endif // SHARE_OPTO_CFGNODE_HPP
|
237 int is_diamond_phi() const;
238 bool try_clean_memory_phi(PhaseIterGVN* igvn);
239 virtual int Opcode() const;
240 virtual bool pinned() const { return in(0) != 0; }
241 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
242
243 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
244 int inst_mem_id() const { return _inst_mem_id; }
245 int inst_id() const { return _inst_id; }
246 int inst_index() const { return _inst_index; }
247 int inst_offset() const { return _inst_offset; }
248 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
249 return type()->basic_type() == tp->basic_type() &&
250 inst_mem_id() == mem_id &&
251 inst_id() == id &&
252 inst_index() == index &&
253 inst_offset() == offset &&
254 type()->higher_equal(tp);
255 }
256
257 InlineTypeNode* push_inline_types_through(PhaseGVN* phase, bool can_reshape, ciInlineKlass* vk);
258
259 virtual const Type* Value(PhaseGVN* phase) const;
260 virtual Node* Identity(PhaseGVN* phase);
261 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
262 virtual const RegMask &out_RegMask() const;
263 virtual const RegMask &in_RegMask(uint) const;
264 #ifndef PRODUCT
265 virtual void dump_spec(outputStream *st) const;
266 #endif
267 #ifdef ASSERT
268 void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
269 void verify_adr_type(bool recursive = false) const;
270 #else //ASSERT
271 void verify_adr_type(bool recursive = false) const {}
272 #endif //ASSERT
273 };
274
275 //------------------------------GotoNode---------------------------------------
276 // GotoNodes perform direct branches.
277 class GotoNode : public Node {
278 public:
420 init_class_id(Class_If);
421 init_req(0,control);
422 init_req(1,b);
423 }
424 virtual int Opcode() const;
425 virtual bool pinned() const { return true; }
426 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
427 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
428 virtual const Type* Value(PhaseGVN* phase) const;
429 virtual int required_outcnt() const { return 2; }
430 virtual const RegMask &out_RegMask() const;
431 Node* fold_compares(PhaseIterGVN* phase);
432 static Node* up_one_dom(Node* curr, bool linear_only = false);
433 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn);
434
435 // Takes the type of val and filters it through the test represented
436 // by if_proj and returns a more refined type if one is produced.
437 // Returns null is it couldn't improve the type.
438 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
439
440 bool is_flat_array_check(PhaseTransform* phase, Node** array = nullptr);
441
442 #ifndef PRODUCT
443 virtual void dump_spec(outputStream *st) const;
444 #endif
445
446 bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
447 };
448
449 class RangeCheckNode : public IfNode {
450 private:
451 int is_range_check(Node* &range, Node* &index, jint &offset);
452
453 public:
454 RangeCheckNode(Node* control, Node *b, float p, float fcnt)
455 : IfNode(control, b, p, fcnt) {
456 init_class_id(Class_RangeCheck);
457 }
458
459 virtual int Opcode() const;
460 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
461 };
680 virtual int Opcode() const;
681 virtual bool pinned() const { return true; };
682 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
683 virtual const Type* Value(PhaseGVN* phase) const;
684 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
685 virtual int required_outcnt() const { return 2; }
686 virtual void emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const { }
687 virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
688 #ifndef PRODUCT
689 virtual void format( PhaseRegAlloc *, outputStream *st ) const;
690 #endif
691 };
692
693 //------------------------------BlackholeNode----------------------------
694 // Blackhole all arguments. This node would survive through the compiler
695 // the effects on its arguments, and would be finally matched to nothing.
696 class BlackholeNode : public MultiNode {
697 public:
698 BlackholeNode(Node* ctrl) : MultiNode(1) {
699 init_req(TypeFunc::Control, ctrl);
700 init_class_id(Class_Blackhole);
701 }
702 virtual int Opcode() const;
703 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
704 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
705
706 const RegMask &in_RegMask(uint idx) const {
707 // Fake the incoming arguments mask for blackholes: accept all registers
708 // and all stack slots. This would avoid any redundant register moves
709 // for blackhole inputs.
710 return RegMask::All;
711 }
712 #ifndef PRODUCT
713 virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
714 #endif
715 };
716
717
718 #endif // SHARE_OPTO_CFGNODE_HPP
|