164 int _inst_mem_id; // Instance memory id (node index of the memory Phi)
165 int _inst_id; // Instance id of the memory slice.
166 const int _inst_index; // Alias index of the instance memory slice.
167 // Array elements references have the same alias_idx but different offset.
168 const int _inst_offset; // Offset of the instance memory slice.
169 // Size is bigger to hold the _adr_type field.
170 virtual uint hash() const; // Check the type
171 virtual bool cmp( const Node &n ) const;
172 virtual uint size_of() const { return sizeof(*this); }
173
174 // Determine if CMoveNode::is_cmove_id can be used at this join point.
175 Node* is_cmove_id(PhaseTransform* phase, int true_path);
176 bool wait_for_region_igvn(PhaseGVN* phase);
177 bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase);
178
179 static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn);
180 static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn);
181
182 bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const;
183
184 public:
185 // Node layout (parallels RegionNode):
186 enum { Region, // Control input is the Phi's region.
187 Input // Input values are [1..len)
188 };
189
190 PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr,
191 const int imid = -1,
192 const int iid = TypeOopPtr::InstanceTop,
193 const int iidx = Compile::AliasIdxTop,
194 const int ioffs = Type::OffsetTop )
195 : TypeNode(t,r->req()),
196 _adr_type(at),
197 _inst_mem_id(imid),
198 _inst_id(iid),
199 _inst_index(iidx),
200 _inst_offset(ioffs)
201 {
202 init_class_id(Class_Phi);
203 init_req(0, r);
237 int is_diamond_phi() const;
238 bool try_clean_memory_phi(PhaseIterGVN* igvn);
239 virtual int Opcode() const;
240 virtual bool pinned() const { return in(0) != 0; }
241 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
242
243 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
244 int inst_mem_id() const { return _inst_mem_id; }
245 int inst_id() const { return _inst_id; }
246 int inst_index() const { return _inst_index; }
247 int inst_offset() const { return _inst_offset; }
248 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
249 return type()->basic_type() == tp->basic_type() &&
250 inst_mem_id() == mem_id &&
251 inst_id() == id &&
252 inst_index() == index &&
253 inst_offset() == offset &&
254 type()->higher_equal(tp);
255 }
256
257 virtual const Type* Value(PhaseGVN* phase) const;
258 virtual Node* Identity(PhaseGVN* phase);
259 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
260 virtual const RegMask &out_RegMask() const;
261 virtual const RegMask &in_RegMask(uint) const;
262 #ifndef PRODUCT
263 virtual void dump_spec(outputStream *st) const;
264 #endif
265 #ifdef ASSERT
266 void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
267 void verify_adr_type(bool recursive = false) const;
268 #else //ASSERT
269 void verify_adr_type(bool recursive = false) const {}
270 #endif //ASSERT
271
272 const TypeTuple* collect_types(PhaseGVN* phase) const;
273 };
274
275 //------------------------------GotoNode---------------------------------------
276 // GotoNodes perform direct branches.
421 init_req(0,control);
422 init_req(1,b);
423 }
424 virtual int Opcode() const;
425 virtual bool pinned() const { return true; }
426 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
427 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
428 virtual const Type* Value(PhaseGVN* phase) const;
429 virtual int required_outcnt() const { return 2; }
430 virtual const RegMask &out_RegMask() const;
431 Node* fold_compares(PhaseIterGVN* phase);
432 static Node* up_one_dom(Node* curr, bool linear_only = false);
433 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn);
434 bool is_zero_trip_guard() const;
435
436 // Takes the type of val and filters it through the test represented
437 // by if_proj and returns a more refined type if one is produced.
438 // Returns null is it couldn't improve the type.
439 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
440
441 #ifndef PRODUCT
442 virtual void dump_spec(outputStream *st) const;
443 #endif
444
445 bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
446 };
447
448 class RangeCheckNode : public IfNode {
449 private:
450 int is_range_check(Node* &range, Node* &index, jint &offset);
451
452 public:
453 RangeCheckNode(Node* control, Node *b, float p, float fcnt)
454 : IfNode(control, b, p, fcnt) {
455 init_class_id(Class_RangeCheck);
456 }
457
458 virtual int Opcode() const;
459 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
460 };
679 virtual int Opcode() const;
680 virtual bool pinned() const { return true; };
681 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
682 virtual const Type* Value(PhaseGVN* phase) const;
683 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
684 virtual int required_outcnt() const { return 2; }
685 virtual void emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const { }
686 virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
687 #ifndef PRODUCT
688 virtual void format( PhaseRegAlloc *, outputStream *st ) const;
689 #endif
690 };
691
692 //------------------------------BlackholeNode----------------------------
693 // Blackhole all arguments. This node would survive through the compiler
694 // the effects on its arguments, and would be finally matched to nothing.
695 class BlackholeNode : public MultiNode {
696 public:
697 BlackholeNode(Node* ctrl) : MultiNode(1) {
698 init_req(TypeFunc::Control, ctrl);
699 }
700 virtual int Opcode() const;
701 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
702 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
703
704 const RegMask &in_RegMask(uint idx) const {
705 // Fake the incoming arguments mask for blackholes: accept all registers
706 // and all stack slots. This would avoid any redundant register moves
707 // for blackhole inputs.
708 return RegMask::All;
709 }
710 #ifndef PRODUCT
711 virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
712 #endif
713 };
714
715
716 #endif // SHARE_OPTO_CFGNODE_HPP
|
164 int _inst_mem_id; // Instance memory id (node index of the memory Phi)
165 int _inst_id; // Instance id of the memory slice.
166 const int _inst_index; // Alias index of the instance memory slice.
167 // Array elements references have the same alias_idx but different offset.
168 const int _inst_offset; // Offset of the instance memory slice.
169 // Size is bigger to hold the _adr_type field.
170 virtual uint hash() const; // Check the type
171 virtual bool cmp( const Node &n ) const;
172 virtual uint size_of() const { return sizeof(*this); }
173
174 // Determine if CMoveNode::is_cmove_id can be used at this join point.
175 Node* is_cmove_id(PhaseTransform* phase, int true_path);
176 bool wait_for_region_igvn(PhaseGVN* phase);
177 bool is_data_loop(RegionNode* r, Node* uin, const PhaseGVN* phase);
178
179 static Node* clone_through_phi(Node* root_phi, const Type* t, uint c, PhaseIterGVN* igvn);
180 static Node* merge_through_phi(Node* root_phi, PhaseIterGVN* igvn);
181
182 bool must_wait_for_region_in_irreducible_loop(PhaseGVN* phase) const;
183
184 bool can_push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass*& inline_klass);
185 InlineTypeNode* push_inline_types_down(PhaseGVN* phase, bool can_reshape, ciInlineKlass* inline_klass);
186
187 public:
188 // Node layout (parallels RegionNode):
189 enum { Region, // Control input is the Phi's region.
190 Input // Input values are [1..len)
191 };
192
193 PhiNode( Node *r, const Type *t, const TypePtr* at = nullptr,
194 const int imid = -1,
195 const int iid = TypeOopPtr::InstanceTop,
196 const int iidx = Compile::AliasIdxTop,
197 const int ioffs = Type::OffsetTop )
198 : TypeNode(t,r->req()),
199 _adr_type(at),
200 _inst_mem_id(imid),
201 _inst_id(iid),
202 _inst_index(iidx),
203 _inst_offset(ioffs)
204 {
205 init_class_id(Class_Phi);
206 init_req(0, r);
240 int is_diamond_phi() const;
241 bool try_clean_memory_phi(PhaseIterGVN* igvn);
242 virtual int Opcode() const;
243 virtual bool pinned() const { return in(0) != 0; }
244 virtual const TypePtr *adr_type() const { verify_adr_type(true); return _adr_type; }
245
246 void set_inst_mem_id(int inst_mem_id) { _inst_mem_id = inst_mem_id; }
247 int inst_mem_id() const { return _inst_mem_id; }
248 int inst_id() const { return _inst_id; }
249 int inst_index() const { return _inst_index; }
250 int inst_offset() const { return _inst_offset; }
251 bool is_same_inst_field(const Type* tp, int mem_id, int id, int index, int offset) {
252 return type()->basic_type() == tp->basic_type() &&
253 inst_mem_id() == mem_id &&
254 inst_id() == id &&
255 inst_index() == index &&
256 inst_offset() == offset &&
257 type()->higher_equal(tp);
258 }
259
260 bool can_be_inline_type() const {
261 return EnableValhalla && _type->isa_instptr() && _type->is_instptr()->can_be_inline_type();
262 }
263
264 Node* try_push_inline_types_down(PhaseGVN* phase, bool can_reshape);
265
266 virtual const Type* Value(PhaseGVN* phase) const;
267 virtual Node* Identity(PhaseGVN* phase);
268 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
269 virtual const RegMask &out_RegMask() const;
270 virtual const RegMask &in_RegMask(uint) const;
271 #ifndef PRODUCT
272 virtual void dump_spec(outputStream *st) const;
273 #endif
274 #ifdef ASSERT
275 void verify_adr_type(VectorSet& visited, const TypePtr* at) const;
276 void verify_adr_type(bool recursive = false) const;
277 #else //ASSERT
278 void verify_adr_type(bool recursive = false) const {}
279 #endif //ASSERT
280
281 const TypeTuple* collect_types(PhaseGVN* phase) const;
282 };
283
284 //------------------------------GotoNode---------------------------------------
285 // GotoNodes perform direct branches.
430 init_req(0,control);
431 init_req(1,b);
432 }
433 virtual int Opcode() const;
434 virtual bool pinned() const { return true; }
435 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
436 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
437 virtual const Type* Value(PhaseGVN* phase) const;
438 virtual int required_outcnt() const { return 2; }
439 virtual const RegMask &out_RegMask() const;
440 Node* fold_compares(PhaseIterGVN* phase);
441 static Node* up_one_dom(Node* curr, bool linear_only = false);
442 Node* dominated_by(Node* prev_dom, PhaseIterGVN* igvn);
443 bool is_zero_trip_guard() const;
444
445 // Takes the type of val and filters it through the test represented
446 // by if_proj and returns a more refined type if one is produced.
447 // Returns null is it couldn't improve the type.
448 static const TypeInt* filtered_int_type(PhaseGVN* phase, Node* val, Node* if_proj);
449
450 bool is_flat_array_check(PhaseTransform* phase, Node** array = nullptr);
451
452 #ifndef PRODUCT
453 virtual void dump_spec(outputStream *st) const;
454 #endif
455
456 bool same_condition(const Node* dom, PhaseIterGVN* igvn) const;
457 };
458
459 class RangeCheckNode : public IfNode {
460 private:
461 int is_range_check(Node* &range, Node* &index, jint &offset);
462
463 public:
464 RangeCheckNode(Node* control, Node *b, float p, float fcnt)
465 : IfNode(control, b, p, fcnt) {
466 init_class_id(Class_RangeCheck);
467 }
468
469 virtual int Opcode() const;
470 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
471 };
690 virtual int Opcode() const;
691 virtual bool pinned() const { return true; };
692 virtual const Type *bottom_type() const { return TypeTuple::IFBOTH; }
693 virtual const Type* Value(PhaseGVN* phase) const;
694 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
695 virtual int required_outcnt() const { return 2; }
696 virtual void emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const { }
697 virtual uint size(PhaseRegAlloc *ra_) const { return 0; }
698 #ifndef PRODUCT
699 virtual void format( PhaseRegAlloc *, outputStream *st ) const;
700 #endif
701 };
702
703 //------------------------------BlackholeNode----------------------------
704 // Blackhole all arguments. This node would survive through the compiler
705 // the effects on its arguments, and would be finally matched to nothing.
706 class BlackholeNode : public MultiNode {
707 public:
708 BlackholeNode(Node* ctrl) : MultiNode(1) {
709 init_req(TypeFunc::Control, ctrl);
710 init_class_id(Class_Blackhole);
711 }
712 virtual int Opcode() const;
713 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
714 virtual const Type* bottom_type() const { return TypeTuple::MEMBAR; }
715
716 const RegMask &in_RegMask(uint idx) const {
717 // Fake the incoming arguments mask for blackholes: accept all registers
718 // and all stack slots. This would avoid any redundant register moves
719 // for blackhole inputs.
720 return RegMask::All;
721 }
722 #ifndef PRODUCT
723 virtual void format(PhaseRegAlloc* ra, outputStream* st) const;
724 #endif
725 };
726
727
728 #endif // SHARE_OPTO_CFGNODE_HPP
|