76 const Type* super_t = phase->type(superklass);
77
78 if (!super_t->isa_klassptr() ||
79 (!sub_t->isa_klassptr() && !sub_t->isa_oopptr())) {
80 return nullptr;
81 }
82
83 Node* addr = nullptr;
84 if (obj_or_subklass->is_DecodeNKlass()) {
85 if (obj_or_subklass->in(1) != nullptr &&
86 obj_or_subklass->in(1)->Opcode() == Op_LoadNKlass) {
87 addr = obj_or_subklass->in(1)->in(MemNode::Address);
88 }
89 } else if (obj_or_subklass->Opcode() == Op_LoadKlass) {
90 addr = obj_or_subklass->in(MemNode::Address);
91 }
92
93 if (addr != nullptr) {
94 intptr_t con = 0;
95 Node* obj = AddPNode::Ideal_base_and_offset(addr, phase, con);
96 if (con == oopDesc::klass_offset_in_bytes() && obj != nullptr) {
97 assert(is_oop(phase, obj), "only for oop input");
98 set_req_X(ObjOrSubKlass, obj, phase);
99 return this;
100 }
101 }
102
103 // AllocateNode might have more accurate klass input
104 Node* allocated_klass = AllocateNode::Ideal_klass(obj_or_subklass, phase);
105 if (allocated_klass != nullptr) {
106 assert(is_oop(phase, obj_or_subklass), "only for oop input");
107 set_req_X(ObjOrSubKlass, allocated_klass, phase);
108 return this;
109 }
110
111 // Verify that optimizing the subtype check to a simple code pattern
112 // when possible would not constant fold better
113 assert(verify(phase), "missing Value() optimization");
114
115 return nullptr;
116 }
200 return verify_helper(phase, nkls, cached_t);
201 }
202 break;
203 }
204 case Compile::SSC_always_false:
205 case Compile::SSC_always_true:
206 default: {
207 break; // nothing to do
208 }
209 }
210 }
211
212 return true;
213 }
214
215 Node* SubTypeCheckNode::load_klass(PhaseGVN* phase) const {
216 Node* obj_or_subklass = in(ObjOrSubKlass);
217 const Type* sub_t = phase->type(obj_or_subklass);
218 Node* subklass = nullptr;
219 if (sub_t->isa_oopptr()) {
220 Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(oopDesc::klass_offset_in_bytes())));
221 subklass = phase->transform(LoadKlassNode::make(*phase, nullptr, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
222 record_for_cleanup(subklass, phase);
223 } else {
224 subklass = obj_or_subklass;
225 }
226 return subklass;
227 }
228 #endif
229
230 uint SubTypeCheckNode::size_of() const {
231 return sizeof(*this);
232 }
233
234 uint SubTypeCheckNode::hash() const {
235 return NO_HASH;
236 }
237
238 #ifndef PRODUCT
239 void SubTypeCheckNode::dump_spec(outputStream* st) const {
240 if (_method != nullptr) {
241 st->print(" profiled at:");
242 _method->print_short_name(st);
243 st->print(":%d", _bci);
244 }
245 }
246 #endif
|
76 const Type* super_t = phase->type(superklass);
77
78 if (!super_t->isa_klassptr() ||
79 (!sub_t->isa_klassptr() && !sub_t->isa_oopptr())) {
80 return nullptr;
81 }
82
83 Node* addr = nullptr;
84 if (obj_or_subklass->is_DecodeNKlass()) {
85 if (obj_or_subklass->in(1) != nullptr &&
86 obj_or_subklass->in(1)->Opcode() == Op_LoadNKlass) {
87 addr = obj_or_subklass->in(1)->in(MemNode::Address);
88 }
89 } else if (obj_or_subklass->Opcode() == Op_LoadKlass) {
90 addr = obj_or_subklass->in(MemNode::Address);
91 }
92
93 if (addr != nullptr) {
94 intptr_t con = 0;
95 Node* obj = AddPNode::Ideal_base_and_offset(addr, phase, con);
96 if (con == Type::klass_offset() && obj != nullptr) {
97 assert(is_oop(phase, obj), "only for oop input");
98 set_req_X(ObjOrSubKlass, obj, phase);
99 return this;
100 }
101 }
102
103 // AllocateNode might have more accurate klass input
104 Node* allocated_klass = AllocateNode::Ideal_klass(obj_or_subklass, phase);
105 if (allocated_klass != nullptr) {
106 assert(is_oop(phase, obj_or_subklass), "only for oop input");
107 set_req_X(ObjOrSubKlass, allocated_klass, phase);
108 return this;
109 }
110
111 // Verify that optimizing the subtype check to a simple code pattern
112 // when possible would not constant fold better
113 assert(verify(phase), "missing Value() optimization");
114
115 return nullptr;
116 }
200 return verify_helper(phase, nkls, cached_t);
201 }
202 break;
203 }
204 case Compile::SSC_always_false:
205 case Compile::SSC_always_true:
206 default: {
207 break; // nothing to do
208 }
209 }
210 }
211
212 return true;
213 }
214
215 Node* SubTypeCheckNode::load_klass(PhaseGVN* phase) const {
216 Node* obj_or_subklass = in(ObjOrSubKlass);
217 const Type* sub_t = phase->type(obj_or_subklass);
218 Node* subklass = nullptr;
219 if (sub_t->isa_oopptr()) {
220 Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(Type::klass_offset())));
221 subklass = phase->transform(LoadKlassNode::make(*phase, nullptr, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
222 record_for_cleanup(subklass, phase);
223 } else {
224 subklass = obj_or_subklass;
225 }
226 return subklass;
227 }
228 #endif
229
230 uint SubTypeCheckNode::size_of() const {
231 return sizeof(*this);
232 }
233
234 uint SubTypeCheckNode::hash() const {
235 return NO_HASH;
236 }
237
238 #ifndef PRODUCT
239 void SubTypeCheckNode::dump_spec(outputStream* st) const {
240 if (_method != nullptr) {
241 st->print(" profiled at:");
242 _method->print_short_name(st);
243 st->print(":%d", _bci);
244 }
245 }
246 #endif
|