< prev index next >

src/hotspot/share/opto/subtypenode.cpp

Print this page

 77   const Type* super_t = phase->type(superklass);
 78 
 79   if (!super_t->isa_klassptr() ||
 80       (!sub_t->isa_klassptr() && !sub_t->isa_oopptr())) {
 81     return nullptr;
 82   }
 83 
 84   Node* addr = nullptr;
 85   if (obj_or_subklass->is_DecodeNKlass()) {
 86     if (obj_or_subklass->in(1) != nullptr &&
 87         obj_or_subklass->in(1)->Opcode() == Op_LoadNKlass) {
 88       addr = obj_or_subklass->in(1)->in(MemNode::Address);
 89     }
 90   } else if (obj_or_subklass->Opcode() == Op_LoadKlass) {
 91     addr = obj_or_subklass->in(MemNode::Address);
 92   }
 93 
 94   if (addr != nullptr) {
 95     intptr_t con = 0;
 96     Node* obj = AddPNode::Ideal_base_and_offset(addr, phase, con);
 97     if (con == oopDesc::klass_offset_in_bytes() && obj != nullptr) {
 98       assert(is_oop(phase, obj), "only for oop input");
 99       set_req_X(ObjOrSubKlass, obj, phase);
100       return this;
101     }
102   }
103 
104   // AllocateNode might have more accurate klass input
105   Node* allocated_klass = AllocateNode::Ideal_klass(obj_or_subklass, phase);
106   if (allocated_klass != nullptr) {
107     assert(is_oop(phase, obj_or_subklass), "only for oop input");
108     set_req_X(ObjOrSubKlass, allocated_klass, phase);
109     return this;
110   }
111 
112   // Verify that optimizing the subtype check to a simple code pattern
113   // when possible would not constant fold better
114   assert(verify(phase), "missing Value() optimization");
115 
116   return nullptr;
117 }

201           return verify_helper(phase, nkls, cached_t);
202         }
203         break;
204       }
205       case Compile::SSC_always_false:
206       case Compile::SSC_always_true:
207       default: {
208         break; // nothing to do
209       }
210     }
211   }
212 
213   return true;
214 }
215 
216 Node* SubTypeCheckNode::load_klass(PhaseGVN* phase) const {
217   Node* obj_or_subklass = in(ObjOrSubKlass);
218   const Type* sub_t = phase->type(obj_or_subklass);
219   Node* subklass = nullptr;
220   if (sub_t->isa_oopptr()) {
221     Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(oopDesc::klass_offset_in_bytes())));
222     subklass  = phase->transform(LoadKlassNode::make(*phase, nullptr, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
223     record_for_cleanup(subklass, phase);
224   } else {
225     subklass = obj_or_subklass;
226   }
227   return subklass;
228 }
229 #endif
230 
231 uint SubTypeCheckNode::size_of() const {
232   return sizeof(*this);
233 }
234 
235 uint SubTypeCheckNode::hash() const {
236   return NO_HASH;
237 }
238 
239 #ifndef PRODUCT
240 void SubTypeCheckNode::dump_spec(outputStream* st) const {
241   if (_method != nullptr) {
242     st->print(" profiled at:");
243     _method->print_short_name(st);
244     st->print(":%d", _bci);
245   }
246 }
247 #endif

 77   const Type* super_t = phase->type(superklass);
 78 
 79   if (!super_t->isa_klassptr() ||
 80       (!sub_t->isa_klassptr() && !sub_t->isa_oopptr())) {
 81     return nullptr;
 82   }
 83 
 84   Node* addr = nullptr;
 85   if (obj_or_subklass->is_DecodeNKlass()) {
 86     if (obj_or_subklass->in(1) != nullptr &&
 87         obj_or_subklass->in(1)->Opcode() == Op_LoadNKlass) {
 88       addr = obj_or_subklass->in(1)->in(MemNode::Address);
 89     }
 90   } else if (obj_or_subklass->Opcode() == Op_LoadKlass) {
 91     addr = obj_or_subklass->in(MemNode::Address);
 92   }
 93 
 94   if (addr != nullptr) {
 95     intptr_t con = 0;
 96     Node* obj = AddPNode::Ideal_base_and_offset(addr, phase, con);
 97     if (con == Type::klass_offset() && obj != nullptr) {
 98       assert(is_oop(phase, obj), "only for oop input");
 99       set_req_X(ObjOrSubKlass, obj, phase);
100       return this;
101     }
102   }
103 
104   // AllocateNode might have more accurate klass input
105   Node* allocated_klass = AllocateNode::Ideal_klass(obj_or_subklass, phase);
106   if (allocated_klass != nullptr) {
107     assert(is_oop(phase, obj_or_subklass), "only for oop input");
108     set_req_X(ObjOrSubKlass, allocated_klass, phase);
109     return this;
110   }
111 
112   // Verify that optimizing the subtype check to a simple code pattern
113   // when possible would not constant fold better
114   assert(verify(phase), "missing Value() optimization");
115 
116   return nullptr;
117 }

201           return verify_helper(phase, nkls, cached_t);
202         }
203         break;
204       }
205       case Compile::SSC_always_false:
206       case Compile::SSC_always_true:
207       default: {
208         break; // nothing to do
209       }
210     }
211   }
212 
213   return true;
214 }
215 
216 Node* SubTypeCheckNode::load_klass(PhaseGVN* phase) const {
217   Node* obj_or_subklass = in(ObjOrSubKlass);
218   const Type* sub_t = phase->type(obj_or_subklass);
219   Node* subklass = nullptr;
220   if (sub_t->isa_oopptr()) {
221     Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(Type::klass_offset())));
222     subklass  = phase->transform(LoadKlassNode::make(*phase, nullptr, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
223     record_for_cleanup(subklass, phase);
224   } else {
225     subklass = obj_or_subklass;
226   }
227   return subklass;
228 }
229 #endif
230 
231 uint SubTypeCheckNode::size_of() const {
232   return sizeof(*this);
233 }
234 
235 uint SubTypeCheckNode::hash() const {
236   return NO_HASH;
237 }
238 
239 #ifndef PRODUCT
240 void SubTypeCheckNode::dump_spec(outputStream* st) const {
241   if (_method != nullptr) {
242     st->print(" profiled at:");
243     _method->print_short_name(st);
244     st->print(":%d", _bci);
245   }
246 }
247 #endif
< prev index next >