1 /*
  2  * Copyright (c) 2020, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "opto/addnode.hpp"
 26 #include "opto/callnode.hpp"
 27 #include "opto/connode.hpp"
 28 #include "opto/convertnode.hpp"
 29 #include "opto/phaseX.hpp"
 30 #include "opto/rootnode.hpp"
 31 #include "opto/subnode.hpp"
 32 #include "opto/subtypenode.hpp"
 33 
 34 const Type* SubTypeCheckNode::sub(const Type* sub_t, const Type* super_t) const {
 35   const TypeKlassPtr* super_klass_type = super_t->isa_klassptr();
 36   assert(sub_t != Type::TOP && !TypePtr::NULL_PTR->higher_equal(sub_t), "should be not null");
 37   const TypeKlassPtr* sub_klass_type = sub_t->isa_klassptr() ? sub_t->is_klassptr() : sub_t->is_oopptr()->as_klass_type();
 38 
 39   // Oop can't be a subtype of abstract type that has no subclass.
 40   if (sub_t->isa_oopptr() && super_klass_type->isa_instklassptr() && super_klass_type->klass_is_exact()) {
 41     ciKlass* superklass = super_klass_type->exact_klass();
 42     if (!superklass->is_interface() && superklass->is_abstract() &&
 43         !superklass->as_instance_klass()->has_subklass()) {
 44       Compile::current()->dependencies()->assert_leaf_type(superklass);
 45       return TypeInt::CC_GT;
 46     }
 47   }
 48 
 49   // FIXME: shouldn't this be encoded in helper methods of the type system (maybe_java_subtype_of() etc.?)
 50   // Similar to logic in CmpPNode::sub()
 51   bool unrelated_classes = false;
 52 
 53   // Handle inline type arrays
 54   //
 55   // The super klass can be an exact non-array klass constant which is known to be not flat in array (e.g. Object)
 56   // while the sub klass could very well be flat in array:
 57   //
 58   //           MyValue       <:       Object:exact
 59   //        flat in array          not flat in array
 60   //
 61   // We therefore first cast the super klass to inexact (if the class is not final itself) and recompute the flat in
 62   // array property for the super klass (all done in cast_to_exactness()) in order to check whether the sub klass is
 63   // flat in array and the super klass is not flat in array. If that's the case, the classes must be unrelated.
 64   const TypeKlassPtr* super_klass_type_for_flat_in_array = super_klass_type;
 65   if (super_klass_type->isa_instklassptr()) {
 66     // Only relevant for TypeInstKlassPtr. TypeAryKlassPtr will always be not flat in array.
 67     super_klass_type_for_flat_in_array = super_klass_type->cast_to_exactness(false);
 68   }
 69 
 70   if ((sub_klass_type->is_flat_in_array() && super_klass_type_for_flat_in_array->is_not_flat_in_array()) ||
 71       (super_klass_type_for_flat_in_array->is_flat_in_array() && sub_klass_type->is_not_flat_in_array())) {
 72     // The subtype is flat in array and the supertype is not in flat array or vice versa. Cannot subtype and thus unrelated.
 73     unrelated_classes = true;
 74   } else if (sub_klass_type->is_not_flat() && super_klass_type->is_flat()) {
 75     // The subtype is a non-flat array and the supertype is a flat array. Must be unrelated.
 76     unrelated_classes = true;
 77   } else if (sub_klass_type->is_not_null_free() && super_klass_type->is_null_free()) {
 78     // The subtype is a nullable array and the supertype is null-free array. Must be unrelated.
 79     unrelated_classes = true;
 80   }
 81   if (unrelated_classes) {
 82     TypePtr::PTR jp = sub_t->is_ptr()->join_ptr(super_t->is_ptr()->_ptr);
 83     if (jp != TypePtr::Null && jp != TypePtr::BotPTR) {
 84       return TypeInt::CC_GT;
 85     }
 86   }
 87 
 88   switch (Compile::current()->static_subtype_check(super_klass_type, sub_klass_type, false)) {
 89     case Compile::SSC_always_false:
 90       return TypeInt::CC_GT;
 91     case Compile::SSC_always_true:
 92       return TypeInt::CC_EQ;
 93     case Compile::SSC_easy_test:
 94     case Compile::SSC_full_test:
 95       break;
 96     default:
 97       ShouldNotReachHere();
 98   }
 99 
100   return bottom_type();
101 }
102 
103 Node *SubTypeCheckNode::Ideal(PhaseGVN* phase, bool can_reshape) {
104   Node* obj_or_subklass = in(ObjOrSubKlass);
105   Node* superklass = in(SuperKlass);
106 
107   if (obj_or_subklass == nullptr ||
108       superklass == nullptr) {
109     return nullptr;
110   }
111 
112   const Type* sub_t = phase->type(obj_or_subklass);
113   const Type* super_t = phase->type(superklass);
114 
115   if (!super_t->isa_klassptr() ||
116       (!sub_t->isa_klassptr() && !sub_t->isa_oopptr())) {
117     return nullptr;
118   }
119 
120   Node* addr = nullptr;
121   if (obj_or_subklass->is_DecodeNKlass()) {
122     if (obj_or_subklass->in(1) != nullptr &&
123         obj_or_subklass->in(1)->Opcode() == Op_LoadNKlass) {
124       addr = obj_or_subklass->in(1)->in(MemNode::Address);
125     }
126   } else if (obj_or_subklass->Opcode() == Op_LoadKlass) {
127     addr = obj_or_subklass->in(MemNode::Address);
128   }
129 
130   if (addr != nullptr) {
131     intptr_t con = 0;
132     Node* obj = AddPNode::Ideal_base_and_offset(addr, phase, con);
133     if (con == oopDesc::klass_offset_in_bytes() && obj != nullptr) {
134       assert(is_oop(phase, obj), "only for oop input");
135       set_req_X(ObjOrSubKlass, obj, phase);
136       return this;
137     }
138   }
139 
140   // AllocateNode might have more accurate klass input
141   Node* allocated_klass = AllocateNode::Ideal_klass(obj_or_subklass, phase);
142   if (allocated_klass != nullptr) {
143     assert(is_oop(phase, obj_or_subklass), "only for oop input");
144     set_req_X(ObjOrSubKlass, allocated_klass, phase);
145     return this;
146   }
147 
148   // Verify that optimizing the subtype check to a simple code pattern
149   // when possible would not constant fold better
150   assert(verify(phase), "missing Value() optimization");
151 
152   return nullptr;
153 }
154 
155 #ifdef ASSERT
156 bool SubTypeCheckNode::is_oop(PhaseGVN* phase, Node* n) {
157     const Type* t = phase->type(n);
158     if (!t->isa_oopptr() && t != Type::TOP) {
159       n->dump();
160       t->dump(); tty->cr();
161       return false;
162     }
163     return true;
164 }
165 
166 static Node* record_for_cleanup(Node* n, PhaseGVN* phase) {
167   if (phase->is_IterGVN()) {
168     phase->is_IterGVN()->_worklist.push(n); // record for cleanup
169   }
170   return n;
171 }
172 bool SubTypeCheckNode::verify_helper(PhaseGVN* phase, Node* subklass, const Type* cached_t) {
173   Node* cmp_orig = new CmpPNode(subklass, in(SuperKlass));
174   Node* cmp = phase->transform(cmp_orig);
175   record_for_cleanup(cmp, phase);
176 
177   const Type* cmp_t = phase->type(cmp);
178   const Type* t = Value(phase);
179 
180   if (t == cmp_t ||
181       t != cached_t || // previous observations don't hold anymore
182       (cmp_t != TypeInt::CC_GT && cmp_t != TypeInt::CC_EQ)) {
183     return true;
184   } else {
185     t->dump(); tty->cr();
186     this->dump(2); tty->cr();
187     tty->print_cr("VS.\n");
188     cmp_t->dump(); tty->cr();
189     cmp_orig->dump(2); tty->cr();
190     tty->print_cr("==============================\n");
191     phase->C->root()->dump(9999);
192     return false;
193   }
194 }
195 
196 // Verify that optimizing the subtype check to a simple code pattern when possible would not constant fold better.
197 bool SubTypeCheckNode::verify(PhaseGVN* phase) {
198   Compile* C = phase->C;
199   Node* obj_or_subklass = in(ObjOrSubKlass);
200   Node* superklass = in(SuperKlass);
201 
202   const Type* sub_t = phase->type(obj_or_subklass);
203   const Type* super_t = phase->type(superklass);
204 
205   const TypeKlassPtr* superk = super_t->isa_klassptr();
206   const TypeKlassPtr* subk = sub_t->isa_klassptr() ? sub_t->is_klassptr() : sub_t->is_oopptr()->as_klass_type();
207 
208   if (super_t->singleton() && subk != nullptr) {
209     if (obj_or_subklass->bottom_type() == Type::TOP) {
210       // The bottom type of obj_or_subklass is TOP, despite its recorded type
211       // being an OOP or a klass pointer. This can happen for example in
212       // transient scenarios where obj_or_subklass is a projection of the TOP
213       // node. In such cases, skip verification to avoid violating the contract
214       // of LoadKlassNode::make(). This does not weaken the effect of verify(),
215       // as SubTypeCheck nodes with TOP obj_or_subklass inputs are dead anyway.
216       return true;
217     }
218     const Type* cached_t = Value(phase); // cache the type to validate consistency
219     switch (C->static_subtype_check(superk, subk)) {
220       case Compile::SSC_easy_test: {
221         return verify_helper(phase, load_klass(phase), cached_t);
222       }
223       case Compile::SSC_full_test: {
224         Node* p1 = phase->transform(new AddPNode(C->top(), superklass, phase->MakeConX(in_bytes(Klass::super_check_offset_offset()))));
225         Node* chk_off = phase->transform(new LoadINode(nullptr, C->immutable_memory(), p1, phase->type(p1)->is_ptr(), TypeInt::INT, MemNode::unordered));
226         record_for_cleanup(chk_off, phase);
227 
228         int cacheoff_con = in_bytes(Klass::secondary_super_cache_offset());
229         bool might_be_cache = phase->find_int_con(chk_off, cacheoff_con) == cacheoff_con;
230         if (!might_be_cache) {
231           Node* subklass = load_klass(phase);
232           Node* chk_off_X = chk_off;
233 #ifdef _LP64
234           chk_off_X = phase->transform(new ConvI2LNode(chk_off_X));
235 #endif
236           Node* p2 = phase->transform(new AddPNode(C->top(), subklass, chk_off_X));
237           Node* nkls = phase->transform(LoadKlassNode::make(*phase, C->immutable_memory(), p2, phase->type(p2)->is_ptr(), TypeInstKlassPtr::OBJECT_OR_NULL));
238 
239           return verify_helper(phase, nkls, cached_t);
240         }
241         break;
242       }
243       case Compile::SSC_always_false:
244       case Compile::SSC_always_true:
245       default: {
246         break; // nothing to do
247       }
248     }
249   }
250 
251   return true;
252 }
253 
254 Node* SubTypeCheckNode::load_klass(PhaseGVN* phase) const {
255   Node* obj_or_subklass = in(ObjOrSubKlass);
256   const Type* sub_t = phase->type(obj_or_subklass);
257   Node* subklass = nullptr;
258   if (sub_t->isa_oopptr()) {
259     Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(oopDesc::klass_offset_in_bytes())));
260     subklass  = phase->transform(LoadKlassNode::make(*phase, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
261     record_for_cleanup(subklass, phase);
262   } else {
263     subklass = obj_or_subklass;
264   }
265   return subklass;
266 }
267 #endif
268 
269 uint SubTypeCheckNode::size_of() const {
270   return sizeof(*this);
271 }
272 
273 uint SubTypeCheckNode::hash() const {
274   return NO_HASH;
275 }
276 
277 #ifndef PRODUCT
278 void SubTypeCheckNode::dump_spec(outputStream* st) const {
279   if (_method != nullptr) {
280     st->print(" profiled at:");
281     _method->print_short_name(st);
282     st->print(":%d", _bci);
283   }
284 }
285 #endif