1 /*
  2  * Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "opto/addnode.hpp"
 27 #include "opto/callnode.hpp"
 28 #include "opto/connode.hpp"
 29 #include "opto/convertnode.hpp"
 30 #include "opto/phaseX.hpp"
 31 #include "opto/rootnode.hpp"
 32 #include "opto/subnode.hpp"
 33 #include "opto/subtypenode.hpp"
 34 
 35 const Type* SubTypeCheckNode::sub(const Type* sub_t, const Type* super_t) const {
 36   const TypeKlassPtr* superk = super_t->isa_klassptr();
 37   const TypeKlassPtr* subk = sub_t->isa_klassptr() ? sub_t->is_klassptr() : sub_t->is_oopptr()->as_klass_type();
 38 
 39   // Oop can't be a subtype of abstract type that has no subclass.
 40   if (sub_t->isa_oopptr() && superk->isa_instklassptr() && superk->klass_is_exact()) {
 41     ciKlass* superklass = superk->exact_klass();
 42     if (!superklass->is_interface() && superklass->is_abstract() &&
 43         !superklass->as_instance_klass()->has_subklass()) {
 44       Compile::current()->dependencies()->assert_leaf_type(superklass);
 45       return TypeInt::CC_GT;
 46     }
 47   }
 48 
 49   // Similar to logic in CmpPNode::sub()
 50   bool unrelated_classes = false;
 51   // Handle inline type arrays
 52   if (subk->flatten_array() && superk->not_flatten_array()) {
 53     // The subtype is flattened in arrays and the supertype is not flattened in arrays. Must be unrelated.
 54     unrelated_classes = true;
 55   } else if (subk->is_not_flat() && superk->is_flat()) {
 56     // The subtype is a non-flattened array and the supertype is a flattened array. Must be unrelated.
 57     unrelated_classes = true;
 58   } else if (subk->is_not_null_free() && superk->is_null_free()) {
 59     // The subtype is a nullable array and the supertype is null-free array. Must be unrelated.
 60     unrelated_classes = true;
 61   }
 62   if (unrelated_classes) {
 63     TypePtr::PTR jp = sub_t->is_ptr()->join_ptr(super_t->is_ptr()->_ptr);
 64     if (jp != TypePtr::Null && jp != TypePtr::BotPTR) {
 65       return TypeInt::CC_GT;
 66     }
 67   }
 68 
 69   if (subk != NULL) {
 70     switch (Compile::current()->static_subtype_check(superk, subk)) {
 71       case Compile::SSC_always_false:
 72         return TypeInt::CC_GT;
 73       case Compile::SSC_always_true:
 74         return TypeInt::CC_EQ;
 75       case Compile::SSC_easy_test:
 76       case Compile::SSC_full_test:
 77         break;
 78       default:
 79         ShouldNotReachHere();
 80     }
 81   }
 82 
 83   return bottom_type();
 84 }
 85 
 86 Node *SubTypeCheckNode::Ideal(PhaseGVN* phase, bool can_reshape) {
 87   Node* obj_or_subklass = in(ObjOrSubKlass);
 88   Node* superklass = in(SuperKlass);
 89 
 90   if (obj_or_subklass == NULL ||
 91       superklass == NULL) {
 92     return NULL;
 93   }
 94 
 95   const Type* sub_t = phase->type(obj_or_subklass);
 96   const Type* super_t = phase->type(superklass);
 97 
 98   if (!super_t->isa_klassptr() ||
 99       (!sub_t->isa_klassptr() && !sub_t->isa_oopptr())) {
100     return NULL;
101   }
102 
103   Node* addr = NULL;
104   if (obj_or_subklass->is_DecodeNKlass()) {
105     if (obj_or_subklass->in(1) != NULL &&
106         obj_or_subklass->in(1)->Opcode() == Op_LoadNKlass) {
107       addr = obj_or_subklass->in(1)->in(MemNode::Address);
108     }
109   } else if (obj_or_subklass->Opcode() == Op_LoadKlass) {
110     addr = obj_or_subklass->in(MemNode::Address);
111   }
112 
113   if (addr != NULL) {
114     intptr_t con = 0;
115     Node* obj = AddPNode::Ideal_base_and_offset(addr, phase, con);
116     if (con == oopDesc::klass_offset_in_bytes() && obj != NULL) {
117       assert(is_oop(phase, obj), "only for oop input");
118       set_req_X(ObjOrSubKlass, obj, phase);
119       return this;
120     }
121   }
122 
123   // AllocateNode might have more accurate klass input
124   Node* allocated_klass = AllocateNode::Ideal_klass(obj_or_subklass, phase);
125   if (allocated_klass != NULL) {
126     assert(is_oop(phase, obj_or_subklass), "only for oop input");
127     set_req_X(ObjOrSubKlass, allocated_klass, phase);
128     return this;
129   }
130 
131   // Verify that optimizing the subtype check to a simple code pattern
132   // when possible would not constant fold better
133   assert(verify(phase), "missing Value() optimization");
134 
135   return NULL;
136 }
137 
138 #ifdef ASSERT
139 bool SubTypeCheckNode::is_oop(PhaseGVN* phase, Node* n) {
140     const Type* t = phase->type(n);
141     if (!t->isa_oopptr() && t != Type::TOP) {
142       n->dump();
143       t->dump(); tty->cr();
144       return false;
145     }
146     return true;
147 }
148 
149 static Node* record_for_cleanup(Node* n, PhaseGVN* phase) {
150   if (phase->is_IterGVN()) {
151     phase->is_IterGVN()->_worklist.push(n); // record for cleanup
152   }
153   return n;
154 }
155 bool SubTypeCheckNode::verify_helper(PhaseGVN* phase, Node* subklass, const Type* cached_t) {
156   Node* cmp = phase->transform(new CmpPNode(subklass, in(SuperKlass)));
157   record_for_cleanup(cmp, phase);
158 
159   const Type* cmp_t = phase->type(cmp);
160   const Type* t = Value(phase);
161 
162   if (t == cmp_t ||
163       t != cached_t || // previous observations don't hold anymore
164       (cmp_t != TypeInt::CC_GT && cmp_t != TypeInt::CC_EQ)) {
165     return true;
166   } else {
167     t->dump(); tty->cr();
168     this->dump(2); tty->cr();
169     cmp_t->dump(); tty->cr();
170     subklass->dump(2); tty->cr();
171     tty->print_cr("==============================");
172     phase->C->root()->dump(9999);
173     return false;
174   }
175 }
176 
177 // Verify that optimizing the subtype check to a simple code pattern when possible would not constant fold better.
178 bool SubTypeCheckNode::verify(PhaseGVN* phase) {
179   Compile* C = phase->C;
180   Node* obj_or_subklass = in(ObjOrSubKlass);
181   Node* superklass = in(SuperKlass);
182 
183   const Type* sub_t = phase->type(obj_or_subklass);
184   const Type* super_t = phase->type(superklass);
185 
186   const TypeKlassPtr* superk = super_t->isa_klassptr();
187   const TypeKlassPtr* subk = sub_t->isa_klassptr() ? sub_t->is_klassptr() : sub_t->is_oopptr()->as_klass_type();
188 
189   if (super_t->singleton() && subk != NULL) {
190     Node* subklass = NULL;
191     if (sub_t->isa_oopptr()) {
192       Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(oopDesc::klass_offset_in_bytes())));
193       subklass  = phase->transform(LoadKlassNode::make(*phase, NULL, C->immutable_memory(), adr, TypeInstPtr::KLASS));
194       record_for_cleanup(subklass, phase);
195     } else {
196       subklass = obj_or_subklass;
197     }
198 
199     const Type* cached_t = Value(phase); // cache the type to validate consistency
200     switch (C->static_subtype_check(superk, subk)) {
201       case Compile::SSC_easy_test: {
202         return verify_helper(phase, subklass, cached_t);
203       }
204       case Compile::SSC_full_test: {
205         Node* p1 = phase->transform(new AddPNode(superklass, superklass, phase->MakeConX(in_bytes(Klass::super_check_offset_offset()))));
206         Node* chk_off = phase->transform(new LoadINode(NULL, C->immutable_memory(), p1, phase->type(p1)->is_ptr(), TypeInt::INT, MemNode::unordered));
207         record_for_cleanup(chk_off, phase);
208 
209         int cacheoff_con = in_bytes(Klass::secondary_super_cache_offset());
210         bool might_be_cache = (phase->find_int_con(chk_off, cacheoff_con) == cacheoff_con);
211         if (!might_be_cache) {
212           Node* chk_off_X = chk_off;
213 #ifdef _LP64
214           chk_off_X = phase->transform(new ConvI2LNode(chk_off_X));
215 #endif
216           Node* p2 = phase->transform(new AddPNode(subklass, subklass, chk_off_X));
217           Node* nkls = phase->transform(LoadKlassNode::make(*phase, NULL, C->immutable_memory(), p2, phase->type(p2)->is_ptr(), TypeInstKlassPtr::OBJECT_OR_NULL));
218 
219           return verify_helper(phase, nkls, cached_t);
220         }
221         break;
222       }
223       case Compile::SSC_always_false:
224       case Compile::SSC_always_true:
225       default: {
226         break; // nothing to do
227       }
228     }
229   }
230 
231   return true;
232 }
233 #endif