1 /* 2 * Copyright (c) 1998, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "ci/ciInlineKlass.hpp" 27 #include "ci/ciSymbols.hpp" 28 #include "compiler/compileLog.hpp" 29 #include "oops/flatArrayKlass.hpp" 30 #include "oops/objArrayKlass.hpp" 31 #include "opto/addnode.hpp" 32 #include "opto/castnode.hpp" 33 #include "opto/inlinetypenode.hpp" 34 #include "opto/memnode.hpp" 35 #include "opto/mulnode.hpp" 36 #include "opto/parse.hpp" 37 #include "opto/rootnode.hpp" 38 #include "opto/runtime.hpp" 39 #include "runtime/sharedRuntime.hpp" 40 41 //------------------------------make_dtrace_method_entry_exit ---------------- 42 // Dtrace -- record entry or exit of a method if compiled with dtrace support 43 void GraphKit::make_dtrace_method_entry_exit(ciMethod* method, bool is_entry) { 44 const TypeFunc *call_type = OptoRuntime::dtrace_method_entry_exit_Type(); 45 address call_address = is_entry ? CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry) : 46 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit); 47 const char *call_name = is_entry ? "dtrace_method_entry" : "dtrace_method_exit"; 48 49 // Get base of thread-local storage area 50 Node* thread = _gvn.transform( new ThreadLocalNode() ); 51 52 // Get method 53 const TypePtr* method_type = TypeMetadataPtr::make(method); 54 Node *method_node = _gvn.transform(ConNode::make(method_type)); 55 56 kill_dead_locals(); 57 58 // For some reason, this call reads only raw memory. 59 const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM; 60 make_runtime_call(RC_LEAF | RC_NARROW_MEM, 61 call_type, call_address, 62 call_name, raw_adr_type, 63 thread, method_node); 64 } 65 66 67 //============================================================================= 68 //------------------------------do_checkcast----------------------------------- 69 void Parse::do_checkcast() { 70 bool will_link; 71 ciKlass* klass = iter().get_klass(will_link); 72 bool null_free = iter().has_Q_signature(); 73 Node *obj = peek(); 74 75 // Throw uncommon trap if class is not loaded or the value we are casting 76 // _from_ is not loaded, and value is not null. If the value _is_ null, 77 // then the checkcast does nothing. 78 const TypeOopPtr *tp = _gvn.type(obj)->isa_oopptr(); 79 if (!will_link || (tp && !tp->is_loaded())) { 80 assert(!null_free, "Inline type should be loaded"); 81 if (C->log() != nullptr) { 82 if (!will_link) { 83 C->log()->elem("assert_null reason='checkcast' klass='%d'", 84 C->log()->identify(klass)); 85 } 86 if (tp && !tp->is_loaded()) { 87 // %%% Cannot happen? 88 ciKlass* klass = tp->unloaded_klass(); 89 C->log()->elem("assert_null reason='checkcast source' klass='%d'", 90 C->log()->identify(klass)); 91 } 92 } 93 null_assert(obj); 94 assert( stopped() || _gvn.type(peek())->higher_equal(TypePtr::NULL_PTR), "what's left behind is null" ); 95 return; 96 } 97 98 Node* res = gen_checkcast(obj, makecon(TypeKlassPtr::make(klass, Type::trust_interfaces)), nullptr, null_free); 99 if (stopped()) { 100 return; 101 } 102 103 // Pop from stack AFTER gen_checkcast because it can uncommon trap and 104 // the debug info has to be correct. 105 pop(); 106 push(res); 107 } 108 109 110 //------------------------------do_instanceof---------------------------------- 111 void Parse::do_instanceof() { 112 if (stopped()) return; 113 // We would like to return false if class is not loaded, emitting a 114 // dependency, but Java requires instanceof to load its operand. 115 116 // Throw uncommon trap if class is not loaded 117 bool will_link; 118 ciKlass* klass = iter().get_klass(will_link); 119 120 if (!will_link) { 121 if (C->log() != nullptr) { 122 C->log()->elem("assert_null reason='instanceof' klass='%d'", 123 C->log()->identify(klass)); 124 } 125 null_assert(peek()); 126 assert( stopped() || _gvn.type(peek())->higher_equal(TypePtr::NULL_PTR), "what's left behind is null" ); 127 if (!stopped()) { 128 // The object is now known to be null. 129 // Shortcut the effect of gen_instanceof and return "false" directly. 130 pop(); // pop the null 131 push(_gvn.intcon(0)); // push false answer 132 } 133 return; 134 } 135 136 // Push the bool result back on stack 137 Node* res = gen_instanceof(peek(), makecon(TypeKlassPtr::make(klass, Type::trust_interfaces)), true); 138 139 // Pop from stack AFTER gen_instanceof because it can uncommon trap. 140 pop(); 141 push(res); 142 } 143 144 //------------------------------array_store_check------------------------------ 145 // pull array from stack and check that the store is valid 146 Node* Parse::array_store_check(Node*& adr, const Type*& elemtype) { 147 // Shorthand access to array store elements without popping them. 148 Node *obj = peek(0); 149 Node *idx = peek(1); 150 Node *ary = peek(2); 151 152 if (_gvn.type(obj) == TypePtr::NULL_PTR) { 153 // There's never a type check on null values. 154 // This cutout lets us avoid the uncommon_trap(Reason_array_check) 155 // below, which turns into a performance liability if the 156 // gen_checkcast folds up completely. 157 if (_gvn.type(ary)->is_aryptr()->is_null_free()) { 158 null_check(obj); 159 } 160 return obj; 161 } 162 163 // Extract the array klass type 164 Node* array_klass = load_object_klass(ary); 165 // Get the array klass 166 const TypeKlassPtr* tak = _gvn.type(array_klass)->is_klassptr(); 167 168 // The type of array_klass is usually INexact array-of-oop. Heroically 169 // cast array_klass to EXACT array and uncommon-trap if the cast fails. 170 // Make constant out of the inexact array klass, but use it only if the cast 171 // succeeds. 172 bool always_see_exact_class = false; 173 if (MonomorphicArrayCheck && !tak->klass_is_exact()) { 174 // Make a constant out of the inexact array klass 175 const TypeKlassPtr* extak = nullptr; 176 const TypeOopPtr* ary_t = _gvn.type(ary)->is_oopptr(); 177 ciKlass* ary_spec = ary_t->speculative_type(); 178 Deoptimization::DeoptReason reason = Deoptimization::Reason_none; 179 // Try to cast the array to an exact type from profile data. First 180 // check the speculative type. 181 if (ary_spec != nullptr && !too_many_traps(Deoptimization::Reason_speculate_class_check)) { 182 extak = TypeKlassPtr::make(ary_spec); 183 reason = Deoptimization::Reason_speculate_class_check; 184 } else if (UseArrayLoadStoreProfile) { 185 // No speculative type: check profile data at this bci. 186 reason = Deoptimization::Reason_class_check; 187 if (!too_many_traps(reason)) { 188 ciKlass* array_type = nullptr; 189 ciKlass* element_type = nullptr; 190 ProfilePtrKind element_ptr = ProfileMaybeNull; 191 bool flat_array = true; 192 bool null_free_array = true; 193 method()->array_access_profiled_type(bci(), array_type, element_type, element_ptr, flat_array, null_free_array); 194 if (array_type != nullptr) { 195 extak = TypeKlassPtr::make(array_type); 196 } 197 } 198 } else if (!too_many_traps(Deoptimization::Reason_array_check) && tak != TypeInstKlassPtr::OBJECT) { 199 // If the compiler has determined that the type of array 'ary' (represented 200 // by 'array_klass') is java/lang/Object, the compiler must not assume that 201 // the array 'ary' is monomorphic. 202 // 203 // If 'ary' were of type java/lang/Object, this arraystore would have to fail, 204 // because it is not possible to perform a arraystore into an object that is not 205 // a "proper" array. 206 // 207 // Therefore, let's obtain at runtime the type of 'ary' and check if we can still 208 // successfully perform the store. 209 // 210 // The implementation reasons for the condition are the following: 211 // 212 // java/lang/Object is the superclass of all arrays, but it is represented by the VM 213 // as an InstanceKlass. The checks generated by gen_checkcast() (see below) expect 214 // 'array_klass' to be ObjArrayKlass, which can result in invalid memory accesses. 215 // 216 // See issue JDK-8057622 for details. 217 extak = tak->cast_to_exactness(true); 218 reason = Deoptimization::Reason_array_check; 219 } 220 if (extak != nullptr && extak->exact_klass(true) != nullptr) { 221 Node* con = makecon(extak); 222 Node* cmp = _gvn.transform(new CmpPNode(array_klass, con)); 223 Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::eq)); 224 // Only do it if the check does not always pass/fail 225 if (!bol->is_Con()) { 226 always_see_exact_class = true; 227 { BuildCutout unless(this, bol, PROB_MAX); 228 uncommon_trap(reason, 229 Deoptimization::Action_maybe_recompile, 230 extak->exact_klass()); 231 } 232 // Cast array klass to exactness 233 replace_in_map(array_klass, con); 234 array_klass = con; 235 Node* cast = _gvn.transform(new CheckCastPPNode(control(), ary, extak->as_instance_type())); 236 replace_in_map(ary, cast); 237 ary = cast; 238 239 // Recompute element type and address 240 const TypeAryPtr* arytype = _gvn.type(ary)->is_aryptr(); 241 elemtype = arytype->elem(); 242 adr = array_element_address(ary, idx, T_OBJECT, arytype->size(), control()); 243 244 CompileLog* log = C->log(); 245 if (log != nullptr) { 246 log->elem("cast_up reason='monomorphic_array' from='%d' to='(exact)'", 247 log->identify(extak->exact_klass())); 248 } 249 } 250 } 251 } 252 253 // Come here for polymorphic array klasses 254 255 // Extract the array element class 256 int element_klass_offset = in_bytes(ArrayKlass::element_klass_offset()); 257 258 Node *p2 = basic_plus_adr(array_klass, array_klass, element_klass_offset); 259 // We are allowed to use the constant type only if cast succeeded. If always_see_exact_class is true, 260 // we must set a control edge from the IfTrue node created by the uncommon_trap above to the 261 // LoadKlassNode. 262 Node* a_e_klass = _gvn.transform(LoadKlassNode::make(_gvn, always_see_exact_class ? control() : nullptr, 263 immutable_memory(), p2, tak)); 264 265 // If we statically know that this is an inline type array, use precise element klass for checkcast 266 const TypeAryPtr* arytype = _gvn.type(ary)->is_aryptr(); 267 bool null_free = false; 268 if (elemtype->make_ptr()->is_inlinetypeptr()) { 269 // We statically know that this is an inline type array, use precise klass ptr 270 null_free = arytype->is_flat() || !elemtype->make_ptr()->maybe_null(); 271 a_e_klass = makecon(TypeKlassPtr::make(elemtype->inline_klass())); 272 } 273 274 // Check (the hard way) and throw if not a subklass. 275 return gen_checkcast(obj, a_e_klass, nullptr, null_free); 276 } 277 278 279 //------------------------------do_new----------------------------------------- 280 void Parse::do_new() { 281 kill_dead_locals(); 282 283 bool will_link; 284 ciInstanceKlass* klass = iter().get_klass(will_link)->as_instance_klass(); 285 assert(will_link, "_new: typeflow responsibility"); 286 assert(!klass->is_inlinetype(), "unexpected inline type"); 287 288 // Should throw an InstantiationError? 289 if (klass->is_abstract() || klass->is_interface() || 290 klass->name() == ciSymbols::java_lang_Class() || 291 iter().is_unresolved_klass()) { 292 uncommon_trap(Deoptimization::Reason_unhandled, 293 Deoptimization::Action_none, 294 klass); 295 return; 296 } 297 298 if (C->needs_clinit_barrier(klass, method())) { 299 clinit_barrier(klass, method()); 300 if (stopped()) return; 301 } 302 303 Node* kls = makecon(TypeKlassPtr::make(klass)); 304 Node* obj = new_instance(kls); 305 306 // Push resultant oop onto stack 307 push(obj); 308 309 // Keep track of whether opportunities exist for StringBuilder 310 // optimizations. 311 if (OptimizeStringConcat && 312 (klass == C->env()->StringBuilder_klass() || 313 klass == C->env()->StringBuffer_klass())) { 314 C->set_has_stringbuilder(true); 315 } 316 317 // Keep track of boxed values for EliminateAutoBox optimizations. 318 if (C->eliminate_boxing() && klass->is_box_klass()) { 319 C->set_has_boxed_value(true); 320 } 321 } 322 323 //------------------------------do_aconst_init--------------------------------- 324 void Parse::do_aconst_init() { 325 bool will_link; 326 ciInlineKlass* vk = iter().get_klass(will_link)->as_inline_klass(); 327 assert(will_link && !iter().is_unresolved_klass(), "aconst_init: typeflow responsibility"); 328 329 if (C->needs_clinit_barrier(vk, method())) { 330 clinit_barrier(vk, method()); 331 if (stopped()) return; 332 } 333 334 push(InlineTypeNode::make_default(_gvn, vk)); 335 } 336 337 //------------------------------do_withfield------------------------------------ 338 void Parse::do_withfield() { 339 bool will_link; 340 ciField* field = iter().get_field(will_link); 341 assert(will_link, "withfield: typeflow responsibility"); 342 int holder_depth = field->type()->size(); 343 null_check(peek(holder_depth)); 344 if (stopped()) { 345 return; 346 } 347 Node* val = pop_node(field->layout_type()); 348 Node* holder = pop(); 349 350 if (!val->is_InlineType() && field->type()->is_inlinetype()) { 351 // Scalarize inline type field value 352 assert(!field->is_null_free() || !gvn().type(val)->maybe_null(), "Null store to null-free field"); 353 val = InlineTypeNode::make_from_oop(this, val, field->type()->as_inline_klass(), field->is_null_free()); 354 } else if (val->is_InlineType() && !field->is_flat()) { 355 // Field value needs to be allocated because it can be merged with an oop. 356 // Re-execute withfield if buffering triggers deoptimization. 357 PreserveReexecuteState preexecs(this); 358 jvms()->set_should_reexecute(true); 359 int nargs = 1 + field->type()->size(); 360 inc_sp(nargs); 361 val = val->as_InlineType()->buffer(this); 362 } 363 364 // Clone the inline type node and set the new field value 365 InlineTypeNode* new_vt = holder->clone()->as_InlineType(); 366 new_vt->set_oop(gvn().zerocon(T_PRIMITIVE_OBJECT)); 367 new_vt->set_is_buffered(gvn(), false); 368 new_vt->set_field_value_by_offset(field->offset_in_bytes(), val); 369 { 370 PreserveReexecuteState preexecs(this); 371 jvms()->set_should_reexecute(true); 372 int nargs = 1 + field->type()->size(); 373 inc_sp(nargs); 374 new_vt = new_vt->adjust_scalarization_depth(this); 375 } 376 push(_gvn.transform(new_vt)); 377 } 378 379 #ifndef PRODUCT 380 //------------------------------dump_map_adr_mem------------------------------- 381 // Debug dump of the mapping from address types to MergeMemNode indices. 382 void Parse::dump_map_adr_mem() const { 383 tty->print_cr("--- Mapping from address types to memory Nodes ---"); 384 MergeMemNode *mem = map() == nullptr ? nullptr : (map()->memory()->is_MergeMem() ? 385 map()->memory()->as_MergeMem() : nullptr); 386 for (uint i = 0; i < (uint)C->num_alias_types(); i++) { 387 C->alias_type(i)->print_on(tty); 388 tty->print("\t"); 389 // Node mapping, if any 390 if (mem && i < mem->req() && mem->in(i) && mem->in(i) != mem->empty_memory()) { 391 mem->in(i)->dump(); 392 } else { 393 tty->cr(); 394 } 395 } 396 } 397 398 #endif