1 /*
  2  * Copyright (c) 1998, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "ci/ciInlineKlass.hpp"
 27 #include "ci/ciSymbols.hpp"
 28 #include "compiler/compileLog.hpp"
 29 #include "oops/flatArrayKlass.hpp"
 30 #include "oops/objArrayKlass.hpp"
 31 #include "opto/addnode.hpp"
 32 #include "opto/castnode.hpp"
 33 #include "opto/inlinetypenode.hpp"
 34 #include "opto/memnode.hpp"
 35 #include "opto/mulnode.hpp"
 36 #include "opto/parse.hpp"
 37 #include "opto/rootnode.hpp"
 38 #include "opto/runtime.hpp"
 39 #include "runtime/sharedRuntime.hpp"
 40 
 41 //------------------------------make_dtrace_method_entry_exit ----------------
 42 // Dtrace -- record entry or exit of a method if compiled with dtrace support
 43 void GraphKit::make_dtrace_method_entry_exit(ciMethod* method, bool is_entry) {
 44   const TypeFunc *call_type    = OptoRuntime::dtrace_method_entry_exit_Type();
 45   address         call_address = is_entry ? CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry) :
 46                                             CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit);
 47   const char     *call_name    = is_entry ? "dtrace_method_entry" : "dtrace_method_exit";
 48 
 49   // Get base of thread-local storage area
 50   Node* thread = _gvn.transform( new ThreadLocalNode() );
 51 
 52   // Get method
 53   const TypePtr* method_type = TypeMetadataPtr::make(method);
 54   Node *method_node = _gvn.transform(ConNode::make(method_type));
 55 
 56   kill_dead_locals();
 57 
 58   // For some reason, this call reads only raw memory.
 59   const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM;
 60   make_runtime_call(RC_LEAF | RC_NARROW_MEM,
 61                     call_type, call_address,
 62                     call_name, raw_adr_type,
 63                     thread, method_node);
 64 }
 65 
 66 
 67 //=============================================================================
 68 //------------------------------do_checkcast-----------------------------------
 69 void Parse::do_checkcast() {
 70   bool will_link;
 71   ciKlass* klass = iter().get_klass(will_link);
 72   Node *obj = peek();
 73 
 74   // Throw uncommon trap if class is not loaded or the value we are casting
 75   // _from_ is not loaded, and value is not null.  If the value _is_ null,
 76   // then the checkcast does nothing.
 77   const TypeOopPtr *tp = _gvn.type(obj)->isa_oopptr();
 78   if (!will_link || (tp && !tp->is_loaded())) {
 79     if (C->log() != nullptr) {
 80       if (!will_link) {
 81         C->log()->elem("assert_null reason='checkcast' klass='%d'",
 82                        C->log()->identify(klass));
 83       }
 84       if (tp && !tp->is_loaded()) {
 85         // %%% Cannot happen?
 86         ciKlass* klass = tp->unloaded_klass();
 87         C->log()->elem("assert_null reason='checkcast source' klass='%d'",
 88                        C->log()->identify(klass));
 89       }
 90     }
 91     null_assert(obj);
 92     assert( stopped() || _gvn.type(peek())->higher_equal(TypePtr::NULL_PTR), "what's left behind is null" );
 93     return;
 94   }
 95 
 96   Node* res = gen_checkcast(obj, makecon(TypeKlassPtr::make(klass, Type::trust_interfaces)));
 97   if (stopped()) {
 98     return;
 99   }
100 
101   // Pop from stack AFTER gen_checkcast because it can uncommon trap and
102   // the debug info has to be correct.
103   pop();
104   push(res);
105 }
106 
107 
108 //------------------------------do_instanceof----------------------------------
109 void Parse::do_instanceof() {
110   if (stopped())  return;
111   // We would like to return false if class is not loaded, emitting a
112   // dependency, but Java requires instanceof to load its operand.
113 
114   // Throw uncommon trap if class is not loaded
115   bool will_link;
116   ciKlass* klass = iter().get_klass(will_link);
117 
118   if (!will_link) {
119     if (C->log() != nullptr) {
120       C->log()->elem("assert_null reason='instanceof' klass='%d'",
121                      C->log()->identify(klass));
122     }
123     null_assert(peek());
124     assert( stopped() || _gvn.type(peek())->higher_equal(TypePtr::NULL_PTR), "what's left behind is null" );
125     if (!stopped()) {
126       // The object is now known to be null.
127       // Shortcut the effect of gen_instanceof and return "false" directly.
128       pop();                   // pop the null
129       push(_gvn.intcon(0));    // push false answer
130     }
131     return;
132   }
133 
134   // Push the bool result back on stack
135   Node* res = gen_instanceof(peek(), makecon(TypeKlassPtr::make(klass, Type::trust_interfaces)), true);
136 
137   // Pop from stack AFTER gen_instanceof because it can uncommon trap.
138   pop();
139   push(res);
140 }
141 
142 //------------------------------array_store_check------------------------------
143 // pull array from stack and check that the store is valid
144 Node* Parse::array_store_check(Node*& adr, const Type*& elemtype) {
145   // Shorthand access to array store elements without popping them.
146   Node *obj = peek(0);
147   Node *idx = peek(1);
148   Node *ary = peek(2);
149 
150   if (_gvn.type(obj) == TypePtr::NULL_PTR) {
151     // There's never a type check on null values.
152     // This cutout lets us avoid the uncommon_trap(Reason_array_check)
153     // below, which turns into a performance liability if the
154     // gen_checkcast folds up completely.
155     if (_gvn.type(ary)->is_aryptr()->is_null_free()) {
156       null_check(obj);
157     }
158     return obj;
159   }
160 
161   // Extract the array klass type
162   Node* array_klass = load_object_klass(ary);
163   // Get the array klass
164   const TypeKlassPtr* tak = _gvn.type(array_klass)->is_klassptr();
165 
166   // The type of array_klass is usually INexact array-of-oop.  Heroically
167   // cast array_klass to EXACT array and uncommon-trap if the cast fails.
168   // Make constant out of the inexact array klass, but use it only if the cast
169   // succeeds.
170   bool always_see_exact_class = false;
171   if (MonomorphicArrayCheck && !tak->klass_is_exact()) {
172     // Make a constant out of the inexact array klass
173     const TypeKlassPtr* extak = nullptr;
174     const TypeOopPtr* ary_t = _gvn.type(ary)->is_oopptr();
175     ciKlass* ary_spec = ary_t->speculative_type();
176     Deoptimization::DeoptReason reason = Deoptimization::Reason_none;
177     // Try to cast the array to an exact type from profile data. First
178     // check the speculative type.
179     if (ary_spec != nullptr && !too_many_traps(Deoptimization::Reason_speculate_class_check)) {
180       extak = TypeKlassPtr::make(ary_spec);
181       reason = Deoptimization::Reason_speculate_class_check;
182     } else if (UseArrayLoadStoreProfile) {
183       // No speculative type: check profile data at this bci.
184       reason = Deoptimization::Reason_class_check;
185       if (!too_many_traps(reason)) {
186         ciKlass* array_type = nullptr;
187         ciKlass* element_type = nullptr;
188         ProfilePtrKind element_ptr = ProfileMaybeNull;
189         bool flat_array = true;
190         bool null_free_array = true;
191         method()->array_access_profiled_type(bci(), array_type, element_type, element_ptr, flat_array, null_free_array);
192         if (array_type != nullptr) {
193           extak = TypeKlassPtr::make(array_type);
194         }
195       }
196     } else if (!too_many_traps(Deoptimization::Reason_array_check) && tak != TypeInstKlassPtr::OBJECT) {
197       // If the compiler has determined that the type of array 'ary' (represented
198       // by 'array_klass') is java/lang/Object, the compiler must not assume that
199       // the array 'ary' is monomorphic.
200       //
201       // If 'ary' were of type java/lang/Object, this arraystore would have to fail,
202       // because it is not possible to perform a arraystore into an object that is not
203       // a "proper" array.
204       //
205       // Therefore, let's obtain at runtime the type of 'ary' and check if we can still
206       // successfully perform the store.
207       //
208       // The implementation reasons for the condition are the following:
209       //
210       // java/lang/Object is the superclass of all arrays, but it is represented by the VM
211       // as an InstanceKlass. The checks generated by gen_checkcast() (see below) expect
212       // 'array_klass' to be ObjArrayKlass, which can result in invalid memory accesses.
213       //
214       // See issue JDK-8057622 for details.
215       extak = tak->cast_to_exactness(true);
216       reason = Deoptimization::Reason_array_check;
217     }
218     if (extak != nullptr && extak->exact_klass(true) != nullptr) {
219       Node* con = makecon(extak);
220       Node* cmp = _gvn.transform(new CmpPNode(array_klass, con));
221       Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::eq));
222       // Only do it if the check does not always pass/fail
223       if (!bol->is_Con()) {
224         always_see_exact_class = true;
225         { BuildCutout unless(this, bol, PROB_MAX);
226           uncommon_trap(reason,
227                         Deoptimization::Action_maybe_recompile,
228                         extak->exact_klass());
229         }
230         // Cast array klass to exactness
231         replace_in_map(array_klass, con);
232         array_klass = con;
233         Node* cast = _gvn.transform(new CheckCastPPNode(control(), ary, extak->as_instance_type()));
234         replace_in_map(ary, cast);
235         ary = cast;
236 
237         // Recompute element type and address
238         const TypeAryPtr* arytype = _gvn.type(ary)->is_aryptr();
239         elemtype = arytype->elem();
240         adr = array_element_address(ary, idx, T_OBJECT, arytype->size(), control());
241 
242         CompileLog* log = C->log();
243         if (log != nullptr) {
244           log->elem("cast_up reason='monomorphic_array' from='%d' to='(exact)'",
245                     log->identify(extak->exact_klass()));
246         }
247       }
248     }
249   }
250 
251   // Come here for polymorphic array klasses
252 
253   // Extract the array element class
254   int element_klass_offset = in_bytes(ArrayKlass::element_klass_offset());
255 
256   Node *p2 = basic_plus_adr(array_klass, array_klass, element_klass_offset);
257   // We are allowed to use the constant type only if cast succeeded. If always_see_exact_class is true,
258   // we must set a control edge from the IfTrue node created by the uncommon_trap above to the
259   // LoadKlassNode.
260   Node* a_e_klass = _gvn.transform(LoadKlassNode::make(_gvn, always_see_exact_class ? control() : nullptr,
261                                                        immutable_memory(), p2, tak));
262 
263   // If we statically know that this is an inline type array, use precise element klass for checkcast
264   const TypeAryPtr* arytype = _gvn.type(ary)->is_aryptr();
265   bool null_free = false;
266   if (elemtype->make_ptr()->is_inlinetypeptr()) {
267     // We statically know that this is an inline type array, use precise klass ptr
268     null_free = arytype->is_flat() || !elemtype->make_ptr()->maybe_null();
269     a_e_klass = makecon(TypeKlassPtr::make(elemtype->inline_klass()));
270   }
271 
272   // Check (the hard way) and throw if not a subklass.
273   return gen_checkcast(obj, a_e_klass, nullptr, null_free);
274 }
275 
276 
277 //------------------------------do_new-----------------------------------------
278 void Parse::do_new() {
279   kill_dead_locals();
280 
281   bool will_link;
282   ciInstanceKlass* klass = iter().get_klass(will_link)->as_instance_klass();
283   assert(will_link, "_new: typeflow responsibility");
284 
285   // Should throw an InstantiationError?
286   if (klass->is_abstract() || klass->is_interface() ||
287       klass->name() == ciSymbols::java_lang_Class() ||
288       iter().is_unresolved_klass()) {
289     uncommon_trap(Deoptimization::Reason_unhandled,
290                   Deoptimization::Action_none,
291                   klass);
292     return;
293   }
294 
295   if (C->needs_clinit_barrier(klass, method())) {
296     clinit_barrier(klass, method());
297     if (stopped())  return;
298   }
299 
300   if (klass->is_inlinetype()) {
301     push(InlineTypeNode::make_default(_gvn, klass->as_inline_klass(), /* is_larval */ true));
302     return;
303   }
304 
305   Node* kls = makecon(TypeKlassPtr::make(klass));
306   Node* obj = new_instance(kls);
307 
308   // Push resultant oop onto stack
309   push(obj);
310 
311   // Keep track of whether opportunities exist for StringBuilder
312   // optimizations.
313   if (OptimizeStringConcat &&
314       (klass == C->env()->StringBuilder_klass() ||
315        klass == C->env()->StringBuffer_klass())) {
316     C->set_has_stringbuilder(true);
317   }
318 
319   // Keep track of boxed values for EliminateAutoBox optimizations.
320   if (C->eliminate_boxing() && klass->is_box_klass()) {
321     C->set_has_boxed_value(true);
322   }
323 }
324 
325 #ifndef PRODUCT
326 //------------------------------dump_map_adr_mem-------------------------------
327 // Debug dump of the mapping from address types to MergeMemNode indices.
328 void Parse::dump_map_adr_mem() const {
329   tty->print_cr("--- Mapping from address types to memory Nodes ---");
330   MergeMemNode *mem = map() == nullptr ? nullptr : (map()->memory()->is_MergeMem() ?
331                                       map()->memory()->as_MergeMem() : nullptr);
332   for (uint i = 0; i < (uint)C->num_alias_types(); i++) {
333     C->alias_type(i)->print_on(tty);
334     tty->print("\t");
335     // Node mapping, if any
336     if (mem && i < mem->req() && mem->in(i) && mem->in(i) != mem->empty_memory()) {
337       mem->in(i)->dump();
338     } else {
339       tty->cr();
340     }
341   }
342 }
343 
344 #endif