1 /*
  2  * Copyright (c) 1998, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "ci/ciInlineKlass.hpp"
 27 #include "ci/ciSymbols.hpp"
 28 #include "compiler/compileLog.hpp"
 29 #include "oops/flatArrayKlass.hpp"
 30 #include "oops/objArrayKlass.hpp"
 31 #include "opto/addnode.hpp"
 32 #include "opto/castnode.hpp"
 33 #include "opto/inlinetypenode.hpp"
 34 #include "opto/memnode.hpp"
 35 #include "opto/mulnode.hpp"
 36 #include "opto/parse.hpp"
 37 #include "opto/rootnode.hpp"
 38 #include "opto/runtime.hpp"
 39 #include "runtime/sharedRuntime.hpp"
 40 
 41 //------------------------------make_dtrace_method_entry_exit ----------------
 42 // Dtrace -- record entry or exit of a method if compiled with dtrace support
 43 void GraphKit::make_dtrace_method_entry_exit(ciMethod* method, bool is_entry) {
 44   const TypeFunc *call_type    = OptoRuntime::dtrace_method_entry_exit_Type();
 45   address         call_address = is_entry ? CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry) :
 46                                             CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit);
 47   const char     *call_name    = is_entry ? "dtrace_method_entry" : "dtrace_method_exit";
 48 
 49   // Get base of thread-local storage area
 50   Node* thread = _gvn.transform( new ThreadLocalNode() );
 51 
 52   // Get method
 53   const TypePtr* method_type = TypeMetadataPtr::make(method);
 54   Node *method_node = _gvn.transform(ConNode::make(method_type));
 55 
 56   kill_dead_locals();
 57 
 58   // For some reason, this call reads only raw memory.
 59   const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM;
 60   make_runtime_call(RC_LEAF | RC_NARROW_MEM,
 61                     call_type, call_address,
 62                     call_name, raw_adr_type,
 63                     thread, method_node);
 64 }
 65 
 66 
 67 //=============================================================================
 68 //------------------------------do_checkcast-----------------------------------
 69 void Parse::do_checkcast() {
 70   bool will_link;
 71   ciKlass* klass = iter().get_klass(will_link);
 72   bool null_free = iter().has_Q_signature();
 73   Node *obj = peek();
 74 
 75   // Throw uncommon trap if class is not loaded or the value we are casting
 76   // _from_ is not loaded, and value is not null.  If the value _is_ NULL,
 77   // then the checkcast does nothing.
 78   const TypeOopPtr *tp = _gvn.type(obj)->isa_oopptr();
 79   if (!will_link || (tp && tp->klass() && !tp->klass()->is_loaded())) {
 80     assert(!null_free, "Inline type should be loaded");
 81     if (C->log() != NULL) {
 82       if (!will_link) {
 83         C->log()->elem("assert_null reason='checkcast' klass='%d'",
 84                        C->log()->identify(klass));
 85       }
 86       if (tp && tp->klass() && !tp->klass()->is_loaded()) {
 87         // %%% Cannot happen?
 88         C->log()->elem("assert_null reason='checkcast source' klass='%d'",
 89                        C->log()->identify(tp->klass()));
 90       }
 91     }
 92     null_assert(obj);
 93     assert( stopped() || _gvn.type(peek())->higher_equal(TypePtr::NULL_PTR), "what's left behind is null" );
 94     return;
 95   }
 96 
 97   Node* res = gen_checkcast(obj, makecon(TypeKlassPtr::make(klass)), NULL, null_free);
 98   if (stopped()) {
 99     return;
100   }
101 
102   // Pop from stack AFTER gen_checkcast because it can uncommon trap and
103   // the debug info has to be correct.
104   pop();
105   push(res);
106 }
107 
108 
109 //------------------------------do_instanceof----------------------------------
110 void Parse::do_instanceof() {
111   if (stopped())  return;
112   // We would like to return false if class is not loaded, emitting a
113   // dependency, but Java requires instanceof to load its operand.
114 
115   // Throw uncommon trap if class is not loaded
116   bool will_link;
117   ciKlass* klass = iter().get_klass(will_link);
118 
119   if (!will_link) {
120     if (C->log() != NULL) {
121       C->log()->elem("assert_null reason='instanceof' klass='%d'",
122                      C->log()->identify(klass));
123     }
124     null_assert(peek());
125     assert( stopped() || _gvn.type(peek())->higher_equal(TypePtr::NULL_PTR), "what's left behind is null" );
126     if (!stopped()) {
127       // The object is now known to be null.
128       // Shortcut the effect of gen_instanceof and return "false" directly.
129       pop();                   // pop the null
130       push(_gvn.intcon(0));    // push false answer
131     }
132     return;
133   }
134 
135   // Push the bool result back on stack
136   Node* res = gen_instanceof(peek(), makecon(TypeKlassPtr::make(klass)), true);
137 
138   // Pop from stack AFTER gen_instanceof because it can uncommon trap.
139   pop();
140   push(res);
141 }
142 
143 //------------------------------array_store_check------------------------------
144 // pull array from stack and check that the store is valid
145 Node* Parse::array_store_check(Node*& adr, const Type*& elemtype) {

146   // Shorthand access to array store elements without popping them.
147   Node *obj = peek(0);
148   Node *idx = peek(1);
149   Node *ary = peek(2);
150 
151   if (_gvn.type(obj) == TypePtr::NULL_PTR) {
152     // There's never a type check on null values.
153     // This cutout lets us avoid the uncommon_trap(Reason_array_check)
154     // below, which turns into a performance liability if the
155     // gen_checkcast folds up completely.
156     if (_gvn.type(ary)->is_aryptr()->is_null_free()) {
157       null_check(obj);
158     }
159     return obj;
160   }
161 
162   // Extract the array klass type
163   Node* array_klass = load_object_klass(ary);



164   // Get the array klass
165   const TypeKlassPtr* tak = _gvn.type(array_klass)->is_klassptr();
166 
167   // The type of array_klass is usually INexact array-of-oop.  Heroically
168   // cast array_klass to EXACT array and uncommon-trap if the cast fails.
169   // Make constant out of the inexact array klass, but use it only if the cast
170   // succeeds.
171   bool always_see_exact_class = false;
172   if (MonomorphicArrayCheck && !tak->klass_is_exact()) {
173     // Make a constant out of the inexact array klass
174     const TypeKlassPtr* extak = NULL;
175     const TypeOopPtr* ary_t = _gvn.type(ary)->is_oopptr();
176     ciKlass* ary_spec = ary_t->speculative_type();
177     Deoptimization::DeoptReason reason = Deoptimization::Reason_none;
178     // Try to cast the array to an exact type from profile data. First
179     // check the speculative type.
180     if (ary_spec != NULL && !too_many_traps(Deoptimization::Reason_speculate_class_check)) {
181       extak = TypeKlassPtr::make(ary_spec);
182       reason = Deoptimization::Reason_speculate_class_check;
183     } else if (UseArrayLoadStoreProfile) {
184       // No speculative type: check profile data at this bci.
185       reason = Deoptimization::Reason_class_check;
186       if (!too_many_traps(reason)) {
187         ciKlass* array_type = NULL;
188         ciKlass* element_type = NULL;
189         ProfilePtrKind element_ptr = ProfileMaybeNull;
190         bool flat_array = true;
191         bool null_free_array = true;
192         method()->array_access_profiled_type(bci(), array_type, element_type, element_ptr, flat_array, null_free_array);
193         if (array_type != NULL) {
194           extak = TypeKlassPtr::make(array_type);
195         }
196       }
197     } else if (!too_many_traps(Deoptimization::Reason_array_check) && tak != TypeInstKlassPtr::OBJECT) {
198       // If the compiler has determined that the type of array 'ary' (represented
199       // by 'array_klass') is java/lang/Object, the compiler must not assume that
200       // the array 'ary' is monomorphic.
201       //
202       // If 'ary' were of type java/lang/Object, this arraystore would have to fail,
203       // because it is not possible to perform a arraystore into an object that is not
204       // a "proper" array.
205       //
206       // Therefore, let's obtain at runtime the type of 'ary' and check if we can still
207       // successfully perform the store.
208       //
209       // The implementation reasons for the condition are the following:
210       //
211       // java/lang/Object is the superclass of all arrays, but it is represented by the VM
212       // as an InstanceKlass. The checks generated by gen_checkcast() (see below) expect
213       // 'array_klass' to be ObjArrayKlass, which can result in invalid memory accesses.
214       //
215       // See issue JDK-8057622 for details.
216       extak = tak->cast_to_exactness(true)->is_klassptr();
217       reason = Deoptimization::Reason_array_check;












218     }
219     if (extak != NULL) {
220       Node* con = makecon(extak);
221       Node* cmp = _gvn.transform(new CmpPNode(array_klass, con));
222       Node* bol = _gvn.transform(new BoolNode(cmp, BoolTest::eq));
223       // Only do it if the check does not always pass/fail
224       if (!bol->is_Con()) {
225         always_see_exact_class = true;
226         { BuildCutout unless(this, bol, PROB_MAX);
227           uncommon_trap(reason,
228                         Deoptimization::Action_maybe_recompile,
229                         tak->klass());
230         }
231         // Cast array klass to exactness
232         replace_in_map(array_klass, con);
233         array_klass = con;
234         Node* cast = _gvn.transform(new CheckCastPPNode(control(), ary, extak->as_instance_type()));
235         replace_in_map(ary, cast);
236         ary = cast;
237 
238         // Recompute element type and address
239         const TypeAryPtr* arytype = _gvn.type(ary)->is_aryptr();
240         elemtype = arytype->elem();
241         adr = array_element_address(ary, idx, T_OBJECT, arytype->size(), control());
242 
243         CompileLog* log = C->log();
244         if (log != NULL) {
245           log->elem("cast_up reason='monomorphic_array' from='%d' to='(exact)'",
246                     log->identify(tak->klass()));
247         }
248       }

249     }
250   }
251 
252   // Come here for polymorphic array klasses
253 
254   // Extract the array element class
255   int element_klass_offset = in_bytes(ArrayKlass::element_klass_offset());
256 
257   Node *p2 = basic_plus_adr(array_klass, array_klass, element_klass_offset);
258   // We are allowed to use the constant type only if cast succeeded. If always_see_exact_class is true,
259   // we must set a control edge from the IfTrue node created by the uncommon_trap above to the
260   // LoadKlassNode.
261   Node* a_e_klass = _gvn.transform(LoadKlassNode::make(_gvn, always_see_exact_class ? control() : NULL,
262                                                        immutable_memory(), p2, tak));
263 
264   // If we statically know that this is an inline type array, use precise element klass for checkcast
265   if (!elemtype->isa_inlinetype()) {
266     elemtype = elemtype->make_oopptr();
267   }
268   bool null_free = false;
269   if (elemtype->isa_inlinetype() != NULL || elemtype->is_inlinetypeptr()) {
270     // We statically know that this is an inline type array, use precise klass ptr
271     null_free = elemtype->isa_inlinetype() || !elemtype->maybe_null();
272     a_e_klass = makecon(TypeKlassPtr::make(elemtype->inline_klass()));
273   }
274 
275   // Check (the hard way) and throw if not a subklass.
276   return gen_checkcast(obj, a_e_klass, NULL, null_free);

277 }
278 
279 
280 //------------------------------do_new-----------------------------------------
281 void Parse::do_new() {
282   kill_dead_locals();
283 
284   bool will_link;
285   ciInstanceKlass* klass = iter().get_klass(will_link)->as_instance_klass();
286   assert(will_link, "_new: typeflow responsibility");
287   assert(!klass->is_inlinetype(), "unexpected inline type");
288 
289   // Should throw an InstantiationError?
290   if (klass->is_abstract() || klass->is_interface() ||
291       klass->name() == ciSymbols::java_lang_Class() ||
292       iter().is_unresolved_klass()) {
293     uncommon_trap(Deoptimization::Reason_unhandled,
294                   Deoptimization::Action_none,
295                   klass);
296     return;
297   }
298 
299   if (C->needs_clinit_barrier(klass, method())) {
300     clinit_barrier(klass, method());
301     if (stopped())  return;
302   }
303 
304   Node* kls = makecon(TypeKlassPtr::make(klass));
305   Node* obj = new_instance(kls);
306 
307   // Push resultant oop onto stack
308   push(obj);
309 
310   // Keep track of whether opportunities exist for StringBuilder
311   // optimizations.
312   if (OptimizeStringConcat &&
313       (klass == C->env()->StringBuilder_klass() ||
314        klass == C->env()->StringBuffer_klass())) {
315     C->set_has_stringbuilder(true);
316   }
317 
318   // Keep track of boxed values for EliminateAutoBox optimizations.
319   if (C->eliminate_boxing() && klass->is_box_klass()) {
320     C->set_has_boxed_value(true);
321   }
322 }
323 
324 //------------------------------do_defaultvalue---------------------------------
325 void Parse::do_defaultvalue() {
326   bool will_link;
327   ciInlineKlass* vk = iter().get_klass(will_link)->as_inline_klass();
328   assert(will_link && !iter().is_unresolved_klass(), "defaultvalue: typeflow responsibility");
329 
330   if (C->needs_clinit_barrier(vk, method())) {
331     clinit_barrier(vk, method());
332     if (stopped())  return;
333   }
334 
335   InlineTypeNode* vt = InlineTypeNode::make_default(_gvn, vk);
336   push(vt);
337 }
338 
339 //------------------------------do_withfield------------------------------------
340 void Parse::do_withfield() {
341   bool will_link;
342   ciField* field = iter().get_field(will_link);
343   assert(will_link, "withfield: typeflow responsibility");
344   Node* val = pop_node(field->layout_type());
345   ciInlineKlass* holder_klass = field->holder()->as_inline_klass();
346   Node* holder = pop();
347   int nargs = 1 + field->type()->size();
348 
349   if (!holder->is_InlineType()) {
350     // Scalarize inline type holder
351     assert(!gvn().type(holder)->maybe_null(), "Inline types are null-free");
352     holder = InlineTypeNode::make_from_oop(this, holder, holder_klass);
353   }
354   if (!val->is_InlineTypeBase() && field->type()->is_inlinetype()) {
355     // Scalarize inline type field value
356     assert(!field->is_null_free() || !gvn().type(val)->maybe_null(), "Null store to null-free field");
357     val = InlineTypeNode::make_from_oop(this, val, field->type()->as_inline_klass(), field->is_null_free());
358   } else if (val->is_InlineType() && !field->is_null_free()) {
359     // Field value needs to be allocated because it can be merged with an oop.
360     // Re-execute withfield if buffering triggers deoptimization.
361     PreserveReexecuteState preexecs(this);
362     jvms()->set_should_reexecute(true);
363     inc_sp(nargs);
364     val = val->as_InlineType()->buffer(this);
365   }
366 
367   // Clone the inline type node and set the new field value
368   InlineTypeNode* new_vt = holder->clone()->as_InlineType();
369   new_vt->set_oop(_gvn.zerocon(T_INLINE_TYPE));
370   gvn().set_type(new_vt, new_vt->bottom_type());
371   new_vt->set_field_value_by_offset(field->offset(), val);
372 
373   push(_gvn.transform(new_vt));
374 }
375 
376 #ifndef PRODUCT
377 //------------------------------dump_map_adr_mem-------------------------------
378 // Debug dump of the mapping from address types to MergeMemNode indices.
379 void Parse::dump_map_adr_mem() const {
380   tty->print_cr("--- Mapping from address types to memory Nodes ---");
381   MergeMemNode *mem = map() == NULL ? NULL : (map()->memory()->is_MergeMem() ?
382                                       map()->memory()->as_MergeMem() : NULL);
383   for (uint i = 0; i < (uint)C->num_alias_types(); i++) {
384     C->alias_type(i)->print_on(tty);
385     tty->print("\t");
386     // Node mapping, if any
387     if (mem && i < mem->req() && mem->in(i) && mem->in(i) != mem->empty_memory()) {
388       mem->in(i)->dump();
389     } else {
390       tty->cr();
391     }
392   }
393 }
394 
395 #endif
396 
--- EOF ---