1 /*
  2  * Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "ci/ciSymbols.hpp"
 27 #include "gc/shared/barrierSet.hpp"
 28 #include "opto/castnode.hpp"
 29 #include "opto/graphKit.hpp"
 30 #include "opto/phaseX.hpp"
 31 #include "opto/rootnode.hpp"
 32 #include "opto/vector.hpp"
 33 #include "utilities/macros.hpp"
 34 
 35 static bool is_vector_mask(ciKlass* klass) {
 36   return klass->is_subclass_of(ciEnv::current()->vector_VectorMask_klass());
 37 }
 38 
 39 static bool is_vector_shuffle(ciKlass* klass) {
 40   return klass->is_subclass_of(ciEnv::current()->vector_VectorShuffle_klass());
 41 }
 42 
 43 
 44 void PhaseVector::optimize_vector_boxes() {
 45   Compile::TracePhase tp("vector_elimination", &timers[_t_vector_elimination]);
 46 
 47   // Signal GraphKit it's post-parse phase.
 48   assert(C->inlining_incrementally() == false, "sanity");
 49   C->set_inlining_incrementally(true);
 50 
 51   C->for_igvn()->clear();
 52   C->initial_gvn()->replace_with(&_igvn);
 53 
 54   expand_vunbox_nodes();
 55   scalarize_vbox_nodes();
 56 
 57   C->inline_vector_reboxing_calls();
 58 
 59   expand_vbox_nodes();
 60   eliminate_vbox_alloc_nodes();
 61 
 62   C->set_inlining_incrementally(false);
 63 
 64   do_cleanup();
 65 }
 66 
 67 void PhaseVector::do_cleanup() {
 68   if (C->failing())  return;
 69   {
 70     Compile::TracePhase tp("vector_pru", &timers[_t_vector_pru]);
 71     ResourceMark rm;
 72     PhaseRemoveUseless pru(C->initial_gvn(), C->for_igvn());
 73     if (C->failing())  return;
 74   }
 75   {
 76     Compile::TracePhase tp("incrementalInline_igvn", &timers[_t_vector_igvn]);
 77     _igvn = PhaseIterGVN(C->initial_gvn());
 78     _igvn.optimize();
 79     if (C->failing())  return;
 80   }
 81   C->print_method(PHASE_ITER_GVN_BEFORE_EA, 3);
 82 }
 83 
 84 void PhaseVector::scalarize_vbox_nodes() {
 85   if (C->failing())  return;
 86 
 87   if (!EnableVectorReboxing) {
 88     return; // don't scalarize vector boxes
 89   }
 90 
 91   int macro_idx = C->macro_count() - 1;
 92   while (macro_idx >= 0) {
 93     Node * n = C->macro_node(macro_idx);
 94     assert(n->is_macro(), "only macro nodes expected here");
 95     if (n->Opcode() == Op_VectorBox) {
 96       VectorBoxNode* vbox = static_cast<VectorBoxNode*>(n);
 97       scalarize_vbox_node(vbox);
 98       if (C->failing())  return;
 99       C->print_method(PHASE_SCALARIZE_VBOX, 3, vbox);
100     }
101     if (C->failing())  return;
102     macro_idx = MIN2(macro_idx - 1, C->macro_count() - 1);
103   }
104 }
105 
106 void PhaseVector::expand_vbox_nodes() {
107   if (C->failing())  return;
108 
109   int macro_idx = C->macro_count() - 1;
110   while (macro_idx >= 0) {
111     Node * n = C->macro_node(macro_idx);
112     assert(n->is_macro(), "only macro nodes expected here");
113     if (n->Opcode() == Op_VectorBox) {
114       VectorBoxNode* vbox = static_cast<VectorBoxNode*>(n);
115       expand_vbox_node(vbox);
116       if (C->failing())  return;
117     }
118     if (C->failing())  return;
119     macro_idx = MIN2(macro_idx - 1, C->macro_count() - 1);
120   }
121 }
122 
123 void PhaseVector::expand_vunbox_nodes() {
124   if (C->failing())  return;
125 
126   int macro_idx = C->macro_count() - 1;
127   while (macro_idx >= 0) {
128     Node * n = C->macro_node(macro_idx);
129     assert(n->is_macro(), "only macro nodes expected here");
130     if (n->Opcode() == Op_VectorUnbox) {
131       VectorUnboxNode* vec_unbox = static_cast<VectorUnboxNode*>(n);
132       expand_vunbox_node(vec_unbox);
133       if (C->failing())  return;
134       C->print_method(PHASE_EXPAND_VUNBOX, 3, vec_unbox);
135     }
136     if (C->failing())  return;
137     macro_idx = MIN2(macro_idx - 1, C->macro_count() - 1);
138   }
139 }
140 
141 void PhaseVector::eliminate_vbox_alloc_nodes() {
142   if (C->failing())  return;
143 
144   int macro_idx = C->macro_count() - 1;
145   while (macro_idx >= 0) {
146     Node * n = C->macro_node(macro_idx);
147     assert(n->is_macro(), "only macro nodes expected here");
148     if (n->Opcode() == Op_VectorBoxAllocate) {
149       VectorBoxAllocateNode* vbox_alloc = static_cast<VectorBoxAllocateNode*>(n);
150       eliminate_vbox_alloc_node(vbox_alloc);
151       if (C->failing())  return;
152       C->print_method(PHASE_ELIMINATE_VBOX_ALLOC, 3, vbox_alloc);
153     }
154     if (C->failing())  return;
155     macro_idx = MIN2(macro_idx - 1, C->macro_count() - 1);
156   }
157 }
158 
159 static JVMState* clone_jvms(Compile* C, SafePointNode* sfpt) {
160   JVMState* new_jvms = sfpt->jvms()->clone_shallow(C);
161   uint size = sfpt->req();
162   SafePointNode* map = new SafePointNode(size, new_jvms);
163   for (uint i = 0; i < size; i++) {
164     map->init_req(i, sfpt->in(i));
165   }
166   Node* mem = map->memory();
167   if (!mem->is_MergeMem()) {
168     // Since we are not in parsing, the SafePointNode does not guarantee that the memory
169     // input is necessarily a MergeMemNode. But we need to ensure that there is that
170     // MergeMemNode, since the GraphKit assumes the memory input of the map to be a
171     // MergeMemNode, so that it can directly access the memory slices.
172     PhaseGVN& gvn = *C->initial_gvn();
173     Node* mergemem = MergeMemNode::make(mem);
174     gvn.set_type_bottom(mergemem);
175     map->set_memory(mergemem);
176   }
177   new_jvms->set_map(map);
178   return new_jvms;
179 }
180 
181 void PhaseVector::scalarize_vbox_node(VectorBoxNode* vec_box) {
182   Node* vec_value = vec_box->in(VectorBoxNode::Value);
183   PhaseGVN& gvn = *C->initial_gvn();
184 
185   // Process merged VBAs
186 
187   if (EnableVectorAggressiveReboxing) {
188     Unique_Node_List calls(C->comp_arena());
189     for (DUIterator_Fast imax, i = vec_box->fast_outs(imax); i < imax; i++) {
190       Node* use = vec_box->fast_out(i);
191       if (use->is_CallJava()) {
192         CallJavaNode* call = use->as_CallJava();
193         if (call->has_non_debug_use(vec_box) && vec_box->in(VectorBoxNode::Box)->is_Phi()) {
194           calls.push(call);
195         }
196       }
197     }
198 
199     while (calls.size() > 0) {
200       CallJavaNode* call = calls.pop()->as_CallJava();
201       // Attach new VBA to the call and use it instead of Phi (VBA ... VBA).
202 
203       JVMState* jvms = clone_jvms(C, call);
204       GraphKit kit(jvms);
205       PhaseGVN& gvn = kit.gvn();
206 
207       // Adjust JVMS from post-call to pre-call state: put args on stack
208       uint nargs = call->method()->arg_size();
209       kit.ensure_stack(kit.sp() + nargs);
210       for (uint i = TypeFunc::Parms; i < call->tf()->domain_sig()->cnt(); i++) {
211         kit.push(call->in(i));
212       }
213       jvms = kit.sync_jvms();
214 
215       Node* new_vbox = NULL;
216       {
217         Node* vect = vec_box->in(VectorBoxNode::Value);
218         const TypeInstPtr* vbox_type = vec_box->box_type();
219         const TypeVect* vt = vec_box->vec_type();
220         BasicType elem_bt = vt->element_basic_type();
221         int num_elem = vt->length();
222 
223         new_vbox = kit.box_vector(vect, vbox_type, elem_bt, num_elem, /*deoptimize=*/true);
224 
225         kit.replace_in_map(vec_box, new_vbox);
226       }
227 
228       kit.dec_sp(nargs);
229       jvms = kit.sync_jvms();
230 
231       call->set_req(TypeFunc::Control , kit.control());
232       call->set_req(TypeFunc::I_O     , kit.i_o());
233       call->set_req(TypeFunc::Memory  , kit.reset_memory());
234       call->set_req(TypeFunc::FramePtr, kit.frameptr());
235       call->replace_edge(vec_box, new_vbox);
236 
237       C->record_for_igvn(call);
238     }
239   }
240 
241   // Process debug uses at safepoints
242   Unique_Node_List safepoints(C->comp_arena());
243 
244   Unique_Node_List worklist(C->comp_arena());
245   worklist.push(vec_box);
246   while (worklist.size() > 0) {
247     Node* n = worklist.pop();
248     for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
249       Node* use = n->fast_out(i);
250       if (use->is_SafePoint()) {
251         SafePointNode* sfpt = use->as_SafePoint();
252         if (!sfpt->is_Call() || !sfpt->as_Call()->has_non_debug_use(n)) {
253           safepoints.push(sfpt);
254         }
255       } else if (use->is_ConstraintCast()) {
256         worklist.push(use); // reversed version of Node::uncast()
257       }
258     }
259   }
260 
261   ciInstanceKlass* iklass = vec_box->box_type()->instance_klass();
262   int n_fields = iklass->nof_nonstatic_fields();
263   assert(n_fields == 1, "sanity");
264 
265   // If a mask is feeding into safepoint[s], then its value should be
266   // packed into a boolean/byte vector first, this will simplify the
267   // re-materialization logic for both predicated and non-predicated
268   // targets.
269   bool is_mask = is_vector_mask(iklass);
270   if (is_mask && vec_value->Opcode() != Op_VectorStoreMask) {
271     const TypeVect* vt = vec_value->bottom_type()->is_vect();
272     BasicType bt = vt->element_basic_type();
273     vec_value = gvn.transform(VectorStoreMaskNode::make(gvn, vec_value, bt, vt->length()));
274   }
275 
276   while (safepoints.size() > 0) {
277     SafePointNode* sfpt = safepoints.pop()->as_SafePoint();
278 
279     uint first_ind = (sfpt->req() - sfpt->jvms()->scloff());
280     Node* sobj = new SafePointScalarObjectNode(vec_box->box_type(),
281 #ifdef ASSERT
282                                                vec_box,
283 #endif // ASSERT
284                                                first_ind, n_fields);
285     sobj->init_req(0, C->root());
286     sfpt->add_req(vec_value);
287 
288     sobj = gvn.transform(sobj);
289 
290     JVMState *jvms = sfpt->jvms();
291 
292     jvms->set_endoff(sfpt->req());
293     // Now make a pass over the debug information replacing any references
294     // to the allocated object with vector value.
295     for (uint i = jvms->debug_start(); i < jvms->debug_end(); i++) {
296       Node* debug = sfpt->in(i);
297       if (debug != NULL && debug->uncast(/*keep_deps*/false) == vec_box) {
298         sfpt->set_req(i, sobj);
299       }
300     }
301     C->record_for_igvn(sfpt);
302   }
303 }
304 
305 void PhaseVector::expand_vbox_node(VectorBoxNode* vec_box) {
306   if (vec_box->outcnt() > 0) {
307     Node* vbox = vec_box->in(VectorBoxNode::Box);
308     Node* vect = vec_box->in(VectorBoxNode::Value);
309     Node* result = expand_vbox_node_helper(vbox, vect, vec_box->box_type(), vec_box->vec_type());
310     C->gvn_replace_by(vec_box, result);
311     C->print_method(PHASE_EXPAND_VBOX, 3, vec_box);
312   }
313   C->remove_macro_node(vec_box);
314 }
315 
316 Node* PhaseVector::expand_vbox_node_helper(Node* vbox,
317                                            Node* vect,
318                                            const TypeInstPtr* box_type,
319                                            const TypeVect* vect_type) {
320   if (vbox->is_Phi() && vect->is_Phi()) {
321     assert(vbox->as_Phi()->region() == vect->as_Phi()->region(), "");
322     Node* new_phi = new PhiNode(vbox->as_Phi()->region(), box_type);
323     for (uint i = 1; i < vbox->req(); i++) {
324       Node* new_box = expand_vbox_node_helper(vbox->in(i), vect->in(i), box_type, vect_type);
325       new_phi->set_req(i, new_box);
326     }
327     new_phi = C->initial_gvn()->transform(new_phi);
328     return new_phi;
329   } else if (vbox->is_Phi() && (vect->is_Vector() || vect->is_LoadVector())) {
330     // Handle the case when the allocation input to VectorBoxNode is a phi
331     // but the vector input is not, which can definitely be the case if the
332     // vector input has been value-numbered. It seems to be safe to do by
333     // construction because VectorBoxNode and VectorBoxAllocate come in a
334     // specific order as a result of expanding an intrinsic call. After that, if
335     // any of the inputs to VectorBoxNode are value-numbered they can only
336     // move up and are guaranteed to dominate.
337     Node* new_phi = new PhiNode(vbox->as_Phi()->region(), box_type);
338     for (uint i = 1; i < vbox->req(); i++) {
339       Node* new_box = expand_vbox_node_helper(vbox->in(i), vect, box_type, vect_type);
340       new_phi->set_req(i, new_box);
341     }
342     new_phi = C->initial_gvn()->transform(new_phi);
343     return new_phi;
344   } else if (vbox->is_Proj() && vbox->in(0)->Opcode() == Op_VectorBoxAllocate) {
345     VectorBoxAllocateNode* vbox_alloc = static_cast<VectorBoxAllocateNode*>(vbox->in(0));
346     return expand_vbox_alloc_node(vbox_alloc, vect, box_type, vect_type);
347   } else {
348     assert(!vbox->is_Phi(), "");
349     // TODO: assert that expanded vbox is initialized with the same value (vect).
350     return vbox; // already expanded
351   }
352 }
353 
354 Node* PhaseVector::expand_vbox_alloc_node(VectorBoxAllocateNode* vbox_alloc,
355                                           Node* value,
356                                           const TypeInstPtr* box_type,
357                                           const TypeVect* vect_type) {
358   JVMState* jvms = clone_jvms(C, vbox_alloc);
359   GraphKit kit(jvms);
360   PhaseGVN& gvn = kit.gvn();
361 
362   ciInstanceKlass* box_klass = box_type->instance_klass();
363   BasicType bt = vect_type->element_basic_type();
364   int num_elem = vect_type->length();
365 
366   bool is_mask = is_vector_mask(box_klass);
367   // If boxed mask value is present in a predicate register, it must be
368   // spilled to a vector though a VectorStoreMaskOperation before actual StoreVector
369   // operation to vector payload field.
370   if (is_mask && (value->bottom_type()->isa_vectmask() || bt != T_BOOLEAN)) {
371     value = gvn.transform(VectorStoreMaskNode::make(gvn, value, bt, num_elem));
372     // Although type of mask depends on its definition, in terms of storage everything is stored in boolean array.
373     bt = T_BOOLEAN;
374     assert(value->bottom_type()->is_vect()->element_basic_type() == bt,
375            "must be consistent with mask representation");
376   }
377 
378   // Generate array allocation for the field which holds the values.
379   const TypeKlassPtr* array_klass = TypeKlassPtr::make(ciTypeArrayKlass::make(bt));
380   Node* arr = kit.new_array(kit.makecon(array_klass), kit.intcon(num_elem), 1);
381 
382   // Store the vector value into the array.
383   // (The store should be captured by InitializeNode and turned into initialized store later.)
384   Node* arr_adr = kit.array_element_address(arr, kit.intcon(0), bt);
385   const TypePtr* arr_adr_type = arr_adr->bottom_type()->is_ptr();
386   Node* arr_mem = kit.memory(arr_adr);
387   Node* vstore = gvn.transform(StoreVectorNode::make(0,
388                                                      kit.control(),
389                                                      arr_mem,
390                                                      arr_adr,
391                                                      arr_adr_type,
392                                                      value,
393                                                      num_elem));
394   kit.set_memory(vstore, arr_adr_type);
395 
396   C->set_max_vector_size(MAX2(C->max_vector_size(), vect_type->length_in_bytes()));
397 
398   // Generate the allocate for the Vector object.
399   const TypeKlassPtr* klass_type = box_type->as_klass_type();
400   Node* klass_node = kit.makecon(klass_type);
401   Node* vec_obj = kit.new_instance(klass_node);
402 
403   // Store the allocated array into object.
404   ciField* field = ciEnv::current()->vector_VectorPayload_klass()->get_field_by_name(ciSymbols::payload_name(),
405                                                                                      ciSymbols::object_signature(),
406                                                                                      false);
407   assert(field != NULL, "");
408   Node* vec_field = kit.basic_plus_adr(vec_obj, field->offset_in_bytes());
409   const TypePtr* vec_adr_type = vec_field->bottom_type()->is_ptr();
410 
411   // The store should be captured by InitializeNode and turned into initialized store later.
412   Node* field_store = gvn.transform(kit.access_store_at(vec_obj,
413                                                         vec_field,
414                                                         vec_adr_type,
415                                                         arr,
416                                                         TypeOopPtr::make_from_klass(field->type()->as_klass()),
417                                                         T_OBJECT,
418                                                         IN_HEAP));
419   kit.set_memory(field_store, vec_adr_type);
420 
421   kit.replace_call(vbox_alloc, vec_obj, true);
422   C->remove_macro_node(vbox_alloc);
423 
424   return vec_obj;
425 }
426 
427 void PhaseVector::expand_vunbox_node(VectorUnboxNode* vec_unbox) {
428   if (vec_unbox->outcnt() > 0) {
429     GraphKit kit;
430     PhaseGVN& gvn = kit.gvn();
431 
432     Node* obj = vec_unbox->obj();
433     const TypeInstPtr* tinst = gvn.type(obj)->isa_instptr();
434     ciInstanceKlass* from_kls = tinst->instance_klass();
435     const TypeVect* vt = vec_unbox->bottom_type()->is_vect();
436     BasicType bt = vt->element_basic_type();
437     BasicType masktype = bt;
438 
439     if (is_vector_mask(from_kls)) {
440       bt = T_BOOLEAN;
441     } else if (is_vector_shuffle(from_kls)) {
442       bt = T_BYTE;
443     }
444 
445     ciField* field = ciEnv::current()->vector_VectorPayload_klass()->get_field_by_name(ciSymbols::payload_name(),
446                                                                                        ciSymbols::object_signature(),
447                                                                                        false);
448     assert(field != NULL, "");
449     int offset = field->offset_in_bytes();
450     Node* vec_adr = kit.basic_plus_adr(obj, offset);
451 
452     Node* mem = vec_unbox->mem();
453     Node* ctrl = vec_unbox->in(0);
454     Node* vec_field_ld;
455     {
456       DecoratorSet decorators = MO_UNORDERED | IN_HEAP;
457       C2AccessValuePtr addr(vec_adr, vec_adr->bottom_type()->is_ptr());
458       MergeMemNode* local_mem = MergeMemNode::make(mem);
459       gvn.record_for_igvn(local_mem);
460       BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
461       C2OptAccess access(gvn, ctrl, local_mem, decorators, T_OBJECT, obj, addr);
462       const Type* type = TypeOopPtr::make_from_klass(field->type()->as_klass());
463       vec_field_ld = bs->load_at(access, type);
464     }
465 
466     // For proper aliasing, attach concrete payload type.
467     ciKlass* payload_klass = ciTypeArrayKlass::make(bt);
468     const Type* payload_type = TypeAryPtr::make_from_klass(payload_klass)->cast_to_ptr_type(TypePtr::NotNull);
469     vec_field_ld = gvn.transform(new CastPPNode(vec_field_ld, payload_type));
470 
471     Node* adr = kit.array_element_address(vec_field_ld, gvn.intcon(0), bt);
472     const TypePtr* adr_type = adr->bottom_type()->is_ptr();
473     int num_elem = vt->length();
474     Node* vec_val_load = LoadVectorNode::make(0,
475                                               ctrl,
476                                               mem,
477                                               adr,
478                                               adr_type,
479                                               num_elem,
480                                               bt);
481     vec_val_load = gvn.transform(vec_val_load);
482 
483     C->set_max_vector_size(MAX2(C->max_vector_size(), vt->length_in_bytes()));
484 
485     if (is_vector_mask(from_kls)) {
486       vec_val_load = gvn.transform(new VectorLoadMaskNode(vec_val_load, TypeVect::makemask(masktype, num_elem)));
487     } else if (is_vector_shuffle(from_kls) && !vec_unbox->is_shuffle_to_vector()) {
488       assert(vec_unbox->bottom_type()->is_vect()->element_basic_type() == masktype, "expect shuffle type consistency");
489       vec_val_load = gvn.transform(new VectorLoadShuffleNode(vec_val_load, TypeVect::make(masktype, num_elem)));
490     }
491 
492     gvn.hash_delete(vec_unbox);
493     vec_unbox->disconnect_inputs(C);
494     C->gvn_replace_by(vec_unbox, vec_val_load);
495   }
496   C->remove_macro_node(vec_unbox);
497 }
498 
499 void PhaseVector::eliminate_vbox_alloc_node(VectorBoxAllocateNode* vbox_alloc) {
500   JVMState* jvms = clone_jvms(C, vbox_alloc);
501   GraphKit kit(jvms);
502   // Remove VBA, but leave a safepoint behind.
503   // Otherwise, it may end up with a loop without any safepoint polls.
504   kit.replace_call(vbox_alloc, kit.map(), true);
505   C->remove_macro_node(vbox_alloc);
506 }