< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page

  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"


  33 #include "ci/ciKlass.hpp"
  34 #include "ci/ciMemberName.hpp"
  35 #include "ci/ciSymbols.hpp"
  36 #include "ci/ciUtilities.inline.hpp"
  37 #include "classfile/javaClasses.hpp"
  38 #include "compiler/compilationPolicy.hpp"
  39 #include "compiler/compileBroker.hpp"
  40 #include "compiler/compilerEvent.hpp"
  41 #include "interpreter/bytecode.hpp"
  42 #include "jfr/jfrEvents.hpp"
  43 #include "memory/resourceArea.hpp"
  44 #include "oops/oop.inline.hpp"
  45 #include "runtime/sharedRuntime.hpp"
  46 #include "runtime/vm_version.hpp"
  47 #include "utilities/bitMap.inline.hpp"
  48 #include "utilities/checkedCast.hpp"
  49 #include "utilities/powerOfTwo.hpp"
  50 #include "utilities/macros.hpp"
  51 #if INCLUDE_JFR
  52 #include "jfr/jfr.hpp"

 708         }
 709 #endif
 710         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 711         return result;
 712       }
 713     }
 714     return load;
 715   }
 716 
 717   // Record this newly allocated object
 718   void new_instance(NewInstance* object) {
 719     int index = _newobjects.length();
 720     _newobjects.append(object);
 721     if (_fields.at_grow(index, nullptr) == nullptr) {
 722       _fields.at_put(index, new FieldBuffer());
 723     } else {
 724       _fields.at(index)->kill();
 725     }
 726   }
 727 











 728   void store_value(Value value) {
 729     int index = _newobjects.find(value);
 730     if (index != -1) {
 731       // stored a newly allocated object into another object.
 732       // Assume we've lost track of it as separate slice of memory.
 733       // We could do better by keeping track of whether individual
 734       // fields could alias each other.
 735       _newobjects.remove_at(index);
 736       // pull out the field info and store it at the end up the list
 737       // of field info list to be reused later.
 738       _fields.append(_fields.at(index));
 739       _fields.remove_at(index);
 740     }
 741   }
 742 
 743   void kill() {
 744     _newobjects.trunc_to(0);
 745     _objects.trunc_to(0);
 746     _values.kill();
 747   }

1007         int offset = java_lang_boxing_object::value_offset(type);
1008         ciField* value_field = box_klass->get_field_by_offset(offset, false /*is_static*/);
1009         x = new LoadField(append(x), offset, value_field, false /*is_static*/, patch_state, false /*needs_patching*/);
1010         t = as_ValueType(type);
1011       } else {
1012         assert(is_reference_type(type), "not a reference: %s", type2name(type));
1013       }
1014     }
1015 
1016     push(t, append(x));
1017   } else {
1018     BAILOUT("could not resolve a constant");
1019   }
1020 }
1021 
1022 
1023 void GraphBuilder::load_local(ValueType* type, int index) {
1024   Value x = state()->local_at(index);
1025   assert(x != nullptr && !x->type()->is_illegal(), "access of illegal local variable");
1026   push(type, x);







1027 }
1028 
1029 
1030 void GraphBuilder::store_local(ValueType* type, int index) {
1031   Value x = pop(type);
1032   store_local(state(), x, index);



1033 }
1034 
1035 
1036 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
1037   if (parsing_jsr()) {
1038     // We need to do additional tracking of the location of the return
1039     // address for jsrs since we don't handle arbitrary jsr/ret
1040     // constructs. Here we are figuring out in which circumstances we
1041     // need to bail out.
1042     if (x->type()->is_address()) {
1043       scope_data()->set_jsr_return_address_local(index);
1044 
1045       // Also check parent jsrs (if any) at this time to see whether
1046       // they are using this local. We don't handle skipping over a
1047       // ret.
1048       for (ScopeData* cur_scope_data = scope_data()->parent();
1049            cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1050            cur_scope_data = cur_scope_data->parent()) {
1051         if (cur_scope_data->jsr_return_address_local() == index) {
1052           BAILOUT("subroutine overwrites return address from previous subroutine");
1053         }
1054       }
1055     } else if (index == scope_data()->jsr_return_address_local()) {
1056       scope_data()->set_jsr_return_address_local(-1);
1057     }
1058   }
1059 
1060   state->store_local(index, round_fp(x));



1061 }
1062 
1063 
1064 void GraphBuilder::load_indexed(BasicType type) {
1065   // In case of in block code motion in range check elimination
1066   ValueStack* state_before = copy_state_indexed_access();








1067   compilation()->set_has_access_indexed(true);
1068   Value index = ipop();
1069   Value array = apop();
1070   Value length = nullptr;
1071   if (CSEArrayLength ||
1072       (array->as_Constant() != nullptr) ||
1073       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1074       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1075       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1076     length = append(new ArrayLength(array, state_before));
1077   }
1078   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));




























































1079 }
1080 
1081 
1082 void GraphBuilder::store_indexed(BasicType type) {
1083   // In case of in block code motion in range check elimination
1084   ValueStack* state_before = copy_state_indexed_access();








1085   compilation()->set_has_access_indexed(true);
1086   Value value = pop(as_ValueType(type));
1087   Value index = ipop();
1088   Value array = apop();
1089   Value length = nullptr;
1090   if (CSEArrayLength ||
1091       (array->as_Constant() != nullptr) ||
1092       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1093       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1094       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1095     length = append(new ArrayLength(array, state_before));
1096   }
1097   ciType* array_type = array->declared_type();
1098   bool check_boolean = false;
1099   if (array_type != nullptr) {
1100     if (array_type->is_loaded() &&
1101       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1102       assert(type == T_BYTE, "boolean store uses bastore");
1103       Value mask = append(new Constant(new IntConstant(1)));
1104       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1105     }
1106   } else if (type == T_BYTE) {
1107     check_boolean = true;
1108   }
1109   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1110   append(result);
1111   _memory->store_value(value);
1112 
1113   if (type == T_OBJECT && is_profiling()) {
1114     // Note that we'd collect profile data in this method if we wanted it.
1115     compilation()->set_would_profile(true);
1116 
1117     if (profile_checkcasts()) {
1118       result->set_profiled_method(method());
1119       result->set_profiled_bci(bci());
1120       result->set_should_profile(true);
1121     }
1122   }



1123 }
1124 
1125 
1126 void GraphBuilder::stack_op(Bytecodes::Code code) {
1127   switch (code) {
1128     case Bytecodes::_pop:
1129       { state()->raw_pop();

1130       }
1131       break;
1132     case Bytecodes::_pop2:
1133       { state()->raw_pop();
1134         state()->raw_pop();


1135       }
1136       break;
1137     case Bytecodes::_dup:
1138       { Value w = state()->raw_pop();

1139         state()->raw_push(w);
1140         state()->raw_push(w);
1141       }
1142       break;
1143     case Bytecodes::_dup_x1:
1144       { Value w1 = state()->raw_pop();
1145         Value w2 = state()->raw_pop();

1146         state()->raw_push(w1);
1147         state()->raw_push(w2);
1148         state()->raw_push(w1);
1149       }
1150       break;
1151     case Bytecodes::_dup_x2:
1152       { Value w1 = state()->raw_pop();
1153         Value w2 = state()->raw_pop();
1154         Value w3 = state()->raw_pop();











1155         state()->raw_push(w1);
1156         state()->raw_push(w3);
1157         state()->raw_push(w2);
1158         state()->raw_push(w1);
1159       }
1160       break;
1161     case Bytecodes::_dup2:
1162       { Value w1 = state()->raw_pop();
1163         Value w2 = state()->raw_pop();


1164         state()->raw_push(w2);
1165         state()->raw_push(w1);
1166         state()->raw_push(w2);
1167         state()->raw_push(w1);
1168       }
1169       break;
1170     case Bytecodes::_dup2_x1:
1171       { Value w1 = state()->raw_pop();
1172         Value w2 = state()->raw_pop();
1173         Value w3 = state()->raw_pop();


1174         state()->raw_push(w2);
1175         state()->raw_push(w1);
1176         state()->raw_push(w3);
1177         state()->raw_push(w2);
1178         state()->raw_push(w1);
1179       }
1180       break;
1181     case Bytecodes::_dup2_x2:
1182       { Value w1 = state()->raw_pop();
1183         Value w2 = state()->raw_pop();
1184         Value w3 = state()->raw_pop();
1185         Value w4 = state()->raw_pop();


1186         state()->raw_push(w2);
1187         state()->raw_push(w1);
1188         state()->raw_push(w4);
1189         state()->raw_push(w3);
1190         state()->raw_push(w2);
1191         state()->raw_push(w1);
1192       }
1193       break;
1194     case Bytecodes::_swap:
1195       { Value w1 = state()->raw_pop();
1196         Value w2 = state()->raw_pop();
1197         state()->raw_push(w1);
1198         state()->raw_push(w2);
1199       }
1200       break;
1201     default:
1202       ShouldNotReachHere();
1203       break;
1204   }
1205 }

1293 
1294 
1295 void GraphBuilder::_goto(int from_bci, int to_bci) {
1296   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1297   if (is_profiling()) {
1298     compilation()->set_would_profile(true);
1299     x->set_profiled_bci(bci());
1300     if (profile_branches()) {
1301       x->set_profiled_method(method());
1302       x->set_should_profile(true);
1303     }
1304   }
1305   append(x);
1306 }
1307 
1308 
1309 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1310   BlockBegin* tsux = block_at(stream()->get_dest());
1311   BlockBegin* fsux = block_at(stream()->next_bci());
1312   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();



























1313   // In case of loop invariant code motion or predicate insertion
1314   // before the body of a loop the state is needed
1315   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : nullptr, is_bb));
1316 
1317   assert(i->as_Goto() == nullptr ||
1318          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1319          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1320          "safepoint state of Goto returned by canonicalizer incorrect");
1321 
1322   if (is_profiling()) {
1323     If* if_node = i->as_If();
1324     if (if_node != nullptr) {
1325       // Note that we'd collect profile data in this method if we wanted it.
1326       compilation()->set_would_profile(true);
1327       // At level 2 we need the proper bci to count backedges
1328       if_node->set_profiled_bci(bci());
1329       if (profile_branches()) {
1330         // Successors can be rotated by the canonicalizer, check for this case.
1331         if_node->set_profiled_method(method());
1332         if_node->set_should_profile(true);
1333         if (if_node->tsux() == fsux) {
1334           if_node->set_swapped(true);
1335         }

1546 
1547   if (needs_check) {
1548     // Perform the registration of finalizable objects.
1549     ValueStack* state_before = copy_state_for_exception();
1550     load_local(objectType, 0);
1551     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1552                                state()->pop_arguments(1),
1553                                true, state_before, true));
1554   }
1555 }
1556 
1557 
1558 void GraphBuilder::method_return(Value x, bool ignore_return) {
1559   if (RegisterFinalizersAtInit &&
1560       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1561     call_register_finalizer();
1562   }
1563 
1564   // The conditions for a memory barrier are described in Parse::do_exits().
1565   bool need_mem_bar = false;
1566   if (method()->name() == ciSymbols::object_initializer_name() &&
1567        (scope()->wrote_final() ||
1568          (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1569          (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1570     need_mem_bar = true;
1571   }
1572 
1573   BasicType bt = method()->return_type()->basic_type();
1574   switch (bt) {
1575     case T_BYTE:
1576     {
1577       Value shift = append(new Constant(new IntConstant(24)));
1578       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1579       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1580       break;
1581     }
1582     case T_SHORT:
1583     {
1584       Value shift = append(new Constant(new IntConstant(16)));
1585       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1586       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));

1697   // Attach dimension info to stable arrays.
1698   if (FoldStableValues &&
1699       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1700     ciArray* array = field_value.as_object()->as_array();
1701     jint dimension = field->type()->as_array_klass()->dimension();
1702     value = new StableArrayConstant(array, dimension);
1703   }
1704 
1705   switch (field_type) {
1706     case T_ARRAY:
1707     case T_OBJECT:
1708       if (field_value.as_object()->should_be_constant()) {
1709         return new Constant(value);
1710       }
1711       return nullptr; // Not a constant.
1712     default:
1713       return new Constant(value);
1714   }
1715 }
1716 













1717 void GraphBuilder::access_field(Bytecodes::Code code) {
1718   bool will_link;
1719   ciField* field = stream()->get_field(will_link);
1720   ciInstanceKlass* holder = field->holder();
1721   BasicType field_type = field->type()->basic_type();
1722   ValueType* type = as_ValueType(field_type);

1723   // call will_link again to determine if the field is valid.
1724   const bool needs_patching = !holder->is_loaded() ||
1725                               !field->will_link(method(), code) ||
1726                               PatchALot;
1727 
1728   ValueStack* state_before = nullptr;
1729   if (!holder->is_initialized() || needs_patching) {
1730     // save state before instruction for debug info when
1731     // deoptimization happens during patching
1732     state_before = copy_state_before();
1733   }
1734 
1735   Value obj = nullptr;
1736   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1737     if (state_before != nullptr) {
1738       // build a patching constant
1739       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1740     } else {
1741       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1742     }
1743   }
1744 
1745   if (field->is_final() && (code == Bytecodes::_putfield)) {
1746     scope()->set_wrote_final();
1747   }
1748 
1749   if (code == Bytecodes::_putfield) {
1750     scope()->set_wrote_fields();
1751     if (field->is_volatile()) {
1752       scope()->set_wrote_volatile();
1753     }
1754   }
1755 
1756   const int offset = !needs_patching ? field->offset_in_bytes() : -1;
1757   switch (code) {
1758     case Bytecodes::_getstatic: {
1759       // check for compile-time constants, i.e., initialized static final fields
1760       Value constant = nullptr;
1761       if (field->is_static_constant() && !PatchALot) {
1762         ciConstant field_value = field->constant_value();
1763         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1764                "stable static w/ default value shouldn't be a constant");
1765         constant = make_constant(field_value, field);




1766       }
1767       if (constant != nullptr) {
1768         push(type, append(constant));
1769       } else {
1770         if (state_before == nullptr) {
1771           state_before = copy_state_for_exception();
1772         }
1773         push(type, append(new LoadField(append(obj), offset, field, true,
1774                                         state_before, needs_patching)));

1775       }
1776       break;
1777     }
1778     case Bytecodes::_putstatic: {
1779       Value val = pop(type);
1780       if (state_before == nullptr) {
1781         state_before = copy_state_for_exception();
1782       }
1783       if (field->type()->basic_type() == T_BOOLEAN) {
1784         Value mask = append(new Constant(new IntConstant(1)));
1785         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1786       }







1787       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1788       break;
1789     }
1790     case Bytecodes::_getfield: {
1791       // Check for compile-time constants, i.e., trusted final non-static fields.
1792       Value constant = nullptr;
1793       obj = apop();
1794       ObjectType* obj_type = obj->type()->as_ObjectType();
1795       if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1796         ciObject* const_oop = obj_type->constant_value();
1797         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1798           ciConstant field_value = field->constant_value_of(const_oop);
1799           if (field_value.is_valid()) {
1800             constant = make_constant(field_value, field);
1801             // For CallSite objects add a dependency for invalidation of the optimization.
1802             if (field->is_call_site_target()) {
1803               ciCallSite* call_site = const_oop->as_call_site();
1804               if (!call_site->is_fully_initialized_constant_call_site()) {
1805                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1806                 dependency_recorder()->assert_call_site_target_value(call_site, target);

















1807               }
1808             }
1809           }
1810         }
1811       }
1812       if (constant != nullptr) {
1813         push(type, append(constant));
1814       } else {
1815         if (state_before == nullptr) {
1816           state_before = copy_state_for_exception();
1817         }
1818         LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1819         Value replacement = !needs_patching ? _memory->load(load) : load;
1820         if (replacement != load) {
1821           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1822           // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1823           // conversion. Emit an explicit conversion here to get the correct field value after the write.
1824           BasicType bt = field->type()->basic_type();
1825           switch (bt) {
1826           case T_BOOLEAN:
1827           case T_BYTE:
1828             replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
1829             break;
1830           case T_CHAR:
1831             replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
1832             break;
1833           case T_SHORT:
1834             replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
1835             break;
1836           default:
1837             break;
1838           }
1839           push(type, replacement);























1840         } else {
1841           push(type, append(load));

































































1842         }
1843       }
1844       break;
1845     }
1846     case Bytecodes::_putfield: {
1847       Value val = pop(type);
1848       obj = apop();
1849       if (state_before == nullptr) {
1850         state_before = copy_state_for_exception();
1851       }
1852       if (field->type()->basic_type() == T_BOOLEAN) {
1853         Value mask = append(new Constant(new IntConstant(1)));
1854         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1855       }
1856       StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1857       if (!needs_patching) store = _memory->store(store);
1858       if (store != nullptr) {
1859         append(store);













1860       }
1861       break;
1862     }
1863     default:
1864       ShouldNotReachHere();
1865       break;
1866   }
1867 }
1868 













































































1869 
1870 Dependencies* GraphBuilder::dependency_recorder() const {
1871   assert(DeoptC1, "need debug information");
1872   return compilation()->dependency_recorder();
1873 }
1874 
1875 // How many arguments do we want to profile?
1876 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1877   int n = 0;
1878   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1879   start = has_receiver ? 1 : 0;
1880   if (profile_arguments()) {
1881     ciProfileData* data = method()->method_data()->bci_to_data(bci());
1882     if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1883       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1884     }
1885   }
1886   // If we are inlining then we need to collect arguments to profile parameters for the target
1887   if (profile_parameters() && target != nullptr) {
1888     if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {

1966       break;
1967     case Bytecodes::_invokehandle:
1968       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1969       break;
1970     default:
1971       break;
1972     }
1973   } else {
1974     if (bc_raw == Bytecodes::_invokehandle) {
1975       assert(!will_link, "should come here only for unlinked call");
1976       code = Bytecodes::_invokespecial;
1977     }
1978   }
1979 
1980   if (code == Bytecodes::_invokespecial) {
1981     // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
1982     ciKlass* receiver_constraint = nullptr;
1983 
1984     if (bc_raw == Bytecodes::_invokeinterface) {
1985       receiver_constraint = holder;
1986     } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer() && calling_klass->is_interface()) {
1987       receiver_constraint = calling_klass;
1988     }
1989 
1990     if (receiver_constraint != nullptr) {
1991       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1992       Value receiver = state()->stack_at(index);
1993       CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
1994       // go to uncommon_trap when checkcast fails
1995       c->set_invokespecial_receiver_check();
1996       state()->stack_at_put(index, append_split(c));
1997     }
1998   }
1999 
2000   // Push appendix argument (MethodType, CallSite, etc.), if one.
2001   bool patch_for_appendix = false;
2002   int patching_appendix_arg = 0;
2003   if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2004     Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2005     apush(arg);
2006     patch_for_appendix = true;

2202       null_check(recv);
2203     }
2204 
2205     if (is_profiling()) {
2206       // Note that we'd collect profile data in this method if we wanted it.
2207       compilation()->set_would_profile(true);
2208 
2209       if (profile_calls()) {
2210         assert(cha_monomorphic_target == nullptr || exact_target == nullptr, "both can not be set");
2211         ciKlass* target_klass = nullptr;
2212         if (cha_monomorphic_target != nullptr) {
2213           target_klass = cha_monomorphic_target->holder();
2214         } else if (exact_target != nullptr) {
2215           target_klass = exact_target->holder();
2216         }
2217         profile_call(target, recv, target_klass, collect_args_for_profiling(args, nullptr, false), false);
2218       }
2219     }
2220   }
2221 
2222   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);

2223   // push result
2224   append_split(result);
2225 
2226   if (result_type != voidType) {
2227     push(result_type, round_fp(result));
2228   }
2229   if (profile_return() && result_type->is_object_kind()) {
2230     profile_return_type(result, target);
2231   }
2232 }
2233 
2234 
2235 void GraphBuilder::new_instance(int klass_index) {
2236   ValueStack* state_before = copy_state_exhandling();
2237   ciKlass* klass = stream()->get_klass();
2238   assert(klass->is_instance_klass(), "must be an instance klass");
2239   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2240   _memory->new_instance(new_instance);
2241   apush(append_split(new_instance));
2242 }
2243 











2244 
2245 void GraphBuilder::new_type_array() {
2246   ValueStack* state_before = copy_state_exhandling();
2247   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2248 }
2249 
2250 
2251 void GraphBuilder::new_object_array() {
2252   ciKlass* klass = stream()->get_klass();

2253   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2254   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2255   apush(append_split(n));
2256 }
2257 
2258 
2259 bool GraphBuilder::direct_compare(ciKlass* k) {
2260   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2261     ciInstanceKlass* ik = k->as_instance_klass();
2262     if (ik->is_final()) {
2263       return true;
2264     } else {
2265       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2266         // test class is leaf class
2267         dependency_recorder()->assert_leaf_type(ik);
2268         return true;
2269       }
2270     }
2271   }
2272   return false;
2273 }
2274 
2275 
2276 void GraphBuilder::check_cast(int klass_index) {
2277   ciKlass* klass = stream()->get_klass();

2278   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2279   CheckCast* c = new CheckCast(klass, apop(), state_before);
2280   apush(append_split(c));
2281   c->set_direct_compare(direct_compare(klass));
2282 
2283   if (is_profiling()) {
2284     // Note that we'd collect profile data in this method if we wanted it.
2285     compilation()->set_would_profile(true);
2286 
2287     if (profile_checkcasts()) {
2288       c->set_profiled_method(method());
2289       c->set_profiled_bci(bci());
2290       c->set_should_profile(true);
2291     }
2292   }
2293 }
2294 
2295 
2296 void GraphBuilder::instance_of(int klass_index) {
2297   ciKlass* klass = stream()->get_klass();
2298   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2299   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2300   ipush(append_split(i));
2301   i->set_direct_compare(direct_compare(klass));
2302 
2303   if (is_profiling()) {
2304     // Note that we'd collect profile data in this method if we wanted it.
2305     compilation()->set_would_profile(true);
2306 
2307     if (profile_checkcasts()) {
2308       i->set_profiled_method(method());
2309       i->set_profiled_bci(bci());
2310       i->set_should_profile(true);
2311     }
2312   }
2313 }
2314 
2315 
2316 void GraphBuilder::monitorenter(Value x, int bci) {



















2317   // save state before locking in case of deoptimization after a NullPointerException
2318   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2319   compilation()->set_has_monitors(true);
2320   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2321   kill_all();
2322 }
2323 
2324 
2325 void GraphBuilder::monitorexit(Value x, int bci) {
2326   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2327   kill_all();
2328 }
2329 
2330 
2331 void GraphBuilder::new_multi_array(int dimensions) {
2332   ciKlass* klass = stream()->get_klass();
2333   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2334 
2335   Values* dims = new Values(dimensions, dimensions, nullptr);
2336   // fill in all dimensions
2337   int i = dimensions;
2338   while (i-- > 0) dims->at_put(i, ipop());
2339   // create array
2340   NewArray* n = new NewMultiArray(klass, dims, state_before);

2435   if (i1->can_trap()) {
2436     i1->set_exception_handlers(handle_exception(i1));
2437     assert(i1->exception_state() != nullptr || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
2438   }
2439   return i1;
2440 }
2441 
2442 
2443 Instruction* GraphBuilder::append(Instruction* instr) {
2444   assert(instr->as_StateSplit() == nullptr || instr->as_BlockEnd() != nullptr, "wrong append used");
2445   return append_with_bci(instr, bci());
2446 }
2447 
2448 
2449 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2450   return append_with_bci(instr, bci());
2451 }
2452 
2453 
2454 void GraphBuilder::null_check(Value value) {
2455   if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2456     return;
2457   } else {
2458     Constant* con = value->as_Constant();
2459     if (con) {
2460       ObjectType* c = con->type()->as_ObjectType();
2461       if (c && c->is_loaded()) {
2462         ObjectConstant* oc = c->as_ObjectConstant();
2463         if (!oc || !oc->value()->is_null_object()) {
2464           return;
2465         }
2466       }
2467     }

2468   }
2469   append(new NullCheck(value, copy_state_for_exception()));
2470 }
2471 
2472 
2473 
2474 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2475   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2476     assert(instruction->exception_state() == nullptr
2477            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2478            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2479            "exception_state should be of exception kind");
2480     return new XHandlers();
2481   }
2482 
2483   XHandlers*  exception_handlers = new XHandlers();
2484   ScopeData*  cur_scope_data = scope_data();
2485   ValueStack* cur_state = instruction->state_before();
2486   ValueStack* prev_state = nullptr;
2487   int scope_count = 0;
2488 
2489   assert(cur_state != nullptr, "state_before must be set");
2490   do {
2491     int cur_bci = cur_state->bci();
2492     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2493     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");


2494 
2495     // join with all potential exception handlers
2496     XHandlers* list = cur_scope_data->xhandlers();
2497     const int n = list->length();
2498     for (int i = 0; i < n; i++) {
2499       XHandler* h = list->handler_at(i);
2500       if (h->covers(cur_bci)) {
2501         // h is a potential exception handler => join it
2502         compilation()->set_has_exception_handlers(true);
2503 
2504         BlockBegin* entry = h->entry_block();
2505         if (entry == block()) {
2506           // It's acceptable for an exception handler to cover itself
2507           // but we don't handle that in the parser currently.  It's
2508           // very rare so we bailout instead of trying to handle it.
2509           BAILOUT_("exception handler covers itself", exception_handlers);
2510         }
2511         assert(entry->bci() == h->handler_bci(), "must match");
2512         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2513 

2961       case Bytecodes::_invokevirtual  : // fall through
2962       case Bytecodes::_invokespecial  : // fall through
2963       case Bytecodes::_invokestatic   : // fall through
2964       case Bytecodes::_invokedynamic  : // fall through
2965       case Bytecodes::_invokeinterface: invoke(code); break;
2966       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
2967       case Bytecodes::_newarray       : new_type_array(); break;
2968       case Bytecodes::_anewarray      : new_object_array(); break;
2969       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
2970       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
2971       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
2972       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
2973       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
2974       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
2975       case Bytecodes::_wide           : ShouldNotReachHere(); break;
2976       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
2977       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
2978       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
2979       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
2980       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;


2981       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", nullptr);
2982       default                         : ShouldNotReachHere(); break;
2983     }
2984 
2985     if (log != nullptr)
2986       log->clear_context(); // skip marker if nothing was printed
2987 
2988     // save current bci to setup Goto at the end
2989     prev_bci = s.cur_bci();
2990 
2991   }
2992   CHECK_BAILOUT_(nullptr);
2993   // stop processing of this block (see try_inline_full)
2994   if (_skip_block) {
2995     _skip_block = false;
2996     assert(_last && _last->as_BlockEnd(), "");
2997     return _last->as_BlockEnd();
2998   }
2999   // if there are any, check if last instruction is a BlockEnd instruction
3000   BlockEnd* end = last()->as_BlockEnd();

3249   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3250 
3251   assert(state->caller_state() == nullptr, "should be top scope");
3252   state->clear_locals();
3253   Goto* g = new Goto(target, false);
3254   append(g);
3255   _osr_entry->set_end(g);
3256   target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3257 
3258   scope_data()->set_stream(nullptr);
3259 }
3260 
3261 
3262 ValueStack* GraphBuilder::state_at_entry() {
3263   ValueStack* state = new ValueStack(scope(), nullptr);
3264 
3265   // Set up locals for receiver
3266   int idx = 0;
3267   if (!method()->is_static()) {
3268     // we should always see the receiver
3269     state->store_local(idx, new Local(method()->holder(), objectType, idx, true));

3270     idx = 1;
3271   }
3272 
3273   // Set up locals for incoming arguments
3274   ciSignature* sig = method()->signature();
3275   for (int i = 0; i < sig->count(); i++) {
3276     ciType* type = sig->type_at(i);
3277     BasicType basic_type = type->basic_type();
3278     // don't allow T_ARRAY to propagate into locals types
3279     if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3280     ValueType* vt = as_ValueType(basic_type);
3281     state->store_local(idx, new Local(type, vt, idx, false));
3282     idx += type->size();
3283   }
3284 
3285   // lock synchronized method
3286   if (method()->is_synchronized()) {
3287     state->lock(nullptr);
3288   }
3289 
3290   return state;
3291 }
3292 
3293 
3294 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3295   : _scope_data(nullptr)
3296   , _compilation(compilation)
3297   , _memory(new MemoryBuffer())
3298   , _inline_bailout_msg(nullptr)
3299   , _instruction_count(0)
3300   , _osr_entry(nullptr)


3301 {
3302   int osr_bci = compilation->osr_bci();
3303 
3304   // determine entry points and bci2block mapping
3305   BlockListBuilder blm(compilation, scope, osr_bci);
3306   CHECK_BAILOUT();
3307 
3308   BlockList* bci2block = blm.bci2block();
3309   BlockBegin* start_block = bci2block->at(0);
3310 
3311   push_root_scope(scope, bci2block, start_block);
3312 
3313   // setup state for std entry
3314   _initial_state = state_at_entry();
3315   start_block->merge(_initial_state, compilation->has_irreducible_loops());
3316 
3317   // End nulls still exist here
3318 
3319   // complete graph
3320   _vmap        = new ValueMap();

  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"
  33 #include "ci/ciFlatArrayKlass.hpp"
  34 #include "ci/ciInlineKlass.hpp"
  35 #include "ci/ciKlass.hpp"
  36 #include "ci/ciMemberName.hpp"
  37 #include "ci/ciSymbols.hpp"
  38 #include "ci/ciUtilities.inline.hpp"
  39 #include "classfile/javaClasses.hpp"
  40 #include "compiler/compilationPolicy.hpp"
  41 #include "compiler/compileBroker.hpp"
  42 #include "compiler/compilerEvent.hpp"
  43 #include "interpreter/bytecode.hpp"
  44 #include "jfr/jfrEvents.hpp"
  45 #include "memory/resourceArea.hpp"
  46 #include "oops/oop.inline.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/vm_version.hpp"
  49 #include "utilities/bitMap.inline.hpp"
  50 #include "utilities/checkedCast.hpp"
  51 #include "utilities/powerOfTwo.hpp"
  52 #include "utilities/macros.hpp"
  53 #if INCLUDE_JFR
  54 #include "jfr/jfr.hpp"

 710         }
 711 #endif
 712         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 713         return result;
 714       }
 715     }
 716     return load;
 717   }
 718 
 719   // Record this newly allocated object
 720   void new_instance(NewInstance* object) {
 721     int index = _newobjects.length();
 722     _newobjects.append(object);
 723     if (_fields.at_grow(index, nullptr) == nullptr) {
 724       _fields.at_put(index, new FieldBuffer());
 725     } else {
 726       _fields.at(index)->kill();
 727     }
 728   }
 729 
 730   // Record this newly allocated object
 731   void new_instance(NewInlineTypeInstance* object) {
 732     int index = _newobjects.length();
 733     _newobjects.append(object);
 734     if (_fields.at_grow(index, nullptr) == nullptr) {
 735       _fields.at_put(index, new FieldBuffer());
 736     } else {
 737       _fields.at(index)->kill();
 738     }
 739   }
 740 
 741   void store_value(Value value) {
 742     int index = _newobjects.find(value);
 743     if (index != -1) {
 744       // stored a newly allocated object into another object.
 745       // Assume we've lost track of it as separate slice of memory.
 746       // We could do better by keeping track of whether individual
 747       // fields could alias each other.
 748       _newobjects.remove_at(index);
 749       // pull out the field info and store it at the end up the list
 750       // of field info list to be reused later.
 751       _fields.append(_fields.at(index));
 752       _fields.remove_at(index);
 753     }
 754   }
 755 
 756   void kill() {
 757     _newobjects.trunc_to(0);
 758     _objects.trunc_to(0);
 759     _values.kill();
 760   }

1020         int offset = java_lang_boxing_object::value_offset(type);
1021         ciField* value_field = box_klass->get_field_by_offset(offset, false /*is_static*/);
1022         x = new LoadField(append(x), offset, value_field, false /*is_static*/, patch_state, false /*needs_patching*/);
1023         t = as_ValueType(type);
1024       } else {
1025         assert(is_reference_type(type), "not a reference: %s", type2name(type));
1026       }
1027     }
1028 
1029     push(t, append(x));
1030   } else {
1031     BAILOUT("could not resolve a constant");
1032   }
1033 }
1034 
1035 
1036 void GraphBuilder::load_local(ValueType* type, int index) {
1037   Value x = state()->local_at(index);
1038   assert(x != nullptr && !x->type()->is_illegal(), "access of illegal local variable");
1039   push(type, x);
1040   if (x->as_NewInlineTypeInstance() != nullptr && x->as_NewInlineTypeInstance()->in_larval_state()) {
1041     if (x->as_NewInlineTypeInstance()->on_stack_count() == 1) {
1042       x->as_NewInlineTypeInstance()->set_not_larva_anymore();
1043     } else {
1044       x->as_NewInlineTypeInstance()->increment_on_stack_count();
1045     }
1046   }
1047 }
1048 
1049 
1050 void GraphBuilder::store_local(ValueType* type, int index) {
1051   Value x = pop(type);
1052   store_local(state(), x, index);
1053   if (x->as_NewInlineTypeInstance() != nullptr) {
1054     x->as_NewInlineTypeInstance()->set_local_index(index);
1055   }
1056 }
1057 
1058 
1059 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
1060   if (parsing_jsr()) {
1061     // We need to do additional tracking of the location of the return
1062     // address for jsrs since we don't handle arbitrary jsr/ret
1063     // constructs. Here we are figuring out in which circumstances we
1064     // need to bail out.
1065     if (x->type()->is_address()) {
1066       scope_data()->set_jsr_return_address_local(index);
1067 
1068       // Also check parent jsrs (if any) at this time to see whether
1069       // they are using this local. We don't handle skipping over a
1070       // ret.
1071       for (ScopeData* cur_scope_data = scope_data()->parent();
1072            cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1073            cur_scope_data = cur_scope_data->parent()) {
1074         if (cur_scope_data->jsr_return_address_local() == index) {
1075           BAILOUT("subroutine overwrites return address from previous subroutine");
1076         }
1077       }
1078     } else if (index == scope_data()->jsr_return_address_local()) {
1079       scope_data()->set_jsr_return_address_local(-1);
1080     }
1081   }
1082 
1083   state->store_local(index, round_fp(x));
1084   if (x->as_NewInlineTypeInstance() != nullptr) {
1085     x->as_NewInlineTypeInstance()->set_local_index(index);
1086   }
1087 }
1088 
1089 
1090 void GraphBuilder::load_indexed(BasicType type) {
1091   // In case of in block code motion in range check elimination
1092   ValueStack* state_before = nullptr;
1093   int array_idx = state()->stack_size() - 2;
1094   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1095     // Save the entire state and re-execute on deopt when accessing flat arrays
1096     state_before = copy_state_before();
1097     state_before->set_should_reexecute(true);
1098   } else {
1099     state_before = copy_state_indexed_access();
1100   }
1101   compilation()->set_has_access_indexed(true);
1102   Value index = ipop();
1103   Value array = apop();
1104   Value length = nullptr;
1105   if (CSEArrayLength ||
1106       (array->as_Constant() != nullptr) ||
1107       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1108       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1109       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1110     length = append(new ArrayLength(array, state_before));
1111   }
1112 
1113   bool need_membar = false;
1114   LoadIndexed* load_indexed = nullptr;
1115   Instruction* result = nullptr;
1116   if (array->is_loaded_flat_array()) {
1117     ciType* array_type = array->declared_type();
1118     ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
1119 
1120     bool can_delay_access = false;
1121     ciBytecodeStream s(method());
1122     s.force_bci(bci());
1123     s.next();
1124     if (s.cur_bc() == Bytecodes::_getfield) {
1125       bool will_link;
1126       ciField* next_field = s.get_field(will_link);
1127       bool next_needs_patching = !next_field->holder()->is_initialized() ||
1128                                  !next_field->will_link(method(), Bytecodes::_getfield) ||
1129                                  PatchALot;
1130       can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1131     }
1132     if (can_delay_access) {
1133       // potentially optimizable array access, storing information for delayed decision
1134       LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
1135       DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
1136       li->set_delayed(dli);
1137       set_pending_load_indexed(dli);
1138       return; // Nothing else to do for now
1139     } else {
1140       if (elem_klass->is_empty()) {
1141         // No need to create a new instance, the default instance will be used instead
1142         load_indexed = new LoadIndexed(array, index, length, type, state_before);
1143         apush(append(load_indexed));
1144       } else {
1145         NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(elem_klass, state_before);
1146         _memory->new_instance(new_instance);
1147         apush(append_split(new_instance));
1148         load_indexed = new LoadIndexed(array, index, length, type, state_before);
1149         load_indexed->set_vt(new_instance);
1150         // The LoadIndexed node will initialise this instance by copying from
1151         // the flat field.  Ensure these stores are visible before any
1152         // subsequent store that publishes this reference.
1153         need_membar = true;
1154       }
1155     }
1156   } else {
1157     load_indexed = new LoadIndexed(array, index, length, type, state_before);
1158     if (profile_array_accesses() && is_reference_type(type)) {
1159       compilation()->set_would_profile(true);
1160       load_indexed->set_should_profile(true);
1161       load_indexed->set_profiled_method(method());
1162       load_indexed->set_profiled_bci(bci());
1163     }
1164   }
1165   result = append(load_indexed);
1166   if (need_membar) {
1167     append(new MemBar(lir_membar_storestore));
1168   }
1169   assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
1170   if (!array->is_loaded_flat_array()) {
1171     push(as_ValueType(type), result);
1172   }
1173 }
1174 
1175 
1176 void GraphBuilder::store_indexed(BasicType type) {
1177   // In case of in block code motion in range check elimination
1178   ValueStack* state_before = nullptr;
1179   int array_idx = state()->stack_size() - 3;
1180   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1181     // Save the entire state and re-execute on deopt when accessing flat arrays
1182     state_before = copy_state_before();
1183     state_before->set_should_reexecute(true);
1184   } else {
1185     state_before = copy_state_indexed_access();
1186   }
1187   compilation()->set_has_access_indexed(true);
1188   Value value = pop(as_ValueType(type));
1189   Value index = ipop();
1190   Value array = apop();
1191   Value length = nullptr;
1192   if (CSEArrayLength ||
1193       (array->as_Constant() != nullptr) ||
1194       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1195       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1196       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1197     length = append(new ArrayLength(array, state_before));
1198   }
1199   ciType* array_type = array->declared_type();
1200   bool check_boolean = false;
1201   if (array_type != nullptr) {
1202     if (array_type->is_loaded() &&
1203       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1204       assert(type == T_BYTE, "boolean store uses bastore");
1205       Value mask = append(new Constant(new IntConstant(1)));
1206       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1207     }
1208   } else if (type == T_BYTE) {
1209     check_boolean = true;
1210   }



1211 
1212   StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1213   if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flat_array()) {
1214     compilation()->set_would_profile(true);
1215     store_indexed->set_should_profile(true);
1216     store_indexed->set_profiled_method(method());
1217     store_indexed->set_profiled_bci(bci());



1218   }
1219   Instruction* result = append(store_indexed);
1220   assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
1221   _memory->store_value(value);
1222 }
1223 

1224 void GraphBuilder::stack_op(Bytecodes::Code code) {
1225   switch (code) {
1226     case Bytecodes::_pop:
1227       { Value w = state()->raw_pop();
1228         update_larva_stack_count(w);
1229       }
1230       break;
1231     case Bytecodes::_pop2:
1232       { Value w1 = state()->raw_pop();
1233         Value w2 = state()->raw_pop();
1234         update_larva_stack_count(w1);
1235         update_larva_stack_count(w2);
1236       }
1237       break;
1238     case Bytecodes::_dup:
1239       { Value w = state()->raw_pop();
1240         update_larval_state(w);
1241         state()->raw_push(w);
1242         state()->raw_push(w);
1243       }
1244       break;
1245     case Bytecodes::_dup_x1:
1246       { Value w1 = state()->raw_pop();
1247         Value w2 = state()->raw_pop();
1248         update_larval_state(w1);
1249         state()->raw_push(w1);
1250         state()->raw_push(w2);
1251         state()->raw_push(w1);
1252       }
1253       break;
1254     case Bytecodes::_dup_x2:
1255       { Value w1 = state()->raw_pop();
1256         Value w2 = state()->raw_pop();
1257         Value w3 = state()->raw_pop();
1258         // special handling for the dup_x2/pop sequence (see JDK-8251046)
1259         if (w1 != nullptr && w1->as_NewInlineTypeInstance() != nullptr) {
1260           ciBytecodeStream s(method());
1261           s.force_bci(bci());
1262           s.next();
1263           if (s.cur_bc() != Bytecodes::_pop) {
1264             w1->as_NewInlineTypeInstance()->set_not_larva_anymore();
1265           } else {
1266             w1->as_NewInlineTypeInstance()->increment_on_stack_count();
1267           }
1268         }
1269         state()->raw_push(w1);
1270         state()->raw_push(w3);
1271         state()->raw_push(w2);
1272         state()->raw_push(w1);
1273       }
1274       break;
1275     case Bytecodes::_dup2:
1276       { Value w1 = state()->raw_pop();
1277         Value w2 = state()->raw_pop();
1278         update_larval_state(w1);
1279         update_larval_state(w2);
1280         state()->raw_push(w2);
1281         state()->raw_push(w1);
1282         state()->raw_push(w2);
1283         state()->raw_push(w1);
1284       }
1285       break;
1286     case Bytecodes::_dup2_x1:
1287       { Value w1 = state()->raw_pop();
1288         Value w2 = state()->raw_pop();
1289         Value w3 = state()->raw_pop();
1290         update_larval_state(w1);
1291         update_larval_state(w2);
1292         state()->raw_push(w2);
1293         state()->raw_push(w1);
1294         state()->raw_push(w3);
1295         state()->raw_push(w2);
1296         state()->raw_push(w1);
1297       }
1298       break;
1299     case Bytecodes::_dup2_x2:
1300       { Value w1 = state()->raw_pop();
1301         Value w2 = state()->raw_pop();
1302         Value w3 = state()->raw_pop();
1303         Value w4 = state()->raw_pop();
1304         update_larval_state(w1);
1305         update_larval_state(w2);
1306         state()->raw_push(w2);
1307         state()->raw_push(w1);
1308         state()->raw_push(w4);
1309         state()->raw_push(w3);
1310         state()->raw_push(w2);
1311         state()->raw_push(w1);
1312       }
1313       break;
1314     case Bytecodes::_swap:
1315       { Value w1 = state()->raw_pop();
1316         Value w2 = state()->raw_pop();
1317         state()->raw_push(w1);
1318         state()->raw_push(w2);
1319       }
1320       break;
1321     default:
1322       ShouldNotReachHere();
1323       break;
1324   }
1325 }

1413 
1414 
1415 void GraphBuilder::_goto(int from_bci, int to_bci) {
1416   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1417   if (is_profiling()) {
1418     compilation()->set_would_profile(true);
1419     x->set_profiled_bci(bci());
1420     if (profile_branches()) {
1421       x->set_profiled_method(method());
1422       x->set_should_profile(true);
1423     }
1424   }
1425   append(x);
1426 }
1427 
1428 
1429 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1430   BlockBegin* tsux = block_at(stream()->get_dest());
1431   BlockBegin* fsux = block_at(stream()->next_bci());
1432   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1433 
1434   bool subst_check = false;
1435   if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
1436     ValueType* left_vt = x->type();
1437     ValueType* right_vt = y->type();
1438     if (left_vt->is_object()) {
1439       assert(right_vt->is_object(), "must be");
1440       ciKlass* left_klass = x->as_loaded_klass_or_null();
1441       ciKlass* right_klass = y->as_loaded_klass_or_null();
1442 
1443       if (left_klass == nullptr || right_klass == nullptr) {
1444         // The klass is still unloaded, or came from a Phi node. Go slow case;
1445         subst_check = true;
1446       } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
1447         // Either operand may be a value object, but we're not sure. Go slow case;
1448         subst_check = true;
1449       } else {
1450         // No need to do substitutability check
1451       }
1452     }
1453   }
1454   if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
1455       is_profiling() && profile_branches()) {
1456     compilation()->set_would_profile(true);
1457     append(new ProfileACmpTypes(method(), bci(), x, y));
1458   }
1459 
1460   // In case of loop invariant code motion or predicate insertion
1461   // before the body of a loop the state is needed
1462   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : nullptr, is_bb, subst_check));
1463 
1464   assert(i->as_Goto() == nullptr ||
1465          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1466          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1467          "safepoint state of Goto returned by canonicalizer incorrect");
1468 
1469   if (is_profiling()) {
1470     If* if_node = i->as_If();
1471     if (if_node != nullptr) {
1472       // Note that we'd collect profile data in this method if we wanted it.
1473       compilation()->set_would_profile(true);
1474       // At level 2 we need the proper bci to count backedges
1475       if_node->set_profiled_bci(bci());
1476       if (profile_branches()) {
1477         // Successors can be rotated by the canonicalizer, check for this case.
1478         if_node->set_profiled_method(method());
1479         if_node->set_should_profile(true);
1480         if (if_node->tsux() == fsux) {
1481           if_node->set_swapped(true);
1482         }

1693 
1694   if (needs_check) {
1695     // Perform the registration of finalizable objects.
1696     ValueStack* state_before = copy_state_for_exception();
1697     load_local(objectType, 0);
1698     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1699                                state()->pop_arguments(1),
1700                                true, state_before, true));
1701   }
1702 }
1703 
1704 
1705 void GraphBuilder::method_return(Value x, bool ignore_return) {
1706   if (RegisterFinalizersAtInit &&
1707       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1708     call_register_finalizer();
1709   }
1710 
1711   // The conditions for a memory barrier are described in Parse::do_exits().
1712   bool need_mem_bar = false;
1713   if ((method()->is_object_constructor() || method()->is_static_vnew_factory()) &&
1714        (scope()->wrote_final() ||
1715          (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1716          (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1717     need_mem_bar = true;
1718   }
1719 
1720   BasicType bt = method()->return_type()->basic_type();
1721   switch (bt) {
1722     case T_BYTE:
1723     {
1724       Value shift = append(new Constant(new IntConstant(24)));
1725       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1726       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1727       break;
1728     }
1729     case T_SHORT:
1730     {
1731       Value shift = append(new Constant(new IntConstant(16)));
1732       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1733       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));

1844   // Attach dimension info to stable arrays.
1845   if (FoldStableValues &&
1846       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1847     ciArray* array = field_value.as_object()->as_array();
1848     jint dimension = field->type()->as_array_klass()->dimension();
1849     value = new StableArrayConstant(array, dimension);
1850   }
1851 
1852   switch (field_type) {
1853     case T_ARRAY:
1854     case T_OBJECT:
1855       if (field_value.as_object()->should_be_constant()) {
1856         return new Constant(value);
1857       }
1858       return nullptr; // Not a constant.
1859     default:
1860       return new Constant(value);
1861   }
1862 }
1863 
1864 void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
1865   for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
1866     ciField* inner_field = vk->nonstatic_field_at(i);
1867     assert(!inner_field->is_flat(), "the iteration over nested fields is handled by the loop itself");
1868     int off = inner_field->offset_in_bytes() - vk->first_field_offset();
1869     LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, false);
1870     Value replacement = append(load);
1871     StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, false);
1872     store->set_enclosing_field(enclosing_field);
1873     append(store);
1874   }
1875 }
1876 
1877 void GraphBuilder::access_field(Bytecodes::Code code) {
1878   bool will_link;
1879   ciField* field = stream()->get_field(will_link);
1880   ciInstanceKlass* holder = field->holder();
1881   BasicType field_type = field->type()->basic_type();
1882   ValueType* type = as_ValueType(field_type);
1883 
1884   // call will_link again to determine if the field is valid.
1885   const bool needs_patching = !holder->is_loaded() ||
1886                               !field->will_link(method(), code) ||
1887                               (!field->is_flat() && PatchALot);
1888 
1889   ValueStack* state_before = nullptr;
1890   if (!holder->is_initialized() || needs_patching) {
1891     // save state before instruction for debug info when
1892     // deoptimization happens during patching
1893     state_before = copy_state_before();
1894   }
1895 
1896   Value obj = nullptr;
1897   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1898     if (state_before != nullptr) {
1899       // build a patching constant
1900       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1901     } else {
1902       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1903     }
1904   }
1905 
1906   if (field->is_final() && code == Bytecodes::_putfield) {
1907     scope()->set_wrote_final();
1908   }
1909 
1910   if (code == Bytecodes::_putfield) {
1911     scope()->set_wrote_fields();
1912     if (field->is_volatile()) {
1913       scope()->set_wrote_volatile();
1914     }
1915   }
1916 
1917   int offset = !needs_patching ? field->offset_in_bytes() : -1;
1918   switch (code) {
1919     case Bytecodes::_getstatic: {
1920       // check for compile-time constants, i.e., initialized static final fields
1921       Value constant = nullptr;
1922       if (field->is_static_constant() && !PatchALot) {
1923         ciConstant field_value = field->constant_value();
1924         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1925                "stable static w/ default value shouldn't be a constant");
1926         constant = make_constant(field_value, field);
1927       } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
1928                  field->type()->as_inline_klass()->is_empty()) {
1929         // Loading from a field of an empty inline type. Just return the default instance.
1930         constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1931       }
1932       if (constant != nullptr) {
1933         push(type, append(constant));
1934       } else {
1935         if (state_before == nullptr) {
1936           state_before = copy_state_for_exception();
1937         }
1938         LoadField* load_field = new LoadField(append(obj), offset, field, true,
1939                                         state_before, needs_patching);
1940         push(type, append(load_field));
1941       }
1942       break;
1943     }
1944     case Bytecodes::_putstatic: {
1945       Value val = pop(type);
1946       if (state_before == nullptr) {
1947         state_before = copy_state_for_exception();
1948       }
1949       if (field_type == T_BOOLEAN) {
1950         Value mask = append(new Constant(new IntConstant(1)));
1951         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1952       }
1953       if (field->is_null_free()) {
1954         null_check(val);
1955       }
1956       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
1957         // Storing to a field of an empty inline type. Ignore.
1958         break;
1959       }
1960       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1961       break;
1962     }
1963     case Bytecodes::_getfield: {
1964       // Check for compile-time constants, i.e., trusted final non-static fields.
1965       Value constant = nullptr;
1966       if (state_before == nullptr && field->is_flat()) {
1967         // Save the entire state and re-execute on deopt when accessing flat fields
1968         assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
1969         state_before = copy_state_before();
1970       }
1971       if (!has_pending_field_access() && !has_pending_load_indexed()) {
1972         obj = apop();
1973         ObjectType* obj_type = obj->type()->as_ObjectType();
1974         if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized()
1975             && field->type()->as_inline_klass()->is_empty()) {
1976           // Loading from a field of an empty inline type. Just return the default instance.
1977           null_check(obj);
1978           constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1979         } else if (field->is_constant() && !field->is_flat() && obj_type->is_constant() && !PatchALot) {
1980           ciObject* const_oop = obj_type->constant_value();
1981           if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1982             ciConstant field_value = field->constant_value_of(const_oop);
1983             if (field_value.is_valid()) {
1984               if (field->is_null_free() && field_value.is_null_or_zero()) {
1985                 // Non-flat inline type field. Replace null by the default value.
1986                 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1987               } else {
1988                 constant = make_constant(field_value, field);
1989               }
1990               // For CallSite objects add a dependency for invalidation of the optimization.
1991               if (field->is_call_site_target()) {
1992                 ciCallSite* call_site = const_oop->as_call_site();
1993                 if (!call_site->is_fully_initialized_constant_call_site()) {
1994                   ciMethodHandle* target = field_value.as_object()->as_method_handle();
1995                   dependency_recorder()->assert_call_site_target_value(call_site, target);
1996                 }
1997               }
1998             }
1999           }
2000         }
2001       }
2002       if (constant != nullptr) {
2003         push(type, append(constant));
2004       } else {
2005         if (state_before == nullptr) {
2006           state_before = copy_state_for_exception();
2007         }
2008         if (!field->is_flat()) {
2009           if (has_pending_field_access()) {
2010             assert(!needs_patching, "Can't patch delayed field access");
2011             obj = pending_field_access()->obj();
2012             offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->first_field_offset();
2013             field = pending_field_access()->holder()->get_field_by_offset(offset, false);
2014             assert(field != nullptr, "field not found");
2015             set_pending_field_access(nullptr);
2016           } else if (has_pending_load_indexed()) {
2017             assert(!needs_patching, "Can't patch delayed field access");
2018             pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2019             LoadIndexed* li = pending_load_indexed()->load_instr();
2020             li->set_type(type);
2021             push(type, append(li));
2022             set_pending_load_indexed(nullptr);




2023             break;
2024           }
2025           LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
2026           Value replacement = !needs_patching ? _memory->load(load) : load;
2027           if (replacement != load) {
2028             assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
2029             // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
2030             // conversion. Emit an explicit conversion here to get the correct field value after the write.
2031             switch (field_type) {
2032             case T_BOOLEAN:
2033             case T_BYTE:
2034               replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
2035               break;
2036             case T_CHAR:
2037               replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
2038               break;
2039             case T_SHORT:
2040               replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
2041               break;
2042             default:
2043               break;
2044             }
2045             push(type, replacement);
2046           } else {
2047             push(type, append(load));
2048           }
2049         } else {
2050           // Look at the next bytecode to check if we can delay the field access
2051           bool can_delay_access = false;
2052           ciBytecodeStream s(method());
2053           s.force_bci(bci());
2054           s.next();
2055           if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
2056             ciField* next_field = s.get_field(will_link);
2057             bool next_needs_patching = !next_field->holder()->is_loaded() ||
2058                                        !next_field->will_link(method(), Bytecodes::_getfield) ||
2059                                        PatchALot;
2060             can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
2061           }
2062           if (can_delay_access) {
2063             if (has_pending_load_indexed()) {
2064               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2065             } else if (has_pending_field_access()) {
2066               pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->first_field_offset());
2067             } else {
2068               null_check(obj);
2069               DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset_in_bytes());
2070               set_pending_field_access(dfa);
2071             }
2072           } else {
2073             ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2074             scope()->set_wrote_final();
2075             scope()->set_wrote_fields();
2076             bool need_membar = false;
2077             if (inline_klass->is_initialized() && inline_klass->is_empty()) {
2078               apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
2079               if (has_pending_field_access()) {
2080                 set_pending_field_access(nullptr);
2081               } else if (has_pending_load_indexed()) {
2082                 set_pending_load_indexed(nullptr);
2083               }
2084             } else if (has_pending_load_indexed()) {
2085               assert(!needs_patching, "Can't patch delayed field access");
2086               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2087               NewInlineTypeInstance* vt = new NewInlineTypeInstance(inline_klass, pending_load_indexed()->state_before());
2088               _memory->new_instance(vt);
2089               pending_load_indexed()->load_instr()->set_vt(vt);
2090               apush(append_split(vt));
2091               append(pending_load_indexed()->load_instr());
2092               set_pending_load_indexed(nullptr);
2093               need_membar = true;
2094             } else {
2095               NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(inline_klass, state_before);
2096               _memory->new_instance(new_instance);
2097               apush(append_split(new_instance));
2098               assert(!needs_patching, "Can't patch flat inline type field access");
2099               if (has_pending_field_access()) {
2100                 copy_inline_content(inline_klass, pending_field_access()->obj(),
2101                                     pending_field_access()->offset() + field->offset_in_bytes() - field->holder()->as_inline_klass()->first_field_offset(),
2102                                     new_instance, inline_klass->first_field_offset(), state_before);
2103                 set_pending_field_access(nullptr);
2104               } else {
2105                 copy_inline_content(inline_klass, obj, field->offset_in_bytes(), new_instance, inline_klass->first_field_offset(), state_before);
2106               }
2107               need_membar = true;
2108             }
2109             if (need_membar) {
2110               // If we allocated a new instance ensure the stores to copy the
2111               // field contents are visible before any subsequent store that
2112               // publishes this reference.
2113               append(new MemBar(lir_membar_storestore));
2114             }
2115           }
2116         }
2117       }
2118       break;
2119     }
2120     case Bytecodes::_putfield: {
2121       Value val = pop(type);
2122       obj = apop();
2123       if (state_before == nullptr) {
2124         state_before = copy_state_for_exception();
2125       }
2126       if (field_type == T_BOOLEAN) {
2127         Value mask = append(new Constant(new IntConstant(1)));
2128         val = append(new LogicOp(Bytecodes::_iand, val, mask));
2129       }
2130       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
2131         // Storing to a field of an empty inline type. Ignore.
2132         null_check(obj);
2133         null_check(val);
2134       } else if (!field->is_flat()) {
2135         if (field->is_null_free()) {
2136           null_check(val);
2137         }
2138         StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
2139         if (!needs_patching) store = _memory->store(store);
2140         if (store != nullptr) {
2141           append(store);
2142         }
2143       } else {
2144         assert(!needs_patching, "Can't patch flat inline type field access");
2145         ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2146         copy_inline_content(inline_klass, val, inline_klass->first_field_offset(), obj, offset, state_before, field);
2147       }
2148       break;
2149     }
2150     default:
2151       ShouldNotReachHere();
2152       break;
2153   }
2154 }
2155 
2156 // Baseline version of withfield, allocate every time
2157 void GraphBuilder::withfield(int field_index) {
2158   // Save the entire state and re-execute on deopt
2159   ValueStack* state_before = copy_state_before();
2160   state_before->set_should_reexecute(true);
2161 
2162   bool will_link;
2163   ciField* field_modify = stream()->get_field(will_link);
2164   ciInstanceKlass* holder = field_modify->holder();
2165   BasicType field_type = field_modify->type()->basic_type();
2166   ValueType* type = as_ValueType(field_type);
2167   Value val = pop(type);
2168   Value obj = apop();
2169   null_check(obj);
2170 
2171   if (!holder->is_loaded() || !holder->is_inlinetype() || !will_link) {
2172     apush(append_split(new Deoptimize(holder, state_before)));
2173     return;
2174   }
2175 
2176   // call will_link again to determine if the field is valid.
2177   const bool needs_patching = !field_modify->will_link(method(), Bytecodes::_withfield) ||
2178                               (!field_modify->is_flat() && PatchALot);
2179   const int offset_modify = !needs_patching ? field_modify->offset_in_bytes() : -1;
2180 
2181   scope()->set_wrote_final();
2182   scope()->set_wrote_fields();
2183 
2184   NewInlineTypeInstance* new_instance;
2185   if (obj->as_NewInlineTypeInstance() != nullptr && obj->as_NewInlineTypeInstance()->in_larval_state()) {
2186     new_instance = obj->as_NewInlineTypeInstance();
2187     apush(append_split(new_instance));
2188   } else {
2189     new_instance = new NewInlineTypeInstance(holder->as_inline_klass(), state_before);
2190     _memory->new_instance(new_instance);
2191     apush(append_split(new_instance));
2192 
2193     // Initialize fields which are not modified
2194     for (int i = 0; i < holder->nof_nonstatic_fields(); i++) {
2195       ciField* field = holder->nonstatic_field_at(i);
2196       int offset = field->offset_in_bytes();
2197       // Don't use offset_modify here, it might be set to -1 if needs_patching
2198       if (offset != field_modify->offset_in_bytes()) {
2199         if (field->is_flat()) {
2200           ciInlineKlass* vk = field->type()->as_inline_klass();
2201           if (!vk->is_empty()) {
2202             copy_inline_content(vk, obj, offset, new_instance, vk->first_field_offset(), state_before, field);
2203           }
2204         } else {
2205           LoadField* load = new LoadField(obj, offset, field, false, state_before, false);
2206           Value replacement = append(load);
2207           StoreField* store = new StoreField(new_instance, offset, field, replacement, false, state_before, false);
2208           append(store);
2209         }
2210       }
2211     }
2212   }
2213 
2214   // Field to modify
2215   if (field_type == T_BOOLEAN) {
2216     Value mask = append(new Constant(new IntConstant(1)));
2217     val = append(new LogicOp(Bytecodes::_iand, val, mask));
2218   }
2219   if (field_modify->is_null_free()) {
2220     null_check(val);
2221   }
2222   if (field_modify->is_flat()) {
2223     assert(!needs_patching, "Can't patch flat inline type field access");
2224     ciInlineKlass* vk = field_modify->type()->as_inline_klass();
2225     if (!vk->is_empty()) {
2226       copy_inline_content(vk, val, vk->first_field_offset(), new_instance, offset_modify, state_before, field_modify);
2227     }
2228   } else {
2229     StoreField* store = new StoreField(new_instance, offset_modify, field_modify, val, false, state_before, needs_patching);
2230     append(store);
2231   }
2232 }
2233 
2234 Dependencies* GraphBuilder::dependency_recorder() const {
2235   assert(DeoptC1, "need debug information");
2236   return compilation()->dependency_recorder();
2237 }
2238 
2239 // How many arguments do we want to profile?
2240 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
2241   int n = 0;
2242   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
2243   start = has_receiver ? 1 : 0;
2244   if (profile_arguments()) {
2245     ciProfileData* data = method()->method_data()->bci_to_data(bci());
2246     if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
2247       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
2248     }
2249   }
2250   // If we are inlining then we need to collect arguments to profile parameters for the target
2251   if (profile_parameters() && target != nullptr) {
2252     if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {

2330       break;
2331     case Bytecodes::_invokehandle:
2332       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
2333       break;
2334     default:
2335       break;
2336     }
2337   } else {
2338     if (bc_raw == Bytecodes::_invokehandle) {
2339       assert(!will_link, "should come here only for unlinked call");
2340       code = Bytecodes::_invokespecial;
2341     }
2342   }
2343 
2344   if (code == Bytecodes::_invokespecial) {
2345     // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
2346     ciKlass* receiver_constraint = nullptr;
2347 
2348     if (bc_raw == Bytecodes::_invokeinterface) {
2349       receiver_constraint = holder;
2350     } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor() && calling_klass->is_interface()) {
2351       receiver_constraint = calling_klass;
2352     }
2353 
2354     if (receiver_constraint != nullptr) {
2355       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
2356       Value receiver = state()->stack_at(index);
2357       CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
2358       // go to uncommon_trap when checkcast fails
2359       c->set_invokespecial_receiver_check();
2360       state()->stack_at_put(index, append_split(c));
2361     }
2362   }
2363 
2364   // Push appendix argument (MethodType, CallSite, etc.), if one.
2365   bool patch_for_appendix = false;
2366   int patching_appendix_arg = 0;
2367   if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2368     Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2369     apush(arg);
2370     patch_for_appendix = true;

2566       null_check(recv);
2567     }
2568 
2569     if (is_profiling()) {
2570       // Note that we'd collect profile data in this method if we wanted it.
2571       compilation()->set_would_profile(true);
2572 
2573       if (profile_calls()) {
2574         assert(cha_monomorphic_target == nullptr || exact_target == nullptr, "both can not be set");
2575         ciKlass* target_klass = nullptr;
2576         if (cha_monomorphic_target != nullptr) {
2577           target_klass = cha_monomorphic_target->holder();
2578         } else if (exact_target != nullptr) {
2579           target_klass = exact_target->holder();
2580         }
2581         profile_call(target, recv, target_klass, collect_args_for_profiling(args, nullptr, false), false);
2582       }
2583     }
2584   }
2585 
2586   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before,
2587                               declared_signature->returns_null_free_inline_type());
2588   // push result
2589   append_split(result);
2590 
2591   if (result_type != voidType) {
2592     push(result_type, round_fp(result));
2593   }
2594   if (profile_return() && result_type->is_object_kind()) {
2595     profile_return_type(result, target);
2596   }
2597 }
2598 
2599 
2600 void GraphBuilder::new_instance(int klass_index) {
2601   ValueStack* state_before = copy_state_exhandling();
2602   ciKlass* klass = stream()->get_klass();
2603   assert(klass->is_instance_klass(), "must be an instance klass");
2604   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2605   _memory->new_instance(new_instance);
2606   apush(append_split(new_instance));
2607 }
2608 
2609 void GraphBuilder::default_value(int klass_index) {
2610   bool will_link;
2611   ciKlass* klass = stream()->get_klass(will_link);
2612   if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
2613       klass->as_inline_klass()->is_initialized()) {
2614     ciInlineKlass* vk = klass->as_inline_klass();
2615     apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
2616   } else {
2617     apush(append_split(new Deoptimize(klass, copy_state_before())));
2618   }
2619 }
2620 
2621 void GraphBuilder::new_type_array() {
2622   ValueStack* state_before = copy_state_exhandling();
2623   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2624 }
2625 
2626 
2627 void GraphBuilder::new_object_array() {
2628   ciKlass* klass = stream()->get_klass();
2629   bool null_free = stream()->has_Q_signature();
2630   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2631   NewArray* n = new NewObjectArray(klass, ipop(), state_before, null_free);
2632   apush(append_split(n));
2633 }
2634 
2635 
2636 bool GraphBuilder::direct_compare(ciKlass* k) {
2637   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2638     ciInstanceKlass* ik = k->as_instance_klass();
2639     if (ik->is_final()) {
2640       return true;
2641     } else {
2642       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2643         // test class is leaf class
2644         dependency_recorder()->assert_leaf_type(ik);
2645         return true;
2646       }
2647     }
2648   }
2649   return false;
2650 }
2651 
2652 
2653 void GraphBuilder::check_cast(int klass_index) {
2654   ciKlass* klass = stream()->get_klass();
2655   bool null_free = stream()->has_Q_signature();
2656   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2657   CheckCast* c = new CheckCast(klass, apop(), state_before, null_free);
2658   apush(append_split(c));
2659   c->set_direct_compare(direct_compare(klass));
2660 
2661   if (is_profiling()) {
2662     // Note that we'd collect profile data in this method if we wanted it.
2663     compilation()->set_would_profile(true);
2664 
2665     if (profile_checkcasts()) {
2666       c->set_profiled_method(method());
2667       c->set_profiled_bci(bci());
2668       c->set_should_profile(true);
2669     }
2670   }
2671 }
2672 
2673 
2674 void GraphBuilder::instance_of(int klass_index) {
2675   ciKlass* klass = stream()->get_klass();
2676   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2677   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2678   ipush(append_split(i));
2679   i->set_direct_compare(direct_compare(klass));
2680 
2681   if (is_profiling()) {
2682     // Note that we'd collect profile data in this method if we wanted it.
2683     compilation()->set_would_profile(true);
2684 
2685     if (profile_checkcasts()) {
2686       i->set_profiled_method(method());
2687       i->set_profiled_bci(bci());
2688       i->set_should_profile(true);
2689     }
2690   }
2691 }
2692 
2693 
2694 void GraphBuilder::monitorenter(Value x, int bci) {
2695   bool maybe_inlinetype = false;
2696   if (bci == InvocationEntryBci) {
2697     // Called by GraphBuilder::inline_sync_entry.
2698 #ifdef ASSERT
2699     ciType* obj_type = x->declared_type();
2700     assert(obj_type == nullptr || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
2701 #endif
2702   } else {
2703     // We are compiling a monitorenter bytecode
2704     if (EnableValhalla) {
2705       ciType* obj_type = x->declared_type();
2706       if (obj_type == nullptr || obj_type->as_klass()->can_be_inline_klass()) {
2707         // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
2708         // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2709         maybe_inlinetype = true;
2710       }
2711     }
2712   }
2713 
2714   // save state before locking in case of deoptimization after a NullPointerException
2715   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2716   compilation()->set_has_monitors(true);
2717   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
2718   kill_all();
2719 }
2720 
2721 
2722 void GraphBuilder::monitorexit(Value x, int bci) {
2723   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2724   kill_all();
2725 }
2726 
2727 
2728 void GraphBuilder::new_multi_array(int dimensions) {
2729   ciKlass* klass = stream()->get_klass();
2730   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2731 
2732   Values* dims = new Values(dimensions, dimensions, nullptr);
2733   // fill in all dimensions
2734   int i = dimensions;
2735   while (i-- > 0) dims->at_put(i, ipop());
2736   // create array
2737   NewArray* n = new NewMultiArray(klass, dims, state_before);

2832   if (i1->can_trap()) {
2833     i1->set_exception_handlers(handle_exception(i1));
2834     assert(i1->exception_state() != nullptr || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
2835   }
2836   return i1;
2837 }
2838 
2839 
2840 Instruction* GraphBuilder::append(Instruction* instr) {
2841   assert(instr->as_StateSplit() == nullptr || instr->as_BlockEnd() != nullptr, "wrong append used");
2842   return append_with_bci(instr, bci());
2843 }
2844 
2845 
2846 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2847   return append_with_bci(instr, bci());
2848 }
2849 
2850 
2851 void GraphBuilder::null_check(Value value) {
2852   if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr || value->as_NewInlineTypeInstance() != nullptr) {
2853     return;
2854   } else {
2855     Constant* con = value->as_Constant();
2856     if (con) {
2857       ObjectType* c = con->type()->as_ObjectType();
2858       if (c && c->is_loaded()) {
2859         ObjectConstant* oc = c->as_ObjectConstant();
2860         if (!oc || !oc->value()->is_null_object()) {
2861           return;
2862         }
2863       }
2864     }
2865     if (value->is_null_free()) return;
2866   }
2867   append(new NullCheck(value, copy_state_for_exception()));
2868 }
2869 
2870 
2871 
2872 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2873   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2874     assert(instruction->exception_state() == nullptr
2875            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2876            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2877            "exception_state should be of exception kind");
2878     return new XHandlers();
2879   }
2880 
2881   XHandlers*  exception_handlers = new XHandlers();
2882   ScopeData*  cur_scope_data = scope_data();
2883   ValueStack* cur_state = instruction->state_before();
2884   ValueStack* prev_state = nullptr;
2885   int scope_count = 0;
2886 
2887   assert(cur_state != nullptr, "state_before must be set");
2888   do {
2889     int cur_bci = cur_state->bci();
2890     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2891     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
2892            || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
2893 
2894 
2895     // join with all potential exception handlers
2896     XHandlers* list = cur_scope_data->xhandlers();
2897     const int n = list->length();
2898     for (int i = 0; i < n; i++) {
2899       XHandler* h = list->handler_at(i);
2900       if (h->covers(cur_bci)) {
2901         // h is a potential exception handler => join it
2902         compilation()->set_has_exception_handlers(true);
2903 
2904         BlockBegin* entry = h->entry_block();
2905         if (entry == block()) {
2906           // It's acceptable for an exception handler to cover itself
2907           // but we don't handle that in the parser currently.  It's
2908           // very rare so we bailout instead of trying to handle it.
2909           BAILOUT_("exception handler covers itself", exception_handlers);
2910         }
2911         assert(entry->bci() == h->handler_bci(), "must match");
2912         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2913 

3361       case Bytecodes::_invokevirtual  : // fall through
3362       case Bytecodes::_invokespecial  : // fall through
3363       case Bytecodes::_invokestatic   : // fall through
3364       case Bytecodes::_invokedynamic  : // fall through
3365       case Bytecodes::_invokeinterface: invoke(code); break;
3366       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
3367       case Bytecodes::_newarray       : new_type_array(); break;
3368       case Bytecodes::_anewarray      : new_object_array(); break;
3369       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
3370       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
3371       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
3372       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
3373       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
3374       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
3375       case Bytecodes::_wide           : ShouldNotReachHere(); break;
3376       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
3377       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
3378       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
3379       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
3380       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;
3381       case Bytecodes::_aconst_init   : default_value(s.get_index_u2()); break;
3382       case Bytecodes::_withfield      : withfield(s.get_index_u2()); break;
3383       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", nullptr);
3384       default                         : ShouldNotReachHere(); break;
3385     }
3386 
3387     if (log != nullptr)
3388       log->clear_context(); // skip marker if nothing was printed
3389 
3390     // save current bci to setup Goto at the end
3391     prev_bci = s.cur_bci();
3392 
3393   }
3394   CHECK_BAILOUT_(nullptr);
3395   // stop processing of this block (see try_inline_full)
3396   if (_skip_block) {
3397     _skip_block = false;
3398     assert(_last && _last->as_BlockEnd(), "");
3399     return _last->as_BlockEnd();
3400   }
3401   // if there are any, check if last instruction is a BlockEnd instruction
3402   BlockEnd* end = last()->as_BlockEnd();

3651   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3652 
3653   assert(state->caller_state() == nullptr, "should be top scope");
3654   state->clear_locals();
3655   Goto* g = new Goto(target, false);
3656   append(g);
3657   _osr_entry->set_end(g);
3658   target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3659 
3660   scope_data()->set_stream(nullptr);
3661 }
3662 
3663 
3664 ValueStack* GraphBuilder::state_at_entry() {
3665   ValueStack* state = new ValueStack(scope(), nullptr);
3666 
3667   // Set up locals for receiver
3668   int idx = 0;
3669   if (!method()->is_static()) {
3670     // we should always see the receiver
3671     state->store_local(idx, new Local(method()->holder(), objectType, idx,
3672              /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
3673     idx = 1;
3674   }
3675 
3676   // Set up locals for incoming arguments
3677   ciSignature* sig = method()->signature();
3678   for (int i = 0; i < sig->count(); i++) {
3679     ciType* type = sig->type_at(i);
3680     BasicType basic_type = type->basic_type();
3681     // don't allow T_ARRAY to propagate into locals types
3682     if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3683     ValueType* vt = as_ValueType(basic_type);
3684     state->store_local(idx, new Local(type, vt, idx, false, sig->is_null_free_at(i)));
3685     idx += type->size();
3686   }
3687 
3688   // lock synchronized method
3689   if (method()->is_synchronized()) {
3690     state->lock(nullptr);
3691   }
3692 
3693   return state;
3694 }
3695 
3696 
3697 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3698   : _scope_data(nullptr)
3699   , _compilation(compilation)
3700   , _memory(new MemoryBuffer())
3701   , _inline_bailout_msg(nullptr)
3702   , _instruction_count(0)
3703   , _osr_entry(nullptr)
3704   , _pending_field_access(nullptr)
3705   , _pending_load_indexed(nullptr)
3706 {
3707   int osr_bci = compilation->osr_bci();
3708 
3709   // determine entry points and bci2block mapping
3710   BlockListBuilder blm(compilation, scope, osr_bci);
3711   CHECK_BAILOUT();
3712 
3713   BlockList* bci2block = blm.bci2block();
3714   BlockBegin* start_block = bci2block->at(0);
3715 
3716   push_root_scope(scope, bci2block, start_block);
3717 
3718   // setup state for std entry
3719   _initial_state = state_at_entry();
3720   start_block->merge(_initial_state, compilation->has_irreducible_loops());
3721 
3722   // End nulls still exist here
3723 
3724   // complete graph
3725   _vmap        = new ValueMap();
< prev index next >