< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page

  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"


  33 #include "ci/ciKlass.hpp"
  34 #include "ci/ciMemberName.hpp"
  35 #include "ci/ciSymbols.hpp"
  36 #include "ci/ciUtilities.inline.hpp"
  37 #include "classfile/javaClasses.hpp"
  38 #include "compiler/compilationPolicy.hpp"
  39 #include "compiler/compileBroker.hpp"
  40 #include "compiler/compilerEvent.hpp"
  41 #include "interpreter/bytecode.hpp"
  42 #include "jfr/jfrEvents.hpp"
  43 #include "memory/resourceArea.hpp"
  44 #include "oops/oop.inline.hpp"
  45 #include "runtime/sharedRuntime.hpp"
  46 #include "runtime/vm_version.hpp"
  47 #include "utilities/bitMap.inline.hpp"
  48 #include "utilities/powerOfTwo.hpp"
  49 
  50 class BlockListBuilder {
  51  private:
  52   Compilation* _compilation;

 687         }
 688 #endif
 689         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 690         return result;
 691       }
 692     }
 693     return load;
 694   }
 695 
 696   // Record this newly allocated object
 697   void new_instance(NewInstance* object) {
 698     int index = _newobjects.length();
 699     _newobjects.append(object);
 700     if (_fields.at_grow(index, NULL) == NULL) {
 701       _fields.at_put(index, new FieldBuffer());
 702     } else {
 703       _fields.at(index)->kill();
 704     }
 705   }
 706 











 707   void store_value(Value value) {
 708     int index = _newobjects.find(value);
 709     if (index != -1) {
 710       // stored a newly allocated object into another object.
 711       // Assume we've lost track of it as separate slice of memory.
 712       // We could do better by keeping track of whether individual
 713       // fields could alias each other.
 714       _newobjects.remove_at(index);
 715       // pull out the field info and store it at the end up the list
 716       // of field info list to be reused later.
 717       _fields.append(_fields.at(index));
 718       _fields.remove_at(index);
 719     }
 720   }
 721 
 722   void kill() {
 723     _newobjects.trunc_to(0);
 724     _objects.trunc_to(0);
 725     _values.kill();
 726   }

 984         int offset = java_lang_boxing_object::value_offset(type);
 985         ciField* value_field = box_klass->get_field_by_offset(offset, false /*is_static*/);
 986         x = new LoadField(append(x), offset, value_field, false /*is_static*/, patch_state, false /*needs_patching*/);
 987         t = as_ValueType(type);
 988       } else {
 989         assert(is_reference_type(type), "not a reference: %s", type2name(type));
 990       }
 991     }
 992 
 993     push(t, append(x));
 994   } else {
 995     BAILOUT("could not resolve a constant");
 996   }
 997 }
 998 
 999 
1000 void GraphBuilder::load_local(ValueType* type, int index) {
1001   Value x = state()->local_at(index);
1002   assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
1003   push(type, x);







1004 }
1005 
1006 
1007 void GraphBuilder::store_local(ValueType* type, int index) {
1008   Value x = pop(type);
1009   store_local(state(), x, index);



1010 }
1011 
1012 
1013 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
1014   if (parsing_jsr()) {
1015     // We need to do additional tracking of the location of the return
1016     // address for jsrs since we don't handle arbitrary jsr/ret
1017     // constructs. Here we are figuring out in which circumstances we
1018     // need to bail out.
1019     if (x->type()->is_address()) {
1020       scope_data()->set_jsr_return_address_local(index);
1021 
1022       // Also check parent jsrs (if any) at this time to see whether
1023       // they are using this local. We don't handle skipping over a
1024       // ret.
1025       for (ScopeData* cur_scope_data = scope_data()->parent();
1026            cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1027            cur_scope_data = cur_scope_data->parent()) {
1028         if (cur_scope_data->jsr_return_address_local() == index) {
1029           BAILOUT("subroutine overwrites return address from previous subroutine");
1030         }
1031       }
1032     } else if (index == scope_data()->jsr_return_address_local()) {
1033       scope_data()->set_jsr_return_address_local(-1);
1034     }
1035   }
1036 
1037   state->store_local(index, round_fp(x));



1038 }
1039 
1040 
1041 void GraphBuilder::load_indexed(BasicType type) {
1042   // In case of in block code motion in range check elimination
1043   ValueStack* state_before = copy_state_indexed_access();








1044   compilation()->set_has_access_indexed(true);
1045   Value index = ipop();
1046   Value array = apop();
1047   Value length = NULL;
1048   if (CSEArrayLength ||
1049       (array->as_Constant() != NULL) ||
1050       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1051       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1052       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1053     length = append(new ArrayLength(array, state_before));
1054   }
1055   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));




























































1056 }
1057 
1058 
1059 void GraphBuilder::store_indexed(BasicType type) {
1060   // In case of in block code motion in range check elimination
1061   ValueStack* state_before = copy_state_indexed_access();








1062   compilation()->set_has_access_indexed(true);
1063   Value value = pop(as_ValueType(type));
1064   Value index = ipop();
1065   Value array = apop();
1066   Value length = NULL;
1067   if (CSEArrayLength ||
1068       (array->as_Constant() != NULL) ||
1069       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1070       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1071       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1072     length = append(new ArrayLength(array, state_before));
1073   }
1074   ciType* array_type = array->declared_type();
1075   bool check_boolean = false;
1076   if (array_type != NULL) {
1077     if (array_type->is_loaded() &&
1078       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1079       assert(type == T_BYTE, "boolean store uses bastore");
1080       Value mask = append(new Constant(new IntConstant(1)));
1081       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1082     }
1083   } else if (type == T_BYTE) {
1084     check_boolean = true;
1085   }
1086   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1087   append(result);
1088   _memory->store_value(value);
1089 
1090   if (type == T_OBJECT && is_profiling()) {
1091     // Note that we'd collect profile data in this method if we wanted it.
1092     compilation()->set_would_profile(true);
1093 
1094     if (profile_checkcasts()) {
1095       result->set_profiled_method(method());
1096       result->set_profiled_bci(bci());
1097       result->set_should_profile(true);
1098     }
1099   }



1100 }
1101 
1102 
1103 void GraphBuilder::stack_op(Bytecodes::Code code) {
1104   switch (code) {
1105     case Bytecodes::_pop:
1106       { state()->raw_pop();

1107       }
1108       break;
1109     case Bytecodes::_pop2:
1110       { state()->raw_pop();
1111         state()->raw_pop();


1112       }
1113       break;
1114     case Bytecodes::_dup:
1115       { Value w = state()->raw_pop();

1116         state()->raw_push(w);
1117         state()->raw_push(w);
1118       }
1119       break;
1120     case Bytecodes::_dup_x1:
1121       { Value w1 = state()->raw_pop();
1122         Value w2 = state()->raw_pop();

1123         state()->raw_push(w1);
1124         state()->raw_push(w2);
1125         state()->raw_push(w1);
1126       }
1127       break;
1128     case Bytecodes::_dup_x2:
1129       { Value w1 = state()->raw_pop();
1130         Value w2 = state()->raw_pop();
1131         Value w3 = state()->raw_pop();











1132         state()->raw_push(w1);
1133         state()->raw_push(w3);
1134         state()->raw_push(w2);
1135         state()->raw_push(w1);
1136       }
1137       break;
1138     case Bytecodes::_dup2:
1139       { Value w1 = state()->raw_pop();
1140         Value w2 = state()->raw_pop();


1141         state()->raw_push(w2);
1142         state()->raw_push(w1);
1143         state()->raw_push(w2);
1144         state()->raw_push(w1);
1145       }
1146       break;
1147     case Bytecodes::_dup2_x1:
1148       { Value w1 = state()->raw_pop();
1149         Value w2 = state()->raw_pop();
1150         Value w3 = state()->raw_pop();


1151         state()->raw_push(w2);
1152         state()->raw_push(w1);
1153         state()->raw_push(w3);
1154         state()->raw_push(w2);
1155         state()->raw_push(w1);
1156       }
1157       break;
1158     case Bytecodes::_dup2_x2:
1159       { Value w1 = state()->raw_pop();
1160         Value w2 = state()->raw_pop();
1161         Value w3 = state()->raw_pop();
1162         Value w4 = state()->raw_pop();


1163         state()->raw_push(w2);
1164         state()->raw_push(w1);
1165         state()->raw_push(w4);
1166         state()->raw_push(w3);
1167         state()->raw_push(w2);
1168         state()->raw_push(w1);
1169       }
1170       break;
1171     case Bytecodes::_swap:
1172       { Value w1 = state()->raw_pop();
1173         Value w2 = state()->raw_pop();
1174         state()->raw_push(w1);
1175         state()->raw_push(w2);
1176       }
1177       break;
1178     default:
1179       ShouldNotReachHere();
1180       break;
1181   }
1182 }

1270 
1271 
1272 void GraphBuilder::_goto(int from_bci, int to_bci) {
1273   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1274   if (is_profiling()) {
1275     compilation()->set_would_profile(true);
1276     x->set_profiled_bci(bci());
1277     if (profile_branches()) {
1278       x->set_profiled_method(method());
1279       x->set_should_profile(true);
1280     }
1281   }
1282   append(x);
1283 }
1284 
1285 
1286 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1287   BlockBegin* tsux = block_at(stream()->get_dest());
1288   BlockBegin* fsux = block_at(stream()->next_bci());
1289   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();



























1290   // In case of loop invariant code motion or predicate insertion
1291   // before the body of a loop the state is needed
1292   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : NULL, is_bb));
1293 
1294   assert(i->as_Goto() == NULL ||
1295          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1296          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1297          "safepoint state of Goto returned by canonicalizer incorrect");
1298 
1299   if (is_profiling()) {
1300     If* if_node = i->as_If();
1301     if (if_node != NULL) {
1302       // Note that we'd collect profile data in this method if we wanted it.
1303       compilation()->set_would_profile(true);
1304       // At level 2 we need the proper bci to count backedges
1305       if_node->set_profiled_bci(bci());
1306       if (profile_branches()) {
1307         // Successors can be rotated by the canonicalizer, check for this case.
1308         if_node->set_profiled_method(method());
1309         if_node->set_should_profile(true);
1310         if (if_node->tsux() == fsux) {
1311           if_node->set_swapped(true);
1312         }

1523 
1524   if (needs_check) {
1525     // Perform the registration of finalizable objects.
1526     ValueStack* state_before = copy_state_for_exception();
1527     load_local(objectType, 0);
1528     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1529                                state()->pop_arguments(1),
1530                                true, state_before, true));
1531   }
1532 }
1533 
1534 
1535 void GraphBuilder::method_return(Value x, bool ignore_return) {
1536   if (RegisterFinalizersAtInit &&
1537       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1538     call_register_finalizer();
1539   }
1540 
1541   // The conditions for a memory barrier are described in Parse::do_exits().
1542   bool need_mem_bar = false;
1543   if (method()->name() == ciSymbols::object_initializer_name() &&
1544        (scope()->wrote_final() ||
1545          (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1546          (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1547     need_mem_bar = true;
1548   }
1549 
1550   BasicType bt = method()->return_type()->basic_type();
1551   switch (bt) {
1552     case T_BYTE:
1553     {
1554       Value shift = append(new Constant(new IntConstant(24)));
1555       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1556       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1557       break;
1558     }
1559     case T_SHORT:
1560     {
1561       Value shift = append(new Constant(new IntConstant(16)));
1562       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1563       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));

1674   // Attach dimension info to stable arrays.
1675   if (FoldStableValues &&
1676       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1677     ciArray* array = field_value.as_object()->as_array();
1678     jint dimension = field->type()->as_array_klass()->dimension();
1679     value = new StableArrayConstant(array, dimension);
1680   }
1681 
1682   switch (field_type) {
1683     case T_ARRAY:
1684     case T_OBJECT:
1685       if (field_value.as_object()->should_be_constant()) {
1686         return new Constant(value);
1687       }
1688       return NULL; // Not a constant.
1689     default:
1690       return new Constant(value);
1691   }
1692 }
1693 













1694 void GraphBuilder::access_field(Bytecodes::Code code) {
1695   bool will_link;
1696   ciField* field = stream()->get_field(will_link);
1697   ciInstanceKlass* holder = field->holder();
1698   BasicType field_type = field->type()->basic_type();
1699   ValueType* type = as_ValueType(field_type);

1700   // call will_link again to determine if the field is valid.
1701   const bool needs_patching = !holder->is_loaded() ||
1702                               !field->will_link(method(), code) ||
1703                               PatchALot;
1704 
1705   ValueStack* state_before = NULL;
1706   if (!holder->is_initialized() || needs_patching) {
1707     // save state before instruction for debug info when
1708     // deoptimization happens during patching
1709     state_before = copy_state_before();
1710   }
1711 
1712   Value obj = NULL;
1713   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1714     if (state_before != NULL) {
1715       // build a patching constant
1716       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1717     } else {
1718       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1719     }
1720   }
1721 
1722   if (field->is_final() && (code == Bytecodes::_putfield)) {
1723     scope()->set_wrote_final();
1724   }
1725 
1726   if (code == Bytecodes::_putfield) {
1727     scope()->set_wrote_fields();
1728     if (field->is_volatile()) {
1729       scope()->set_wrote_volatile();
1730     }
1731   }
1732 
1733   const int offset = !needs_patching ? field->offset() : -1;
1734   switch (code) {
1735     case Bytecodes::_getstatic: {
1736       // check for compile-time constants, i.e., initialized static final fields
1737       Value constant = NULL;
1738       if (field->is_static_constant() && !PatchALot) {
1739         ciConstant field_value = field->constant_value();
1740         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1741                "stable static w/ default value shouldn't be a constant");
1742         constant = make_constant(field_value, field);




1743       }
1744       if (constant != NULL) {
1745         push(type, append(constant));
1746       } else {
1747         if (state_before == NULL) {
1748           state_before = copy_state_for_exception();
1749         }
1750         push(type, append(new LoadField(append(obj), offset, field, true,
1751                                         state_before, needs_patching)));

1752       }
1753       break;
1754     }
1755     case Bytecodes::_putstatic: {
1756       Value val = pop(type);
1757       if (state_before == NULL) {
1758         state_before = copy_state_for_exception();
1759       }
1760       if (field->type()->basic_type() == T_BOOLEAN) {
1761         Value mask = append(new Constant(new IntConstant(1)));
1762         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1763       }




1764       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1765       break;
1766     }
1767     case Bytecodes::_getfield: {
1768       // Check for compile-time constants, i.e., trusted final non-static fields.
1769       Value constant = NULL;
1770       obj = apop();
1771       ObjectType* obj_type = obj->type()->as_ObjectType();
1772       if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1773         ciObject* const_oop = obj_type->constant_value();
1774         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1775           ciConstant field_value = field->constant_value_of(const_oop);
1776           if (field_value.is_valid()) {
1777             constant = make_constant(field_value, field);
1778             // For CallSite objects add a dependency for invalidation of the optimization.
1779             if (field->is_call_site_target()) {
1780               ciCallSite* call_site = const_oop->as_call_site();
1781               if (!call_site->is_fully_initialized_constant_call_site()) {
1782                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1783                 dependency_recorder()->assert_call_site_target_value(call_site, target);

















1784               }
1785             }
1786           }
1787         }
1788       }
1789       if (constant != NULL) {
1790         push(type, append(constant));
1791       } else {
1792         if (state_before == NULL) {
1793           state_before = copy_state_for_exception();
1794         }
1795         LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1796         Value replacement = !needs_patching ? _memory->load(load) : load;
1797         if (replacement != load) {
1798           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1799           // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1800           // conversion. Emit an explicit conversion here to get the correct field value after the write.
1801           BasicType bt = field->type()->basic_type();
1802           switch (bt) {
1803           case T_BOOLEAN:
1804           case T_BYTE:
1805             replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
1806             break;
1807           case T_CHAR:
1808             replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
1809             break;
1810           case T_SHORT:
1811             replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
1812             break;
1813           default:
1814             break;
1815           }
1816           push(type, replacement);























1817         } else {
1818           push(type, append(load));

































































1819         }
1820       }
1821       break;
1822     }
1823     case Bytecodes::_putfield: {
1824       Value val = pop(type);
1825       obj = apop();
1826       if (state_before == NULL) {
1827         state_before = copy_state_for_exception();
1828       }
1829       if (field->type()->basic_type() == T_BOOLEAN) {
1830         Value mask = append(new Constant(new IntConstant(1)));
1831         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1832       }
1833       StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1834       if (!needs_patching) store = _memory->store(store);
1835       if (store != NULL) {
1836         append(store);









1837       }
1838       break;
1839     }
1840     default:
1841       ShouldNotReachHere();
1842       break;
1843   }
1844 }
1845 










































































1846 
1847 Dependencies* GraphBuilder::dependency_recorder() const {
1848   assert(DeoptC1, "need debug information");
1849   return compilation()->dependency_recorder();
1850 }
1851 
1852 // How many arguments do we want to profile?
1853 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1854   int n = 0;
1855   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1856   start = has_receiver ? 1 : 0;
1857   if (profile_arguments()) {
1858     ciProfileData* data = method()->method_data()->bci_to_data(bci());
1859     if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1860       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1861     }
1862   }
1863   // If we are inlining then we need to collect arguments to profile parameters for the target
1864   if (profile_parameters() && target != NULL) {
1865     if (target->method_data() != NULL && target->method_data()->parameters_type_data() != NULL) {

1943       break;
1944     case Bytecodes::_invokehandle:
1945       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1946       break;
1947     default:
1948       break;
1949     }
1950   } else {
1951     if (bc_raw == Bytecodes::_invokehandle) {
1952       assert(!will_link, "should come here only for unlinked call");
1953       code = Bytecodes::_invokespecial;
1954     }
1955   }
1956 
1957   if (code == Bytecodes::_invokespecial) {
1958     // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
1959     ciKlass* receiver_constraint = nullptr;
1960 
1961     if (bc_raw == Bytecodes::_invokeinterface) {
1962       receiver_constraint = holder;
1963     } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer() && calling_klass->is_interface()) {
1964       receiver_constraint = calling_klass;
1965     }
1966 
1967     if (receiver_constraint != nullptr) {
1968       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1969       Value receiver = state()->stack_at(index);
1970       CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
1971       // go to uncommon_trap when checkcast fails
1972       c->set_invokespecial_receiver_check();
1973       state()->stack_at_put(index, append_split(c));
1974     }
1975   }
1976 
1977   // Push appendix argument (MethodType, CallSite, etc.), if one.
1978   bool patch_for_appendix = false;
1979   int patching_appendix_arg = 0;
1980   if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
1981     Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
1982     apush(arg);
1983     patch_for_appendix = true;

2178       null_check(recv);
2179     }
2180 
2181     if (is_profiling()) {
2182       // Note that we'd collect profile data in this method if we wanted it.
2183       compilation()->set_would_profile(true);
2184 
2185       if (profile_calls()) {
2186         assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2187         ciKlass* target_klass = NULL;
2188         if (cha_monomorphic_target != NULL) {
2189           target_klass = cha_monomorphic_target->holder();
2190         } else if (exact_target != NULL) {
2191           target_klass = exact_target->holder();
2192         }
2193         profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2194       }
2195     }
2196   }
2197 
2198   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);

2199   // push result
2200   append_split(result);
2201 
2202   if (result_type != voidType) {
2203     push(result_type, round_fp(result));
2204   }
2205   if (profile_return() && result_type->is_object_kind()) {
2206     profile_return_type(result, target);
2207   }
2208 }
2209 
2210 
2211 void GraphBuilder::new_instance(int klass_index) {
2212   ValueStack* state_before = copy_state_exhandling();
2213   bool will_link;
2214   ciKlass* klass = stream()->get_klass(will_link);
2215   assert(klass->is_instance_klass(), "must be an instance klass");
2216   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2217   _memory->new_instance(new_instance);
2218   apush(append_split(new_instance));
2219 }
2220 











2221 
2222 void GraphBuilder::new_type_array() {
2223   ValueStack* state_before = copy_state_exhandling();
2224   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2225 }
2226 
2227 
2228 void GraphBuilder::new_object_array() {
2229   bool will_link;
2230   ciKlass* klass = stream()->get_klass(will_link);

2231   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2232   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2233   apush(append_split(n));
2234 }
2235 
2236 
2237 bool GraphBuilder::direct_compare(ciKlass* k) {
2238   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2239     ciInstanceKlass* ik = k->as_instance_klass();
2240     if (ik->is_final()) {
2241       return true;
2242     } else {
2243       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2244         // test class is leaf class
2245         dependency_recorder()->assert_leaf_type(ik);
2246         return true;
2247       }
2248     }
2249   }
2250   return false;
2251 }
2252 
2253 
2254 void GraphBuilder::check_cast(int klass_index) {
2255   bool will_link;
2256   ciKlass* klass = stream()->get_klass(will_link);

2257   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2258   CheckCast* c = new CheckCast(klass, apop(), state_before);
2259   apush(append_split(c));
2260   c->set_direct_compare(direct_compare(klass));
2261 
2262   if (is_profiling()) {
2263     // Note that we'd collect profile data in this method if we wanted it.
2264     compilation()->set_would_profile(true);
2265 
2266     if (profile_checkcasts()) {
2267       c->set_profiled_method(method());
2268       c->set_profiled_bci(bci());
2269       c->set_should_profile(true);
2270     }
2271   }
2272 }
2273 
2274 
2275 void GraphBuilder::instance_of(int klass_index) {
2276   bool will_link;
2277   ciKlass* klass = stream()->get_klass(will_link);
2278   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2279   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2280   ipush(append_split(i));
2281   i->set_direct_compare(direct_compare(klass));
2282 
2283   if (is_profiling()) {
2284     // Note that we'd collect profile data in this method if we wanted it.
2285     compilation()->set_would_profile(true);
2286 
2287     if (profile_checkcasts()) {
2288       i->set_profiled_method(method());
2289       i->set_profiled_bci(bci());
2290       i->set_should_profile(true);
2291     }
2292   }
2293 }
2294 
2295 
2296 void GraphBuilder::monitorenter(Value x, int bci) {



















2297   // save state before locking in case of deoptimization after a NullPointerException
2298   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2299   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2300   kill_all();
2301 }
2302 
2303 
2304 void GraphBuilder::monitorexit(Value x, int bci) {
2305   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2306   kill_all();
2307 }
2308 
2309 
2310 void GraphBuilder::new_multi_array(int dimensions) {
2311   bool will_link;
2312   ciKlass* klass = stream()->get_klass(will_link);
2313   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2314 
2315   Values* dims = new Values(dimensions, dimensions, NULL);
2316   // fill in all dimensions
2317   int i = dimensions;
2318   while (i-- > 0) dims->at_put(i, ipop());
2319   // create array

2415   if (i1->can_trap()) {
2416     i1->set_exception_handlers(handle_exception(i1));
2417     assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
2418   }
2419   return i1;
2420 }
2421 
2422 
2423 Instruction* GraphBuilder::append(Instruction* instr) {
2424   assert(instr->as_StateSplit() == NULL || instr->as_BlockEnd() != NULL, "wrong append used");
2425   return append_with_bci(instr, bci());
2426 }
2427 
2428 
2429 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2430   return append_with_bci(instr, bci());
2431 }
2432 
2433 
2434 void GraphBuilder::null_check(Value value) {
2435   if (value->as_NewArray() != NULL || value->as_NewInstance() != NULL) {
2436     return;
2437   } else {
2438     Constant* con = value->as_Constant();
2439     if (con) {
2440       ObjectType* c = con->type()->as_ObjectType();
2441       if (c && c->is_loaded()) {
2442         ObjectConstant* oc = c->as_ObjectConstant();
2443         if (!oc || !oc->value()->is_null_object()) {
2444           return;
2445         }
2446       }
2447     }

2448   }
2449   append(new NullCheck(value, copy_state_for_exception()));
2450 }
2451 
2452 
2453 
2454 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2455   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
2456     assert(instruction->exception_state() == NULL
2457            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2458            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2459            "exception_state should be of exception kind");
2460     return new XHandlers();
2461   }
2462 
2463   XHandlers*  exception_handlers = new XHandlers();
2464   ScopeData*  cur_scope_data = scope_data();
2465   ValueStack* cur_state = instruction->state_before();
2466   ValueStack* prev_state = NULL;
2467   int scope_count = 0;
2468 
2469   assert(cur_state != NULL, "state_before must be set");
2470   do {
2471     int cur_bci = cur_state->bci();
2472     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2473     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");


2474 
2475     // join with all potential exception handlers
2476     XHandlers* list = cur_scope_data->xhandlers();
2477     const int n = list->length();
2478     for (int i = 0; i < n; i++) {
2479       XHandler* h = list->handler_at(i);
2480       if (h->covers(cur_bci)) {
2481         // h is a potential exception handler => join it
2482         compilation()->set_has_exception_handlers(true);
2483 
2484         BlockBegin* entry = h->entry_block();
2485         if (entry == block()) {
2486           // It's acceptable for an exception handler to cover itself
2487           // but we don't handle that in the parser currently.  It's
2488           // very rare so we bailout instead of trying to handle it.
2489           BAILOUT_("exception handler covers itself", exception_handlers);
2490         }
2491         assert(entry->bci() == h->handler_bci(), "must match");
2492         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2493 

2941       case Bytecodes::_invokevirtual  : // fall through
2942       case Bytecodes::_invokespecial  : // fall through
2943       case Bytecodes::_invokestatic   : // fall through
2944       case Bytecodes::_invokedynamic  : // fall through
2945       case Bytecodes::_invokeinterface: invoke(code); break;
2946       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
2947       case Bytecodes::_newarray       : new_type_array(); break;
2948       case Bytecodes::_anewarray      : new_object_array(); break;
2949       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
2950       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
2951       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
2952       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
2953       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
2954       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
2955       case Bytecodes::_wide           : ShouldNotReachHere(); break;
2956       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
2957       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
2958       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
2959       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
2960       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;


2961       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
2962       default                         : ShouldNotReachHere(); break;
2963     }
2964 
2965     if (log != NULL)
2966       log->clear_context(); // skip marker if nothing was printed
2967 
2968     // save current bci to setup Goto at the end
2969     prev_bci = s.cur_bci();
2970 
2971   }
2972   CHECK_BAILOUT_(NULL);
2973   // stop processing of this block (see try_inline_full)
2974   if (_skip_block) {
2975     _skip_block = false;
2976     assert(_last && _last->as_BlockEnd(), "");
2977     return _last->as_BlockEnd();
2978   }
2979   // if there are any, check if last instruction is a BlockEnd instruction
2980   BlockEnd* end = last()->as_BlockEnd();

3229   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3230 
3231   assert(state->caller_state() == NULL, "should be top scope");
3232   state->clear_locals();
3233   Goto* g = new Goto(target, false);
3234   append(g);
3235   _osr_entry->set_end(g);
3236   target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3237 
3238   scope_data()->set_stream(NULL);
3239 }
3240 
3241 
3242 ValueStack* GraphBuilder::state_at_entry() {
3243   ValueStack* state = new ValueStack(scope(), NULL);
3244 
3245   // Set up locals for receiver
3246   int idx = 0;
3247   if (!method()->is_static()) {
3248     // we should always see the receiver
3249     state->store_local(idx, new Local(method()->holder(), objectType, idx, true));

3250     idx = 1;
3251   }
3252 
3253   // Set up locals for incoming arguments
3254   ciSignature* sig = method()->signature();
3255   for (int i = 0; i < sig->count(); i++) {
3256     ciType* type = sig->type_at(i);
3257     BasicType basic_type = type->basic_type();
3258     // don't allow T_ARRAY to propagate into locals types
3259     if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3260     ValueType* vt = as_ValueType(basic_type);
3261     state->store_local(idx, new Local(type, vt, idx, false));
3262     idx += type->size();
3263   }
3264 
3265   // lock synchronized method
3266   if (method()->is_synchronized()) {
3267     state->lock(NULL);
3268   }
3269 
3270   return state;
3271 }
3272 
3273 
3274 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3275   : _scope_data(NULL)
3276   , _compilation(compilation)
3277   , _memory(new MemoryBuffer())
3278   , _inline_bailout_msg(NULL)
3279   , _instruction_count(0)
3280   , _osr_entry(NULL)


3281 {
3282   int osr_bci = compilation->osr_bci();
3283 
3284   // determine entry points and bci2block mapping
3285   BlockListBuilder blm(compilation, scope, osr_bci);
3286   CHECK_BAILOUT();
3287 
3288   BlockList* bci2block = blm.bci2block();
3289   BlockBegin* start_block = bci2block->at(0);
3290 
3291   push_root_scope(scope, bci2block, start_block);
3292 
3293   // setup state for std entry
3294   _initial_state = state_at_entry();
3295   start_block->merge(_initial_state, compilation->has_irreducible_loops());
3296 
3297   // End nulls still exist here
3298 
3299   // complete graph
3300   _vmap        = new ValueMap();

  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"
  33 #include "ci/ciFlatArrayKlass.hpp"
  34 #include "ci/ciInlineKlass.hpp"
  35 #include "ci/ciKlass.hpp"
  36 #include "ci/ciMemberName.hpp"
  37 #include "ci/ciSymbols.hpp"
  38 #include "ci/ciUtilities.inline.hpp"
  39 #include "classfile/javaClasses.hpp"
  40 #include "compiler/compilationPolicy.hpp"
  41 #include "compiler/compileBroker.hpp"
  42 #include "compiler/compilerEvent.hpp"
  43 #include "interpreter/bytecode.hpp"
  44 #include "jfr/jfrEvents.hpp"
  45 #include "memory/resourceArea.hpp"
  46 #include "oops/oop.inline.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/vm_version.hpp"
  49 #include "utilities/bitMap.inline.hpp"
  50 #include "utilities/powerOfTwo.hpp"
  51 
  52 class BlockListBuilder {
  53  private:
  54   Compilation* _compilation;

 689         }
 690 #endif
 691         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 692         return result;
 693       }
 694     }
 695     return load;
 696   }
 697 
 698   // Record this newly allocated object
 699   void new_instance(NewInstance* object) {
 700     int index = _newobjects.length();
 701     _newobjects.append(object);
 702     if (_fields.at_grow(index, NULL) == NULL) {
 703       _fields.at_put(index, new FieldBuffer());
 704     } else {
 705       _fields.at(index)->kill();
 706     }
 707   }
 708 
 709   // Record this newly allocated object
 710   void new_instance(NewInlineTypeInstance* object) {
 711     int index = _newobjects.length();
 712     _newobjects.append(object);
 713     if (_fields.at_grow(index, NULL) == NULL) {
 714       _fields.at_put(index, new FieldBuffer());
 715     } else {
 716       _fields.at(index)->kill();
 717     }
 718   }
 719 
 720   void store_value(Value value) {
 721     int index = _newobjects.find(value);
 722     if (index != -1) {
 723       // stored a newly allocated object into another object.
 724       // Assume we've lost track of it as separate slice of memory.
 725       // We could do better by keeping track of whether individual
 726       // fields could alias each other.
 727       _newobjects.remove_at(index);
 728       // pull out the field info and store it at the end up the list
 729       // of field info list to be reused later.
 730       _fields.append(_fields.at(index));
 731       _fields.remove_at(index);
 732     }
 733   }
 734 
 735   void kill() {
 736     _newobjects.trunc_to(0);
 737     _objects.trunc_to(0);
 738     _values.kill();
 739   }

 997         int offset = java_lang_boxing_object::value_offset(type);
 998         ciField* value_field = box_klass->get_field_by_offset(offset, false /*is_static*/);
 999         x = new LoadField(append(x), offset, value_field, false /*is_static*/, patch_state, false /*needs_patching*/);
1000         t = as_ValueType(type);
1001       } else {
1002         assert(is_reference_type(type), "not a reference: %s", type2name(type));
1003       }
1004     }
1005 
1006     push(t, append(x));
1007   } else {
1008     BAILOUT("could not resolve a constant");
1009   }
1010 }
1011 
1012 
1013 void GraphBuilder::load_local(ValueType* type, int index) {
1014   Value x = state()->local_at(index);
1015   assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
1016   push(type, x);
1017   if (x->as_NewInlineTypeInstance() != NULL && x->as_NewInlineTypeInstance()->in_larval_state()) {
1018     if (x->as_NewInlineTypeInstance()->on_stack_count() == 1) {
1019       x->as_NewInlineTypeInstance()->set_not_larva_anymore();
1020     } else {
1021       x->as_NewInlineTypeInstance()->increment_on_stack_count();
1022     }
1023   }
1024 }
1025 
1026 
1027 void GraphBuilder::store_local(ValueType* type, int index) {
1028   Value x = pop(type);
1029   store_local(state(), x, index);
1030   if (x->as_NewInlineTypeInstance() != NULL) {
1031     x->as_NewInlineTypeInstance()->set_local_index(index);
1032   }
1033 }
1034 
1035 
1036 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
1037   if (parsing_jsr()) {
1038     // We need to do additional tracking of the location of the return
1039     // address for jsrs since we don't handle arbitrary jsr/ret
1040     // constructs. Here we are figuring out in which circumstances we
1041     // need to bail out.
1042     if (x->type()->is_address()) {
1043       scope_data()->set_jsr_return_address_local(index);
1044 
1045       // Also check parent jsrs (if any) at this time to see whether
1046       // they are using this local. We don't handle skipping over a
1047       // ret.
1048       for (ScopeData* cur_scope_data = scope_data()->parent();
1049            cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1050            cur_scope_data = cur_scope_data->parent()) {
1051         if (cur_scope_data->jsr_return_address_local() == index) {
1052           BAILOUT("subroutine overwrites return address from previous subroutine");
1053         }
1054       }
1055     } else if (index == scope_data()->jsr_return_address_local()) {
1056       scope_data()->set_jsr_return_address_local(-1);
1057     }
1058   }
1059 
1060   state->store_local(index, round_fp(x));
1061   if (x->as_NewInlineTypeInstance() != NULL) {
1062     x->as_NewInlineTypeInstance()->set_local_index(index);
1063   }
1064 }
1065 
1066 
1067 void GraphBuilder::load_indexed(BasicType type) {
1068   // In case of in block code motion in range check elimination
1069   ValueStack* state_before = NULL;
1070   int array_idx = state()->stack_size() - 2;
1071   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flattened_array()) {
1072     // Save the entire state and re-execute on deopt when accessing flattened arrays
1073     state_before = copy_state_before();
1074     state_before->set_should_reexecute(true);
1075   } else {
1076     state_before = copy_state_indexed_access();
1077   }
1078   compilation()->set_has_access_indexed(true);
1079   Value index = ipop();
1080   Value array = apop();
1081   Value length = NULL;
1082   if (CSEArrayLength ||
1083       (array->as_Constant() != NULL) ||
1084       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1085       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1086       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1087     length = append(new ArrayLength(array, state_before));
1088   }
1089 
1090   bool need_membar = false;
1091   LoadIndexed* load_indexed = NULL;
1092   Instruction* result = NULL;
1093   if (array->is_loaded_flattened_array()) {
1094     ciType* array_type = array->declared_type();
1095     ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
1096 
1097     bool can_delay_access = false;
1098     ciBytecodeStream s(method());
1099     s.force_bci(bci());
1100     s.next();
1101     if (s.cur_bc() == Bytecodes::_getfield) {
1102       bool will_link;
1103       ciField* next_field = s.get_field(will_link);
1104       bool next_needs_patching = !next_field->holder()->is_loaded() ||
1105                                  !next_field->will_link(method(), Bytecodes::_getfield) ||
1106                                  PatchALot;
1107       can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1108     }
1109     if (can_delay_access) {
1110       // potentially optimizable array access, storing information for delayed decision
1111       LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
1112       DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
1113       li->set_delayed(dli);
1114       set_pending_load_indexed(dli);
1115       return; // Nothing else to do for now
1116     } else {
1117       if (elem_klass->is_empty()) {
1118         // No need to create a new instance, the default instance will be used instead
1119         load_indexed = new LoadIndexed(array, index, length, type, state_before);
1120         apush(append(load_indexed));
1121       } else {
1122         NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(elem_klass, state_before);
1123         _memory->new_instance(new_instance);
1124         apush(append_split(new_instance));
1125         load_indexed = new LoadIndexed(array, index, length, type, state_before);
1126         load_indexed->set_vt(new_instance);
1127         // The LoadIndexed node will initialise this instance by copying from
1128         // the flattened field.  Ensure these stores are visible before any
1129         // subsequent store that publishes this reference.
1130         need_membar = true;
1131       }
1132     }
1133   } else {
1134     load_indexed = new LoadIndexed(array, index, length, type, state_before);
1135     if (profile_array_accesses() && is_reference_type(type)) {
1136       compilation()->set_would_profile(true);
1137       load_indexed->set_should_profile(true);
1138       load_indexed->set_profiled_method(method());
1139       load_indexed->set_profiled_bci(bci());
1140     }
1141   }
1142   result = append(load_indexed);
1143   if (need_membar) {
1144     append(new MemBar(lir_membar_storestore));
1145   }
1146   assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
1147   if (!array->is_loaded_flattened_array()) {
1148     push(as_ValueType(type), result);
1149   }
1150 }
1151 
1152 
1153 void GraphBuilder::store_indexed(BasicType type) {
1154   // In case of in block code motion in range check elimination
1155   ValueStack* state_before = NULL;
1156   int array_idx = state()->stack_size() - 3;
1157   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flattened_array()) {
1158     // Save the entire state and re-execute on deopt when accessing flattened arrays
1159     state_before = copy_state_before();
1160     state_before->set_should_reexecute(true);
1161   } else {
1162     state_before = copy_state_indexed_access();
1163   }
1164   compilation()->set_has_access_indexed(true);
1165   Value value = pop(as_ValueType(type));
1166   Value index = ipop();
1167   Value array = apop();
1168   Value length = NULL;
1169   if (CSEArrayLength ||
1170       (array->as_Constant() != NULL) ||
1171       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1172       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1173       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1174     length = append(new ArrayLength(array, state_before));
1175   }
1176   ciType* array_type = array->declared_type();
1177   bool check_boolean = false;
1178   if (array_type != NULL) {
1179     if (array_type->is_loaded() &&
1180       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1181       assert(type == T_BYTE, "boolean store uses bastore");
1182       Value mask = append(new Constant(new IntConstant(1)));
1183       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1184     }
1185   } else if (type == T_BYTE) {
1186     check_boolean = true;
1187   }



1188 
1189   StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1190   if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flattened_array()) {
1191     compilation()->set_would_profile(true);
1192     store_indexed->set_should_profile(true);
1193     store_indexed->set_profiled_method(method());
1194     store_indexed->set_profiled_bci(bci());



1195   }
1196   Instruction* result = append(store_indexed);
1197   assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
1198   _memory->store_value(value);
1199 }
1200 

1201 void GraphBuilder::stack_op(Bytecodes::Code code) {
1202   switch (code) {
1203     case Bytecodes::_pop:
1204       { Value w = state()->raw_pop();
1205         update_larva_stack_count(w);
1206       }
1207       break;
1208     case Bytecodes::_pop2:
1209       { Value w1 = state()->raw_pop();
1210         Value w2 = state()->raw_pop();
1211         update_larva_stack_count(w1);
1212         update_larva_stack_count(w2);
1213       }
1214       break;
1215     case Bytecodes::_dup:
1216       { Value w = state()->raw_pop();
1217         update_larval_state(w);
1218         state()->raw_push(w);
1219         state()->raw_push(w);
1220       }
1221       break;
1222     case Bytecodes::_dup_x1:
1223       { Value w1 = state()->raw_pop();
1224         Value w2 = state()->raw_pop();
1225         update_larval_state(w1);
1226         state()->raw_push(w1);
1227         state()->raw_push(w2);
1228         state()->raw_push(w1);
1229       }
1230       break;
1231     case Bytecodes::_dup_x2:
1232       { Value w1 = state()->raw_pop();
1233         Value w2 = state()->raw_pop();
1234         Value w3 = state()->raw_pop();
1235         // special handling for the dup_x2/pop sequence (see JDK-8251046)
1236         if (w1 != NULL && w1->as_NewInlineTypeInstance() != NULL) {
1237           ciBytecodeStream s(method());
1238           s.force_bci(bci());
1239           s.next();
1240           if (s.cur_bc() != Bytecodes::_pop) {
1241             w1->as_NewInlineTypeInstance()->set_not_larva_anymore();
1242           }  else {
1243             w1->as_NewInlineTypeInstance()->increment_on_stack_count();
1244            }
1245         }
1246         state()->raw_push(w1);
1247         state()->raw_push(w3);
1248         state()->raw_push(w2);
1249         state()->raw_push(w1);
1250       }
1251       break;
1252     case Bytecodes::_dup2:
1253       { Value w1 = state()->raw_pop();
1254         Value w2 = state()->raw_pop();
1255         update_larval_state(w1);
1256         update_larval_state(w2);
1257         state()->raw_push(w2);
1258         state()->raw_push(w1);
1259         state()->raw_push(w2);
1260         state()->raw_push(w1);
1261       }
1262       break;
1263     case Bytecodes::_dup2_x1:
1264       { Value w1 = state()->raw_pop();
1265         Value w2 = state()->raw_pop();
1266         Value w3 = state()->raw_pop();
1267         update_larval_state(w1);
1268         update_larval_state(w2);
1269         state()->raw_push(w2);
1270         state()->raw_push(w1);
1271         state()->raw_push(w3);
1272         state()->raw_push(w2);
1273         state()->raw_push(w1);
1274       }
1275       break;
1276     case Bytecodes::_dup2_x2:
1277       { Value w1 = state()->raw_pop();
1278         Value w2 = state()->raw_pop();
1279         Value w3 = state()->raw_pop();
1280         Value w4 = state()->raw_pop();
1281         update_larval_state(w1);
1282         update_larval_state(w2);
1283         state()->raw_push(w2);
1284         state()->raw_push(w1);
1285         state()->raw_push(w4);
1286         state()->raw_push(w3);
1287         state()->raw_push(w2);
1288         state()->raw_push(w1);
1289       }
1290       break;
1291     case Bytecodes::_swap:
1292       { Value w1 = state()->raw_pop();
1293         Value w2 = state()->raw_pop();
1294         state()->raw_push(w1);
1295         state()->raw_push(w2);
1296       }
1297       break;
1298     default:
1299       ShouldNotReachHere();
1300       break;
1301   }
1302 }

1390 
1391 
1392 void GraphBuilder::_goto(int from_bci, int to_bci) {
1393   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1394   if (is_profiling()) {
1395     compilation()->set_would_profile(true);
1396     x->set_profiled_bci(bci());
1397     if (profile_branches()) {
1398       x->set_profiled_method(method());
1399       x->set_should_profile(true);
1400     }
1401   }
1402   append(x);
1403 }
1404 
1405 
1406 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1407   BlockBegin* tsux = block_at(stream()->get_dest());
1408   BlockBegin* fsux = block_at(stream()->next_bci());
1409   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1410 
1411   bool subst_check = false;
1412   if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
1413     ValueType* left_vt = x->type();
1414     ValueType* right_vt = y->type();
1415     if (left_vt->is_object()) {
1416       assert(right_vt->is_object(), "must be");
1417       ciKlass* left_klass = x->as_loaded_klass_or_null();
1418       ciKlass* right_klass = y->as_loaded_klass_or_null();
1419 
1420       if (left_klass == NULL || right_klass == NULL) {
1421         // The klass is still unloaded, or came from a Phi node. Go slow case;
1422         subst_check = true;
1423       } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
1424         // Either operand may be a value object, but we're not sure. Go slow case;
1425         subst_check = true;
1426       } else {
1427         // No need to do substitutability check
1428       }
1429     }
1430   }
1431   if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
1432       is_profiling() && profile_branches()) {
1433     compilation()->set_would_profile(true);
1434     append(new ProfileACmpTypes(method(), bci(), x, y));
1435   }
1436 
1437   // In case of loop invariant code motion or predicate insertion
1438   // before the body of a loop the state is needed
1439   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : NULL, is_bb, subst_check));
1440 
1441   assert(i->as_Goto() == NULL ||
1442          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1443          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1444          "safepoint state of Goto returned by canonicalizer incorrect");
1445 
1446   if (is_profiling()) {
1447     If* if_node = i->as_If();
1448     if (if_node != NULL) {
1449       // Note that we'd collect profile data in this method if we wanted it.
1450       compilation()->set_would_profile(true);
1451       // At level 2 we need the proper bci to count backedges
1452       if_node->set_profiled_bci(bci());
1453       if (profile_branches()) {
1454         // Successors can be rotated by the canonicalizer, check for this case.
1455         if_node->set_profiled_method(method());
1456         if_node->set_should_profile(true);
1457         if (if_node->tsux() == fsux) {
1458           if_node->set_swapped(true);
1459         }

1670 
1671   if (needs_check) {
1672     // Perform the registration of finalizable objects.
1673     ValueStack* state_before = copy_state_for_exception();
1674     load_local(objectType, 0);
1675     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1676                                state()->pop_arguments(1),
1677                                true, state_before, true));
1678   }
1679 }
1680 
1681 
1682 void GraphBuilder::method_return(Value x, bool ignore_return) {
1683   if (RegisterFinalizersAtInit &&
1684       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1685     call_register_finalizer();
1686   }
1687 
1688   // The conditions for a memory barrier are described in Parse::do_exits().
1689   bool need_mem_bar = false;
1690   if ((method()->is_object_constructor() || method()->is_static_init_factory()) &&
1691        (scope()->wrote_final() ||
1692          (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1693          (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1694     need_mem_bar = true;
1695   }
1696 
1697   BasicType bt = method()->return_type()->basic_type();
1698   switch (bt) {
1699     case T_BYTE:
1700     {
1701       Value shift = append(new Constant(new IntConstant(24)));
1702       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1703       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1704       break;
1705     }
1706     case T_SHORT:
1707     {
1708       Value shift = append(new Constant(new IntConstant(16)));
1709       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1710       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));

1821   // Attach dimension info to stable arrays.
1822   if (FoldStableValues &&
1823       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1824     ciArray* array = field_value.as_object()->as_array();
1825     jint dimension = field->type()->as_array_klass()->dimension();
1826     value = new StableArrayConstant(array, dimension);
1827   }
1828 
1829   switch (field_type) {
1830     case T_ARRAY:
1831     case T_OBJECT:
1832       if (field_value.as_object()->should_be_constant()) {
1833         return new Constant(value);
1834       }
1835       return NULL; // Not a constant.
1836     default:
1837       return new Constant(value);
1838   }
1839 }
1840 
1841 void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
1842   for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
1843     ciField* inner_field = vk->nonstatic_field_at(i);
1844     assert(!inner_field->is_flattened(), "the iteration over nested fields is handled by the loop itself");
1845     int off = inner_field->offset() - vk->first_field_offset();
1846     LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, false);
1847     Value replacement = append(load);
1848     StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, false);
1849     store->set_enclosing_field(enclosing_field);
1850     append(store);
1851   }
1852 }
1853 
1854 void GraphBuilder::access_field(Bytecodes::Code code) {
1855   bool will_link;
1856   ciField* field = stream()->get_field(will_link);
1857   ciInstanceKlass* holder = field->holder();
1858   BasicType field_type = field->type()->basic_type();
1859   ValueType* type = as_ValueType(field_type);
1860 
1861   // call will_link again to determine if the field is valid.
1862   const bool needs_patching = !holder->is_loaded() ||
1863                               !field->will_link(method(), code) ||
1864                               (!field->is_flattened() && PatchALot);
1865 
1866   ValueStack* state_before = NULL;
1867   if (!holder->is_initialized() || needs_patching) {
1868     // save state before instruction for debug info when
1869     // deoptimization happens during patching
1870     state_before = copy_state_before();
1871   }
1872 
1873   Value obj = NULL;
1874   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1875     if (state_before != NULL) {
1876       // build a patching constant
1877       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1878     } else {
1879       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1880     }
1881   }
1882 
1883   if (field->is_final() && code == Bytecodes::_putfield) {
1884     scope()->set_wrote_final();
1885   }
1886 
1887   if (code == Bytecodes::_putfield) {
1888     scope()->set_wrote_fields();
1889     if (field->is_volatile()) {
1890       scope()->set_wrote_volatile();
1891     }
1892   }
1893 
1894   int offset = !needs_patching ? field->offset() : -1;
1895   switch (code) {
1896     case Bytecodes::_getstatic: {
1897       // check for compile-time constants, i.e., initialized static final fields
1898       Value constant = NULL;
1899       if (field->is_static_constant() && !PatchALot) {
1900         ciConstant field_value = field->constant_value();
1901         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1902                "stable static w/ default value shouldn't be a constant");
1903         constant = make_constant(field_value, field);
1904       } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
1905                  field->type()->as_inline_klass()->is_empty()) {
1906         // Loading from a field of an empty inline type. Just return the default instance.
1907         constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1908       }
1909       if (constant != NULL) {
1910         push(type, append(constant));
1911       } else {
1912         if (state_before == NULL) {
1913           state_before = copy_state_for_exception();
1914         }
1915         LoadField* load_field = new LoadField(append(obj), offset, field, true,
1916                                         state_before, needs_patching);
1917         push(type, append(load_field));
1918       }
1919       break;
1920     }
1921     case Bytecodes::_putstatic: {
1922       Value val = pop(type);
1923       if (state_before == NULL) {
1924         state_before = copy_state_for_exception();
1925       }
1926       if (field_type == T_BOOLEAN) {
1927         Value mask = append(new Constant(new IntConstant(1)));
1928         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1929       }
1930       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
1931         // Storing to a field of an empty inline type. Ignore.
1932         break;
1933       }
1934       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1935       break;
1936     }
1937     case Bytecodes::_getfield: {
1938       // Check for compile-time constants, i.e., trusted final non-static fields.
1939       Value constant = NULL;
1940       if (state_before == NULL && field->is_flattened()) {
1941         // Save the entire state and re-execute on deopt when accessing flattened fields
1942         assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
1943         state_before = copy_state_before();
1944       }
1945       if (!has_pending_field_access() && !has_pending_load_indexed()) {
1946         obj = apop();
1947         ObjectType* obj_type = obj->type()->as_ObjectType();
1948         if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized()
1949             && field->type()->as_inline_klass()->is_empty()) {
1950           // Loading from a field of an empty inline type. Just return the default instance.
1951           null_check(obj);
1952           constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1953         } else if (field->is_constant() && !field->is_flattened() && obj_type->is_constant() && !PatchALot) {
1954           ciObject* const_oop = obj_type->constant_value();
1955           if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1956             ciConstant field_value = field->constant_value_of(const_oop);
1957             if (field_value.is_valid()) {
1958               if (field->is_null_free() && field_value.is_null_or_zero()) {
1959                 // Non-flattened inline type field. Replace null by the default value.
1960                 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1961               } else {
1962                 constant = make_constant(field_value, field);
1963               }
1964               // For CallSite objects add a dependency for invalidation of the optimization.
1965               if (field->is_call_site_target()) {
1966                 ciCallSite* call_site = const_oop->as_call_site();
1967                 if (!call_site->is_fully_initialized_constant_call_site()) {
1968                   ciMethodHandle* target = field_value.as_object()->as_method_handle();
1969                   dependency_recorder()->assert_call_site_target_value(call_site, target);
1970                 }
1971               }
1972             }
1973           }
1974         }
1975       }
1976       if (constant != NULL) {
1977         push(type, append(constant));
1978       } else {
1979         if (state_before == NULL) {
1980           state_before = copy_state_for_exception();
1981         }
1982         if (!field->is_flattened()) {
1983           if (has_pending_field_access()) {
1984             assert(!needs_patching, "Can't patch delayed field access");
1985             obj = pending_field_access()->obj();
1986             offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->first_field_offset();
1987             field = pending_field_access()->holder()->get_field_by_offset(offset, false);
1988             assert(field != NULL, "field not found");
1989             set_pending_field_access(NULL);
1990           } else if (has_pending_load_indexed()) {
1991             assert(!needs_patching, "Can't patch delayed field access");
1992             pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
1993             LoadIndexed* li = pending_load_indexed()->load_instr();
1994             li->set_type(type);
1995             push(type, append(li));
1996             set_pending_load_indexed(NULL);




1997             break;
1998           }
1999           LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
2000           Value replacement = !needs_patching ? _memory->load(load) : load;
2001           if (replacement != load) {
2002             assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
2003             // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
2004             // conversion. Emit an explicit conversion here to get the correct field value after the write.
2005             switch (field_type) {
2006             case T_BOOLEAN:
2007             case T_BYTE:
2008               replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
2009               break;
2010             case T_CHAR:
2011               replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
2012               break;
2013             case T_SHORT:
2014               replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
2015               break;
2016             default:
2017               break;
2018             }
2019             push(type, replacement);
2020           } else {
2021             push(type, append(load));
2022           }
2023         } else {
2024           // Look at the next bytecode to check if we can delay the field access
2025           bool can_delay_access = false;
2026           ciBytecodeStream s(method());
2027           s.force_bci(bci());
2028           s.next();
2029           if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
2030             ciField* next_field = s.get_field(will_link);
2031             bool next_needs_patching = !next_field->holder()->is_loaded() ||
2032                                        !next_field->will_link(method(), Bytecodes::_getfield) ||
2033                                        PatchALot;
2034             can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
2035           }
2036           if (can_delay_access) {
2037             if (has_pending_load_indexed()) {
2038               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2039             } else if (has_pending_field_access()) {
2040               pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->first_field_offset());
2041             } else {
2042               null_check(obj);
2043               DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset());
2044               set_pending_field_access(dfa);
2045             }
2046           } else {
2047             ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2048             scope()->set_wrote_final();
2049             scope()->set_wrote_fields();
2050             bool need_membar = false;
2051             if (inline_klass->is_initialized() && inline_klass->is_empty()) {
2052               apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
2053               if (has_pending_field_access()) {
2054                 set_pending_field_access(NULL);
2055               } else if (has_pending_load_indexed()) {
2056                 set_pending_load_indexed(NULL);
2057               }
2058             } else if (has_pending_load_indexed()) {
2059               assert(!needs_patching, "Can't patch delayed field access");
2060               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2061               NewInlineTypeInstance* vt = new NewInlineTypeInstance(inline_klass, pending_load_indexed()->state_before());
2062               _memory->new_instance(vt);
2063               pending_load_indexed()->load_instr()->set_vt(vt);
2064               apush(append_split(vt));
2065               append(pending_load_indexed()->load_instr());
2066               set_pending_load_indexed(NULL);
2067               need_membar = true;
2068             } else {
2069               NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(inline_klass, state_before);
2070               _memory->new_instance(new_instance);
2071               apush(append_split(new_instance));
2072               assert(!needs_patching, "Can't patch flattened inline type field access");
2073               if (has_pending_field_access()) {
2074                 copy_inline_content(inline_klass, pending_field_access()->obj(),
2075                                     pending_field_access()->offset() + field->offset() - field->holder()->as_inline_klass()->first_field_offset(),
2076                                     new_instance, inline_klass->first_field_offset(), state_before);
2077                 set_pending_field_access(NULL);
2078               } else {
2079                 copy_inline_content(inline_klass, obj, field->offset(), new_instance, inline_klass->first_field_offset(), state_before);
2080               }
2081               need_membar = true;
2082             }
2083             if (need_membar) {
2084               // If we allocated a new instance ensure the stores to copy the
2085               // field contents are visible before any subsequent store that
2086               // publishes this reference.
2087               append(new MemBar(lir_membar_storestore));
2088             }
2089           }
2090         }
2091       }
2092       break;
2093     }
2094     case Bytecodes::_putfield: {
2095       Value val = pop(type);
2096       obj = apop();
2097       if (state_before == NULL) {
2098         state_before = copy_state_for_exception();
2099       }
2100       if (field_type == T_BOOLEAN) {
2101         Value mask = append(new Constant(new IntConstant(1)));
2102         val = append(new LogicOp(Bytecodes::_iand, val, mask));
2103       }
2104       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
2105         // Storing to a field of an empty inline type. Ignore.
2106         null_check(obj);
2107       } else if (!field->is_flattened()) {
2108         StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
2109         if (!needs_patching) store = _memory->store(store);
2110         if (store != NULL) {
2111           append(store);
2112         }
2113       } else {
2114         assert(!needs_patching, "Can't patch flattened inline type field access");
2115         ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2116         copy_inline_content(inline_klass, val, inline_klass->first_field_offset(), obj, offset, state_before, field);
2117       }
2118       break;
2119     }
2120     default:
2121       ShouldNotReachHere();
2122       break;
2123   }
2124 }
2125 
2126 // Baseline version of withfield, allocate every time
2127 void GraphBuilder::withfield(int field_index) {
2128   // Save the entire state and re-execute on deopt
2129   ValueStack* state_before = copy_state_before();
2130   state_before->set_should_reexecute(true);
2131 
2132   bool will_link;
2133   ciField* field_modify = stream()->get_field(will_link);
2134   ciInstanceKlass* holder = field_modify->holder();
2135   BasicType field_type = field_modify->type()->basic_type();
2136   ValueType* type = as_ValueType(field_type);
2137   Value val = pop(type);
2138   Value obj = apop();
2139   null_check(obj);
2140 
2141   if (!holder->is_loaded() || !holder->is_inlinetype() || !will_link) {
2142     apush(append_split(new Deoptimize(holder, state_before)));
2143     return;
2144   }
2145 
2146   // call will_link again to determine if the field is valid.
2147   const bool needs_patching = !field_modify->will_link(method(), Bytecodes::_withfield) ||
2148                               (!field_modify->is_flattened() && PatchALot);
2149   const int offset_modify = !needs_patching ? field_modify->offset() : -1;
2150 
2151   scope()->set_wrote_final();
2152   scope()->set_wrote_fields();
2153 
2154   NewInlineTypeInstance* new_instance;
2155   if (obj->as_NewInlineTypeInstance() != NULL && obj->as_NewInlineTypeInstance()->in_larval_state()) {
2156     new_instance = obj->as_NewInlineTypeInstance();
2157     apush(append_split(new_instance));
2158   } else {
2159     new_instance = new NewInlineTypeInstance(holder->as_inline_klass(), state_before);
2160     _memory->new_instance(new_instance);
2161     apush(append_split(new_instance));
2162 
2163     // Initialize fields which are not modified
2164     for (int i = 0; i < holder->nof_nonstatic_fields(); i++) {
2165       ciField* field = holder->nonstatic_field_at(i);
2166       int offset = field->offset();
2167       // Don't use offset_modify here, it might be set to -1 if needs_patching
2168       if (offset != field_modify->offset()) {
2169         if (field->is_flattened()) {
2170           ciInlineKlass* vk = field->type()->as_inline_klass();
2171           if (!vk->is_empty()) {
2172             copy_inline_content(vk, obj, offset, new_instance, vk->first_field_offset(), state_before, field);
2173           }
2174         } else {
2175           LoadField* load = new LoadField(obj, offset, field, false, state_before, false);
2176           Value replacement = append(load);
2177           StoreField* store = new StoreField(new_instance, offset, field, replacement, false, state_before, false);
2178           append(store);
2179         }
2180       }
2181     }
2182   }
2183 
2184   // Field to modify
2185   if (field_type == T_BOOLEAN) {
2186     Value mask = append(new Constant(new IntConstant(1)));
2187     val = append(new LogicOp(Bytecodes::_iand, val, mask));
2188   }
2189   if (field_modify->is_flattened()) {
2190     assert(!needs_patching, "Can't patch flattened inline type field access");
2191     ciInlineKlass* vk = field_modify->type()->as_inline_klass();
2192     if (!vk->is_empty()) {
2193       copy_inline_content(vk, val, vk->first_field_offset(), new_instance, offset_modify, state_before, field_modify);
2194     }
2195   } else {
2196     StoreField* store = new StoreField(new_instance, offset_modify, field_modify, val, false, state_before, needs_patching);
2197     append(store);
2198   }
2199 }
2200 
2201 Dependencies* GraphBuilder::dependency_recorder() const {
2202   assert(DeoptC1, "need debug information");
2203   return compilation()->dependency_recorder();
2204 }
2205 
2206 // How many arguments do we want to profile?
2207 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
2208   int n = 0;
2209   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
2210   start = has_receiver ? 1 : 0;
2211   if (profile_arguments()) {
2212     ciProfileData* data = method()->method_data()->bci_to_data(bci());
2213     if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
2214       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
2215     }
2216   }
2217   // If we are inlining then we need to collect arguments to profile parameters for the target
2218   if (profile_parameters() && target != NULL) {
2219     if (target->method_data() != NULL && target->method_data()->parameters_type_data() != NULL) {

2297       break;
2298     case Bytecodes::_invokehandle:
2299       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
2300       break;
2301     default:
2302       break;
2303     }
2304   } else {
2305     if (bc_raw == Bytecodes::_invokehandle) {
2306       assert(!will_link, "should come here only for unlinked call");
2307       code = Bytecodes::_invokespecial;
2308     }
2309   }
2310 
2311   if (code == Bytecodes::_invokespecial) {
2312     // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
2313     ciKlass* receiver_constraint = nullptr;
2314 
2315     if (bc_raw == Bytecodes::_invokeinterface) {
2316       receiver_constraint = holder;
2317     } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor() && calling_klass->is_interface()) {
2318       receiver_constraint = calling_klass;
2319     }
2320 
2321     if (receiver_constraint != nullptr) {
2322       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
2323       Value receiver = state()->stack_at(index);
2324       CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
2325       // go to uncommon_trap when checkcast fails
2326       c->set_invokespecial_receiver_check();
2327       state()->stack_at_put(index, append_split(c));
2328     }
2329   }
2330 
2331   // Push appendix argument (MethodType, CallSite, etc.), if one.
2332   bool patch_for_appendix = false;
2333   int patching_appendix_arg = 0;
2334   if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2335     Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2336     apush(arg);
2337     patch_for_appendix = true;

2532       null_check(recv);
2533     }
2534 
2535     if (is_profiling()) {
2536       // Note that we'd collect profile data in this method if we wanted it.
2537       compilation()->set_would_profile(true);
2538 
2539       if (profile_calls()) {
2540         assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2541         ciKlass* target_klass = NULL;
2542         if (cha_monomorphic_target != NULL) {
2543           target_klass = cha_monomorphic_target->holder();
2544         } else if (exact_target != NULL) {
2545           target_klass = exact_target->holder();
2546         }
2547         profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2548       }
2549     }
2550   }
2551 
2552   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before,
2553                               declared_signature->returns_null_free_inline_type());
2554   // push result
2555   append_split(result);
2556 
2557   if (result_type != voidType) {
2558     push(result_type, round_fp(result));
2559   }
2560   if (profile_return() && result_type->is_object_kind()) {
2561     profile_return_type(result, target);
2562   }
2563 }
2564 
2565 
2566 void GraphBuilder::new_instance(int klass_index) {
2567   ValueStack* state_before = copy_state_exhandling();
2568   bool will_link;
2569   ciKlass* klass = stream()->get_klass(will_link);
2570   assert(klass->is_instance_klass(), "must be an instance klass");
2571   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2572   _memory->new_instance(new_instance);
2573   apush(append_split(new_instance));
2574 }
2575 
2576 void GraphBuilder::default_value(int klass_index) {
2577   bool will_link;
2578   ciKlass* klass = stream()->get_klass(will_link);
2579   if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
2580       klass->as_inline_klass()->is_initialized()) {
2581     ciInlineKlass* vk = klass->as_inline_klass();
2582     apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
2583   } else {
2584     apush(append_split(new Deoptimize(klass, copy_state_before())));
2585   }
2586 }
2587 
2588 void GraphBuilder::new_type_array() {
2589   ValueStack* state_before = copy_state_exhandling();
2590   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2591 }
2592 
2593 
2594 void GraphBuilder::new_object_array() {
2595   bool will_link;
2596   ciKlass* klass = stream()->get_klass(will_link);
2597   bool null_free = stream()->has_Q_signature();
2598   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2599   NewArray* n = new NewObjectArray(klass, ipop(), state_before, null_free);
2600   apush(append_split(n));
2601 }
2602 
2603 
2604 bool GraphBuilder::direct_compare(ciKlass* k) {
2605   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2606     ciInstanceKlass* ik = k->as_instance_klass();
2607     if (ik->is_final()) {
2608       return true;
2609     } else {
2610       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2611         // test class is leaf class
2612         dependency_recorder()->assert_leaf_type(ik);
2613         return true;
2614       }
2615     }
2616   }
2617   return false;
2618 }
2619 
2620 
2621 void GraphBuilder::check_cast(int klass_index) {
2622   bool will_link;
2623   ciKlass* klass = stream()->get_klass(will_link);
2624   bool null_free = stream()->has_Q_signature();
2625   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2626   CheckCast* c = new CheckCast(klass, apop(), state_before, null_free);
2627   apush(append_split(c));
2628   c->set_direct_compare(direct_compare(klass));
2629 
2630   if (is_profiling()) {
2631     // Note that we'd collect profile data in this method if we wanted it.
2632     compilation()->set_would_profile(true);
2633 
2634     if (profile_checkcasts()) {
2635       c->set_profiled_method(method());
2636       c->set_profiled_bci(bci());
2637       c->set_should_profile(true);
2638     }
2639   }
2640 }
2641 
2642 
2643 void GraphBuilder::instance_of(int klass_index) {
2644   bool will_link;
2645   ciKlass* klass = stream()->get_klass(will_link);
2646   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2647   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2648   ipush(append_split(i));
2649   i->set_direct_compare(direct_compare(klass));
2650 
2651   if (is_profiling()) {
2652     // Note that we'd collect profile data in this method if we wanted it.
2653     compilation()->set_would_profile(true);
2654 
2655     if (profile_checkcasts()) {
2656       i->set_profiled_method(method());
2657       i->set_profiled_bci(bci());
2658       i->set_should_profile(true);
2659     }
2660   }
2661 }
2662 
2663 
2664 void GraphBuilder::monitorenter(Value x, int bci) {
2665   bool maybe_inlinetype = false;
2666   if (bci == InvocationEntryBci) {
2667     // Called by GraphBuilder::inline_sync_entry.
2668 #ifdef ASSERT
2669     ciType* obj_type = x->declared_type();
2670     assert(obj_type == NULL || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
2671 #endif
2672   } else {
2673     // We are compiling a monitorenter bytecode
2674     if (EnableValhalla) {
2675       ciType* obj_type = x->declared_type();
2676       if (obj_type == NULL || obj_type->as_klass()->can_be_inline_klass()) {
2677         // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
2678         // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2679         maybe_inlinetype = true;
2680       }
2681     }
2682   }
2683 
2684   // save state before locking in case of deoptimization after a NullPointerException
2685   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2686   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
2687   kill_all();
2688 }
2689 
2690 
2691 void GraphBuilder::monitorexit(Value x, int bci) {
2692   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2693   kill_all();
2694 }
2695 
2696 
2697 void GraphBuilder::new_multi_array(int dimensions) {
2698   bool will_link;
2699   ciKlass* klass = stream()->get_klass(will_link);
2700   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2701 
2702   Values* dims = new Values(dimensions, dimensions, NULL);
2703   // fill in all dimensions
2704   int i = dimensions;
2705   while (i-- > 0) dims->at_put(i, ipop());
2706   // create array

2802   if (i1->can_trap()) {
2803     i1->set_exception_handlers(handle_exception(i1));
2804     assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
2805   }
2806   return i1;
2807 }
2808 
2809 
2810 Instruction* GraphBuilder::append(Instruction* instr) {
2811   assert(instr->as_StateSplit() == NULL || instr->as_BlockEnd() != NULL, "wrong append used");
2812   return append_with_bci(instr, bci());
2813 }
2814 
2815 
2816 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2817   return append_with_bci(instr, bci());
2818 }
2819 
2820 
2821 void GraphBuilder::null_check(Value value) {
2822   if (value->as_NewArray() != NULL || value->as_NewInstance() != NULL || value->as_NewInlineTypeInstance() != NULL) {
2823     return;
2824   } else {
2825     Constant* con = value->as_Constant();
2826     if (con) {
2827       ObjectType* c = con->type()->as_ObjectType();
2828       if (c && c->is_loaded()) {
2829         ObjectConstant* oc = c->as_ObjectConstant();
2830         if (!oc || !oc->value()->is_null_object()) {
2831           return;
2832         }
2833       }
2834     }
2835     if (value->is_null_free()) return;
2836   }
2837   append(new NullCheck(value, copy_state_for_exception()));
2838 }
2839 
2840 
2841 
2842 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2843   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
2844     assert(instruction->exception_state() == NULL
2845            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2846            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2847            "exception_state should be of exception kind");
2848     return new XHandlers();
2849   }
2850 
2851   XHandlers*  exception_handlers = new XHandlers();
2852   ScopeData*  cur_scope_data = scope_data();
2853   ValueStack* cur_state = instruction->state_before();
2854   ValueStack* prev_state = NULL;
2855   int scope_count = 0;
2856 
2857   assert(cur_state != NULL, "state_before must be set");
2858   do {
2859     int cur_bci = cur_state->bci();
2860     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2861     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
2862            || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
2863 
2864 
2865     // join with all potential exception handlers
2866     XHandlers* list = cur_scope_data->xhandlers();
2867     const int n = list->length();
2868     for (int i = 0; i < n; i++) {
2869       XHandler* h = list->handler_at(i);
2870       if (h->covers(cur_bci)) {
2871         // h is a potential exception handler => join it
2872         compilation()->set_has_exception_handlers(true);
2873 
2874         BlockBegin* entry = h->entry_block();
2875         if (entry == block()) {
2876           // It's acceptable for an exception handler to cover itself
2877           // but we don't handle that in the parser currently.  It's
2878           // very rare so we bailout instead of trying to handle it.
2879           BAILOUT_("exception handler covers itself", exception_handlers);
2880         }
2881         assert(entry->bci() == h->handler_bci(), "must match");
2882         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2883 

3331       case Bytecodes::_invokevirtual  : // fall through
3332       case Bytecodes::_invokespecial  : // fall through
3333       case Bytecodes::_invokestatic   : // fall through
3334       case Bytecodes::_invokedynamic  : // fall through
3335       case Bytecodes::_invokeinterface: invoke(code); break;
3336       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
3337       case Bytecodes::_newarray       : new_type_array(); break;
3338       case Bytecodes::_anewarray      : new_object_array(); break;
3339       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
3340       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
3341       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
3342       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
3343       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
3344       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
3345       case Bytecodes::_wide           : ShouldNotReachHere(); break;
3346       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
3347       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
3348       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
3349       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
3350       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;
3351       case Bytecodes::_aconst_init   : default_value(s.get_index_u2()); break;
3352       case Bytecodes::_withfield      : withfield(s.get_index_u2()); break;
3353       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
3354       default                         : ShouldNotReachHere(); break;
3355     }
3356 
3357     if (log != NULL)
3358       log->clear_context(); // skip marker if nothing was printed
3359 
3360     // save current bci to setup Goto at the end
3361     prev_bci = s.cur_bci();
3362 
3363   }
3364   CHECK_BAILOUT_(NULL);
3365   // stop processing of this block (see try_inline_full)
3366   if (_skip_block) {
3367     _skip_block = false;
3368     assert(_last && _last->as_BlockEnd(), "");
3369     return _last->as_BlockEnd();
3370   }
3371   // if there are any, check if last instruction is a BlockEnd instruction
3372   BlockEnd* end = last()->as_BlockEnd();

3621   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3622 
3623   assert(state->caller_state() == NULL, "should be top scope");
3624   state->clear_locals();
3625   Goto* g = new Goto(target, false);
3626   append(g);
3627   _osr_entry->set_end(g);
3628   target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3629 
3630   scope_data()->set_stream(NULL);
3631 }
3632 
3633 
3634 ValueStack* GraphBuilder::state_at_entry() {
3635   ValueStack* state = new ValueStack(scope(), NULL);
3636 
3637   // Set up locals for receiver
3638   int idx = 0;
3639   if (!method()->is_static()) {
3640     // we should always see the receiver
3641     state->store_local(idx, new Local(method()->holder(), objectType, idx,
3642              /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
3643     idx = 1;
3644   }
3645 
3646   // Set up locals for incoming arguments
3647   ciSignature* sig = method()->signature();
3648   for (int i = 0; i < sig->count(); i++) {
3649     ciType* type = sig->type_at(i);
3650     BasicType basic_type = type->basic_type();
3651     // don't allow T_ARRAY to propagate into locals types
3652     if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3653     ValueType* vt = as_ValueType(basic_type);
3654     state->store_local(idx, new Local(type, vt, idx, false, sig->is_null_free_at(i)));
3655     idx += type->size();
3656   }
3657 
3658   // lock synchronized method
3659   if (method()->is_synchronized()) {
3660     state->lock(NULL);
3661   }
3662 
3663   return state;
3664 }
3665 
3666 
3667 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3668   : _scope_data(NULL)
3669   , _compilation(compilation)
3670   , _memory(new MemoryBuffer())
3671   , _inline_bailout_msg(NULL)
3672   , _instruction_count(0)
3673   , _osr_entry(NULL)
3674   , _pending_field_access(NULL)
3675   , _pending_load_indexed(NULL)
3676 {
3677   int osr_bci = compilation->osr_bci();
3678 
3679   // determine entry points and bci2block mapping
3680   BlockListBuilder blm(compilation, scope, osr_bci);
3681   CHECK_BAILOUT();
3682 
3683   BlockList* bci2block = blm.bci2block();
3684   BlockBegin* start_block = bci2block->at(0);
3685 
3686   push_root_scope(scope, bci2block, start_block);
3687 
3688   // setup state for std entry
3689   _initial_state = state_at_entry();
3690   start_block->merge(_initial_state, compilation->has_irreducible_loops());
3691 
3692   // End nulls still exist here
3693 
3694   // complete graph
3695   _vmap        = new ValueMap();
< prev index next >