< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page

  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"


  33 #include "ci/ciKlass.hpp"
  34 #include "ci/ciMemberName.hpp"
  35 #include "ci/ciSymbols.hpp"
  36 #include "ci/ciUtilities.inline.hpp"
  37 #include "compiler/compilationPolicy.hpp"
  38 #include "compiler/compileBroker.hpp"
  39 #include "compiler/compilerEvent.hpp"
  40 #include "interpreter/bytecode.hpp"
  41 #include "jfr/jfrEvents.hpp"
  42 #include "memory/resourceArea.hpp"
  43 #include "oops/oop.inline.hpp"
  44 #include "runtime/sharedRuntime.hpp"
  45 #include "runtime/vm_version.hpp"
  46 #include "utilities/bitMap.inline.hpp"
  47 #include "utilities/powerOfTwo.hpp"
  48 
  49 class BlockListBuilder {
  50  private:
  51   Compilation* _compilation;
  52   IRScope*     _scope;

 640         }
 641 #endif
 642         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 643         return result;
 644       }
 645     }
 646     return load;
 647   }
 648 
 649   // Record this newly allocated object
 650   void new_instance(NewInstance* object) {
 651     int index = _newobjects.length();
 652     _newobjects.append(object);
 653     if (_fields.at_grow(index, NULL) == NULL) {
 654       _fields.at_put(index, new FieldBuffer());
 655     } else {
 656       _fields.at(index)->kill();
 657     }
 658   }
 659 











 660   void store_value(Value value) {
 661     int index = _newobjects.find(value);
 662     if (index != -1) {
 663       // stored a newly allocated object into another object.
 664       // Assume we've lost track of it as separate slice of memory.
 665       // We could do better by keeping track of whether individual
 666       // fields could alias each other.
 667       _newobjects.remove_at(index);
 668       // pull out the field info and store it at the end up the list
 669       // of field info list to be reused later.
 670       _fields.append(_fields.at(index));
 671       _fields.remove_at(index);
 672     }
 673   }
 674 
 675   void kill() {
 676     _newobjects.trunc_to(0);
 677     _objects.trunc_to(0);
 678     _values.kill();
 679   }

 922         }
 923         break;
 924        }
 925       default       : ShouldNotReachHere();
 926     }
 927     Value x;
 928     if (patch_state != NULL) {
 929       x = new Constant(t, patch_state);
 930     } else {
 931       x = new Constant(t);
 932     }
 933     push(t, append(x));
 934   }
 935 }
 936 
 937 
 938 void GraphBuilder::load_local(ValueType* type, int index) {
 939   Value x = state()->local_at(index);
 940   assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
 941   push(type, x);







 942 }
 943 
 944 
 945 void GraphBuilder::store_local(ValueType* type, int index) {
 946   Value x = pop(type);
 947   store_local(state(), x, index);



 948 }
 949 
 950 
 951 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
 952   if (parsing_jsr()) {
 953     // We need to do additional tracking of the location of the return
 954     // address for jsrs since we don't handle arbitrary jsr/ret
 955     // constructs. Here we are figuring out in which circumstances we
 956     // need to bail out.
 957     if (x->type()->is_address()) {
 958       scope_data()->set_jsr_return_address_local(index);
 959 
 960       // Also check parent jsrs (if any) at this time to see whether
 961       // they are using this local. We don't handle skipping over a
 962       // ret.
 963       for (ScopeData* cur_scope_data = scope_data()->parent();
 964            cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
 965            cur_scope_data = cur_scope_data->parent()) {
 966         if (cur_scope_data->jsr_return_address_local() == index) {
 967           BAILOUT("subroutine overwrites return address from previous subroutine");
 968         }
 969       }
 970     } else if (index == scope_data()->jsr_return_address_local()) {
 971       scope_data()->set_jsr_return_address_local(-1);
 972     }
 973   }
 974 
 975   state->store_local(index, round_fp(x));



 976 }
 977 
 978 
 979 void GraphBuilder::load_indexed(BasicType type) {
 980   // In case of in block code motion in range check elimination
 981   ValueStack* state_before = copy_state_indexed_access();








 982   compilation()->set_has_access_indexed(true);
 983   Value index = ipop();
 984   Value array = apop();
 985   Value length = NULL;
 986   if (CSEArrayLength ||
 987       (array->as_Constant() != NULL) ||
 988       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
 989       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
 990       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
 991     length = append(new ArrayLength(array, state_before));
 992   }
 993   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));




























































 994 }
 995 
 996 
 997 void GraphBuilder::store_indexed(BasicType type) {
 998   // In case of in block code motion in range check elimination
 999   ValueStack* state_before = copy_state_indexed_access();








1000   compilation()->set_has_access_indexed(true);
1001   Value value = pop(as_ValueType(type));
1002   Value index = ipop();
1003   Value array = apop();
1004   Value length = NULL;
1005   if (CSEArrayLength ||
1006       (array->as_Constant() != NULL) ||
1007       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1008       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1009       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1010     length = append(new ArrayLength(array, state_before));
1011   }
1012   ciType* array_type = array->declared_type();
1013   bool check_boolean = false;
1014   if (array_type != NULL) {
1015     if (array_type->is_loaded() &&
1016       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1017       assert(type == T_BYTE, "boolean store uses bastore");
1018       Value mask = append(new Constant(new IntConstant(1)));
1019       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1020     }
1021   } else if (type == T_BYTE) {
1022     check_boolean = true;
1023   }
1024   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1025   append(result);
1026   _memory->store_value(value);
1027 
1028   if (type == T_OBJECT && is_profiling()) {
1029     // Note that we'd collect profile data in this method if we wanted it.
1030     compilation()->set_would_profile(true);
1031 
1032     if (profile_checkcasts()) {
1033       result->set_profiled_method(method());
1034       result->set_profiled_bci(bci());
1035       result->set_should_profile(true);
1036     }
1037   }



1038 }
1039 
1040 
1041 void GraphBuilder::stack_op(Bytecodes::Code code) {
1042   switch (code) {
1043     case Bytecodes::_pop:
1044       { state()->raw_pop();

1045       }
1046       break;
1047     case Bytecodes::_pop2:
1048       { state()->raw_pop();
1049         state()->raw_pop();


1050       }
1051       break;
1052     case Bytecodes::_dup:
1053       { Value w = state()->raw_pop();

1054         state()->raw_push(w);
1055         state()->raw_push(w);
1056       }
1057       break;
1058     case Bytecodes::_dup_x1:
1059       { Value w1 = state()->raw_pop();
1060         Value w2 = state()->raw_pop();

1061         state()->raw_push(w1);
1062         state()->raw_push(w2);
1063         state()->raw_push(w1);
1064       }
1065       break;
1066     case Bytecodes::_dup_x2:
1067       { Value w1 = state()->raw_pop();
1068         Value w2 = state()->raw_pop();
1069         Value w3 = state()->raw_pop();











1070         state()->raw_push(w1);
1071         state()->raw_push(w3);
1072         state()->raw_push(w2);
1073         state()->raw_push(w1);
1074       }
1075       break;
1076     case Bytecodes::_dup2:
1077       { Value w1 = state()->raw_pop();
1078         Value w2 = state()->raw_pop();


1079         state()->raw_push(w2);
1080         state()->raw_push(w1);
1081         state()->raw_push(w2);
1082         state()->raw_push(w1);
1083       }
1084       break;
1085     case Bytecodes::_dup2_x1:
1086       { Value w1 = state()->raw_pop();
1087         Value w2 = state()->raw_pop();
1088         Value w3 = state()->raw_pop();


1089         state()->raw_push(w2);
1090         state()->raw_push(w1);
1091         state()->raw_push(w3);
1092         state()->raw_push(w2);
1093         state()->raw_push(w1);
1094       }
1095       break;
1096     case Bytecodes::_dup2_x2:
1097       { Value w1 = state()->raw_pop();
1098         Value w2 = state()->raw_pop();
1099         Value w3 = state()->raw_pop();
1100         Value w4 = state()->raw_pop();


1101         state()->raw_push(w2);
1102         state()->raw_push(w1);
1103         state()->raw_push(w4);
1104         state()->raw_push(w3);
1105         state()->raw_push(w2);
1106         state()->raw_push(w1);
1107       }
1108       break;
1109     case Bytecodes::_swap:
1110       { Value w1 = state()->raw_pop();
1111         Value w2 = state()->raw_pop();
1112         state()->raw_push(w1);
1113         state()->raw_push(w2);
1114       }
1115       break;
1116     default:
1117       ShouldNotReachHere();
1118       break;
1119   }
1120 }

1208 
1209 
1210 void GraphBuilder::_goto(int from_bci, int to_bci) {
1211   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1212   if (is_profiling()) {
1213     compilation()->set_would_profile(true);
1214     x->set_profiled_bci(bci());
1215     if (profile_branches()) {
1216       x->set_profiled_method(method());
1217       x->set_should_profile(true);
1218     }
1219   }
1220   append(x);
1221 }
1222 
1223 
1224 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1225   BlockBegin* tsux = block_at(stream()->get_dest());
1226   BlockBegin* fsux = block_at(stream()->next_bci());
1227   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();



























1228   // In case of loop invariant code motion or predicate insertion
1229   // before the body of a loop the state is needed
1230   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : NULL, is_bb));
1231 
1232   assert(i->as_Goto() == NULL ||
1233          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1234          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1235          "safepoint state of Goto returned by canonicalizer incorrect");
1236 
1237   if (is_profiling()) {
1238     If* if_node = i->as_If();
1239     if (if_node != NULL) {
1240       // Note that we'd collect profile data in this method if we wanted it.
1241       compilation()->set_would_profile(true);
1242       // At level 2 we need the proper bci to count backedges
1243       if_node->set_profiled_bci(bci());
1244       if (profile_branches()) {
1245         // Successors can be rotated by the canonicalizer, check for this case.
1246         if_node->set_profiled_method(method());
1247         if_node->set_should_profile(true);
1248         if (if_node->tsux() == fsux) {
1249           if_node->set_swapped(true);
1250         }

1461 
1462   if (needs_check) {
1463     // Perform the registration of finalizable objects.
1464     ValueStack* state_before = copy_state_for_exception();
1465     load_local(objectType, 0);
1466     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1467                                state()->pop_arguments(1),
1468                                true, state_before, true));
1469   }
1470 }
1471 
1472 
1473 void GraphBuilder::method_return(Value x, bool ignore_return) {
1474   if (RegisterFinalizersAtInit &&
1475       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1476     call_register_finalizer();
1477   }
1478 
1479   // The conditions for a memory barrier are described in Parse::do_exits().
1480   bool need_mem_bar = false;
1481   if (method()->name() == ciSymbols::object_initializer_name() &&
1482        (scope()->wrote_final() ||
1483          (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1484          (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1485     need_mem_bar = true;
1486   }
1487 
1488   BasicType bt = method()->return_type()->basic_type();
1489   switch (bt) {
1490     case T_BYTE:
1491     {
1492       Value shift = append(new Constant(new IntConstant(24)));
1493       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1494       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1495       break;
1496     }
1497     case T_SHORT:
1498     {
1499       Value shift = append(new Constant(new IntConstant(16)));
1500       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1501       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));

1612   // Attach dimension info to stable arrays.
1613   if (FoldStableValues &&
1614       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1615     ciArray* array = field_value.as_object()->as_array();
1616     jint dimension = field->type()->as_array_klass()->dimension();
1617     value = new StableArrayConstant(array, dimension);
1618   }
1619 
1620   switch (field_type) {
1621     case T_ARRAY:
1622     case T_OBJECT:
1623       if (field_value.as_object()->should_be_constant()) {
1624         return new Constant(value);
1625       }
1626       return NULL; // Not a constant.
1627     default:
1628       return new Constant(value);
1629   }
1630 }
1631 














1632 void GraphBuilder::access_field(Bytecodes::Code code) {
1633   bool will_link;
1634   ciField* field = stream()->get_field(will_link);
1635   ciInstanceKlass* holder = field->holder();
1636   BasicType field_type = field->type()->basic_type();
1637   ValueType* type = as_ValueType(field_type);

1638   // call will_link again to determine if the field is valid.
1639   const bool needs_patching = !holder->is_loaded() ||
1640                               !field->will_link(method(), code) ||
1641                               PatchALot;
1642 
1643   ValueStack* state_before = NULL;
1644   if (!holder->is_initialized() || needs_patching) {
1645     // save state before instruction for debug info when
1646     // deoptimization happens during patching
1647     state_before = copy_state_before();
1648   }
1649 
1650   Value obj = NULL;
1651   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1652     if (state_before != NULL) {
1653       // build a patching constant
1654       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1655     } else {
1656       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1657     }
1658   }
1659 
1660   if (field->is_final() && (code == Bytecodes::_putfield)) {
1661     scope()->set_wrote_final();
1662   }
1663 
1664   if (code == Bytecodes::_putfield) {
1665     scope()->set_wrote_fields();
1666     if (field->is_volatile()) {
1667       scope()->set_wrote_volatile();
1668     }
1669   }
1670 
1671   const int offset = !needs_patching ? field->offset() : -1;
1672   switch (code) {
1673     case Bytecodes::_getstatic: {
1674       // check for compile-time constants, i.e., initialized static final fields
1675       Value constant = NULL;
1676       if (field->is_static_constant() && !PatchALot) {
1677         ciConstant field_value = field->constant_value();
1678         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1679                "stable static w/ default value shouldn't be a constant");
1680         constant = make_constant(field_value, field);




1681       }
1682       if (constant != NULL) {
1683         push(type, append(constant));
1684       } else {
1685         if (state_before == NULL) {
1686           state_before = copy_state_for_exception();
1687         }
1688         push(type, append(new LoadField(append(obj), offset, field, true,
1689                                         state_before, needs_patching)));

1690       }
1691       break;
1692     }
1693     case Bytecodes::_putstatic: {
1694       Value val = pop(type);
1695       if (state_before == NULL) {
1696         state_before = copy_state_for_exception();
1697       }
1698       if (field->type()->basic_type() == T_BOOLEAN) {
1699         Value mask = append(new Constant(new IntConstant(1)));
1700         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1701       }




1702       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1703       break;
1704     }
1705     case Bytecodes::_getfield: {
1706       // Check for compile-time constants, i.e., trusted final non-static fields.
1707       Value constant = NULL;
1708       obj = apop();
1709       ObjectType* obj_type = obj->type()->as_ObjectType();
1710       if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1711         ciObject* const_oop = obj_type->constant_value();
1712         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1713           ciConstant field_value = field->constant_value_of(const_oop);
1714           if (field_value.is_valid()) {
1715             constant = make_constant(field_value, field);
1716             // For CallSite objects add a dependency for invalidation of the optimization.
1717             if (field->is_call_site_target()) {
1718               ciCallSite* call_site = const_oop->as_call_site();
1719               if (!call_site->is_fully_initialized_constant_call_site()) {
1720                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1721                 dependency_recorder()->assert_call_site_target_value(call_site, target);
















1722               }
1723             }
1724           }
1725         }
1726       }
1727       if (constant != NULL) {
1728         push(type, append(constant));
1729       } else {
1730         if (state_before == NULL) {
1731           state_before = copy_state_for_exception();
1732         }
1733         LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1734         Value replacement = !needs_patching ? _memory->load(load) : load;
1735         if (replacement != load) {
1736           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1737           // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1738           // conversion. Emit an explicit conversion here to get the correct field value after the write.
1739           BasicType bt = field->type()->basic_type();
1740           switch (bt) {
1741           case T_BOOLEAN:
1742           case T_BYTE:
1743             replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
1744             break;
1745           case T_CHAR:
1746             replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
1747             break;
1748           case T_SHORT:
1749             replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
1750             break;
1751           default:
1752             break;
1753           }
1754           push(type, replacement);























1755         } else {
1756           push(type, append(load));

































































1757         }
1758       }
1759       break;
1760     }
1761     case Bytecodes::_putfield: {
1762       Value val = pop(type);
1763       obj = apop();
1764       if (state_before == NULL) {
1765         state_before = copy_state_for_exception();
1766       }
1767       if (field->type()->basic_type() == T_BOOLEAN) {
1768         Value mask = append(new Constant(new IntConstant(1)));
1769         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1770       }
1771       StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1772       if (!needs_patching) store = _memory->store(store);
1773       if (store != NULL) {
1774         append(store);









1775       }
1776       break;
1777     }
1778     default:
1779       ShouldNotReachHere();
1780       break;
1781   }
1782 }
1783 









































































1784 
1785 Dependencies* GraphBuilder::dependency_recorder() const {
1786   assert(DeoptC1, "need debug information");
1787   return compilation()->dependency_recorder();
1788 }
1789 
1790 // How many arguments do we want to profile?
1791 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1792   int n = 0;
1793   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1794   start = has_receiver ? 1 : 0;
1795   if (profile_arguments()) {
1796     ciProfileData* data = method()->method_data()->bci_to_data(bci());
1797     if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1798       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1799     }
1800   }
1801   // If we are inlining then we need to collect arguments to profile parameters for the target
1802   if (profile_parameters() && target != NULL) {
1803     if (target->method_data() != NULL && target->method_data()->parameters_type_data() != NULL) {

1849   ciKlass*              holder = stream()->get_declared_method_holder();
1850   const Bytecodes::Code bc_raw = stream()->cur_bc_raw();
1851   assert(declared_signature != NULL, "cannot be null");
1852   assert(will_link == target->is_loaded(), "");
1853 
1854   ciInstanceKlass* klass = target->holder();
1855   assert(!target->is_loaded() || klass->is_loaded(), "loaded target must imply loaded klass");
1856 
1857   // check if CHA possible: if so, change the code to invoke_special
1858   ciInstanceKlass* calling_klass = method()->holder();
1859   ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
1860   ciInstanceKlass* actual_recv = callee_holder;
1861 
1862   CompileLog* log = compilation()->log();
1863   if (log != NULL)
1864       log->elem("call method='%d' instr='%s'",
1865                 log->identify(target),
1866                 Bytecodes::name(code));
1867 
1868   // invoke-special-super
1869   if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer()) {
1870     ciInstanceKlass* sender_klass = calling_klass;
1871     if (sender_klass->is_interface()) {
1872       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1873       Value receiver = state()->stack_at(index);
1874       CheckCast* c = new CheckCast(sender_klass, receiver, copy_state_before());
1875       c->set_invokespecial_receiver_check();
1876       state()->stack_at_put(index, append_split(c));
1877     }
1878   }
1879 
1880   // Some methods are obviously bindable without any type checks so
1881   // convert them directly to an invokespecial or invokestatic.
1882   if (target->is_loaded() && !target->is_abstract() && target->can_be_statically_bound()) {
1883     switch (bc_raw) {
1884     case Bytecodes::_invokevirtual:
1885       code = Bytecodes::_invokespecial;
1886       break;
1887     case Bytecodes::_invokehandle:
1888       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1889       break;

2099       null_check(recv);
2100     }
2101 
2102     if (is_profiling()) {
2103       // Note that we'd collect profile data in this method if we wanted it.
2104       compilation()->set_would_profile(true);
2105 
2106       if (profile_calls()) {
2107         assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2108         ciKlass* target_klass = NULL;
2109         if (cha_monomorphic_target != NULL) {
2110           target_klass = cha_monomorphic_target->holder();
2111         } else if (exact_target != NULL) {
2112           target_klass = exact_target->holder();
2113         }
2114         profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2115       }
2116     }
2117   }
2118 
2119   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);

2120   // push result
2121   append_split(result);
2122 
2123   if (result_type != voidType) {
2124     push(result_type, round_fp(result));
2125   }
2126   if (profile_return() && result_type->is_object_kind()) {
2127     profile_return_type(result, target);
2128   }
2129 }
2130 
2131 
2132 void GraphBuilder::new_instance(int klass_index) {
2133   ValueStack* state_before = copy_state_exhandling();
2134   bool will_link;
2135   ciKlass* klass = stream()->get_klass(will_link);
2136   assert(klass->is_instance_klass(), "must be an instance klass");
2137   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2138   _memory->new_instance(new_instance);
2139   apush(append_split(new_instance));
2140 }
2141 











2142 
2143 void GraphBuilder::new_type_array() {
2144   ValueStack* state_before = copy_state_exhandling();
2145   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2146 }
2147 
2148 
2149 void GraphBuilder::new_object_array() {
2150   bool will_link;
2151   ciKlass* klass = stream()->get_klass(will_link);

2152   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2153   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2154   apush(append_split(n));
2155 }
2156 
2157 
2158 bool GraphBuilder::direct_compare(ciKlass* k) {
2159   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2160     ciInstanceKlass* ik = k->as_instance_klass();
2161     if (ik->is_final()) {
2162       return true;
2163     } else {
2164       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2165         // test class is leaf class
2166         dependency_recorder()->assert_leaf_type(ik);
2167         return true;
2168       }
2169     }
2170   }
2171   return false;
2172 }
2173 
2174 
2175 void GraphBuilder::check_cast(int klass_index) {
2176   bool will_link;
2177   ciKlass* klass = stream()->get_klass(will_link);

2178   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2179   CheckCast* c = new CheckCast(klass, apop(), state_before);
2180   apush(append_split(c));
2181   c->set_direct_compare(direct_compare(klass));
2182 
2183   if (is_profiling()) {
2184     // Note that we'd collect profile data in this method if we wanted it.
2185     compilation()->set_would_profile(true);
2186 
2187     if (profile_checkcasts()) {
2188       c->set_profiled_method(method());
2189       c->set_profiled_bci(bci());
2190       c->set_should_profile(true);
2191     }
2192   }
2193 }
2194 
2195 
2196 void GraphBuilder::instance_of(int klass_index) {
2197   bool will_link;
2198   ciKlass* klass = stream()->get_klass(will_link);
2199   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2200   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2201   ipush(append_split(i));
2202   i->set_direct_compare(direct_compare(klass));
2203 
2204   if (is_profiling()) {
2205     // Note that we'd collect profile data in this method if we wanted it.
2206     compilation()->set_would_profile(true);
2207 
2208     if (profile_checkcasts()) {
2209       i->set_profiled_method(method());
2210       i->set_profiled_bci(bci());
2211       i->set_should_profile(true);
2212     }
2213   }
2214 }
2215 
2216 
2217 void GraphBuilder::monitorenter(Value x, int bci) {



















2218   // save state before locking in case of deoptimization after a NullPointerException
2219   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2220   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2221   kill_all();
2222 }
2223 
2224 
2225 void GraphBuilder::monitorexit(Value x, int bci) {
2226   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2227   kill_all();
2228 }
2229 
2230 
2231 void GraphBuilder::new_multi_array(int dimensions) {
2232   bool will_link;
2233   ciKlass* klass = stream()->get_klass(will_link);
2234   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2235 
2236   Values* dims = new Values(dimensions, dimensions, NULL);
2237   // fill in all dimensions
2238   int i = dimensions;
2239   while (i-- > 0) dims->at_put(i, ipop());
2240   // create array

2374 
2375 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2376   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
2377     assert(instruction->exception_state() == NULL
2378            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2379            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2380            "exception_state should be of exception kind");
2381     return new XHandlers();
2382   }
2383 
2384   XHandlers*  exception_handlers = new XHandlers();
2385   ScopeData*  cur_scope_data = scope_data();
2386   ValueStack* cur_state = instruction->state_before();
2387   ValueStack* prev_state = NULL;
2388   int scope_count = 0;
2389 
2390   assert(cur_state != NULL, "state_before must be set");
2391   do {
2392     int cur_bci = cur_state->bci();
2393     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2394     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");


2395 
2396     // join with all potential exception handlers
2397     XHandlers* list = cur_scope_data->xhandlers();
2398     const int n = list->length();
2399     for (int i = 0; i < n; i++) {
2400       XHandler* h = list->handler_at(i);
2401       if (h->covers(cur_bci)) {
2402         // h is a potential exception handler => join it
2403         compilation()->set_has_exception_handlers(true);
2404 
2405         BlockBegin* entry = h->entry_block();
2406         if (entry == block()) {
2407           // It's acceptable for an exception handler to cover itself
2408           // but we don't handle that in the parser currently.  It's
2409           // very rare so we bailout instead of trying to handle it.
2410           BAILOUT_("exception handler covers itself", exception_handlers);
2411         }
2412         assert(entry->bci() == h->handler_bci(), "must match");
2413         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2414 

2862       case Bytecodes::_invokevirtual  : // fall through
2863       case Bytecodes::_invokespecial  : // fall through
2864       case Bytecodes::_invokestatic   : // fall through
2865       case Bytecodes::_invokedynamic  : // fall through
2866       case Bytecodes::_invokeinterface: invoke(code); break;
2867       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
2868       case Bytecodes::_newarray       : new_type_array(); break;
2869       case Bytecodes::_anewarray      : new_object_array(); break;
2870       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
2871       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
2872       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
2873       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
2874       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
2875       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
2876       case Bytecodes::_wide           : ShouldNotReachHere(); break;
2877       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
2878       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
2879       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
2880       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
2881       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;


2882       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
2883       default                         : ShouldNotReachHere(); break;
2884     }
2885 
2886     if (log != NULL)
2887       log->clear_context(); // skip marker if nothing was printed
2888 
2889     // save current bci to setup Goto at the end
2890     prev_bci = s.cur_bci();
2891 
2892   }
2893   CHECK_BAILOUT_(NULL);
2894   // stop processing of this block (see try_inline_full)
2895   if (_skip_block) {
2896     _skip_block = false;
2897     assert(_last && _last->as_BlockEnd(), "");
2898     return _last->as_BlockEnd();
2899   }
2900   // if there are any, check if last instruction is a BlockEnd instruction
2901   BlockEnd* end = last()->as_BlockEnd();

3150   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3151 
3152   assert(state->caller_state() == NULL, "should be top scope");
3153   state->clear_locals();
3154   Goto* g = new Goto(target, false);
3155   append(g);
3156   _osr_entry->set_end(g);
3157   target->merge(_osr_entry->end()->state());
3158 
3159   scope_data()->set_stream(NULL);
3160 }
3161 
3162 
3163 ValueStack* GraphBuilder::state_at_entry() {
3164   ValueStack* state = new ValueStack(scope(), NULL);
3165 
3166   // Set up locals for receiver
3167   int idx = 0;
3168   if (!method()->is_static()) {
3169     // we should always see the receiver
3170     state->store_local(idx, new Local(method()->holder(), objectType, idx, true));

3171     idx = 1;
3172   }
3173 
3174   // Set up locals for incoming arguments
3175   ciSignature* sig = method()->signature();
3176   for (int i = 0; i < sig->count(); i++) {
3177     ciType* type = sig->type_at(i);
3178     BasicType basic_type = type->basic_type();
3179     // don't allow T_ARRAY to propagate into locals types
3180     if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3181     ValueType* vt = as_ValueType(basic_type);
3182     state->store_local(idx, new Local(type, vt, idx, false));
3183     idx += type->size();
3184   }
3185 
3186   // lock synchronized method
3187   if (method()->is_synchronized()) {
3188     state->lock(NULL);
3189   }
3190 
3191   return state;
3192 }
3193 
3194 
3195 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3196   : _scope_data(NULL)
3197   , _compilation(compilation)
3198   , _memory(new MemoryBuffer())
3199   , _inline_bailout_msg(NULL)
3200   , _instruction_count(0)
3201   , _osr_entry(NULL)


3202 {
3203   int osr_bci = compilation->osr_bci();
3204 
3205   // determine entry points and bci2block mapping
3206   BlockListBuilder blm(compilation, scope, osr_bci);
3207   CHECK_BAILOUT();
3208 
3209   BlockList* bci2block = blm.bci2block();
3210   BlockBegin* start_block = bci2block->at(0);
3211 
3212   push_root_scope(scope, bci2block, start_block);
3213 
3214   // setup state for std entry
3215   _initial_state = state_at_entry();
3216   start_block->merge(_initial_state);
3217 
3218   // complete graph
3219   _vmap        = new ValueMap();
3220   switch (scope->method()->intrinsic_id()) {
3221   case vmIntrinsics::_dabs          : // fall through

  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "c1/c1_CFGPrinter.hpp"
  27 #include "c1/c1_Canonicalizer.hpp"
  28 #include "c1/c1_Compilation.hpp"
  29 #include "c1/c1_GraphBuilder.hpp"
  30 #include "c1/c1_InstructionPrinter.hpp"
  31 #include "ci/ciCallSite.hpp"
  32 #include "ci/ciField.hpp"
  33 #include "ci/ciFlatArrayKlass.hpp"
  34 #include "ci/ciInlineKlass.hpp"
  35 #include "ci/ciKlass.hpp"
  36 #include "ci/ciMemberName.hpp"
  37 #include "ci/ciSymbols.hpp"
  38 #include "ci/ciUtilities.inline.hpp"
  39 #include "compiler/compilationPolicy.hpp"
  40 #include "compiler/compileBroker.hpp"
  41 #include "compiler/compilerEvent.hpp"
  42 #include "interpreter/bytecode.hpp"
  43 #include "jfr/jfrEvents.hpp"
  44 #include "memory/resourceArea.hpp"
  45 #include "oops/oop.inline.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 #include "runtime/vm_version.hpp"
  48 #include "utilities/bitMap.inline.hpp"
  49 #include "utilities/powerOfTwo.hpp"
  50 
  51 class BlockListBuilder {
  52  private:
  53   Compilation* _compilation;
  54   IRScope*     _scope;

 642         }
 643 #endif
 644         assert(result->type()->tag() == load->type()->tag(), "wrong types");
 645         return result;
 646       }
 647     }
 648     return load;
 649   }
 650 
 651   // Record this newly allocated object
 652   void new_instance(NewInstance* object) {
 653     int index = _newobjects.length();
 654     _newobjects.append(object);
 655     if (_fields.at_grow(index, NULL) == NULL) {
 656       _fields.at_put(index, new FieldBuffer());
 657     } else {
 658       _fields.at(index)->kill();
 659     }
 660   }
 661 
 662   // Record this newly allocated object
 663   void new_instance(NewInlineTypeInstance* object) {
 664     int index = _newobjects.length();
 665     _newobjects.append(object);
 666     if (_fields.at_grow(index, NULL) == NULL) {
 667       _fields.at_put(index, new FieldBuffer());
 668     } else {
 669       _fields.at(index)->kill();
 670     }
 671   }
 672 
 673   void store_value(Value value) {
 674     int index = _newobjects.find(value);
 675     if (index != -1) {
 676       // stored a newly allocated object into another object.
 677       // Assume we've lost track of it as separate slice of memory.
 678       // We could do better by keeping track of whether individual
 679       // fields could alias each other.
 680       _newobjects.remove_at(index);
 681       // pull out the field info and store it at the end up the list
 682       // of field info list to be reused later.
 683       _fields.append(_fields.at(index));
 684       _fields.remove_at(index);
 685     }
 686   }
 687 
 688   void kill() {
 689     _newobjects.trunc_to(0);
 690     _objects.trunc_to(0);
 691     _values.kill();
 692   }

 935         }
 936         break;
 937        }
 938       default       : ShouldNotReachHere();
 939     }
 940     Value x;
 941     if (patch_state != NULL) {
 942       x = new Constant(t, patch_state);
 943     } else {
 944       x = new Constant(t);
 945     }
 946     push(t, append(x));
 947   }
 948 }
 949 
 950 
 951 void GraphBuilder::load_local(ValueType* type, int index) {
 952   Value x = state()->local_at(index);
 953   assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
 954   push(type, x);
 955   if (x->as_NewInlineTypeInstance() != NULL && x->as_NewInlineTypeInstance()->in_larval_state()) {
 956     if (x->as_NewInlineTypeInstance()->on_stack_count() == 1) {
 957       x->as_NewInlineTypeInstance()->set_not_larva_anymore();
 958     } else {
 959       x->as_NewInlineTypeInstance()->increment_on_stack_count();
 960     }
 961   }
 962 }
 963 
 964 
 965 void GraphBuilder::store_local(ValueType* type, int index) {
 966   Value x = pop(type);
 967   store_local(state(), x, index);
 968   if (x->as_NewInlineTypeInstance() != NULL) {
 969     x->as_NewInlineTypeInstance()->set_local_index(index);
 970   }
 971 }
 972 
 973 
 974 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
 975   if (parsing_jsr()) {
 976     // We need to do additional tracking of the location of the return
 977     // address for jsrs since we don't handle arbitrary jsr/ret
 978     // constructs. Here we are figuring out in which circumstances we
 979     // need to bail out.
 980     if (x->type()->is_address()) {
 981       scope_data()->set_jsr_return_address_local(index);
 982 
 983       // Also check parent jsrs (if any) at this time to see whether
 984       // they are using this local. We don't handle skipping over a
 985       // ret.
 986       for (ScopeData* cur_scope_data = scope_data()->parent();
 987            cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
 988            cur_scope_data = cur_scope_data->parent()) {
 989         if (cur_scope_data->jsr_return_address_local() == index) {
 990           BAILOUT("subroutine overwrites return address from previous subroutine");
 991         }
 992       }
 993     } else if (index == scope_data()->jsr_return_address_local()) {
 994       scope_data()->set_jsr_return_address_local(-1);
 995     }
 996   }
 997 
 998   state->store_local(index, round_fp(x));
 999   if (x->as_NewInlineTypeInstance() != NULL) {
1000     x->as_NewInlineTypeInstance()->set_local_index(index);
1001   }
1002 }
1003 
1004 
1005 void GraphBuilder::load_indexed(BasicType type) {
1006   // In case of in block code motion in range check elimination
1007   ValueStack* state_before = NULL;
1008   int array_idx = state()->stack_size() - 2;
1009   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flattened_array()) {
1010     // Save the entire state and re-execute on deopt when accessing flattened arrays
1011     state_before = copy_state_before();
1012     state_before->set_should_reexecute(true);
1013   } else {
1014     state_before = copy_state_indexed_access();
1015   }
1016   compilation()->set_has_access_indexed(true);
1017   Value index = ipop();
1018   Value array = apop();
1019   Value length = NULL;
1020   if (CSEArrayLength ||
1021       (array->as_Constant() != NULL) ||
1022       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1023       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1024       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1025     length = append(new ArrayLength(array, state_before));
1026   }
1027 
1028   bool need_membar = false;
1029   LoadIndexed* load_indexed = NULL;
1030   Instruction* result = NULL;
1031   if (array->is_loaded_flattened_array()) {
1032     ciType* array_type = array->declared_type();
1033     ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
1034 
1035     bool can_delay_access = false;
1036     ciBytecodeStream s(method());
1037     s.force_bci(bci());
1038     s.next();
1039     if (s.cur_bc() == Bytecodes::_getfield) {
1040       bool will_link;
1041       ciField* next_field = s.get_field(will_link);
1042       bool next_needs_patching = !next_field->holder()->is_loaded() ||
1043                                  !next_field->will_link(method(), Bytecodes::_getfield) ||
1044                                  PatchALot;
1045       can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1046     }
1047     if (can_delay_access) {
1048       // potentially optimizable array access, storing information for delayed decision
1049       LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
1050       DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
1051       li->set_delayed(dli);
1052       set_pending_load_indexed(dli);
1053       return; // Nothing else to do for now
1054     } else {
1055       if (elem_klass->is_empty()) {
1056         // No need to create a new instance, the default instance will be used instead
1057         load_indexed = new LoadIndexed(array, index, length, type, state_before);
1058         apush(append(load_indexed));
1059       } else {
1060         NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(elem_klass, state_before);
1061         _memory->new_instance(new_instance);
1062         apush(append_split(new_instance));
1063         load_indexed = new LoadIndexed(array, index, length, type, state_before);
1064         load_indexed->set_vt(new_instance);
1065         // The LoadIndexed node will initialise this instance by copying from
1066         // the flattened field.  Ensure these stores are visible before any
1067         // subsequent store that publishes this reference.
1068         need_membar = true;
1069       }
1070     }
1071   } else {
1072     load_indexed = new LoadIndexed(array, index, length, type, state_before);
1073     if (profile_array_accesses() && is_reference_type(type)) {
1074       compilation()->set_would_profile(true);
1075       load_indexed->set_should_profile(true);
1076       load_indexed->set_profiled_method(method());
1077       load_indexed->set_profiled_bci(bci());
1078     }
1079   }
1080   result = append(load_indexed);
1081   if (need_membar) {
1082     append(new MemBar(lir_membar_storestore));
1083   }
1084   assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
1085   if (!array->is_loaded_flattened_array()) {
1086     push(as_ValueType(type), result);
1087   }
1088 }
1089 
1090 
1091 void GraphBuilder::store_indexed(BasicType type) {
1092   // In case of in block code motion in range check elimination
1093   ValueStack* state_before = NULL;
1094   int array_idx = state()->stack_size() - 3;
1095   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flattened_array()) {
1096     // Save the entire state and re-execute on deopt when accessing flattened arrays
1097     state_before = copy_state_before();
1098     state_before->set_should_reexecute(true);
1099   } else {
1100     state_before = copy_state_indexed_access();
1101   }
1102   compilation()->set_has_access_indexed(true);
1103   Value value = pop(as_ValueType(type));
1104   Value index = ipop();
1105   Value array = apop();
1106   Value length = NULL;
1107   if (CSEArrayLength ||
1108       (array->as_Constant() != NULL) ||
1109       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1110       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1111       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1112     length = append(new ArrayLength(array, state_before));
1113   }
1114   ciType* array_type = array->declared_type();
1115   bool check_boolean = false;
1116   if (array_type != NULL) {
1117     if (array_type->is_loaded() &&
1118       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1119       assert(type == T_BYTE, "boolean store uses bastore");
1120       Value mask = append(new Constant(new IntConstant(1)));
1121       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1122     }
1123   } else if (type == T_BYTE) {
1124     check_boolean = true;
1125   }



1126 
1127   StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1128   if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flattened_array()) {
1129     compilation()->set_would_profile(true);
1130     store_indexed->set_should_profile(true);
1131     store_indexed->set_profiled_method(method());
1132     store_indexed->set_profiled_bci(bci());



1133   }
1134   Instruction* result = append(store_indexed);
1135   assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
1136   _memory->store_value(value);
1137 }
1138 

1139 void GraphBuilder::stack_op(Bytecodes::Code code) {
1140   switch (code) {
1141     case Bytecodes::_pop:
1142       { Value w = state()->raw_pop();
1143         update_larva_stack_count(w);
1144       }
1145       break;
1146     case Bytecodes::_pop2:
1147       { Value w1 = state()->raw_pop();
1148         Value w2 = state()->raw_pop();
1149         update_larva_stack_count(w1);
1150         update_larva_stack_count(w2);
1151       }
1152       break;
1153     case Bytecodes::_dup:
1154       { Value w = state()->raw_pop();
1155         update_larval_state(w);
1156         state()->raw_push(w);
1157         state()->raw_push(w);
1158       }
1159       break;
1160     case Bytecodes::_dup_x1:
1161       { Value w1 = state()->raw_pop();
1162         Value w2 = state()->raw_pop();
1163         update_larval_state(w1);
1164         state()->raw_push(w1);
1165         state()->raw_push(w2);
1166         state()->raw_push(w1);
1167       }
1168       break;
1169     case Bytecodes::_dup_x2:
1170       { Value w1 = state()->raw_pop();
1171         Value w2 = state()->raw_pop();
1172         Value w3 = state()->raw_pop();
1173         // special handling for the dup_x2/pop sequence (see JDK-8251046)
1174         if (w1 != NULL && w1->as_NewInlineTypeInstance() != NULL) {
1175           ciBytecodeStream s(method());
1176           s.force_bci(bci());
1177           s.next();
1178           if (s.cur_bc() != Bytecodes::_pop) {
1179             w1->as_NewInlineTypeInstance()->set_not_larva_anymore();
1180           }  else {
1181             w1->as_NewInlineTypeInstance()->increment_on_stack_count();
1182            }
1183         }
1184         state()->raw_push(w1);
1185         state()->raw_push(w3);
1186         state()->raw_push(w2);
1187         state()->raw_push(w1);
1188       }
1189       break;
1190     case Bytecodes::_dup2:
1191       { Value w1 = state()->raw_pop();
1192         Value w2 = state()->raw_pop();
1193         update_larval_state(w1);
1194         update_larval_state(w2);
1195         state()->raw_push(w2);
1196         state()->raw_push(w1);
1197         state()->raw_push(w2);
1198         state()->raw_push(w1);
1199       }
1200       break;
1201     case Bytecodes::_dup2_x1:
1202       { Value w1 = state()->raw_pop();
1203         Value w2 = state()->raw_pop();
1204         Value w3 = state()->raw_pop();
1205         update_larval_state(w1);
1206         update_larval_state(w2);
1207         state()->raw_push(w2);
1208         state()->raw_push(w1);
1209         state()->raw_push(w3);
1210         state()->raw_push(w2);
1211         state()->raw_push(w1);
1212       }
1213       break;
1214     case Bytecodes::_dup2_x2:
1215       { Value w1 = state()->raw_pop();
1216         Value w2 = state()->raw_pop();
1217         Value w3 = state()->raw_pop();
1218         Value w4 = state()->raw_pop();
1219         update_larval_state(w1);
1220         update_larval_state(w2);
1221         state()->raw_push(w2);
1222         state()->raw_push(w1);
1223         state()->raw_push(w4);
1224         state()->raw_push(w3);
1225         state()->raw_push(w2);
1226         state()->raw_push(w1);
1227       }
1228       break;
1229     case Bytecodes::_swap:
1230       { Value w1 = state()->raw_pop();
1231         Value w2 = state()->raw_pop();
1232         state()->raw_push(w1);
1233         state()->raw_push(w2);
1234       }
1235       break;
1236     default:
1237       ShouldNotReachHere();
1238       break;
1239   }
1240 }

1328 
1329 
1330 void GraphBuilder::_goto(int from_bci, int to_bci) {
1331   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1332   if (is_profiling()) {
1333     compilation()->set_would_profile(true);
1334     x->set_profiled_bci(bci());
1335     if (profile_branches()) {
1336       x->set_profiled_method(method());
1337       x->set_should_profile(true);
1338     }
1339   }
1340   append(x);
1341 }
1342 
1343 
1344 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1345   BlockBegin* tsux = block_at(stream()->get_dest());
1346   BlockBegin* fsux = block_at(stream()->next_bci());
1347   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1348 
1349   bool subst_check = false;
1350   if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
1351     ValueType* left_vt = x->type();
1352     ValueType* right_vt = y->type();
1353     if (left_vt->is_object()) {
1354       assert(right_vt->is_object(), "must be");
1355       ciKlass* left_klass = x->as_loaded_klass_or_null();
1356       ciKlass* right_klass = y->as_loaded_klass_or_null();
1357 
1358       if (left_klass == NULL || right_klass == NULL) {
1359         // The klass is still unloaded, or came from a Phi node. Go slow case;
1360         subst_check = true;
1361       } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
1362         // Either operand may be a value object, but we're not sure. Go slow case;
1363         subst_check = true;
1364       } else {
1365         // No need to do substitutability check
1366       }
1367     }
1368   }
1369   if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
1370       is_profiling() && profile_branches()) {
1371     compilation()->set_would_profile(true);
1372     append(new ProfileACmpTypes(method(), bci(), x, y));
1373   }
1374 
1375   // In case of loop invariant code motion or predicate insertion
1376   // before the body of a loop the state is needed
1377   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : NULL, is_bb, subst_check));
1378 
1379   assert(i->as_Goto() == NULL ||
1380          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1381          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1382          "safepoint state of Goto returned by canonicalizer incorrect");
1383 
1384   if (is_profiling()) {
1385     If* if_node = i->as_If();
1386     if (if_node != NULL) {
1387       // Note that we'd collect profile data in this method if we wanted it.
1388       compilation()->set_would_profile(true);
1389       // At level 2 we need the proper bci to count backedges
1390       if_node->set_profiled_bci(bci());
1391       if (profile_branches()) {
1392         // Successors can be rotated by the canonicalizer, check for this case.
1393         if_node->set_profiled_method(method());
1394         if_node->set_should_profile(true);
1395         if (if_node->tsux() == fsux) {
1396           if_node->set_swapped(true);
1397         }

1608 
1609   if (needs_check) {
1610     // Perform the registration of finalizable objects.
1611     ValueStack* state_before = copy_state_for_exception();
1612     load_local(objectType, 0);
1613     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1614                                state()->pop_arguments(1),
1615                                true, state_before, true));
1616   }
1617 }
1618 
1619 
1620 void GraphBuilder::method_return(Value x, bool ignore_return) {
1621   if (RegisterFinalizersAtInit &&
1622       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1623     call_register_finalizer();
1624   }
1625 
1626   // The conditions for a memory barrier are described in Parse::do_exits().
1627   bool need_mem_bar = false;
1628   if ((method()->is_object_constructor() || method()->is_static_init_factory()) &&
1629        (scope()->wrote_final() ||
1630          (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1631          (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1632     need_mem_bar = true;
1633   }
1634 
1635   BasicType bt = method()->return_type()->basic_type();
1636   switch (bt) {
1637     case T_BYTE:
1638     {
1639       Value shift = append(new Constant(new IntConstant(24)));
1640       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1641       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1642       break;
1643     }
1644     case T_SHORT:
1645     {
1646       Value shift = append(new Constant(new IntConstant(16)));
1647       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1648       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));

1759   // Attach dimension info to stable arrays.
1760   if (FoldStableValues &&
1761       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1762     ciArray* array = field_value.as_object()->as_array();
1763     jint dimension = field->type()->as_array_klass()->dimension();
1764     value = new StableArrayConstant(array, dimension);
1765   }
1766 
1767   switch (field_type) {
1768     case T_ARRAY:
1769     case T_OBJECT:
1770       if (field_value.as_object()->should_be_constant()) {
1771         return new Constant(value);
1772       }
1773       return NULL; // Not a constant.
1774     default:
1775       return new Constant(value);
1776   }
1777 }
1778 
1779 void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
1780   assert(vk->nof_nonstatic_fields() > 0, "Empty inline type access should be removed");
1781   for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
1782     ciField* inner_field = vk->nonstatic_field_at(i);
1783     assert(!inner_field->is_flattened(), "the iteration over nested fields is handled by the loop itself");
1784     int off = inner_field->offset() - vk->first_field_offset();
1785     LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, false);
1786     Value replacement = append(load);
1787     StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, false);
1788     store->set_enclosing_field(enclosing_field);
1789     append(store);
1790   }
1791 }
1792 
1793 void GraphBuilder::access_field(Bytecodes::Code code) {
1794   bool will_link;
1795   ciField* field = stream()->get_field(will_link);
1796   ciInstanceKlass* holder = field->holder();
1797   BasicType field_type = field->type()->basic_type();
1798   ValueType* type = as_ValueType(field_type);
1799 
1800   // call will_link again to determine if the field is valid.
1801   const bool needs_patching = !holder->is_loaded() ||
1802                               !field->will_link(method(), code) ||
1803                               (!field->is_flattened() && PatchALot);
1804 
1805   ValueStack* state_before = NULL;
1806   if (!holder->is_initialized() || needs_patching) {
1807     // save state before instruction for debug info when
1808     // deoptimization happens during patching
1809     state_before = copy_state_before();
1810   }
1811 
1812   Value obj = NULL;
1813   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1814     if (state_before != NULL) {
1815       // build a patching constant
1816       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1817     } else {
1818       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1819     }
1820   }
1821 
1822   if (field->is_final() && code == Bytecodes::_putfield) {
1823     scope()->set_wrote_final();
1824   }
1825 
1826   if (code == Bytecodes::_putfield) {
1827     scope()->set_wrote_fields();
1828     if (field->is_volatile()) {
1829       scope()->set_wrote_volatile();
1830     }
1831   }
1832 
1833   int offset = !needs_patching ? field->offset() : -1;
1834   switch (code) {
1835     case Bytecodes::_getstatic: {
1836       // check for compile-time constants, i.e., initialized static final fields
1837       Value constant = NULL;
1838       if (field->is_static_constant() && !PatchALot) {
1839         ciConstant field_value = field->constant_value();
1840         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1841                "stable static w/ default value shouldn't be a constant");
1842         constant = make_constant(field_value, field);
1843       } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
1844                  field->type()->as_inline_klass()->is_empty()) {
1845         // Loading from a field of an empty inline type. Just return the default instance.
1846         constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1847       }
1848       if (constant != NULL) {
1849         push(type, append(constant));
1850       } else {
1851         if (state_before == NULL) {
1852           state_before = copy_state_for_exception();
1853         }
1854         LoadField* load_field = new LoadField(append(obj), offset, field, true,
1855                                         state_before, needs_patching);
1856         push(type, append(load_field));
1857       }
1858       break;
1859     }
1860     case Bytecodes::_putstatic: {
1861       Value val = pop(type);
1862       if (state_before == NULL) {
1863         state_before = copy_state_for_exception();
1864       }
1865       if (field_type == T_BOOLEAN) {
1866         Value mask = append(new Constant(new IntConstant(1)));
1867         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1868       }
1869       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
1870         // Storing to a field of an empty inline type. Ignore.
1871         break;
1872       }
1873       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1874       break;
1875     }
1876     case Bytecodes::_getfield: {
1877       // Check for compile-time constants, i.e., trusted final non-static fields.
1878       Value constant = NULL;
1879       if (state_before == NULL && field->is_flattened()) {
1880         // Save the entire state and re-execute on deopt when accessing flattened fields
1881         assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
1882         state_before = copy_state_before();
1883       }
1884       if (!has_pending_field_access() && !has_pending_load_indexed()) {
1885         obj = apop();
1886         ObjectType* obj_type = obj->type()->as_ObjectType();
1887         if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
1888           // Loading from a field of an empty inline type. Just return the default instance.
1889           null_check(obj);
1890           constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1891         } else if (field->is_constant() && !field->is_flattened() && obj_type->is_constant() && !PatchALot) {
1892           ciObject* const_oop = obj_type->constant_value();
1893           if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1894             ciConstant field_value = field->constant_value_of(const_oop);
1895             if (field_value.is_valid()) {
1896               if (field->is_null_free() && field_value.is_null_or_zero()) {
1897                 // Non-flattened inline type field. Replace null by the default value.
1898                 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1899               } else {
1900                 constant = make_constant(field_value, field);
1901               }
1902               // For CallSite objects add a dependency for invalidation of the optimization.
1903               if (field->is_call_site_target()) {
1904                 ciCallSite* call_site = const_oop->as_call_site();
1905                 if (!call_site->is_fully_initialized_constant_call_site()) {
1906                   ciMethodHandle* target = field_value.as_object()->as_method_handle();
1907                   dependency_recorder()->assert_call_site_target_value(call_site, target);
1908                 }
1909               }
1910             }
1911           }
1912         }
1913       }
1914       if (constant != NULL) {
1915         push(type, append(constant));
1916       } else {
1917         if (state_before == NULL) {
1918           state_before = copy_state_for_exception();
1919         }
1920         if (!field->is_flattened()) {
1921           if (has_pending_field_access()) {
1922             assert(!needs_patching, "Can't patch delayed field access");
1923             obj = pending_field_access()->obj();
1924             offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->first_field_offset();
1925             field = pending_field_access()->holder()->get_field_by_offset(offset, false);
1926             assert(field != NULL, "field not found");
1927             set_pending_field_access(NULL);
1928           } else if (has_pending_load_indexed()) {
1929             assert(!needs_patching, "Can't patch delayed field access");
1930             pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
1931             LoadIndexed* li = pending_load_indexed()->load_instr();
1932             li->set_type(type);
1933             push(type, append(li));
1934             set_pending_load_indexed(NULL);




1935             break;
1936           }
1937           LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1938           Value replacement = !needs_patching ? _memory->load(load) : load;
1939           if (replacement != load) {
1940             assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1941             // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1942             // conversion. Emit an explicit conversion here to get the correct field value after the write.
1943             switch (field_type) {
1944             case T_BOOLEAN:
1945             case T_BYTE:
1946               replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
1947               break;
1948             case T_CHAR:
1949               replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
1950               break;
1951             case T_SHORT:
1952               replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
1953               break;
1954             default:
1955               break;
1956             }
1957             push(type, replacement);
1958           } else {
1959             push(type, append(load));
1960           }
1961         } else {
1962           // Look at the next bytecode to check if we can delay the field access
1963           bool can_delay_access = false;
1964           ciBytecodeStream s(method());
1965           s.force_bci(bci());
1966           s.next();
1967           if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
1968             ciField* next_field = s.get_field(will_link);
1969             bool next_needs_patching = !next_field->holder()->is_loaded() ||
1970                                        !next_field->will_link(method(), Bytecodes::_getfield) ||
1971                                        PatchALot;
1972             can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1973           }
1974           if (can_delay_access) {
1975             if (has_pending_load_indexed()) {
1976               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
1977             } else if (has_pending_field_access()) {
1978               pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->first_field_offset());
1979             } else {
1980               null_check(obj);
1981               DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset());
1982               set_pending_field_access(dfa);
1983             }
1984           } else {
1985             ciInlineKlass* inline_klass = field->type()->as_inline_klass();
1986             scope()->set_wrote_final();
1987             scope()->set_wrote_fields();
1988             bool need_membar = false;
1989             if (inline_klass->is_empty()) {
1990               apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
1991               if (has_pending_field_access()) {
1992                 set_pending_field_access(NULL);
1993               } else if (has_pending_load_indexed()) {
1994                 set_pending_load_indexed(NULL);
1995               }
1996             } else if (has_pending_load_indexed()) {
1997               assert(!needs_patching, "Can't patch delayed field access");
1998               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
1999               NewInlineTypeInstance* vt = new NewInlineTypeInstance(inline_klass, pending_load_indexed()->state_before());
2000               _memory->new_instance(vt);
2001               pending_load_indexed()->load_instr()->set_vt(vt);
2002               apush(append_split(vt));
2003               append(pending_load_indexed()->load_instr());
2004               set_pending_load_indexed(NULL);
2005               need_membar = true;
2006             } else {
2007               NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(inline_klass, state_before);
2008               _memory->new_instance(new_instance);
2009               apush(append_split(new_instance));
2010               assert(!needs_patching, "Can't patch flattened inline type field access");
2011               if (has_pending_field_access()) {
2012                 copy_inline_content(inline_klass, pending_field_access()->obj(),
2013                                     pending_field_access()->offset() + field->offset() - field->holder()->as_inline_klass()->first_field_offset(),
2014                                     new_instance, inline_klass->first_field_offset(), state_before);
2015                 set_pending_field_access(NULL);
2016               } else {
2017                 copy_inline_content(inline_klass, obj, field->offset(), new_instance, inline_klass->first_field_offset(), state_before);
2018               }
2019               need_membar = true;
2020             }
2021             if (need_membar) {
2022               // If we allocated a new instance ensure the stores to copy the
2023               // field contents are visible before any subsequent store that
2024               // publishes this reference.
2025               append(new MemBar(lir_membar_storestore));
2026             }
2027           }
2028         }
2029       }
2030       break;
2031     }
2032     case Bytecodes::_putfield: {
2033       Value val = pop(type);
2034       obj = apop();
2035       if (state_before == NULL) {
2036         state_before = copy_state_for_exception();
2037       }
2038       if (field_type == T_BOOLEAN) {
2039         Value mask = append(new Constant(new IntConstant(1)));
2040         val = append(new LogicOp(Bytecodes::_iand, val, mask));
2041       }
2042       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
2043         // Storing to a field of an empty inline type. Ignore.
2044         null_check(obj);
2045       } else if (!field->is_flattened()) {
2046         StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
2047         if (!needs_patching) store = _memory->store(store);
2048         if (store != NULL) {
2049           append(store);
2050         }
2051       } else {
2052         assert(!needs_patching, "Can't patch flattened inline type field access");
2053         ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2054         copy_inline_content(inline_klass, val, inline_klass->first_field_offset(), obj, offset, state_before, field);
2055       }
2056       break;
2057     }
2058     default:
2059       ShouldNotReachHere();
2060       break;
2061   }
2062 }
2063 
2064 // Baseline version of withfield, allocate every time
2065 void GraphBuilder::withfield(int field_index) {
2066   // Save the entire state and re-execute on deopt
2067   ValueStack* state_before = copy_state_before();
2068   state_before->set_should_reexecute(true);
2069 
2070   bool will_link;
2071   ciField* field_modify = stream()->get_field(will_link);
2072   ciInstanceKlass* holder = field_modify->holder();
2073   BasicType field_type = field_modify->type()->basic_type();
2074   ValueType* type = as_ValueType(field_type);
2075   Value val = pop(type);
2076   Value obj = apop();
2077 
2078   if (!holder->is_loaded() || !holder->is_inlinetype()) {
2079     apush(append_split(new Deoptimize(holder, state_before)));
2080     return;
2081   }
2082 
2083   // call will_link again to determine if the field is valid.
2084   const bool needs_patching = !field_modify->will_link(method(), Bytecodes::_withfield) ||
2085                               (!field_modify->is_flattened() && PatchALot);
2086   const int offset_modify = !needs_patching ? field_modify->offset() : -1;
2087 
2088   scope()->set_wrote_final();
2089   scope()->set_wrote_fields();
2090 
2091   NewInlineTypeInstance* new_instance;
2092   if (obj->as_NewInlineTypeInstance() != NULL && obj->as_NewInlineTypeInstance()->in_larval_state()) {
2093     new_instance = obj->as_NewInlineTypeInstance();
2094     apush(append_split(new_instance));
2095   } else {
2096     new_instance = new NewInlineTypeInstance(holder->as_inline_klass(), state_before);
2097     _memory->new_instance(new_instance);
2098     apush(append_split(new_instance));
2099 
2100     // Initialize fields which are not modified
2101     for (int i = 0; i < holder->nof_nonstatic_fields(); i++) {
2102       ciField* field = holder->nonstatic_field_at(i);
2103       int offset = field->offset();
2104       // Don't use offset_modify here, it might be set to -1 if needs_patching
2105       if (offset != field_modify->offset()) {
2106         if (field->is_flattened()) {
2107           ciInlineKlass* vk = field->type()->as_inline_klass();
2108           if (!vk->is_empty()) {
2109             copy_inline_content(vk, obj, offset, new_instance, vk->first_field_offset(), state_before, field);
2110           }
2111         } else {
2112           LoadField* load = new LoadField(obj, offset, field, false, state_before, false);
2113           Value replacement = append(load);
2114           StoreField* store = new StoreField(new_instance, offset, field, replacement, false, state_before, false);
2115           append(store);
2116         }
2117       }
2118     }
2119   }
2120 
2121   // Field to modify
2122   if (field_type == T_BOOLEAN) {
2123     Value mask = append(new Constant(new IntConstant(1)));
2124     val = append(new LogicOp(Bytecodes::_iand, val, mask));
2125   }
2126   if (field_modify->is_flattened()) {
2127     assert(!needs_patching, "Can't patch flattened inline type field access");
2128     ciInlineKlass* vk = field_modify->type()->as_inline_klass();
2129     if (!vk->is_empty()) {
2130       copy_inline_content(vk, val, vk->first_field_offset(), new_instance, offset_modify, state_before, field_modify);
2131     }
2132   } else {
2133     StoreField* store = new StoreField(new_instance, offset_modify, field_modify, val, false, state_before, needs_patching);
2134     append(store);
2135   }
2136 }
2137 
2138 Dependencies* GraphBuilder::dependency_recorder() const {
2139   assert(DeoptC1, "need debug information");
2140   return compilation()->dependency_recorder();
2141 }
2142 
2143 // How many arguments do we want to profile?
2144 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
2145   int n = 0;
2146   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
2147   start = has_receiver ? 1 : 0;
2148   if (profile_arguments()) {
2149     ciProfileData* data = method()->method_data()->bci_to_data(bci());
2150     if (data != NULL && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
2151       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
2152     }
2153   }
2154   // If we are inlining then we need to collect arguments to profile parameters for the target
2155   if (profile_parameters() && target != NULL) {
2156     if (target->method_data() != NULL && target->method_data()->parameters_type_data() != NULL) {

2202   ciKlass*              holder = stream()->get_declared_method_holder();
2203   const Bytecodes::Code bc_raw = stream()->cur_bc_raw();
2204   assert(declared_signature != NULL, "cannot be null");
2205   assert(will_link == target->is_loaded(), "");
2206 
2207   ciInstanceKlass* klass = target->holder();
2208   assert(!target->is_loaded() || klass->is_loaded(), "loaded target must imply loaded klass");
2209 
2210   // check if CHA possible: if so, change the code to invoke_special
2211   ciInstanceKlass* calling_klass = method()->holder();
2212   ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
2213   ciInstanceKlass* actual_recv = callee_holder;
2214 
2215   CompileLog* log = compilation()->log();
2216   if (log != NULL)
2217       log->elem("call method='%d' instr='%s'",
2218                 log->identify(target),
2219                 Bytecodes::name(code));
2220 
2221   // invoke-special-super
2222   if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor()) {
2223     ciInstanceKlass* sender_klass = calling_klass;
2224     if (sender_klass->is_interface()) {
2225       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
2226       Value receiver = state()->stack_at(index);
2227       CheckCast* c = new CheckCast(sender_klass, receiver, copy_state_before());
2228       c->set_invokespecial_receiver_check();
2229       state()->stack_at_put(index, append_split(c));
2230     }
2231   }
2232 
2233   // Some methods are obviously bindable without any type checks so
2234   // convert them directly to an invokespecial or invokestatic.
2235   if (target->is_loaded() && !target->is_abstract() && target->can_be_statically_bound()) {
2236     switch (bc_raw) {
2237     case Bytecodes::_invokevirtual:
2238       code = Bytecodes::_invokespecial;
2239       break;
2240     case Bytecodes::_invokehandle:
2241       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
2242       break;

2452       null_check(recv);
2453     }
2454 
2455     if (is_profiling()) {
2456       // Note that we'd collect profile data in this method if we wanted it.
2457       compilation()->set_would_profile(true);
2458 
2459       if (profile_calls()) {
2460         assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2461         ciKlass* target_klass = NULL;
2462         if (cha_monomorphic_target != NULL) {
2463           target_klass = cha_monomorphic_target->holder();
2464         } else if (exact_target != NULL) {
2465           target_klass = exact_target->holder();
2466         }
2467         profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2468       }
2469     }
2470   }
2471 
2472   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before,
2473                               declared_signature->returns_null_free_inline_type());
2474   // push result
2475   append_split(result);
2476 
2477   if (result_type != voidType) {
2478     push(result_type, round_fp(result));
2479   }
2480   if (profile_return() && result_type->is_object_kind()) {
2481     profile_return_type(result, target);
2482   }
2483 }
2484 
2485 
2486 void GraphBuilder::new_instance(int klass_index) {
2487   ValueStack* state_before = copy_state_exhandling();
2488   bool will_link;
2489   ciKlass* klass = stream()->get_klass(will_link);
2490   assert(klass->is_instance_klass(), "must be an instance klass");
2491   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2492   _memory->new_instance(new_instance);
2493   apush(append_split(new_instance));
2494 }
2495 
2496 void GraphBuilder::default_value(int klass_index) {
2497   bool will_link;
2498   ciKlass* klass = stream()->get_klass(will_link);
2499   if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
2500       klass->as_inline_klass()->is_initialized()) {
2501     ciInlineKlass* vk = klass->as_inline_klass();
2502     apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
2503   } else {
2504     apush(append_split(new Deoptimize(klass, copy_state_before())));
2505   }
2506 }
2507 
2508 void GraphBuilder::new_type_array() {
2509   ValueStack* state_before = copy_state_exhandling();
2510   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2511 }
2512 
2513 
2514 void GraphBuilder::new_object_array() {
2515   bool will_link;
2516   ciKlass* klass = stream()->get_klass(will_link);
2517   bool null_free = stream()->has_Q_signature();
2518   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2519   NewArray* n = new NewObjectArray(klass, ipop(), state_before, null_free);
2520   apush(append_split(n));
2521 }
2522 
2523 
2524 bool GraphBuilder::direct_compare(ciKlass* k) {
2525   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2526     ciInstanceKlass* ik = k->as_instance_klass();
2527     if (ik->is_final()) {
2528       return true;
2529     } else {
2530       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2531         // test class is leaf class
2532         dependency_recorder()->assert_leaf_type(ik);
2533         return true;
2534       }
2535     }
2536   }
2537   return false;
2538 }
2539 
2540 
2541 void GraphBuilder::check_cast(int klass_index) {
2542   bool will_link;
2543   ciKlass* klass = stream()->get_klass(will_link);
2544   bool null_free = stream()->has_Q_signature();
2545   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2546   CheckCast* c = new CheckCast(klass, apop(), state_before, null_free);
2547   apush(append_split(c));
2548   c->set_direct_compare(direct_compare(klass));
2549 
2550   if (is_profiling()) {
2551     // Note that we'd collect profile data in this method if we wanted it.
2552     compilation()->set_would_profile(true);
2553 
2554     if (profile_checkcasts()) {
2555       c->set_profiled_method(method());
2556       c->set_profiled_bci(bci());
2557       c->set_should_profile(true);
2558     }
2559   }
2560 }
2561 
2562 
2563 void GraphBuilder::instance_of(int klass_index) {
2564   bool will_link;
2565   ciKlass* klass = stream()->get_klass(will_link);
2566   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2567   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2568   ipush(append_split(i));
2569   i->set_direct_compare(direct_compare(klass));
2570 
2571   if (is_profiling()) {
2572     // Note that we'd collect profile data in this method if we wanted it.
2573     compilation()->set_would_profile(true);
2574 
2575     if (profile_checkcasts()) {
2576       i->set_profiled_method(method());
2577       i->set_profiled_bci(bci());
2578       i->set_should_profile(true);
2579     }
2580   }
2581 }
2582 
2583 
2584 void GraphBuilder::monitorenter(Value x, int bci) {
2585   bool maybe_inlinetype = false;
2586   if (bci == InvocationEntryBci) {
2587     // Called by GraphBuilder::inline_sync_entry.
2588 #ifdef ASSERT
2589     ciType* obj_type = x->declared_type();
2590     assert(obj_type == NULL || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
2591 #endif
2592   } else {
2593     // We are compiling a monitorenter bytecode
2594     if (EnableValhalla) {
2595       ciType* obj_type = x->declared_type();
2596       if (obj_type == NULL || obj_type->as_klass()->can_be_inline_klass()) {
2597         // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
2598         // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2599         maybe_inlinetype = true;
2600       }
2601     }
2602   }
2603 
2604   // save state before locking in case of deoptimization after a NullPointerException
2605   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2606   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
2607   kill_all();
2608 }
2609 
2610 
2611 void GraphBuilder::monitorexit(Value x, int bci) {
2612   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2613   kill_all();
2614 }
2615 
2616 
2617 void GraphBuilder::new_multi_array(int dimensions) {
2618   bool will_link;
2619   ciKlass* klass = stream()->get_klass(will_link);
2620   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2621 
2622   Values* dims = new Values(dimensions, dimensions, NULL);
2623   // fill in all dimensions
2624   int i = dimensions;
2625   while (i-- > 0) dims->at_put(i, ipop());
2626   // create array

2760 
2761 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2762   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
2763     assert(instruction->exception_state() == NULL
2764            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2765            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2766            "exception_state should be of exception kind");
2767     return new XHandlers();
2768   }
2769 
2770   XHandlers*  exception_handlers = new XHandlers();
2771   ScopeData*  cur_scope_data = scope_data();
2772   ValueStack* cur_state = instruction->state_before();
2773   ValueStack* prev_state = NULL;
2774   int scope_count = 0;
2775 
2776   assert(cur_state != NULL, "state_before must be set");
2777   do {
2778     int cur_bci = cur_state->bci();
2779     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2780     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
2781            || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
2782 
2783 
2784     // join with all potential exception handlers
2785     XHandlers* list = cur_scope_data->xhandlers();
2786     const int n = list->length();
2787     for (int i = 0; i < n; i++) {
2788       XHandler* h = list->handler_at(i);
2789       if (h->covers(cur_bci)) {
2790         // h is a potential exception handler => join it
2791         compilation()->set_has_exception_handlers(true);
2792 
2793         BlockBegin* entry = h->entry_block();
2794         if (entry == block()) {
2795           // It's acceptable for an exception handler to cover itself
2796           // but we don't handle that in the parser currently.  It's
2797           // very rare so we bailout instead of trying to handle it.
2798           BAILOUT_("exception handler covers itself", exception_handlers);
2799         }
2800         assert(entry->bci() == h->handler_bci(), "must match");
2801         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2802 

3250       case Bytecodes::_invokevirtual  : // fall through
3251       case Bytecodes::_invokespecial  : // fall through
3252       case Bytecodes::_invokestatic   : // fall through
3253       case Bytecodes::_invokedynamic  : // fall through
3254       case Bytecodes::_invokeinterface: invoke(code); break;
3255       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
3256       case Bytecodes::_newarray       : new_type_array(); break;
3257       case Bytecodes::_anewarray      : new_object_array(); break;
3258       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
3259       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
3260       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
3261       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
3262       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
3263       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
3264       case Bytecodes::_wide           : ShouldNotReachHere(); break;
3265       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
3266       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
3267       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
3268       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
3269       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;
3270       case Bytecodes::_defaultvalue   : default_value(s.get_index_u2()); break;
3271       case Bytecodes::_withfield      : withfield(s.get_index_u2()); break;
3272       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
3273       default                         : ShouldNotReachHere(); break;
3274     }
3275 
3276     if (log != NULL)
3277       log->clear_context(); // skip marker if nothing was printed
3278 
3279     // save current bci to setup Goto at the end
3280     prev_bci = s.cur_bci();
3281 
3282   }
3283   CHECK_BAILOUT_(NULL);
3284   // stop processing of this block (see try_inline_full)
3285   if (_skip_block) {
3286     _skip_block = false;
3287     assert(_last && _last->as_BlockEnd(), "");
3288     return _last->as_BlockEnd();
3289   }
3290   // if there are any, check if last instruction is a BlockEnd instruction
3291   BlockEnd* end = last()->as_BlockEnd();

3540   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3541 
3542   assert(state->caller_state() == NULL, "should be top scope");
3543   state->clear_locals();
3544   Goto* g = new Goto(target, false);
3545   append(g);
3546   _osr_entry->set_end(g);
3547   target->merge(_osr_entry->end()->state());
3548 
3549   scope_data()->set_stream(NULL);
3550 }
3551 
3552 
3553 ValueStack* GraphBuilder::state_at_entry() {
3554   ValueStack* state = new ValueStack(scope(), NULL);
3555 
3556   // Set up locals for receiver
3557   int idx = 0;
3558   if (!method()->is_static()) {
3559     // we should always see the receiver
3560     state->store_local(idx, new Local(method()->holder(), objectType, idx,
3561              /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
3562     idx = 1;
3563   }
3564 
3565   // Set up locals for incoming arguments
3566   ciSignature* sig = method()->signature();
3567   for (int i = 0; i < sig->count(); i++) {
3568     ciType* type = sig->type_at(i);
3569     BasicType basic_type = type->basic_type();
3570     // don't allow T_ARRAY to propagate into locals types
3571     if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3572     ValueType* vt = as_ValueType(basic_type);
3573     state->store_local(idx, new Local(type, vt, idx, false, sig->is_null_free_at(i)));
3574     idx += type->size();
3575   }
3576 
3577   // lock synchronized method
3578   if (method()->is_synchronized()) {
3579     state->lock(NULL);
3580   }
3581 
3582   return state;
3583 }
3584 
3585 
3586 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3587   : _scope_data(NULL)
3588   , _compilation(compilation)
3589   , _memory(new MemoryBuffer())
3590   , _inline_bailout_msg(NULL)
3591   , _instruction_count(0)
3592   , _osr_entry(NULL)
3593   , _pending_field_access(NULL)
3594   , _pending_load_indexed(NULL)
3595 {
3596   int osr_bci = compilation->osr_bci();
3597 
3598   // determine entry points and bci2block mapping
3599   BlockListBuilder blm(compilation, scope, osr_bci);
3600   CHECK_BAILOUT();
3601 
3602   BlockList* bci2block = blm.bci2block();
3603   BlockBegin* start_block = bci2block->at(0);
3604 
3605   push_root_scope(scope, bci2block, start_block);
3606 
3607   // setup state for std entry
3608   _initial_state = state_at_entry();
3609   start_block->merge(_initial_state);
3610 
3611   // complete graph
3612   _vmap        = new ValueMap();
3613   switch (scope->method()->intrinsic_id()) {
3614   case vmIntrinsics::_dabs          : // fall through
< prev index next >