< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page

  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "c1/c1_CFGPrinter.hpp"
  26 #include "c1/c1_Canonicalizer.hpp"
  27 #include "c1/c1_Compilation.hpp"
  28 #include "c1/c1_GraphBuilder.hpp"
  29 #include "c1/c1_InstructionPrinter.hpp"
  30 #include "ci/ciCallSite.hpp"
  31 #include "ci/ciField.hpp"


  32 #include "ci/ciKlass.hpp"
  33 #include "ci/ciMemberName.hpp"
  34 #include "ci/ciSymbols.hpp"
  35 #include "ci/ciUtilities.inline.hpp"
  36 #include "classfile/javaClasses.hpp"
  37 #include "compiler/compilationPolicy.hpp"
  38 #include "compiler/compileBroker.hpp"
  39 #include "compiler/compilerEvent.hpp"
  40 #include "interpreter/bytecode.hpp"
  41 #include "jfr/jfrEvents.hpp"
  42 #include "memory/resourceArea.hpp"
  43 #include "runtime/sharedRuntime.hpp"
  44 #include "utilities/checkedCast.hpp"
  45 #include "utilities/macros.hpp"
  46 #if INCLUDE_JFR
  47 #include "jfr/jfr.hpp"
  48 #endif
  49 
  50 class BlockListBuilder {
  51  private:

1041       // they are using this local. We don't handle skipping over a
1042       // ret.
1043       for (ScopeData* cur_scope_data = scope_data()->parent();
1044            cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1045            cur_scope_data = cur_scope_data->parent()) {
1046         if (cur_scope_data->jsr_return_address_local() == index) {
1047           BAILOUT("subroutine overwrites return address from previous subroutine");
1048         }
1049       }
1050     } else if (index == scope_data()->jsr_return_address_local()) {
1051       scope_data()->set_jsr_return_address_local(-1);
1052     }
1053   }
1054 
1055   state->store_local(index, round_fp(x));
1056 }
1057 
1058 
1059 void GraphBuilder::load_indexed(BasicType type) {
1060   // In case of in block code motion in range check elimination
1061   ValueStack* state_before = copy_state_indexed_access();








1062   compilation()->set_has_access_indexed(true);
1063   Value index = ipop();
1064   Value array = apop();
1065   Value length = nullptr;
1066   if (CSEArrayLength ||
1067       (array->as_Constant() != nullptr) ||
1068       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1069       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1070       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1071     length = append(new ArrayLength(array, state_before));
1072   }
1073   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));




























































1074 }
1075 
1076 
1077 void GraphBuilder::store_indexed(BasicType type) {
1078   // In case of in block code motion in range check elimination
1079   ValueStack* state_before = copy_state_indexed_access();








1080   compilation()->set_has_access_indexed(true);
1081   Value value = pop(as_ValueType(type));
1082   Value index = ipop();
1083   Value array = apop();
1084   Value length = nullptr;
1085   if (CSEArrayLength ||
1086       (array->as_Constant() != nullptr) ||
1087       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1088       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1089       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1090     length = append(new ArrayLength(array, state_before));
1091   }
1092   ciType* array_type = array->declared_type();
1093   bool check_boolean = false;
1094   if (array_type != nullptr) {
1095     if (array_type->is_loaded() &&
1096       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1097       assert(type == T_BYTE, "boolean store uses bastore");
1098       Value mask = append(new Constant(new IntConstant(1)));
1099       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1100     }
1101   } else if (type == T_BYTE) {
1102     check_boolean = true;
1103   }
1104   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1105   append(result);
1106   _memory->store_value(value);
1107 
1108   if (type == T_OBJECT && is_profiling()) {
1109     // Note that we'd collect profile data in this method if we wanted it.
1110     compilation()->set_would_profile(true);
1111 
1112     if (profile_checkcasts()) {
1113       result->set_profiled_method(method());
1114       result->set_profiled_bci(bci());
1115       result->set_should_profile(true);
1116     }
1117   }



1118 }
1119 
1120 
1121 void GraphBuilder::stack_op(Bytecodes::Code code) {
1122   switch (code) {
1123     case Bytecodes::_pop:
1124       { state()->raw_pop();
1125       }
1126       break;
1127     case Bytecodes::_pop2:
1128       { state()->raw_pop();
1129         state()->raw_pop();
1130       }
1131       break;
1132     case Bytecodes::_dup:
1133       { Value w = state()->raw_pop();
1134         state()->raw_push(w);
1135         state()->raw_push(w);
1136       }
1137       break;
1138     case Bytecodes::_dup_x1:
1139       { Value w1 = state()->raw_pop();
1140         Value w2 = state()->raw_pop();
1141         state()->raw_push(w1);
1142         state()->raw_push(w2);
1143         state()->raw_push(w1);
1144       }
1145       break;
1146     case Bytecodes::_dup_x2:
1147       { Value w1 = state()->raw_pop();
1148         Value w2 = state()->raw_pop();
1149         Value w3 = state()->raw_pop();

1288 
1289 
1290 void GraphBuilder::_goto(int from_bci, int to_bci) {
1291   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1292   if (is_profiling()) {
1293     compilation()->set_would_profile(true);
1294     x->set_profiled_bci(bci());
1295     if (profile_branches()) {
1296       x->set_profiled_method(method());
1297       x->set_should_profile(true);
1298     }
1299   }
1300   append(x);
1301 }
1302 
1303 
1304 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1305   BlockBegin* tsux = block_at(stream()->get_dest());
1306   BlockBegin* fsux = block_at(stream()->next_bci());
1307   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();



























1308   // In case of loop invariant code motion or predicate insertion
1309   // before the body of a loop the state is needed
1310   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : nullptr, is_bb));
1311 
1312   assert(i->as_Goto() == nullptr ||
1313          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == (tsux->bci() < stream()->cur_bci())) ||
1314          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == (fsux->bci() < stream()->cur_bci())),
1315          "safepoint state of Goto returned by canonicalizer incorrect");
1316 
1317   if (is_profiling()) {
1318     If* if_node = i->as_If();
1319     if (if_node != nullptr) {
1320       // Note that we'd collect profile data in this method if we wanted it.
1321       compilation()->set_would_profile(true);
1322       // At level 2 we need the proper bci to count backedges
1323       if_node->set_profiled_bci(bci());
1324       if (profile_branches()) {
1325         // Successors can be rotated by the canonicalizer, check for this case.
1326         if_node->set_profiled_method(method());
1327         if_node->set_should_profile(true);
1328         if (if_node->tsux() == fsux) {
1329           if_node->set_swapped(true);
1330         }

1545   }
1546 
1547   if (needs_check) {
1548     // Perform the registration of finalizable objects.
1549     ValueStack* state_before = copy_state_for_exception();
1550     load_local(objectType, 0);
1551     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1552                                state()->pop_arguments(1),
1553                                true, state_before, true));
1554   }
1555 }
1556 
1557 
1558 void GraphBuilder::method_return(Value x, bool ignore_return) {
1559   if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1560     call_register_finalizer();
1561   }
1562 
1563   // The conditions for a memory barrier are described in Parse::do_exits().
1564   bool need_mem_bar = false;
1565   if (method()->name() == ciSymbols::object_initializer_name() &&
1566        (scope()->wrote_final() || scope()->wrote_stable() ||
1567          (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1568          (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1569     need_mem_bar = true;
1570   }
1571 
1572   BasicType bt = method()->return_type()->basic_type();
1573   switch (bt) {
1574     case T_BYTE:
1575     {
1576       Value shift = append(new Constant(new IntConstant(24)));
1577       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1578       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1579       break;
1580     }
1581     case T_SHORT:
1582     {
1583       Value shift = append(new Constant(new IntConstant(16)));
1584       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1585       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));

1696   // Attach dimension info to stable arrays.
1697   if (FoldStableValues &&
1698       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1699     ciArray* array = field_value.as_object()->as_array();
1700     jint dimension = field->type()->as_array_klass()->dimension();
1701     value = new StableArrayConstant(array, dimension);
1702   }
1703 
1704   switch (field_type) {
1705     case T_ARRAY:
1706     case T_OBJECT:
1707       if (field_value.as_object()->should_be_constant()) {
1708         return new Constant(value);
1709       }
1710       return nullptr; // Not a constant.
1711     default:
1712       return new Constant(value);
1713   }
1714 }
1715 

























1716 void GraphBuilder::access_field(Bytecodes::Code code) {
1717   bool will_link;
1718   ciField* field = stream()->get_field(will_link);
1719   ciInstanceKlass* holder = field->holder();
1720   BasicType field_type = field->type()->basic_type();
1721   ValueType* type = as_ValueType(field_type);

1722   // call will_link again to determine if the field is valid.
1723   const bool needs_patching = !holder->is_loaded() ||
1724                               !field->will_link(method(), code) ||
1725                               PatchALot;
1726 
1727   ValueStack* state_before = nullptr;
1728   if (!holder->is_initialized() || needs_patching) {
1729     // save state before instruction for debug info when
1730     // deoptimization happens during patching
1731     state_before = copy_state_before();
1732   }
1733 
1734   Value obj = nullptr;
1735   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1736     if (state_before != nullptr) {
1737       // build a patching constant
1738       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1739     } else {
1740       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1741     }
1742   }
1743 
1744   if (code == Bytecodes::_putfield) {
1745     scope()->set_wrote_fields();
1746     if (field->is_volatile()) {
1747       scope()->set_wrote_volatile();
1748     }
1749     if (field->is_final()) {
1750       scope()->set_wrote_final();
1751     }
1752     if (field->is_stable()) {
1753       scope()->set_wrote_stable();
1754     }
1755   }
1756 
1757   const int offset = !needs_patching ? field->offset_in_bytes() : -1;
1758   switch (code) {
1759     case Bytecodes::_getstatic: {
1760       // check for compile-time constants, i.e., initialized static final fields
1761       Value constant = nullptr;
1762       if (field->is_static_constant() && !PatchALot) {
1763         ciConstant field_value = field->constant_value();
1764         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1765                "stable static w/ default value shouldn't be a constant");
1766         constant = make_constant(field_value, field);




1767       }
1768       if (constant != nullptr) {
1769         push(type, append(constant));
1770       } else {
1771         if (state_before == nullptr) {
1772           state_before = copy_state_for_exception();
1773         }
1774         push(type, append(new LoadField(append(obj), offset, field, true,
1775                                         state_before, needs_patching)));

1776       }
1777       break;
1778     }
1779     case Bytecodes::_putstatic: {
1780       Value val = pop(type);
1781       if (state_before == nullptr) {
1782         state_before = copy_state_for_exception();
1783       }
1784       if (field->type()->basic_type() == T_BOOLEAN) {
1785         Value mask = append(new Constant(new IntConstant(1)));
1786         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1787       }







1788       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1789       break;
1790     }
1791     case Bytecodes::_getfield: {
1792       // Check for compile-time constants, i.e., trusted final non-static fields.
1793       Value constant = nullptr;
1794       obj = apop();
1795       ObjectType* obj_type = obj->type()->as_ObjectType();
1796       if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1797         ciObject* const_oop = obj_type->constant_value();
1798         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1799           ciConstant field_value = field->constant_value_of(const_oop);
1800           if (field_value.is_valid()) {
1801             constant = make_constant(field_value, field);
1802             // For CallSite objects add a dependency for invalidation of the optimization.
1803             if (field->is_call_site_target()) {
1804               ciCallSite* call_site = const_oop->as_call_site();
1805               if (!call_site->is_fully_initialized_constant_call_site()) {
1806                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1807                 dependency_recorder()->assert_call_site_target_value(call_site, target);

















1808               }
1809             }
1810           }
1811         }
1812       }
1813       if (constant != nullptr) {
1814         push(type, append(constant));
1815       } else {
1816         if (state_before == nullptr) {
1817           state_before = copy_state_for_exception();
1818         }
1819         LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1820         Value replacement = !needs_patching ? _memory->load(load) : load;
1821         if (replacement != load) {
1822           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1823           // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1824           // conversion. Emit an explicit conversion here to get the correct field value after the write.
1825           BasicType bt = field->type()->basic_type();
1826           switch (bt) {
1827           case T_BOOLEAN:
1828           case T_BYTE:
1829             replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
1830             break;
1831           case T_CHAR:
1832             replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
1833             break;
1834           case T_SHORT:
1835             replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
1836             break;
1837           default:
1838             break;
1839           }
1840           push(type, replacement);























1841         } else {
1842           push(type, append(load));
















































































1843         }
1844       }
1845       break;
1846     }
1847     case Bytecodes::_putfield: {
1848       Value val = pop(type);
1849       obj = apop();
1850       if (state_before == nullptr) {
1851         state_before = copy_state_for_exception();
1852       }
1853       if (field->type()->basic_type() == T_BOOLEAN) {
1854         Value mask = append(new Constant(new IntConstant(1)));
1855         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1856       }
1857       StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1858       if (!needs_patching) store = _memory->store(store);
1859       if (store != nullptr) {
1860         append(store);
























1861       }
1862       break;
1863     }
1864     default:
1865       ShouldNotReachHere();
1866       break;
1867   }
1868 }
1869 
1870 
1871 Dependencies* GraphBuilder::dependency_recorder() const {
1872   assert(DeoptC1, "need debug information");
1873   return compilation()->dependency_recorder();
1874 }
1875 
1876 // How many arguments do we want to profile?
1877 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1878   int n = 0;
1879   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1880   start = has_receiver ? 1 : 0;
1881   if (profile_arguments()) {
1882     ciProfileData* data = method()->method_data()->bci_to_data(bci());
1883     if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1884       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1885     }
1886   }
1887   // If we are inlining then we need to collect arguments to profile parameters for the target
1888   if (profile_parameters() && target != nullptr) {
1889     if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
1890       // The receiver is profiled on method entry so it's included in

1967       break;
1968     case Bytecodes::_invokehandle:
1969       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1970       break;
1971     default:
1972       break;
1973     }
1974   } else {
1975     if (bc_raw == Bytecodes::_invokehandle) {
1976       assert(!will_link, "should come here only for unlinked call");
1977       code = Bytecodes::_invokespecial;
1978     }
1979   }
1980 
1981   if (code == Bytecodes::_invokespecial) {
1982     // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
1983     ciKlass* receiver_constraint = nullptr;
1984 
1985     if (bc_raw == Bytecodes::_invokeinterface) {
1986       receiver_constraint = holder;
1987     } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer() && calling_klass->is_interface()) {
1988       receiver_constraint = calling_klass;
1989     }
1990 
1991     if (receiver_constraint != nullptr) {
1992       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1993       Value receiver = state()->stack_at(index);
1994       CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
1995       // go to uncommon_trap when checkcast fails
1996       c->set_invokespecial_receiver_check();
1997       state()->stack_at_put(index, append_split(c));
1998     }
1999   }
2000 
2001   // Push appendix argument (MethodType, CallSite, etc.), if one.
2002   bool patch_for_appendix = false;
2003   int patching_appendix_arg = 0;
2004   if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2005     Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2006     apush(arg);
2007     patch_for_appendix = true;

2223     }
2224   }
2225 
2226   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2227   // push result
2228   append_split(result);
2229 
2230   if (result_type != voidType) {
2231     push(result_type, round_fp(result));
2232   }
2233   if (profile_return() && result_type->is_object_kind()) {
2234     profile_return_type(result, target);
2235   }
2236 }
2237 
2238 
2239 void GraphBuilder::new_instance(int klass_index) {
2240   ValueStack* state_before = copy_state_exhandling();
2241   ciKlass* klass = stream()->get_klass();
2242   assert(klass->is_instance_klass(), "must be an instance klass");
2243   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2244   _memory->new_instance(new_instance);
2245   apush(append_split(new_instance));






2246 }
2247 
2248 
2249 void GraphBuilder::new_type_array() {
2250   ValueStack* state_before = copy_state_exhandling();
2251   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before, true)));
2252 }
2253 
2254 
2255 void GraphBuilder::new_object_array() {
2256   ciKlass* klass = stream()->get_klass();
2257   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2258   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2259   apush(append_split(n));
2260 }
2261 
2262 
2263 bool GraphBuilder::direct_compare(ciKlass* k) {
2264   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2265     ciInstanceKlass* ik = k->as_instance_klass();
2266     if (ik->is_final()) {
2267       return true;
2268     } else {

2301   ciKlass* klass = stream()->get_klass();
2302   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2303   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2304   ipush(append_split(i));
2305   i->set_direct_compare(direct_compare(klass));
2306 
2307   if (is_profiling()) {
2308     // Note that we'd collect profile data in this method if we wanted it.
2309     compilation()->set_would_profile(true);
2310 
2311     if (profile_checkcasts()) {
2312       i->set_profiled_method(method());
2313       i->set_profiled_bci(bci());
2314       i->set_should_profile(true);
2315     }
2316   }
2317 }
2318 
2319 
2320 void GraphBuilder::monitorenter(Value x, int bci) {



















2321   // save state before locking in case of deoptimization after a NullPointerException
2322   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2323   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2324   kill_all();
2325 }
2326 
2327 
2328 void GraphBuilder::monitorexit(Value x, int bci) {
2329   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2330   kill_all();
2331 }
2332 
2333 
2334 void GraphBuilder::new_multi_array(int dimensions) {
2335   ciKlass* klass = stream()->get_klass();
2336   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2337 
2338   Values* dims = new Values(dimensions, dimensions, nullptr);
2339   // fill in all dimensions
2340   int i = dimensions;
2341   while (i-- > 0) dims->at_put(i, ipop());
2342   // create array
2343   NewArray* n = new NewMultiArray(klass, dims, state_before);

2451 
2452 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2453   return append_with_bci(instr, bci());
2454 }
2455 
2456 
2457 void GraphBuilder::null_check(Value value) {
2458   if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2459     return;
2460   } else {
2461     Constant* con = value->as_Constant();
2462     if (con) {
2463       ObjectType* c = con->type()->as_ObjectType();
2464       if (c && c->is_loaded()) {
2465         ObjectConstant* oc = c->as_ObjectConstant();
2466         if (!oc || !oc->value()->is_null_object()) {
2467           return;
2468         }
2469       }
2470     }

2471   }
2472   append(new NullCheck(value, copy_state_for_exception()));
2473 }
2474 
2475 
2476 
2477 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2478   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2479     assert(instruction->exception_state() == nullptr
2480            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2481            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2482            "exception_state should be of exception kind");
2483     return new XHandlers();
2484   }
2485 
2486   XHandlers*  exception_handlers = new XHandlers();
2487   ScopeData*  cur_scope_data = scope_data();
2488   ValueStack* cur_state = instruction->state_before();
2489   ValueStack* prev_state = nullptr;
2490   int scope_count = 0;
2491 
2492   assert(cur_state != nullptr, "state_before must be set");
2493   do {
2494     int cur_bci = cur_state->bci();
2495     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2496     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");


2497 
2498     // join with all potential exception handlers
2499     XHandlers* list = cur_scope_data->xhandlers();
2500     const int n = list->length();
2501     for (int i = 0; i < n; i++) {
2502       XHandler* h = list->handler_at(i);
2503       if (h->covers(cur_bci)) {
2504         // h is a potential exception handler => join it
2505         compilation()->set_has_exception_handlers(true);
2506 
2507         BlockBegin* entry = h->entry_block();
2508         if (entry == block()) {
2509           // It's acceptable for an exception handler to cover itself
2510           // but we don't handle that in the parser currently.  It's
2511           // very rare so we bailout instead of trying to handle it.
2512           BAILOUT_("exception handler covers itself", exception_handlers);
2513         }
2514         assert(entry->bci() == h->handler_bci(), "must match");
2515         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2516 

3258   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3259 
3260   assert(state->caller_state() == nullptr, "should be top scope");
3261   state->clear_locals();
3262   Goto* g = new Goto(target, false);
3263   append(g);
3264   _osr_entry->set_end(g);
3265   target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3266 
3267   scope_data()->set_stream(nullptr);
3268 }
3269 
3270 
3271 ValueStack* GraphBuilder::state_at_entry() {
3272   ValueStack* state = new ValueStack(scope(), nullptr);
3273 
3274   // Set up locals for receiver
3275   int idx = 0;
3276   if (!method()->is_static()) {
3277     // we should always see the receiver
3278     state->store_local(idx, new Local(method()->holder(), objectType, idx, true));

3279     idx = 1;
3280   }
3281 
3282   // Set up locals for incoming arguments
3283   ciSignature* sig = method()->signature();
3284   for (int i = 0; i < sig->count(); i++) {
3285     ciType* type = sig->type_at(i);
3286     BasicType basic_type = type->basic_type();
3287     // don't allow T_ARRAY to propagate into locals types
3288     if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3289     ValueType* vt = as_ValueType(basic_type);
3290     state->store_local(idx, new Local(type, vt, idx, false));
3291     idx += type->size();
3292   }
3293 
3294   // lock synchronized method
3295   if (method()->is_synchronized()) {
3296     state->lock(nullptr);
3297   }
3298 
3299   return state;
3300 }
3301 
3302 
3303 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3304   : _scope_data(nullptr)
3305   , _compilation(compilation)
3306   , _memory(new MemoryBuffer())
3307   , _inline_bailout_msg(nullptr)
3308   , _instruction_count(0)
3309   , _osr_entry(nullptr)


3310 {
3311   int osr_bci = compilation->osr_bci();
3312 
3313   // determine entry points and bci2block mapping
3314   BlockListBuilder blm(compilation, scope, osr_bci);
3315   CHECK_BAILOUT();
3316 
3317   BlockList* bci2block = blm.bci2block();
3318   BlockBegin* start_block = bci2block->at(0);
3319 
3320   push_root_scope(scope, bci2block, start_block);
3321 
3322   // setup state for std entry
3323   _initial_state = state_at_entry();
3324   start_block->merge(_initial_state, compilation->has_irreducible_loops());
3325 
3326   // End nulls still exist here
3327 
3328   // complete graph
3329   _vmap        = new ValueMap();

  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "c1/c1_CFGPrinter.hpp"
  26 #include "c1/c1_Canonicalizer.hpp"
  27 #include "c1/c1_Compilation.hpp"
  28 #include "c1/c1_GraphBuilder.hpp"
  29 #include "c1/c1_InstructionPrinter.hpp"
  30 #include "ci/ciCallSite.hpp"
  31 #include "ci/ciField.hpp"
  32 #include "ci/ciFlatArrayKlass.hpp"
  33 #include "ci/ciInlineKlass.hpp"
  34 #include "ci/ciKlass.hpp"
  35 #include "ci/ciMemberName.hpp"
  36 #include "ci/ciSymbols.hpp"
  37 #include "ci/ciUtilities.inline.hpp"
  38 #include "classfile/javaClasses.hpp"
  39 #include "compiler/compilationPolicy.hpp"
  40 #include "compiler/compileBroker.hpp"
  41 #include "compiler/compilerEvent.hpp"
  42 #include "interpreter/bytecode.hpp"
  43 #include "jfr/jfrEvents.hpp"
  44 #include "memory/resourceArea.hpp"
  45 #include "runtime/sharedRuntime.hpp"
  46 #include "utilities/checkedCast.hpp"
  47 #include "utilities/macros.hpp"
  48 #if INCLUDE_JFR
  49 #include "jfr/jfr.hpp"
  50 #endif
  51 
  52 class BlockListBuilder {
  53  private:

1043       // they are using this local. We don't handle skipping over a
1044       // ret.
1045       for (ScopeData* cur_scope_data = scope_data()->parent();
1046            cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1047            cur_scope_data = cur_scope_data->parent()) {
1048         if (cur_scope_data->jsr_return_address_local() == index) {
1049           BAILOUT("subroutine overwrites return address from previous subroutine");
1050         }
1051       }
1052     } else if (index == scope_data()->jsr_return_address_local()) {
1053       scope_data()->set_jsr_return_address_local(-1);
1054     }
1055   }
1056 
1057   state->store_local(index, round_fp(x));
1058 }
1059 
1060 
1061 void GraphBuilder::load_indexed(BasicType type) {
1062   // In case of in block code motion in range check elimination
1063   ValueStack* state_before = nullptr;
1064   int array_idx = state()->stack_size() - 2;
1065   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1066     // Save the entire state and re-execute on deopt when accessing flat arrays
1067     state_before = copy_state_before();
1068     state_before->set_should_reexecute(true);
1069   } else {
1070     state_before = copy_state_indexed_access();
1071   }
1072   compilation()->set_has_access_indexed(true);
1073   Value index = ipop();
1074   Value array = apop();
1075   Value length = nullptr;
1076   if (CSEArrayLength ||
1077       (array->as_Constant() != nullptr) ||
1078       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1079       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1080       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1081     length = append(new ArrayLength(array, state_before));
1082   }
1083 
1084   bool need_membar = false;
1085   LoadIndexed* load_indexed = nullptr;
1086   Instruction* result = nullptr;
1087   if (array->is_loaded_flat_array()) {
1088     ciType* array_type = array->declared_type();
1089     ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
1090 
1091     bool can_delay_access = false;
1092     ciBytecodeStream s(method());
1093     s.force_bci(bci());
1094     s.next();
1095     if (s.cur_bc() == Bytecodes::_getfield) {
1096       bool will_link;
1097       ciField* next_field = s.get_field(will_link);
1098       bool next_needs_patching = !next_field->holder()->is_initialized() ||
1099                                  !next_field->will_link(method(), Bytecodes::_getfield) ||
1100                                  PatchALot;
1101       can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1102     }
1103     if (can_delay_access) {
1104       // potentially optimizable array access, storing information for delayed decision
1105       LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
1106       DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
1107       li->set_delayed(dli);
1108       set_pending_load_indexed(dli);
1109       return; // Nothing else to do for now
1110     } else {
1111       if (elem_klass->is_empty()) {
1112         // No need to create a new instance, the default instance will be used instead
1113         load_indexed = new LoadIndexed(array, index, length, type, state_before);
1114         apush(append(load_indexed));
1115       } else {
1116         NewInstance* new_instance = new NewInstance(elem_klass, state_before, false, true);
1117         _memory->new_instance(new_instance);
1118         apush(append_split(new_instance));
1119         load_indexed = new LoadIndexed(array, index, length, type, state_before);
1120         load_indexed->set_vt(new_instance);
1121         // The LoadIndexed node will initialise this instance by copying from
1122         // the flat field.  Ensure these stores are visible before any
1123         // subsequent store that publishes this reference.
1124         need_membar = true;
1125       }
1126     }
1127   } else {
1128     load_indexed = new LoadIndexed(array, index, length, type, state_before);
1129     if (profile_array_accesses() && is_reference_type(type)) {
1130       compilation()->set_would_profile(true);
1131       load_indexed->set_should_profile(true);
1132       load_indexed->set_profiled_method(method());
1133       load_indexed->set_profiled_bci(bci());
1134     }
1135   }
1136   result = append(load_indexed);
1137   if (need_membar) {
1138     append(new MemBar(lir_membar_storestore));
1139   }
1140   assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
1141   if (!array->is_loaded_flat_array()) {
1142     push(as_ValueType(type), result);
1143   }
1144 }
1145 
1146 
1147 void GraphBuilder::store_indexed(BasicType type) {
1148   // In case of in block code motion in range check elimination
1149   ValueStack* state_before = nullptr;
1150   int array_idx = state()->stack_size() - 3;
1151   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1152     // Save the entire state and re-execute on deopt when accessing flat arrays
1153     state_before = copy_state_before();
1154     state_before->set_should_reexecute(true);
1155   } else {
1156     state_before = copy_state_indexed_access();
1157   }
1158   compilation()->set_has_access_indexed(true);
1159   Value value = pop(as_ValueType(type));
1160   Value index = ipop();
1161   Value array = apop();
1162   Value length = nullptr;
1163   if (CSEArrayLength ||
1164       (array->as_Constant() != nullptr) ||
1165       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1166       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1167       (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1168     length = append(new ArrayLength(array, state_before));
1169   }
1170   ciType* array_type = array->declared_type();
1171   bool check_boolean = false;
1172   if (array_type != nullptr) {
1173     if (array_type->is_loaded() &&
1174       array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1175       assert(type == T_BYTE, "boolean store uses bastore");
1176       Value mask = append(new Constant(new IntConstant(1)));
1177       value = append(new LogicOp(Bytecodes::_iand, value, mask));
1178     }
1179   } else if (type == T_BYTE) {
1180     check_boolean = true;
1181   }



1182 
1183   StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1184   if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flat_array()) {
1185     compilation()->set_would_profile(true);
1186     store_indexed->set_should_profile(true);
1187     store_indexed->set_profiled_method(method());
1188     store_indexed->set_profiled_bci(bci());



1189   }
1190   Instruction* result = append(store_indexed);
1191   assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
1192   _memory->store_value(value);
1193 }
1194 

1195 void GraphBuilder::stack_op(Bytecodes::Code code) {
1196   switch (code) {
1197     case Bytecodes::_pop:
1198       { Value w = state()->raw_pop();
1199       }
1200       break;
1201     case Bytecodes::_pop2:
1202       { Value w1 = state()->raw_pop();
1203         Value w2 = state()->raw_pop();
1204       }
1205       break;
1206     case Bytecodes::_dup:
1207       { Value w = state()->raw_pop();
1208         state()->raw_push(w);
1209         state()->raw_push(w);
1210       }
1211       break;
1212     case Bytecodes::_dup_x1:
1213       { Value w1 = state()->raw_pop();
1214         Value w2 = state()->raw_pop();
1215         state()->raw_push(w1);
1216         state()->raw_push(w2);
1217         state()->raw_push(w1);
1218       }
1219       break;
1220     case Bytecodes::_dup_x2:
1221       { Value w1 = state()->raw_pop();
1222         Value w2 = state()->raw_pop();
1223         Value w3 = state()->raw_pop();

1362 
1363 
1364 void GraphBuilder::_goto(int from_bci, int to_bci) {
1365   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1366   if (is_profiling()) {
1367     compilation()->set_would_profile(true);
1368     x->set_profiled_bci(bci());
1369     if (profile_branches()) {
1370       x->set_profiled_method(method());
1371       x->set_should_profile(true);
1372     }
1373   }
1374   append(x);
1375 }
1376 
1377 
1378 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1379   BlockBegin* tsux = block_at(stream()->get_dest());
1380   BlockBegin* fsux = block_at(stream()->next_bci());
1381   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1382 
1383   bool subst_check = false;
1384   if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
1385     ValueType* left_vt = x->type();
1386     ValueType* right_vt = y->type();
1387     if (left_vt->is_object()) {
1388       assert(right_vt->is_object(), "must be");
1389       ciKlass* left_klass = x->as_loaded_klass_or_null();
1390       ciKlass* right_klass = y->as_loaded_klass_or_null();
1391 
1392       if (left_klass == nullptr || right_klass == nullptr) {
1393         // The klass is still unloaded, or came from a Phi node. Go slow case;
1394         subst_check = true;
1395       } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
1396         // Either operand may be a value object, but we're not sure. Go slow case;
1397         subst_check = true;
1398       } else {
1399         // No need to do substitutability check
1400       }
1401     }
1402   }
1403   if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
1404       is_profiling() && profile_branches()) {
1405     compilation()->set_would_profile(true);
1406     append(new ProfileACmpTypes(method(), bci(), x, y));
1407   }
1408 
1409   // In case of loop invariant code motion or predicate insertion
1410   // before the body of a loop the state is needed
1411   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : nullptr, is_bb, subst_check));
1412 
1413   assert(i->as_Goto() == nullptr ||
1414          (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == (tsux->bci() < stream()->cur_bci())) ||
1415          (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == (fsux->bci() < stream()->cur_bci())),
1416          "safepoint state of Goto returned by canonicalizer incorrect");
1417 
1418   if (is_profiling()) {
1419     If* if_node = i->as_If();
1420     if (if_node != nullptr) {
1421       // Note that we'd collect profile data in this method if we wanted it.
1422       compilation()->set_would_profile(true);
1423       // At level 2 we need the proper bci to count backedges
1424       if_node->set_profiled_bci(bci());
1425       if (profile_branches()) {
1426         // Successors can be rotated by the canonicalizer, check for this case.
1427         if_node->set_profiled_method(method());
1428         if_node->set_should_profile(true);
1429         if (if_node->tsux() == fsux) {
1430           if_node->set_swapped(true);
1431         }

1646   }
1647 
1648   if (needs_check) {
1649     // Perform the registration of finalizable objects.
1650     ValueStack* state_before = copy_state_for_exception();
1651     load_local(objectType, 0);
1652     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1653                                state()->pop_arguments(1),
1654                                true, state_before, true));
1655   }
1656 }
1657 
1658 
1659 void GraphBuilder::method_return(Value x, bool ignore_return) {
1660   if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1661     call_register_finalizer();
1662   }
1663 
1664   // The conditions for a memory barrier are described in Parse::do_exits().
1665   bool need_mem_bar = false;
1666   if (method()->is_object_constructor() &&
1667        (scope()->wrote_final() || scope()->wrote_stable() ||
1668          (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1669          (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1670     need_mem_bar = true;
1671   }
1672 
1673   BasicType bt = method()->return_type()->basic_type();
1674   switch (bt) {
1675     case T_BYTE:
1676     {
1677       Value shift = append(new Constant(new IntConstant(24)));
1678       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1679       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1680       break;
1681     }
1682     case T_SHORT:
1683     {
1684       Value shift = append(new Constant(new IntConstant(16)));
1685       x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1686       x = append(new ShiftOp(Bytecodes::_ishr, x, shift));

1797   // Attach dimension info to stable arrays.
1798   if (FoldStableValues &&
1799       field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1800     ciArray* array = field_value.as_object()->as_array();
1801     jint dimension = field->type()->as_array_klass()->dimension();
1802     value = new StableArrayConstant(array, dimension);
1803   }
1804 
1805   switch (field_type) {
1806     case T_ARRAY:
1807     case T_OBJECT:
1808       if (field_value.as_object()->should_be_constant()) {
1809         return new Constant(value);
1810       }
1811       return nullptr; // Not a constant.
1812     default:
1813       return new Constant(value);
1814   }
1815 }
1816 
1817 void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
1818   for (int i = 0; i < vk->nof_declared_nonstatic_fields(); i++) {
1819     ciField* field = vk->declared_nonstatic_field_at(i);
1820     int offset = field->offset_in_bytes() - vk->payload_offset();
1821     if (field->is_flat()) {
1822       bool needs_atomic_access = !field->is_null_free() || field->is_volatile();
1823       assert(!needs_atomic_access, "Atomic access in non-atomic container");
1824       copy_inline_content(field->type()->as_inline_klass(), src, src_off + offset, dest, dest_off + offset, state_before, enclosing_field);
1825       if (!field->is_null_free()) {
1826         // Nullable, copy the null marker using Unsafe because null markers are no real fields
1827         int null_marker_offset = field->null_marker_offset() - vk->payload_offset();
1828         Value offset = append(new Constant(new LongConstant(src_off + null_marker_offset)));
1829         Value nm = append(new UnsafeGet(T_BOOLEAN, src, offset, false));
1830         offset = append(new Constant(new LongConstant(dest_off + null_marker_offset)));
1831         append(new UnsafePut(T_BOOLEAN, dest, offset, nm, false));
1832       }
1833     } else {
1834       Value value = append(new LoadField(src, src_off + offset, field, false, state_before, false));
1835       StoreField* store = new StoreField(dest, dest_off + offset, field, value, false, state_before, false);
1836       store->set_enclosing_field(enclosing_field);
1837       append(store);
1838     }
1839   }
1840 }
1841 
1842 void GraphBuilder::access_field(Bytecodes::Code code) {
1843   bool will_link;
1844   ciField* field = stream()->get_field(will_link);
1845   ciInstanceKlass* holder = field->holder();
1846   BasicType field_type = field->type()->basic_type();
1847   ValueType* type = as_ValueType(field_type);
1848 
1849   // call will_link again to determine if the field is valid.
1850   const bool needs_patching = !holder->is_loaded() ||
1851                               !field->will_link(method(), code) ||
1852                               (!field->is_flat() && PatchALot);
1853 
1854   ValueStack* state_before = nullptr;
1855   if (!holder->is_initialized() || needs_patching) {
1856     // save state before instruction for debug info when
1857     // deoptimization happens during patching
1858     state_before = copy_state_before();
1859   }
1860 
1861   Value obj = nullptr;
1862   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1863     if (state_before != nullptr) {
1864       // build a patching constant
1865       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1866     } else {
1867       obj = new Constant(new InstanceConstant(holder->java_mirror()));
1868     }
1869   }
1870 
1871   if (code == Bytecodes::_putfield) {
1872     scope()->set_wrote_fields();
1873     if (field->is_volatile()) {
1874       scope()->set_wrote_volatile();
1875     }
1876     if (field->is_final()) {
1877       scope()->set_wrote_final();
1878     }
1879     if (field->is_stable()) {
1880       scope()->set_wrote_stable();
1881     }
1882   }
1883 
1884   int offset = !needs_patching ? field->offset_in_bytes() : -1;
1885   switch (code) {
1886     case Bytecodes::_getstatic: {
1887       // check for compile-time constants, i.e., initialized static final fields
1888       Value constant = nullptr;
1889       if (field->is_static_constant() && !PatchALot) {
1890         ciConstant field_value = field->constant_value();
1891         assert(!field->is_stable() || !field_value.is_null_or_zero(),
1892                "stable static w/ default value shouldn't be a constant");
1893         constant = make_constant(field_value, field);
1894       } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
1895                  field->type()->as_inline_klass()->is_empty()) {
1896         // Loading from a field of an empty, null-free inline type. Just return the default instance.
1897         constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1898       }
1899       if (constant != nullptr) {
1900         push(type, append(constant));
1901       } else {
1902         if (state_before == nullptr) {
1903           state_before = copy_state_for_exception();
1904         }
1905         LoadField* load_field = new LoadField(append(obj), offset, field, true,
1906                                         state_before, needs_patching);
1907         push(type, append(load_field));
1908       }
1909       break;
1910     }
1911     case Bytecodes::_putstatic: {
1912       Value val = pop(type);
1913       if (state_before == nullptr) {
1914         state_before = copy_state_for_exception();
1915       }
1916       if (field_type == T_BOOLEAN) {
1917         Value mask = append(new Constant(new IntConstant(1)));
1918         val = append(new LogicOp(Bytecodes::_iand, val, mask));
1919       }
1920       if (field->is_null_free()) {
1921         null_check(val);
1922       }
1923       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
1924         // Storing to a field of an empty, null-free inline type. Ignore.
1925         break;
1926       }
1927       append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1928       break;
1929     }
1930     case Bytecodes::_getfield: {
1931       // Check for compile-time constants, i.e., trusted final non-static fields.
1932       Value constant = nullptr;
1933       if (state_before == nullptr && field->is_flat()) {
1934         // Save the entire state and re-execute on deopt when accessing flat fields
1935         assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
1936         state_before = copy_state_before();
1937       }
1938       if (!has_pending_field_access() && !has_pending_load_indexed()) {
1939         obj = apop();
1940         ObjectType* obj_type = obj->type()->as_ObjectType();
1941         if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized()
1942             && field->type()->as_inline_klass()->is_empty()) {
1943           // Loading from a field of an empty, null-free inline type. Just return the default instance.
1944           null_check(obj);
1945           constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1946         } else if (field->is_constant() && !field->is_flat() && obj_type->is_constant() && !PatchALot) {
1947           ciObject* const_oop = obj_type->constant_value();
1948           if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1949             ciConstant field_value = field->constant_value_of(const_oop);
1950             if (field_value.is_valid()) {
1951               if (field->is_null_free() && field_value.is_null_or_zero()) {
1952                 // Non-flat inline type field. Replace null by the default value.
1953                 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1954               } else {
1955                 constant = make_constant(field_value, field);
1956               }
1957               // For CallSite objects add a dependency for invalidation of the optimization.
1958               if (field->is_call_site_target()) {
1959                 ciCallSite* call_site = const_oop->as_call_site();
1960                 if (!call_site->is_fully_initialized_constant_call_site()) {
1961                   ciMethodHandle* target = field_value.as_object()->as_method_handle();
1962                   dependency_recorder()->assert_call_site_target_value(call_site, target);
1963                 }
1964               }
1965             }
1966           }
1967         }
1968       }
1969       if (constant != nullptr) {
1970         push(type, append(constant));
1971       } else {
1972         if (state_before == nullptr) {
1973           state_before = copy_state_for_exception();
1974         }
1975         if (!field->is_flat()) {
1976           if (has_pending_field_access()) {
1977             assert(!needs_patching, "Can't patch delayed field access");
1978             obj = pending_field_access()->obj();
1979             offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->payload_offset();
1980             field = pending_field_access()->holder()->get_field_by_offset(offset, false);
1981             assert(field != nullptr, "field not found");
1982             set_pending_field_access(nullptr);
1983           } else if (has_pending_load_indexed()) {
1984             assert(!needs_patching, "Can't patch delayed field access");
1985             pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->payload_offset());
1986             LoadIndexed* li = pending_load_indexed()->load_instr();
1987             li->set_type(type);
1988             push(type, append(li));
1989             set_pending_load_indexed(nullptr);




1990             break;
1991           }
1992           LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1993           Value replacement = !needs_patching ? _memory->load(load) : load;
1994           if (replacement != load) {
1995             assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1996             // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1997             // conversion. Emit an explicit conversion here to get the correct field value after the write.
1998             switch (field_type) {
1999             case T_BOOLEAN:
2000             case T_BYTE:
2001               replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
2002               break;
2003             case T_CHAR:
2004               replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
2005               break;
2006             case T_SHORT:
2007               replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
2008               break;
2009             default:
2010               break;
2011             }
2012             push(type, replacement);
2013           } else {
2014             push(type, append(load));
2015           }
2016         } else {
2017           // Flat field
2018           assert(!needs_patching, "Can't patch flat inline type field access");
2019           ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2020           bool is_naturally_atomic = inline_klass->nof_declared_nonstatic_fields() <= 1;
2021           bool needs_atomic_access = !field->is_null_free() || (field->is_volatile() && !is_naturally_atomic);
2022           if (needs_atomic_access) {
2023             assert(!has_pending_field_access(), "Pending field accesses are not supported");
2024             LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
2025             push(type, append(load));
2026           } else {
2027             assert(field->is_null_free(), "must be null-free");
2028             // Look at the next bytecode to check if we can delay the field access
2029             bool can_delay_access = false;
2030             ciBytecodeStream s(method());
2031             s.force_bci(bci());
2032             s.next();
2033             if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
2034               ciField* next_field = s.get_field(will_link);
2035               bool next_needs_patching = !next_field->holder()->is_loaded() ||
2036                                          !next_field->will_link(method(), Bytecodes::_getfield) ||
2037                                          PatchALot;
2038               // We can't update the offset for atomic accesses
2039               bool next_needs_atomic_access = !next_field->is_null_free() || next_field->is_volatile();
2040               can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching && !next_needs_atomic_access;
2041             }
2042             if (can_delay_access) {
2043               if (has_pending_load_indexed()) {
2044                 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->payload_offset());
2045               } else if (has_pending_field_access()) {
2046                 pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->payload_offset());
2047               } else {
2048                 null_check(obj);
2049                 DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset_in_bytes(), state_before);
2050                 set_pending_field_access(dfa);
2051               }
2052             } else {
2053               scope()->set_wrote_final();
2054               scope()->set_wrote_fields();
2055               bool need_membar = false;
2056               if (field->is_null_free() && inline_klass->is_initialized() && inline_klass->is_empty()) {
2057                 apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
2058                 if (has_pending_field_access()) {
2059                   set_pending_field_access(nullptr);
2060                 } else if (has_pending_load_indexed()) {
2061                   set_pending_load_indexed(nullptr);
2062                 }
2063               } else if (has_pending_load_indexed()) {
2064                 assert(!needs_patching, "Can't patch delayed field access");
2065                 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->payload_offset());
2066                 NewInstance* vt = new NewInstance(inline_klass, pending_load_indexed()->state_before(), false, true);
2067                 _memory->new_instance(vt);
2068                 pending_load_indexed()->load_instr()->set_vt(vt);
2069                 apush(append_split(vt));
2070                 append(pending_load_indexed()->load_instr());
2071                 set_pending_load_indexed(nullptr);
2072                 need_membar = true;
2073               } else {
2074                 if (has_pending_field_access()) {
2075                   state_before = pending_field_access()->state_before();
2076                 }
2077                 NewInstance* new_instance = new NewInstance(inline_klass, state_before, false, true);
2078                 _memory->new_instance(new_instance);
2079                 apush(append_split(new_instance));
2080                 if (has_pending_field_access()) {
2081                   copy_inline_content(inline_klass, pending_field_access()->obj(),
2082                                       pending_field_access()->offset() + field->offset_in_bytes() - field->holder()->as_inline_klass()->payload_offset(),
2083                                       new_instance, inline_klass->payload_offset(), state_before);
2084                   set_pending_field_access(nullptr);
2085                 } else {
2086                   copy_inline_content(inline_klass, obj, field->offset_in_bytes(), new_instance, inline_klass->payload_offset(), state_before);
2087                 }
2088                 need_membar = true;
2089               }
2090               if (need_membar) {
2091                 // If we allocated a new instance ensure the stores to copy the
2092                 // field contents are visible before any subsequent store that
2093                 // publishes this reference.
2094                 append(new MemBar(lir_membar_storestore));
2095               }
2096             }
2097           }
2098         }
2099       }
2100       break;
2101     }
2102     case Bytecodes::_putfield: {
2103       Value val = pop(type);
2104       obj = apop();
2105       if (state_before == nullptr) {
2106         state_before = copy_state_for_exception();
2107       }
2108       if (field_type == T_BOOLEAN) {
2109         Value mask = append(new Constant(new IntConstant(1)));
2110         val = append(new LogicOp(Bytecodes::_iand, val, mask));
2111       }
2112       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
2113         // Storing to a field of an empty, null-free inline type. Ignore.
2114         null_check(obj);
2115         null_check(val);
2116       } else if (!field->is_flat()) {
2117         if (field->is_null_free()) {
2118           null_check(val);
2119         }
2120         StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
2121         if (!needs_patching) store = _memory->store(store);
2122         if (store != nullptr) {
2123           append(store);
2124         }
2125       } else {
2126         // Flat field
2127         assert(!needs_patching, "Can't patch flat inline type field access");
2128         ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2129         bool is_naturally_atomic = inline_klass->nof_declared_nonstatic_fields() <= 1;
2130         bool needs_atomic_access = !field->is_null_free() || (field->is_volatile() && !is_naturally_atomic);
2131         if (needs_atomic_access) {
2132           if (field->is_null_free()) {
2133             null_check(val);
2134           }
2135           append(new StoreField(obj, offset, field, val, false, state_before, needs_patching));
2136         } else {
2137           assert(field->is_null_free(), "must be null-free");
2138           copy_inline_content(inline_klass, val, inline_klass->payload_offset(), obj, offset, state_before, field);
2139         }
2140       }
2141       break;
2142     }
2143     default:
2144       ShouldNotReachHere();
2145       break;
2146   }
2147 }
2148 

2149 Dependencies* GraphBuilder::dependency_recorder() const {
2150   assert(DeoptC1, "need debug information");
2151   return compilation()->dependency_recorder();
2152 }
2153 
2154 // How many arguments do we want to profile?
2155 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
2156   int n = 0;
2157   bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
2158   start = has_receiver ? 1 : 0;
2159   if (profile_arguments()) {
2160     ciProfileData* data = method()->method_data()->bci_to_data(bci());
2161     if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
2162       n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
2163     }
2164   }
2165   // If we are inlining then we need to collect arguments to profile parameters for the target
2166   if (profile_parameters() && target != nullptr) {
2167     if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
2168       // The receiver is profiled on method entry so it's included in

2245       break;
2246     case Bytecodes::_invokehandle:
2247       code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
2248       break;
2249     default:
2250       break;
2251     }
2252   } else {
2253     if (bc_raw == Bytecodes::_invokehandle) {
2254       assert(!will_link, "should come here only for unlinked call");
2255       code = Bytecodes::_invokespecial;
2256     }
2257   }
2258 
2259   if (code == Bytecodes::_invokespecial) {
2260     // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
2261     ciKlass* receiver_constraint = nullptr;
2262 
2263     if (bc_raw == Bytecodes::_invokeinterface) {
2264       receiver_constraint = holder;
2265     } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor() && calling_klass->is_interface()) {
2266       receiver_constraint = calling_klass;
2267     }
2268 
2269     if (receiver_constraint != nullptr) {
2270       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
2271       Value receiver = state()->stack_at(index);
2272       CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
2273       // go to uncommon_trap when checkcast fails
2274       c->set_invokespecial_receiver_check();
2275       state()->stack_at_put(index, append_split(c));
2276     }
2277   }
2278 
2279   // Push appendix argument (MethodType, CallSite, etc.), if one.
2280   bool patch_for_appendix = false;
2281   int patching_appendix_arg = 0;
2282   if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2283     Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2284     apush(arg);
2285     patch_for_appendix = true;

2501     }
2502   }
2503 
2504   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2505   // push result
2506   append_split(result);
2507 
2508   if (result_type != voidType) {
2509     push(result_type, round_fp(result));
2510   }
2511   if (profile_return() && result_type->is_object_kind()) {
2512     profile_return_type(result, target);
2513   }
2514 }
2515 
2516 
2517 void GraphBuilder::new_instance(int klass_index) {
2518   ValueStack* state_before = copy_state_exhandling();
2519   ciKlass* klass = stream()->get_klass();
2520   assert(klass->is_instance_klass(), "must be an instance klass");
2521   if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
2522       klass->as_inline_klass()->is_initialized() && klass->as_inline_klass()->is_empty()) {
2523     ciInlineKlass* vk = klass->as_inline_klass();
2524     apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
2525   } else {
2526     NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass(), false);
2527     _memory->new_instance(new_instance);
2528     apush(append_split(new_instance));
2529   }
2530 }
2531 

2532 void GraphBuilder::new_type_array() {
2533   ValueStack* state_before = copy_state_exhandling();
2534   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before, true)));
2535 }
2536 
2537 
2538 void GraphBuilder::new_object_array() {
2539   ciKlass* klass = stream()->get_klass();
2540   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2541   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2542   apush(append_split(n));
2543 }
2544 
2545 
2546 bool GraphBuilder::direct_compare(ciKlass* k) {
2547   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2548     ciInstanceKlass* ik = k->as_instance_klass();
2549     if (ik->is_final()) {
2550       return true;
2551     } else {

2584   ciKlass* klass = stream()->get_klass();
2585   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2586   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2587   ipush(append_split(i));
2588   i->set_direct_compare(direct_compare(klass));
2589 
2590   if (is_profiling()) {
2591     // Note that we'd collect profile data in this method if we wanted it.
2592     compilation()->set_would_profile(true);
2593 
2594     if (profile_checkcasts()) {
2595       i->set_profiled_method(method());
2596       i->set_profiled_bci(bci());
2597       i->set_should_profile(true);
2598     }
2599   }
2600 }
2601 
2602 
2603 void GraphBuilder::monitorenter(Value x, int bci) {
2604   bool maybe_inlinetype = false;
2605   if (bci == InvocationEntryBci) {
2606     // Called by GraphBuilder::inline_sync_entry.
2607 #ifdef ASSERT
2608     ciType* obj_type = x->declared_type();
2609     assert(obj_type == nullptr || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
2610 #endif
2611   } else {
2612     // We are compiling a monitorenter bytecode
2613     if (EnableValhalla) {
2614       ciType* obj_type = x->declared_type();
2615       if (obj_type == nullptr || obj_type->as_klass()->can_be_inline_klass()) {
2616         // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
2617         // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2618         maybe_inlinetype = true;
2619       }
2620     }
2621   }
2622 
2623   // save state before locking in case of deoptimization after a NullPointerException
2624   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2625   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
2626   kill_all();
2627 }
2628 
2629 
2630 void GraphBuilder::monitorexit(Value x, int bci) {
2631   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2632   kill_all();
2633 }
2634 
2635 
2636 void GraphBuilder::new_multi_array(int dimensions) {
2637   ciKlass* klass = stream()->get_klass();
2638   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2639 
2640   Values* dims = new Values(dimensions, dimensions, nullptr);
2641   // fill in all dimensions
2642   int i = dimensions;
2643   while (i-- > 0) dims->at_put(i, ipop());
2644   // create array
2645   NewArray* n = new NewMultiArray(klass, dims, state_before);

2753 
2754 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2755   return append_with_bci(instr, bci());
2756 }
2757 
2758 
2759 void GraphBuilder::null_check(Value value) {
2760   if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2761     return;
2762   } else {
2763     Constant* con = value->as_Constant();
2764     if (con) {
2765       ObjectType* c = con->type()->as_ObjectType();
2766       if (c && c->is_loaded()) {
2767         ObjectConstant* oc = c->as_ObjectConstant();
2768         if (!oc || !oc->value()->is_null_object()) {
2769           return;
2770         }
2771       }
2772     }
2773     if (value->is_null_free()) return;
2774   }
2775   append(new NullCheck(value, copy_state_for_exception()));
2776 }
2777 
2778 
2779 
2780 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2781   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2782     assert(instruction->exception_state() == nullptr
2783            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2784            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2785            "exception_state should be of exception kind");
2786     return new XHandlers();
2787   }
2788 
2789   XHandlers*  exception_handlers = new XHandlers();
2790   ScopeData*  cur_scope_data = scope_data();
2791   ValueStack* cur_state = instruction->state_before();
2792   ValueStack* prev_state = nullptr;
2793   int scope_count = 0;
2794 
2795   assert(cur_state != nullptr, "state_before must be set");
2796   do {
2797     int cur_bci = cur_state->bci();
2798     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2799     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
2800            || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
2801 
2802 
2803     // join with all potential exception handlers
2804     XHandlers* list = cur_scope_data->xhandlers();
2805     const int n = list->length();
2806     for (int i = 0; i < n; i++) {
2807       XHandler* h = list->handler_at(i);
2808       if (h->covers(cur_bci)) {
2809         // h is a potential exception handler => join it
2810         compilation()->set_has_exception_handlers(true);
2811 
2812         BlockBegin* entry = h->entry_block();
2813         if (entry == block()) {
2814           // It's acceptable for an exception handler to cover itself
2815           // but we don't handle that in the parser currently.  It's
2816           // very rare so we bailout instead of trying to handle it.
2817           BAILOUT_("exception handler covers itself", exception_handlers);
2818         }
2819         assert(entry->bci() == h->handler_bci(), "must match");
2820         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2821 

3563   // the storage for the OSR buffer is freed manually in the LIRGenerator.
3564 
3565   assert(state->caller_state() == nullptr, "should be top scope");
3566   state->clear_locals();
3567   Goto* g = new Goto(target, false);
3568   append(g);
3569   _osr_entry->set_end(g);
3570   target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3571 
3572   scope_data()->set_stream(nullptr);
3573 }
3574 
3575 
3576 ValueStack* GraphBuilder::state_at_entry() {
3577   ValueStack* state = new ValueStack(scope(), nullptr);
3578 
3579   // Set up locals for receiver
3580   int idx = 0;
3581   if (!method()->is_static()) {
3582     // we should always see the receiver
3583     state->store_local(idx, new Local(method()->holder(), objectType, idx,
3584              /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
3585     idx = 1;
3586   }
3587 
3588   // Set up locals for incoming arguments
3589   ciSignature* sig = method()->signature();
3590   for (int i = 0; i < sig->count(); i++) {
3591     ciType* type = sig->type_at(i);
3592     BasicType basic_type = type->basic_type();
3593     // don't allow T_ARRAY to propagate into locals types
3594     if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3595     ValueType* vt = as_ValueType(basic_type);
3596     state->store_local(idx, new Local(type, vt, idx, false, false));
3597     idx += type->size();
3598   }
3599 
3600   // lock synchronized method
3601   if (method()->is_synchronized()) {
3602     state->lock(nullptr);
3603   }
3604 
3605   return state;
3606 }
3607 
3608 
3609 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3610   : _scope_data(nullptr)
3611   , _compilation(compilation)
3612   , _memory(new MemoryBuffer())
3613   , _inline_bailout_msg(nullptr)
3614   , _instruction_count(0)
3615   , _osr_entry(nullptr)
3616   , _pending_field_access(nullptr)
3617   , _pending_load_indexed(nullptr)
3618 {
3619   int osr_bci = compilation->osr_bci();
3620 
3621   // determine entry points and bci2block mapping
3622   BlockListBuilder blm(compilation, scope, osr_bci);
3623   CHECK_BAILOUT();
3624 
3625   BlockList* bci2block = blm.bci2block();
3626   BlockBegin* start_block = bci2block->at(0);
3627 
3628   push_root_scope(scope, bci2block, start_block);
3629 
3630   // setup state for std entry
3631   _initial_state = state_at_entry();
3632   start_block->merge(_initial_state, compilation->has_irreducible_loops());
3633 
3634   // End nulls still exist here
3635 
3636   // complete graph
3637   _vmap        = new ValueMap();
< prev index next >