13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "c1/c1_CFGPrinter.hpp"
27 #include "c1/c1_Canonicalizer.hpp"
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_GraphBuilder.hpp"
30 #include "c1/c1_InstructionPrinter.hpp"
31 #include "ci/ciCallSite.hpp"
32 #include "ci/ciField.hpp"
33 #include "ci/ciKlass.hpp"
34 #include "ci/ciMemberName.hpp"
35 #include "ci/ciSymbols.hpp"
36 #include "ci/ciUtilities.inline.hpp"
37 #include "classfile/javaClasses.hpp"
38 #include "compiler/compilationPolicy.hpp"
39 #include "compiler/compileBroker.hpp"
40 #include "compiler/compilerEvent.hpp"
41 #include "interpreter/bytecode.hpp"
42 #include "jfr/jfrEvents.hpp"
43 #include "memory/resourceArea.hpp"
44 #include "oops/oop.inline.hpp"
45 #include "runtime/sharedRuntime.hpp"
46 #include "runtime/vm_version.hpp"
47 #include "utilities/bitMap.inline.hpp"
48 #include "utilities/checkedCast.hpp"
49 #include "utilities/powerOfTwo.hpp"
50 #include "utilities/macros.hpp"
51 #if INCLUDE_JFR
52 #include "jfr/jfr.hpp"
1046 // they are using this local. We don't handle skipping over a
1047 // ret.
1048 for (ScopeData* cur_scope_data = scope_data()->parent();
1049 cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1050 cur_scope_data = cur_scope_data->parent()) {
1051 if (cur_scope_data->jsr_return_address_local() == index) {
1052 BAILOUT("subroutine overwrites return address from previous subroutine");
1053 }
1054 }
1055 } else if (index == scope_data()->jsr_return_address_local()) {
1056 scope_data()->set_jsr_return_address_local(-1);
1057 }
1058 }
1059
1060 state->store_local(index, round_fp(x));
1061 }
1062
1063
1064 void GraphBuilder::load_indexed(BasicType type) {
1065 // In case of in block code motion in range check elimination
1066 ValueStack* state_before = copy_state_indexed_access();
1067 compilation()->set_has_access_indexed(true);
1068 Value index = ipop();
1069 Value array = apop();
1070 Value length = nullptr;
1071 if (CSEArrayLength ||
1072 (array->as_Constant() != nullptr) ||
1073 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1074 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1075 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1076 length = append(new ArrayLength(array, state_before));
1077 }
1078 push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
1079 }
1080
1081
1082 void GraphBuilder::store_indexed(BasicType type) {
1083 // In case of in block code motion in range check elimination
1084 ValueStack* state_before = copy_state_indexed_access();
1085 compilation()->set_has_access_indexed(true);
1086 Value value = pop(as_ValueType(type));
1087 Value index = ipop();
1088 Value array = apop();
1089 Value length = nullptr;
1090 if (CSEArrayLength ||
1091 (array->as_Constant() != nullptr) ||
1092 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1093 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1094 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1095 length = append(new ArrayLength(array, state_before));
1096 }
1097 ciType* array_type = array->declared_type();
1098 bool check_boolean = false;
1099 if (array_type != nullptr) {
1100 if (array_type->is_loaded() &&
1101 array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1102 assert(type == T_BYTE, "boolean store uses bastore");
1103 Value mask = append(new Constant(new IntConstant(1)));
1104 value = append(new LogicOp(Bytecodes::_iand, value, mask));
1105 }
1106 } else if (type == T_BYTE) {
1107 check_boolean = true;
1108 }
1109 StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1110 append(result);
1111 _memory->store_value(value);
1112
1113 if (type == T_OBJECT && is_profiling()) {
1114 // Note that we'd collect profile data in this method if we wanted it.
1115 compilation()->set_would_profile(true);
1116
1117 if (profile_checkcasts()) {
1118 result->set_profiled_method(method());
1119 result->set_profiled_bci(bci());
1120 result->set_should_profile(true);
1121 }
1122 }
1123 }
1124
1125
1126 void GraphBuilder::stack_op(Bytecodes::Code code) {
1127 switch (code) {
1128 case Bytecodes::_pop:
1129 { state()->raw_pop();
1130 }
1131 break;
1132 case Bytecodes::_pop2:
1133 { state()->raw_pop();
1134 state()->raw_pop();
1135 }
1136 break;
1137 case Bytecodes::_dup:
1138 { Value w = state()->raw_pop();
1139 state()->raw_push(w);
1140 state()->raw_push(w);
1141 }
1142 break;
1143 case Bytecodes::_dup_x1:
1144 { Value w1 = state()->raw_pop();
1145 Value w2 = state()->raw_pop();
1146 state()->raw_push(w1);
1147 state()->raw_push(w2);
1148 state()->raw_push(w1);
1149 }
1150 break;
1151 case Bytecodes::_dup_x2:
1152 { Value w1 = state()->raw_pop();
1153 Value w2 = state()->raw_pop();
1154 Value w3 = state()->raw_pop();
1293
1294
1295 void GraphBuilder::_goto(int from_bci, int to_bci) {
1296 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1297 if (is_profiling()) {
1298 compilation()->set_would_profile(true);
1299 x->set_profiled_bci(bci());
1300 if (profile_branches()) {
1301 x->set_profiled_method(method());
1302 x->set_should_profile(true);
1303 }
1304 }
1305 append(x);
1306 }
1307
1308
1309 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1310 BlockBegin* tsux = block_at(stream()->get_dest());
1311 BlockBegin* fsux = block_at(stream()->next_bci());
1312 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1313 // In case of loop invariant code motion or predicate insertion
1314 // before the body of a loop the state is needed
1315 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : nullptr, is_bb));
1316
1317 assert(i->as_Goto() == nullptr ||
1318 (i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == (tsux->bci() < stream()->cur_bci())) ||
1319 (i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == (fsux->bci() < stream()->cur_bci())),
1320 "safepoint state of Goto returned by canonicalizer incorrect");
1321
1322 if (is_profiling()) {
1323 If* if_node = i->as_If();
1324 if (if_node != nullptr) {
1325 // Note that we'd collect profile data in this method if we wanted it.
1326 compilation()->set_would_profile(true);
1327 // At level 2 we need the proper bci to count backedges
1328 if_node->set_profiled_bci(bci());
1329 if (profile_branches()) {
1330 // Successors can be rotated by the canonicalizer, check for this case.
1331 if_node->set_profiled_method(method());
1332 if_node->set_should_profile(true);
1333 if (if_node->tsux() == fsux) {
1334 if_node->set_swapped(true);
1335 }
1550 }
1551
1552 if (needs_check) {
1553 // Perform the registration of finalizable objects.
1554 ValueStack* state_before = copy_state_for_exception();
1555 load_local(objectType, 0);
1556 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1557 state()->pop_arguments(1),
1558 true, state_before, true));
1559 }
1560 }
1561
1562
1563 void GraphBuilder::method_return(Value x, bool ignore_return) {
1564 if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1565 call_register_finalizer();
1566 }
1567
1568 // The conditions for a memory barrier are described in Parse::do_exits().
1569 bool need_mem_bar = false;
1570 if (method()->name() == ciSymbols::object_initializer_name() &&
1571 (scope()->wrote_final() || scope()->wrote_stable() ||
1572 (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1573 (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1574 need_mem_bar = true;
1575 }
1576
1577 BasicType bt = method()->return_type()->basic_type();
1578 switch (bt) {
1579 case T_BYTE:
1580 {
1581 Value shift = append(new Constant(new IntConstant(24)));
1582 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1583 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1584 break;
1585 }
1586 case T_SHORT:
1587 {
1588 Value shift = append(new Constant(new IntConstant(16)));
1589 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1590 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1701 // Attach dimension info to stable arrays.
1702 if (FoldStableValues &&
1703 field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1704 ciArray* array = field_value.as_object()->as_array();
1705 jint dimension = field->type()->as_array_klass()->dimension();
1706 value = new StableArrayConstant(array, dimension);
1707 }
1708
1709 switch (field_type) {
1710 case T_ARRAY:
1711 case T_OBJECT:
1712 if (field_value.as_object()->should_be_constant()) {
1713 return new Constant(value);
1714 }
1715 return nullptr; // Not a constant.
1716 default:
1717 return new Constant(value);
1718 }
1719 }
1720
1721 void GraphBuilder::access_field(Bytecodes::Code code) {
1722 bool will_link;
1723 ciField* field = stream()->get_field(will_link);
1724 ciInstanceKlass* holder = field->holder();
1725 BasicType field_type = field->type()->basic_type();
1726 ValueType* type = as_ValueType(field_type);
1727 // call will_link again to determine if the field is valid.
1728 const bool needs_patching = !holder->is_loaded() ||
1729 !field->will_link(method(), code) ||
1730 PatchALot;
1731
1732 ValueStack* state_before = nullptr;
1733 if (!holder->is_initialized() || needs_patching) {
1734 // save state before instruction for debug info when
1735 // deoptimization happens during patching
1736 state_before = copy_state_before();
1737 }
1738
1739 Value obj = nullptr;
1740 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1741 if (state_before != nullptr) {
1742 // build a patching constant
1743 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1744 } else {
1745 obj = new Constant(new InstanceConstant(holder->java_mirror()));
1746 }
1747 }
1748
1749 if (code == Bytecodes::_putfield) {
1750 scope()->set_wrote_fields();
1751 if (field->is_volatile()) {
1752 scope()->set_wrote_volatile();
1753 }
1754 if (field->is_final()) {
1755 scope()->set_wrote_final();
1756 }
1757 if (field->is_stable()) {
1758 scope()->set_wrote_stable();
1759 }
1760 }
1761
1762 const int offset = !needs_patching ? field->offset_in_bytes() : -1;
1763 switch (code) {
1764 case Bytecodes::_getstatic: {
1765 // check for compile-time constants, i.e., initialized static final fields
1766 Value constant = nullptr;
1767 if (field->is_static_constant() && !PatchALot) {
1768 ciConstant field_value = field->constant_value();
1769 assert(!field->is_stable() || !field_value.is_null_or_zero(),
1770 "stable static w/ default value shouldn't be a constant");
1771 constant = make_constant(field_value, field);
1772 }
1773 if (constant != nullptr) {
1774 push(type, append(constant));
1775 } else {
1776 if (state_before == nullptr) {
1777 state_before = copy_state_for_exception();
1778 }
1779 push(type, append(new LoadField(append(obj), offset, field, true,
1780 state_before, needs_patching)));
1781 }
1782 break;
1783 }
1784 case Bytecodes::_putstatic: {
1785 Value val = pop(type);
1786 if (state_before == nullptr) {
1787 state_before = copy_state_for_exception();
1788 }
1789 if (field->type()->basic_type() == T_BOOLEAN) {
1790 Value mask = append(new Constant(new IntConstant(1)));
1791 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1792 }
1793 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1794 break;
1795 }
1796 case Bytecodes::_getfield: {
1797 // Check for compile-time constants, i.e., trusted final non-static fields.
1798 Value constant = nullptr;
1799 obj = apop();
1800 ObjectType* obj_type = obj->type()->as_ObjectType();
1801 if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1802 ciObject* const_oop = obj_type->constant_value();
1803 if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1804 ciConstant field_value = field->constant_value_of(const_oop);
1805 if (field_value.is_valid()) {
1806 constant = make_constant(field_value, field);
1807 // For CallSite objects add a dependency for invalidation of the optimization.
1808 if (field->is_call_site_target()) {
1809 ciCallSite* call_site = const_oop->as_call_site();
1810 if (!call_site->is_fully_initialized_constant_call_site()) {
1811 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1812 dependency_recorder()->assert_call_site_target_value(call_site, target);
1813 }
1814 }
1815 }
1816 }
1817 }
1818 if (constant != nullptr) {
1819 push(type, append(constant));
1820 } else {
1821 if (state_before == nullptr) {
1822 state_before = copy_state_for_exception();
1823 }
1824 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1825 Value replacement = !needs_patching ? _memory->load(load) : load;
1826 if (replacement != load) {
1827 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1828 // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1829 // conversion. Emit an explicit conversion here to get the correct field value after the write.
1830 BasicType bt = field->type()->basic_type();
1831 switch (bt) {
1832 case T_BOOLEAN:
1833 case T_BYTE:
1834 replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
1835 break;
1836 case T_CHAR:
1837 replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
1838 break;
1839 case T_SHORT:
1840 replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
1841 break;
1842 default:
1843 break;
1844 }
1845 push(type, replacement);
1846 } else {
1847 push(type, append(load));
1848 }
1849 }
1850 break;
1851 }
1852 case Bytecodes::_putfield: {
1853 Value val = pop(type);
1854 obj = apop();
1855 if (state_before == nullptr) {
1856 state_before = copy_state_for_exception();
1857 }
1858 if (field->type()->basic_type() == T_BOOLEAN) {
1859 Value mask = append(new Constant(new IntConstant(1)));
1860 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1861 }
1862 StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1863 if (!needs_patching) store = _memory->store(store);
1864 if (store != nullptr) {
1865 append(store);
1866 }
1867 break;
1868 }
1869 default:
1870 ShouldNotReachHere();
1871 break;
1872 }
1873 }
1874
1875
1876 Dependencies* GraphBuilder::dependency_recorder() const {
1877 assert(DeoptC1, "need debug information");
1878 return compilation()->dependency_recorder();
1879 }
1880
1881 // How many arguments do we want to profile?
1882 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1883 int n = 0;
1884 bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1885 start = has_receiver ? 1 : 0;
1886 if (profile_arguments()) {
1887 ciProfileData* data = method()->method_data()->bci_to_data(bci());
1888 if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1889 n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1890 }
1891 }
1892 // If we are inlining then we need to collect arguments to profile parameters for the target
1893 if (profile_parameters() && target != nullptr) {
1894 if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
1895 // The receiver is profiled on method entry so it's included in
1972 break;
1973 case Bytecodes::_invokehandle:
1974 code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1975 break;
1976 default:
1977 break;
1978 }
1979 } else {
1980 if (bc_raw == Bytecodes::_invokehandle) {
1981 assert(!will_link, "should come here only for unlinked call");
1982 code = Bytecodes::_invokespecial;
1983 }
1984 }
1985
1986 if (code == Bytecodes::_invokespecial) {
1987 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
1988 ciKlass* receiver_constraint = nullptr;
1989
1990 if (bc_raw == Bytecodes::_invokeinterface) {
1991 receiver_constraint = holder;
1992 } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer() && calling_klass->is_interface()) {
1993 receiver_constraint = calling_klass;
1994 }
1995
1996 if (receiver_constraint != nullptr) {
1997 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1998 Value receiver = state()->stack_at(index);
1999 CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
2000 // go to uncommon_trap when checkcast fails
2001 c->set_invokespecial_receiver_check();
2002 state()->stack_at_put(index, append_split(c));
2003 }
2004 }
2005
2006 // Push appendix argument (MethodType, CallSite, etc.), if one.
2007 bool patch_for_appendix = false;
2008 int patching_appendix_arg = 0;
2009 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2010 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2011 apush(arg);
2012 patch_for_appendix = true;
2228 }
2229 }
2230
2231 Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2232 // push result
2233 append_split(result);
2234
2235 if (result_type != voidType) {
2236 push(result_type, round_fp(result));
2237 }
2238 if (profile_return() && result_type->is_object_kind()) {
2239 profile_return_type(result, target);
2240 }
2241 }
2242
2243
2244 void GraphBuilder::new_instance(int klass_index) {
2245 ValueStack* state_before = copy_state_exhandling();
2246 ciKlass* klass = stream()->get_klass();
2247 assert(klass->is_instance_klass(), "must be an instance klass");
2248 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2249 _memory->new_instance(new_instance);
2250 apush(append_split(new_instance));
2251 }
2252
2253
2254 void GraphBuilder::new_type_array() {
2255 ValueStack* state_before = copy_state_exhandling();
2256 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before, true)));
2257 }
2258
2259
2260 void GraphBuilder::new_object_array() {
2261 ciKlass* klass = stream()->get_klass();
2262 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2263 NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2264 apush(append_split(n));
2265 }
2266
2267
2268 bool GraphBuilder::direct_compare(ciKlass* k) {
2269 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2270 ciInstanceKlass* ik = k->as_instance_klass();
2271 if (ik->is_final()) {
2272 return true;
2273 } else {
2306 ciKlass* klass = stream()->get_klass();
2307 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2308 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2309 ipush(append_split(i));
2310 i->set_direct_compare(direct_compare(klass));
2311
2312 if (is_profiling()) {
2313 // Note that we'd collect profile data in this method if we wanted it.
2314 compilation()->set_would_profile(true);
2315
2316 if (profile_checkcasts()) {
2317 i->set_profiled_method(method());
2318 i->set_profiled_bci(bci());
2319 i->set_should_profile(true);
2320 }
2321 }
2322 }
2323
2324
2325 void GraphBuilder::monitorenter(Value x, int bci) {
2326 // save state before locking in case of deoptimization after a NullPointerException
2327 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2328 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2329 kill_all();
2330 }
2331
2332
2333 void GraphBuilder::monitorexit(Value x, int bci) {
2334 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2335 kill_all();
2336 }
2337
2338
2339 void GraphBuilder::new_multi_array(int dimensions) {
2340 ciKlass* klass = stream()->get_klass();
2341 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2342
2343 Values* dims = new Values(dimensions, dimensions, nullptr);
2344 // fill in all dimensions
2345 int i = dimensions;
2346 while (i-- > 0) dims->at_put(i, ipop());
2347 // create array
2348 NewArray* n = new NewMultiArray(klass, dims, state_before);
2456
2457 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2458 return append_with_bci(instr, bci());
2459 }
2460
2461
2462 void GraphBuilder::null_check(Value value) {
2463 if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2464 return;
2465 } else {
2466 Constant* con = value->as_Constant();
2467 if (con) {
2468 ObjectType* c = con->type()->as_ObjectType();
2469 if (c && c->is_loaded()) {
2470 ObjectConstant* oc = c->as_ObjectConstant();
2471 if (!oc || !oc->value()->is_null_object()) {
2472 return;
2473 }
2474 }
2475 }
2476 }
2477 append(new NullCheck(value, copy_state_for_exception()));
2478 }
2479
2480
2481
2482 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2483 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2484 assert(instruction->exception_state() == nullptr
2485 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2486 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2487 "exception_state should be of exception kind");
2488 return new XHandlers();
2489 }
2490
2491 XHandlers* exception_handlers = new XHandlers();
2492 ScopeData* cur_scope_data = scope_data();
2493 ValueStack* cur_state = instruction->state_before();
2494 ValueStack* prev_state = nullptr;
2495 int scope_count = 0;
2496
2497 assert(cur_state != nullptr, "state_before must be set");
2498 do {
2499 int cur_bci = cur_state->bci();
2500 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2501 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
2502
2503 // join with all potential exception handlers
2504 XHandlers* list = cur_scope_data->xhandlers();
2505 const int n = list->length();
2506 for (int i = 0; i < n; i++) {
2507 XHandler* h = list->handler_at(i);
2508 if (h->covers(cur_bci)) {
2509 // h is a potential exception handler => join it
2510 compilation()->set_has_exception_handlers(true);
2511
2512 BlockBegin* entry = h->entry_block();
2513 if (entry == block()) {
2514 // It's acceptable for an exception handler to cover itself
2515 // but we don't handle that in the parser currently. It's
2516 // very rare so we bailout instead of trying to handle it.
2517 BAILOUT_("exception handler covers itself", exception_handlers);
2518 }
2519 assert(entry->bci() == h->handler_bci(), "must match");
2520 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2521
3263 // the storage for the OSR buffer is freed manually in the LIRGenerator.
3264
3265 assert(state->caller_state() == nullptr, "should be top scope");
3266 state->clear_locals();
3267 Goto* g = new Goto(target, false);
3268 append(g);
3269 _osr_entry->set_end(g);
3270 target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3271
3272 scope_data()->set_stream(nullptr);
3273 }
3274
3275
3276 ValueStack* GraphBuilder::state_at_entry() {
3277 ValueStack* state = new ValueStack(scope(), nullptr);
3278
3279 // Set up locals for receiver
3280 int idx = 0;
3281 if (!method()->is_static()) {
3282 // we should always see the receiver
3283 state->store_local(idx, new Local(method()->holder(), objectType, idx, true));
3284 idx = 1;
3285 }
3286
3287 // Set up locals for incoming arguments
3288 ciSignature* sig = method()->signature();
3289 for (int i = 0; i < sig->count(); i++) {
3290 ciType* type = sig->type_at(i);
3291 BasicType basic_type = type->basic_type();
3292 // don't allow T_ARRAY to propagate into locals types
3293 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3294 ValueType* vt = as_ValueType(basic_type);
3295 state->store_local(idx, new Local(type, vt, idx, false));
3296 idx += type->size();
3297 }
3298
3299 // lock synchronized method
3300 if (method()->is_synchronized()) {
3301 state->lock(nullptr);
3302 }
3303
3304 return state;
3305 }
3306
3307
3308 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3309 : _scope_data(nullptr)
3310 , _compilation(compilation)
3311 , _memory(new MemoryBuffer())
3312 , _inline_bailout_msg(nullptr)
3313 , _instruction_count(0)
3314 , _osr_entry(nullptr)
3315 {
3316 int osr_bci = compilation->osr_bci();
3317
3318 // determine entry points and bci2block mapping
3319 BlockListBuilder blm(compilation, scope, osr_bci);
3320 CHECK_BAILOUT();
3321
3322 BlockList* bci2block = blm.bci2block();
3323 BlockBegin* start_block = bci2block->at(0);
3324
3325 push_root_scope(scope, bci2block, start_block);
3326
3327 // setup state for std entry
3328 _initial_state = state_at_entry();
3329 start_block->merge(_initial_state, compilation->has_irreducible_loops());
3330
3331 // End nulls still exist here
3332
3333 // complete graph
3334 _vmap = new ValueMap();
|
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "c1/c1_CFGPrinter.hpp"
27 #include "c1/c1_Canonicalizer.hpp"
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_GraphBuilder.hpp"
30 #include "c1/c1_InstructionPrinter.hpp"
31 #include "ci/ciCallSite.hpp"
32 #include "ci/ciField.hpp"
33 #include "ci/ciFlatArrayKlass.hpp"
34 #include "ci/ciInlineKlass.hpp"
35 #include "ci/ciKlass.hpp"
36 #include "ci/ciMemberName.hpp"
37 #include "ci/ciSymbols.hpp"
38 #include "ci/ciUtilities.inline.hpp"
39 #include "classfile/javaClasses.hpp"
40 #include "compiler/compilationPolicy.hpp"
41 #include "compiler/compileBroker.hpp"
42 #include "compiler/compilerEvent.hpp"
43 #include "interpreter/bytecode.hpp"
44 #include "jfr/jfrEvents.hpp"
45 #include "memory/resourceArea.hpp"
46 #include "oops/oop.inline.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/vm_version.hpp"
49 #include "utilities/bitMap.inline.hpp"
50 #include "utilities/checkedCast.hpp"
51 #include "utilities/powerOfTwo.hpp"
52 #include "utilities/macros.hpp"
53 #if INCLUDE_JFR
54 #include "jfr/jfr.hpp"
1048 // they are using this local. We don't handle skipping over a
1049 // ret.
1050 for (ScopeData* cur_scope_data = scope_data()->parent();
1051 cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1052 cur_scope_data = cur_scope_data->parent()) {
1053 if (cur_scope_data->jsr_return_address_local() == index) {
1054 BAILOUT("subroutine overwrites return address from previous subroutine");
1055 }
1056 }
1057 } else if (index == scope_data()->jsr_return_address_local()) {
1058 scope_data()->set_jsr_return_address_local(-1);
1059 }
1060 }
1061
1062 state->store_local(index, round_fp(x));
1063 }
1064
1065
1066 void GraphBuilder::load_indexed(BasicType type) {
1067 // In case of in block code motion in range check elimination
1068 ValueStack* state_before = nullptr;
1069 int array_idx = state()->stack_size() - 2;
1070 if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1071 // Save the entire state and re-execute on deopt when accessing flat arrays
1072 state_before = copy_state_before();
1073 state_before->set_should_reexecute(true);
1074 } else {
1075 state_before = copy_state_indexed_access();
1076 }
1077 compilation()->set_has_access_indexed(true);
1078 Value index = ipop();
1079 Value array = apop();
1080 Value length = nullptr;
1081 if (CSEArrayLength ||
1082 (array->as_Constant() != nullptr) ||
1083 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1084 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1085 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1086 length = append(new ArrayLength(array, state_before));
1087 }
1088
1089 bool need_membar = false;
1090 LoadIndexed* load_indexed = nullptr;
1091 Instruction* result = nullptr;
1092 if (array->is_loaded_flat_array()) {
1093 ciType* array_type = array->declared_type();
1094 ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
1095
1096 bool can_delay_access = false;
1097 ciBytecodeStream s(method());
1098 s.force_bci(bci());
1099 s.next();
1100 if (s.cur_bc() == Bytecodes::_getfield) {
1101 bool will_link;
1102 ciField* next_field = s.get_field(will_link);
1103 bool next_needs_patching = !next_field->holder()->is_initialized() ||
1104 !next_field->will_link(method(), Bytecodes::_getfield) ||
1105 PatchALot;
1106 can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1107 }
1108 if (can_delay_access) {
1109 // potentially optimizable array access, storing information for delayed decision
1110 LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
1111 DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
1112 li->set_delayed(dli);
1113 set_pending_load_indexed(dli);
1114 return; // Nothing else to do for now
1115 } else {
1116 if (elem_klass->is_empty()) {
1117 // No need to create a new instance, the default instance will be used instead
1118 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1119 apush(append(load_indexed));
1120 } else {
1121 NewInstance* new_instance = new NewInstance(elem_klass, state_before, false, true);
1122 _memory->new_instance(new_instance);
1123 apush(append_split(new_instance));
1124 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1125 load_indexed->set_vt(new_instance);
1126 // The LoadIndexed node will initialise this instance by copying from
1127 // the flat field. Ensure these stores are visible before any
1128 // subsequent store that publishes this reference.
1129 need_membar = true;
1130 }
1131 }
1132 } else {
1133 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1134 if (profile_array_accesses() && is_reference_type(type)) {
1135 compilation()->set_would_profile(true);
1136 load_indexed->set_should_profile(true);
1137 load_indexed->set_profiled_method(method());
1138 load_indexed->set_profiled_bci(bci());
1139 }
1140 }
1141 result = append(load_indexed);
1142 if (need_membar) {
1143 append(new MemBar(lir_membar_storestore));
1144 }
1145 assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
1146 if (!array->is_loaded_flat_array()) {
1147 push(as_ValueType(type), result);
1148 }
1149 }
1150
1151
1152 void GraphBuilder::store_indexed(BasicType type) {
1153 // In case of in block code motion in range check elimination
1154 ValueStack* state_before = nullptr;
1155 int array_idx = state()->stack_size() - 3;
1156 if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1157 // Save the entire state and re-execute on deopt when accessing flat arrays
1158 state_before = copy_state_before();
1159 state_before->set_should_reexecute(true);
1160 } else {
1161 state_before = copy_state_indexed_access();
1162 }
1163 compilation()->set_has_access_indexed(true);
1164 Value value = pop(as_ValueType(type));
1165 Value index = ipop();
1166 Value array = apop();
1167 Value length = nullptr;
1168 if (CSEArrayLength ||
1169 (array->as_Constant() != nullptr) ||
1170 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1171 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1172 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1173 length = append(new ArrayLength(array, state_before));
1174 }
1175 ciType* array_type = array->declared_type();
1176 bool check_boolean = false;
1177 if (array_type != nullptr) {
1178 if (array_type->is_loaded() &&
1179 array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1180 assert(type == T_BYTE, "boolean store uses bastore");
1181 Value mask = append(new Constant(new IntConstant(1)));
1182 value = append(new LogicOp(Bytecodes::_iand, value, mask));
1183 }
1184 } else if (type == T_BYTE) {
1185 check_boolean = true;
1186 }
1187
1188 StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1189 if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flat_array()) {
1190 compilation()->set_would_profile(true);
1191 store_indexed->set_should_profile(true);
1192 store_indexed->set_profiled_method(method());
1193 store_indexed->set_profiled_bci(bci());
1194 }
1195 Instruction* result = append(store_indexed);
1196 assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
1197 _memory->store_value(value);
1198 }
1199
1200 void GraphBuilder::stack_op(Bytecodes::Code code) {
1201 switch (code) {
1202 case Bytecodes::_pop:
1203 { Value w = state()->raw_pop();
1204 }
1205 break;
1206 case Bytecodes::_pop2:
1207 { Value w1 = state()->raw_pop();
1208 Value w2 = state()->raw_pop();
1209 }
1210 break;
1211 case Bytecodes::_dup:
1212 { Value w = state()->raw_pop();
1213 state()->raw_push(w);
1214 state()->raw_push(w);
1215 }
1216 break;
1217 case Bytecodes::_dup_x1:
1218 { Value w1 = state()->raw_pop();
1219 Value w2 = state()->raw_pop();
1220 state()->raw_push(w1);
1221 state()->raw_push(w2);
1222 state()->raw_push(w1);
1223 }
1224 break;
1225 case Bytecodes::_dup_x2:
1226 { Value w1 = state()->raw_pop();
1227 Value w2 = state()->raw_pop();
1228 Value w3 = state()->raw_pop();
1367
1368
1369 void GraphBuilder::_goto(int from_bci, int to_bci) {
1370 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1371 if (is_profiling()) {
1372 compilation()->set_would_profile(true);
1373 x->set_profiled_bci(bci());
1374 if (profile_branches()) {
1375 x->set_profiled_method(method());
1376 x->set_should_profile(true);
1377 }
1378 }
1379 append(x);
1380 }
1381
1382
1383 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1384 BlockBegin* tsux = block_at(stream()->get_dest());
1385 BlockBegin* fsux = block_at(stream()->next_bci());
1386 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1387
1388 bool subst_check = false;
1389 if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
1390 ValueType* left_vt = x->type();
1391 ValueType* right_vt = y->type();
1392 if (left_vt->is_object()) {
1393 assert(right_vt->is_object(), "must be");
1394 ciKlass* left_klass = x->as_loaded_klass_or_null();
1395 ciKlass* right_klass = y->as_loaded_klass_or_null();
1396
1397 if (left_klass == nullptr || right_klass == nullptr) {
1398 // The klass is still unloaded, or came from a Phi node. Go slow case;
1399 subst_check = true;
1400 } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
1401 // Either operand may be a value object, but we're not sure. Go slow case;
1402 subst_check = true;
1403 } else {
1404 // No need to do substitutability check
1405 }
1406 }
1407 }
1408 if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
1409 is_profiling() && profile_branches()) {
1410 compilation()->set_would_profile(true);
1411 append(new ProfileACmpTypes(method(), bci(), x, y));
1412 }
1413
1414 // In case of loop invariant code motion or predicate insertion
1415 // before the body of a loop the state is needed
1416 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : nullptr, is_bb, subst_check));
1417
1418 assert(i->as_Goto() == nullptr ||
1419 (i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == (tsux->bci() < stream()->cur_bci())) ||
1420 (i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == (fsux->bci() < stream()->cur_bci())),
1421 "safepoint state of Goto returned by canonicalizer incorrect");
1422
1423 if (is_profiling()) {
1424 If* if_node = i->as_If();
1425 if (if_node != nullptr) {
1426 // Note that we'd collect profile data in this method if we wanted it.
1427 compilation()->set_would_profile(true);
1428 // At level 2 we need the proper bci to count backedges
1429 if_node->set_profiled_bci(bci());
1430 if (profile_branches()) {
1431 // Successors can be rotated by the canonicalizer, check for this case.
1432 if_node->set_profiled_method(method());
1433 if_node->set_should_profile(true);
1434 if (if_node->tsux() == fsux) {
1435 if_node->set_swapped(true);
1436 }
1651 }
1652
1653 if (needs_check) {
1654 // Perform the registration of finalizable objects.
1655 ValueStack* state_before = copy_state_for_exception();
1656 load_local(objectType, 0);
1657 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1658 state()->pop_arguments(1),
1659 true, state_before, true));
1660 }
1661 }
1662
1663
1664 void GraphBuilder::method_return(Value x, bool ignore_return) {
1665 if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1666 call_register_finalizer();
1667 }
1668
1669 // The conditions for a memory barrier are described in Parse::do_exits().
1670 bool need_mem_bar = false;
1671 if (method()->is_object_constructor() &&
1672 (scope()->wrote_final() || scope()->wrote_stable() ||
1673 (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1674 (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1675 need_mem_bar = true;
1676 }
1677
1678 BasicType bt = method()->return_type()->basic_type();
1679 switch (bt) {
1680 case T_BYTE:
1681 {
1682 Value shift = append(new Constant(new IntConstant(24)));
1683 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1684 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1685 break;
1686 }
1687 case T_SHORT:
1688 {
1689 Value shift = append(new Constant(new IntConstant(16)));
1690 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1691 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1802 // Attach dimension info to stable arrays.
1803 if (FoldStableValues &&
1804 field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1805 ciArray* array = field_value.as_object()->as_array();
1806 jint dimension = field->type()->as_array_klass()->dimension();
1807 value = new StableArrayConstant(array, dimension);
1808 }
1809
1810 switch (field_type) {
1811 case T_ARRAY:
1812 case T_OBJECT:
1813 if (field_value.as_object()->should_be_constant()) {
1814 return new Constant(value);
1815 }
1816 return nullptr; // Not a constant.
1817 default:
1818 return new Constant(value);
1819 }
1820 }
1821
1822 void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
1823 for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
1824 ciField* inner_field = vk->nonstatic_field_at(i);
1825 assert(!inner_field->is_flat(), "the iteration over nested fields is handled by the loop itself");
1826 int off = inner_field->offset_in_bytes() - vk->first_field_offset();
1827 LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, false);
1828 Value replacement = append(load);
1829 StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, false);
1830 store->set_enclosing_field(enclosing_field);
1831 append(store);
1832 }
1833 }
1834
1835 void GraphBuilder::access_field(Bytecodes::Code code) {
1836 bool will_link;
1837 ciField* field = stream()->get_field(will_link);
1838 ciInstanceKlass* holder = field->holder();
1839 BasicType field_type = field->type()->basic_type();
1840 ValueType* type = as_ValueType(field_type);
1841
1842 // call will_link again to determine if the field is valid.
1843 const bool needs_patching = !holder->is_loaded() ||
1844 !field->will_link(method(), code) ||
1845 (!field->is_flat() && PatchALot);
1846
1847 ValueStack* state_before = nullptr;
1848 if (!holder->is_initialized() || needs_patching) {
1849 // save state before instruction for debug info when
1850 // deoptimization happens during patching
1851 state_before = copy_state_before();
1852 }
1853
1854 Value obj = nullptr;
1855 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1856 if (state_before != nullptr) {
1857 // build a patching constant
1858 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1859 } else {
1860 obj = new Constant(new InstanceConstant(holder->java_mirror()));
1861 }
1862 }
1863
1864 if (code == Bytecodes::_putfield) {
1865 scope()->set_wrote_fields();
1866 if (field->is_volatile()) {
1867 scope()->set_wrote_volatile();
1868 }
1869 if (field->is_final()) {
1870 scope()->set_wrote_final();
1871 }
1872 if (field->is_stable()) {
1873 scope()->set_wrote_stable();
1874 }
1875 }
1876
1877 int offset = !needs_patching ? field->offset_in_bytes() : -1;
1878 switch (code) {
1879 case Bytecodes::_getstatic: {
1880 // check for compile-time constants, i.e., initialized static final fields
1881 Value constant = nullptr;
1882 if (field->is_static_constant() && !PatchALot) {
1883 ciConstant field_value = field->constant_value();
1884 assert(!field->is_stable() || !field_value.is_null_or_zero(),
1885 "stable static w/ default value shouldn't be a constant");
1886 constant = make_constant(field_value, field);
1887 } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
1888 field->type()->as_inline_klass()->is_empty()) {
1889 // Loading from a field of an empty inline type. Just return the default instance.
1890 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1891 }
1892 if (constant != nullptr) {
1893 push(type, append(constant));
1894 } else {
1895 if (state_before == nullptr) {
1896 state_before = copy_state_for_exception();
1897 }
1898 LoadField* load_field = new LoadField(append(obj), offset, field, true,
1899 state_before, needs_patching);
1900 push(type, append(load_field));
1901 }
1902 break;
1903 }
1904 case Bytecodes::_putstatic: {
1905 Value val = pop(type);
1906 if (state_before == nullptr) {
1907 state_before = copy_state_for_exception();
1908 }
1909 if (field_type == T_BOOLEAN) {
1910 Value mask = append(new Constant(new IntConstant(1)));
1911 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1912 }
1913 if (field->is_null_free()) {
1914 null_check(val);
1915 }
1916 if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
1917 // Storing to a field of an empty inline type. Ignore.
1918 break;
1919 }
1920 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1921 break;
1922 }
1923 case Bytecodes::_getfield: {
1924 // Check for compile-time constants, i.e., trusted final non-static fields.
1925 Value constant = nullptr;
1926 if (state_before == nullptr && field->is_flat()) {
1927 // Save the entire state and re-execute on deopt when accessing flat fields
1928 assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
1929 state_before = copy_state_before();
1930 }
1931 if (!has_pending_field_access() && !has_pending_load_indexed()) {
1932 obj = apop();
1933 ObjectType* obj_type = obj->type()->as_ObjectType();
1934 if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized()
1935 && field->type()->as_inline_klass()->is_empty()) {
1936 // Loading from a field of an empty inline type. Just return the default instance.
1937 null_check(obj);
1938 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1939 } else if (field->is_constant() && !field->is_flat() && obj_type->is_constant() && !PatchALot) {
1940 ciObject* const_oop = obj_type->constant_value();
1941 if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1942 ciConstant field_value = field->constant_value_of(const_oop);
1943 if (field_value.is_valid()) {
1944 if (field->is_null_free() && field_value.is_null_or_zero()) {
1945 // Non-flat inline type field. Replace null by the default value.
1946 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1947 } else {
1948 constant = make_constant(field_value, field);
1949 }
1950 // For CallSite objects add a dependency for invalidation of the optimization.
1951 if (field->is_call_site_target()) {
1952 ciCallSite* call_site = const_oop->as_call_site();
1953 if (!call_site->is_fully_initialized_constant_call_site()) {
1954 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1955 dependency_recorder()->assert_call_site_target_value(call_site, target);
1956 }
1957 }
1958 }
1959 }
1960 }
1961 }
1962 if (constant != nullptr) {
1963 push(type, append(constant));
1964 } else {
1965 if (state_before == nullptr) {
1966 state_before = copy_state_for_exception();
1967 }
1968 if (!field->is_flat()) {
1969 if (has_pending_field_access()) {
1970 assert(!needs_patching, "Can't patch delayed field access");
1971 obj = pending_field_access()->obj();
1972 offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->first_field_offset();
1973 field = pending_field_access()->holder()->get_field_by_offset(offset, false);
1974 assert(field != nullptr, "field not found");
1975 set_pending_field_access(nullptr);
1976 } else if (has_pending_load_indexed()) {
1977 assert(!needs_patching, "Can't patch delayed field access");
1978 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
1979 LoadIndexed* li = pending_load_indexed()->load_instr();
1980 li->set_type(type);
1981 push(type, append(li));
1982 set_pending_load_indexed(nullptr);
1983 break;
1984 }
1985 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1986 Value replacement = !needs_patching ? _memory->load(load) : load;
1987 if (replacement != load) {
1988 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1989 // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1990 // conversion. Emit an explicit conversion here to get the correct field value after the write.
1991 switch (field_type) {
1992 case T_BOOLEAN:
1993 case T_BYTE:
1994 replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
1995 break;
1996 case T_CHAR:
1997 replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
1998 break;
1999 case T_SHORT:
2000 replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
2001 break;
2002 default:
2003 break;
2004 }
2005 push(type, replacement);
2006 } else {
2007 push(type, append(load));
2008 }
2009 } else { // field is flat
2010 // Look at the next bytecode to check if we can delay the field access
2011 bool can_delay_access = false;
2012 ciBytecodeStream s(method());
2013 s.force_bci(bci());
2014 s.next();
2015 if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
2016 ciField* next_field = s.get_field(will_link);
2017 bool next_needs_patching = !next_field->holder()->is_loaded() ||
2018 !next_field->will_link(method(), Bytecodes::_getfield) ||
2019 PatchALot;
2020 can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
2021 }
2022 if (can_delay_access) {
2023 if (has_pending_load_indexed()) {
2024 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2025 } else if (has_pending_field_access()) {
2026 pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->first_field_offset());
2027 } else {
2028 null_check(obj);
2029 DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset_in_bytes(), state_before);
2030 set_pending_field_access(dfa);
2031 }
2032 } else {
2033 ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2034 scope()->set_wrote_final();
2035 scope()->set_wrote_fields();
2036 bool need_membar = false;
2037 if (inline_klass->is_initialized() && inline_klass->is_empty()) {
2038 apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
2039 if (has_pending_field_access()) {
2040 set_pending_field_access(nullptr);
2041 } else if (has_pending_load_indexed()) {
2042 set_pending_load_indexed(nullptr);
2043 }
2044 } else if (has_pending_load_indexed()) {
2045 assert(!needs_patching, "Can't patch delayed field access");
2046 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2047 NewInstance* vt = new NewInstance(inline_klass, pending_load_indexed()->state_before(), false, true);
2048 _memory->new_instance(vt);
2049 pending_load_indexed()->load_instr()->set_vt(vt);
2050 apush(append_split(vt));
2051 append(pending_load_indexed()->load_instr());
2052 set_pending_load_indexed(nullptr);
2053 need_membar = true;
2054 } else {
2055 if (has_pending_field_access()) {
2056 state_before = pending_field_access()->state_before();
2057 }
2058 NewInstance* new_instance = new NewInstance(inline_klass, state_before, false, true);
2059 _memory->new_instance(new_instance);
2060 apush(append_split(new_instance));
2061 assert(!needs_patching, "Can't patch flat inline type field access");
2062 if (has_pending_field_access()) {
2063 copy_inline_content(inline_klass, pending_field_access()->obj(),
2064 pending_field_access()->offset() + field->offset_in_bytes() - field->holder()->as_inline_klass()->first_field_offset(),
2065 new_instance, inline_klass->first_field_offset(), state_before);
2066 set_pending_field_access(nullptr);
2067 } else {
2068 copy_inline_content(inline_klass, obj, field->offset_in_bytes(), new_instance, inline_klass->first_field_offset(), state_before);
2069 }
2070 need_membar = true;
2071 }
2072 if (need_membar) {
2073 // If we allocated a new instance ensure the stores to copy the
2074 // field contents are visible before any subsequent store that
2075 // publishes this reference.
2076 append(new MemBar(lir_membar_storestore));
2077 }
2078 }
2079 }
2080 }
2081 break;
2082 }
2083 case Bytecodes::_putfield: {
2084 Value val = pop(type);
2085 obj = apop();
2086 if (state_before == nullptr) {
2087 state_before = copy_state_for_exception();
2088 }
2089 if (field_type == T_BOOLEAN) {
2090 Value mask = append(new Constant(new IntConstant(1)));
2091 val = append(new LogicOp(Bytecodes::_iand, val, mask));
2092 }
2093 if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
2094 // Storing to a field of an empty inline type. Ignore.
2095 null_check(obj);
2096 null_check(val);
2097 } else if (!field->is_flat()) {
2098 if (field->is_null_free()) {
2099 null_check(val);
2100 }
2101 StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
2102 if (!needs_patching) store = _memory->store(store);
2103 if (store != nullptr) {
2104 append(store);
2105 }
2106 } else {
2107 assert(!needs_patching, "Can't patch flat inline type field access");
2108 ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2109 copy_inline_content(inline_klass, val, inline_klass->first_field_offset(), obj, offset, state_before, field);
2110 }
2111 break;
2112 }
2113 default:
2114 ShouldNotReachHere();
2115 break;
2116 }
2117 }
2118
2119 Dependencies* GraphBuilder::dependency_recorder() const {
2120 assert(DeoptC1, "need debug information");
2121 return compilation()->dependency_recorder();
2122 }
2123
2124 // How many arguments do we want to profile?
2125 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
2126 int n = 0;
2127 bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
2128 start = has_receiver ? 1 : 0;
2129 if (profile_arguments()) {
2130 ciProfileData* data = method()->method_data()->bci_to_data(bci());
2131 if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
2132 n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
2133 }
2134 }
2135 // If we are inlining then we need to collect arguments to profile parameters for the target
2136 if (profile_parameters() && target != nullptr) {
2137 if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
2138 // The receiver is profiled on method entry so it's included in
2215 break;
2216 case Bytecodes::_invokehandle:
2217 code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
2218 break;
2219 default:
2220 break;
2221 }
2222 } else {
2223 if (bc_raw == Bytecodes::_invokehandle) {
2224 assert(!will_link, "should come here only for unlinked call");
2225 code = Bytecodes::_invokespecial;
2226 }
2227 }
2228
2229 if (code == Bytecodes::_invokespecial) {
2230 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
2231 ciKlass* receiver_constraint = nullptr;
2232
2233 if (bc_raw == Bytecodes::_invokeinterface) {
2234 receiver_constraint = holder;
2235 } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor() && calling_klass->is_interface()) {
2236 receiver_constraint = calling_klass;
2237 }
2238
2239 if (receiver_constraint != nullptr) {
2240 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
2241 Value receiver = state()->stack_at(index);
2242 CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
2243 // go to uncommon_trap when checkcast fails
2244 c->set_invokespecial_receiver_check();
2245 state()->stack_at_put(index, append_split(c));
2246 }
2247 }
2248
2249 // Push appendix argument (MethodType, CallSite, etc.), if one.
2250 bool patch_for_appendix = false;
2251 int patching_appendix_arg = 0;
2252 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2253 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2254 apush(arg);
2255 patch_for_appendix = true;
2471 }
2472 }
2473
2474 Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2475 // push result
2476 append_split(result);
2477
2478 if (result_type != voidType) {
2479 push(result_type, round_fp(result));
2480 }
2481 if (profile_return() && result_type->is_object_kind()) {
2482 profile_return_type(result, target);
2483 }
2484 }
2485
2486
2487 void GraphBuilder::new_instance(int klass_index) {
2488 ValueStack* state_before = copy_state_exhandling();
2489 ciKlass* klass = stream()->get_klass();
2490 assert(klass->is_instance_klass(), "must be an instance klass");
2491 if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
2492 klass->as_inline_klass()->is_initialized() && klass->as_inline_klass()->is_empty()) {
2493 ciInlineKlass* vk = klass->as_inline_klass();
2494 apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
2495 } else {
2496 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass(), false);
2497 _memory->new_instance(new_instance);
2498 apush(append_split(new_instance));
2499 }
2500 }
2501
2502 void GraphBuilder::new_type_array() {
2503 ValueStack* state_before = copy_state_exhandling();
2504 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before, true)));
2505 }
2506
2507
2508 void GraphBuilder::new_object_array() {
2509 ciKlass* klass = stream()->get_klass();
2510 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2511 NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2512 apush(append_split(n));
2513 }
2514
2515
2516 bool GraphBuilder::direct_compare(ciKlass* k) {
2517 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2518 ciInstanceKlass* ik = k->as_instance_klass();
2519 if (ik->is_final()) {
2520 return true;
2521 } else {
2554 ciKlass* klass = stream()->get_klass();
2555 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2556 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2557 ipush(append_split(i));
2558 i->set_direct_compare(direct_compare(klass));
2559
2560 if (is_profiling()) {
2561 // Note that we'd collect profile data in this method if we wanted it.
2562 compilation()->set_would_profile(true);
2563
2564 if (profile_checkcasts()) {
2565 i->set_profiled_method(method());
2566 i->set_profiled_bci(bci());
2567 i->set_should_profile(true);
2568 }
2569 }
2570 }
2571
2572
2573 void GraphBuilder::monitorenter(Value x, int bci) {
2574 bool maybe_inlinetype = false;
2575 if (bci == InvocationEntryBci) {
2576 // Called by GraphBuilder::inline_sync_entry.
2577 #ifdef ASSERT
2578 ciType* obj_type = x->declared_type();
2579 assert(obj_type == nullptr || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
2580 #endif
2581 } else {
2582 // We are compiling a monitorenter bytecode
2583 if (EnableValhalla) {
2584 ciType* obj_type = x->declared_type();
2585 if (obj_type == nullptr || obj_type->as_klass()->can_be_inline_klass()) {
2586 // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
2587 // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2588 maybe_inlinetype = true;
2589 }
2590 }
2591 }
2592
2593 // save state before locking in case of deoptimization after a NullPointerException
2594 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2595 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
2596 kill_all();
2597 }
2598
2599
2600 void GraphBuilder::monitorexit(Value x, int bci) {
2601 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2602 kill_all();
2603 }
2604
2605
2606 void GraphBuilder::new_multi_array(int dimensions) {
2607 ciKlass* klass = stream()->get_klass();
2608 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2609
2610 Values* dims = new Values(dimensions, dimensions, nullptr);
2611 // fill in all dimensions
2612 int i = dimensions;
2613 while (i-- > 0) dims->at_put(i, ipop());
2614 // create array
2615 NewArray* n = new NewMultiArray(klass, dims, state_before);
2723
2724 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2725 return append_with_bci(instr, bci());
2726 }
2727
2728
2729 void GraphBuilder::null_check(Value value) {
2730 if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2731 return;
2732 } else {
2733 Constant* con = value->as_Constant();
2734 if (con) {
2735 ObjectType* c = con->type()->as_ObjectType();
2736 if (c && c->is_loaded()) {
2737 ObjectConstant* oc = c->as_ObjectConstant();
2738 if (!oc || !oc->value()->is_null_object()) {
2739 return;
2740 }
2741 }
2742 }
2743 if (value->is_null_free()) return;
2744 }
2745 append(new NullCheck(value, copy_state_for_exception()));
2746 }
2747
2748
2749
2750 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2751 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2752 assert(instruction->exception_state() == nullptr
2753 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2754 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2755 "exception_state should be of exception kind");
2756 return new XHandlers();
2757 }
2758
2759 XHandlers* exception_handlers = new XHandlers();
2760 ScopeData* cur_scope_data = scope_data();
2761 ValueStack* cur_state = instruction->state_before();
2762 ValueStack* prev_state = nullptr;
2763 int scope_count = 0;
2764
2765 assert(cur_state != nullptr, "state_before must be set");
2766 do {
2767 int cur_bci = cur_state->bci();
2768 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2769 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
2770 || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
2771
2772
2773 // join with all potential exception handlers
2774 XHandlers* list = cur_scope_data->xhandlers();
2775 const int n = list->length();
2776 for (int i = 0; i < n; i++) {
2777 XHandler* h = list->handler_at(i);
2778 if (h->covers(cur_bci)) {
2779 // h is a potential exception handler => join it
2780 compilation()->set_has_exception_handlers(true);
2781
2782 BlockBegin* entry = h->entry_block();
2783 if (entry == block()) {
2784 // It's acceptable for an exception handler to cover itself
2785 // but we don't handle that in the parser currently. It's
2786 // very rare so we bailout instead of trying to handle it.
2787 BAILOUT_("exception handler covers itself", exception_handlers);
2788 }
2789 assert(entry->bci() == h->handler_bci(), "must match");
2790 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2791
3533 // the storage for the OSR buffer is freed manually in the LIRGenerator.
3534
3535 assert(state->caller_state() == nullptr, "should be top scope");
3536 state->clear_locals();
3537 Goto* g = new Goto(target, false);
3538 append(g);
3539 _osr_entry->set_end(g);
3540 target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3541
3542 scope_data()->set_stream(nullptr);
3543 }
3544
3545
3546 ValueStack* GraphBuilder::state_at_entry() {
3547 ValueStack* state = new ValueStack(scope(), nullptr);
3548
3549 // Set up locals for receiver
3550 int idx = 0;
3551 if (!method()->is_static()) {
3552 // we should always see the receiver
3553 state->store_local(idx, new Local(method()->holder(), objectType, idx,
3554 /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
3555 idx = 1;
3556 }
3557
3558 // Set up locals for incoming arguments
3559 ciSignature* sig = method()->signature();
3560 for (int i = 0; i < sig->count(); i++) {
3561 ciType* type = sig->type_at(i);
3562 BasicType basic_type = type->basic_type();
3563 // don't allow T_ARRAY to propagate into locals types
3564 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3565 ValueType* vt = as_ValueType(basic_type);
3566 state->store_local(idx, new Local(type, vt, idx, false, false));
3567 idx += type->size();
3568 }
3569
3570 // lock synchronized method
3571 if (method()->is_synchronized()) {
3572 state->lock(nullptr);
3573 }
3574
3575 return state;
3576 }
3577
3578
3579 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3580 : _scope_data(nullptr)
3581 , _compilation(compilation)
3582 , _memory(new MemoryBuffer())
3583 , _inline_bailout_msg(nullptr)
3584 , _instruction_count(0)
3585 , _osr_entry(nullptr)
3586 , _pending_field_access(nullptr)
3587 , _pending_load_indexed(nullptr)
3588 {
3589 int osr_bci = compilation->osr_bci();
3590
3591 // determine entry points and bci2block mapping
3592 BlockListBuilder blm(compilation, scope, osr_bci);
3593 CHECK_BAILOUT();
3594
3595 BlockList* bci2block = blm.bci2block();
3596 BlockBegin* start_block = bci2block->at(0);
3597
3598 push_root_scope(scope, bci2block, start_block);
3599
3600 // setup state for std entry
3601 _initial_state = state_at_entry();
3602 start_block->merge(_initial_state, compilation->has_irreducible_loops());
3603
3604 // End nulls still exist here
3605
3606 // complete graph
3607 _vmap = new ValueMap();
|