13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "c1/c1_CFGPrinter.hpp"
27 #include "c1/c1_Canonicalizer.hpp"
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_GraphBuilder.hpp"
30 #include "c1/c1_InstructionPrinter.hpp"
31 #include "ci/ciCallSite.hpp"
32 #include "ci/ciField.hpp"
33 #include "ci/ciKlass.hpp"
34 #include "ci/ciMemberName.hpp"
35 #include "ci/ciSymbols.hpp"
36 #include "ci/ciUtilities.inline.hpp"
37 #include "classfile/javaClasses.hpp"
38 #include "compiler/compilationPolicy.hpp"
39 #include "compiler/compileBroker.hpp"
40 #include "compiler/compilerEvent.hpp"
41 #include "interpreter/bytecode.hpp"
42 #include "jfr/jfrEvents.hpp"
43 #include "memory/resourceArea.hpp"
44 #include "oops/oop.inline.hpp"
45 #include "runtime/sharedRuntime.hpp"
46 #include "runtime/vm_version.hpp"
47 #include "utilities/bitMap.inline.hpp"
48 #include "utilities/checkedCast.hpp"
49 #include "utilities/powerOfTwo.hpp"
50 #include "utilities/macros.hpp"
51 #if INCLUDE_JFR
52 #include "jfr/jfr.hpp"
1046 // they are using this local. We don't handle skipping over a
1047 // ret.
1048 for (ScopeData* cur_scope_data = scope_data()->parent();
1049 cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1050 cur_scope_data = cur_scope_data->parent()) {
1051 if (cur_scope_data->jsr_return_address_local() == index) {
1052 BAILOUT("subroutine overwrites return address from previous subroutine");
1053 }
1054 }
1055 } else if (index == scope_data()->jsr_return_address_local()) {
1056 scope_data()->set_jsr_return_address_local(-1);
1057 }
1058 }
1059
1060 state->store_local(index, round_fp(x));
1061 }
1062
1063
1064 void GraphBuilder::load_indexed(BasicType type) {
1065 // In case of in block code motion in range check elimination
1066 ValueStack* state_before = copy_state_indexed_access();
1067 compilation()->set_has_access_indexed(true);
1068 Value index = ipop();
1069 Value array = apop();
1070 Value length = nullptr;
1071 if (CSEArrayLength ||
1072 (array->as_Constant() != nullptr) ||
1073 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1074 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1075 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1076 length = append(new ArrayLength(array, state_before));
1077 }
1078 push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
1079 }
1080
1081
1082 void GraphBuilder::store_indexed(BasicType type) {
1083 // In case of in block code motion in range check elimination
1084 ValueStack* state_before = copy_state_indexed_access();
1085 compilation()->set_has_access_indexed(true);
1086 Value value = pop(as_ValueType(type));
1087 Value index = ipop();
1088 Value array = apop();
1089 Value length = nullptr;
1090 if (CSEArrayLength ||
1091 (array->as_Constant() != nullptr) ||
1092 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1093 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1094 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1095 length = append(new ArrayLength(array, state_before));
1096 }
1097 ciType* array_type = array->declared_type();
1098 bool check_boolean = false;
1099 if (array_type != nullptr) {
1100 if (array_type->is_loaded() &&
1101 array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1102 assert(type == T_BYTE, "boolean store uses bastore");
1103 Value mask = append(new Constant(new IntConstant(1)));
1104 value = append(new LogicOp(Bytecodes::_iand, value, mask));
1105 }
1106 } else if (type == T_BYTE) {
1107 check_boolean = true;
1108 }
1109 StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1110 append(result);
1111 _memory->store_value(value);
1112
1113 if (type == T_OBJECT && is_profiling()) {
1114 // Note that we'd collect profile data in this method if we wanted it.
1115 compilation()->set_would_profile(true);
1116
1117 if (profile_checkcasts()) {
1118 result->set_profiled_method(method());
1119 result->set_profiled_bci(bci());
1120 result->set_should_profile(true);
1121 }
1122 }
1123 }
1124
1125
1126 void GraphBuilder::stack_op(Bytecodes::Code code) {
1127 switch (code) {
1128 case Bytecodes::_pop:
1129 { state()->raw_pop();
1130 }
1131 break;
1132 case Bytecodes::_pop2:
1133 { state()->raw_pop();
1134 state()->raw_pop();
1135 }
1136 break;
1137 case Bytecodes::_dup:
1138 { Value w = state()->raw_pop();
1139 state()->raw_push(w);
1140 state()->raw_push(w);
1141 }
1142 break;
1143 case Bytecodes::_dup_x1:
1144 { Value w1 = state()->raw_pop();
1145 Value w2 = state()->raw_pop();
1146 state()->raw_push(w1);
1147 state()->raw_push(w2);
1148 state()->raw_push(w1);
1149 }
1150 break;
1151 case Bytecodes::_dup_x2:
1152 { Value w1 = state()->raw_pop();
1153 Value w2 = state()->raw_pop();
1154 Value w3 = state()->raw_pop();
1293
1294
1295 void GraphBuilder::_goto(int from_bci, int to_bci) {
1296 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1297 if (is_profiling()) {
1298 compilation()->set_would_profile(true);
1299 x->set_profiled_bci(bci());
1300 if (profile_branches()) {
1301 x->set_profiled_method(method());
1302 x->set_should_profile(true);
1303 }
1304 }
1305 append(x);
1306 }
1307
1308
1309 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1310 BlockBegin* tsux = block_at(stream()->get_dest());
1311 BlockBegin* fsux = block_at(stream()->next_bci());
1312 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1313 // In case of loop invariant code motion or predicate insertion
1314 // before the body of a loop the state is needed
1315 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : nullptr, is_bb));
1316
1317 assert(i->as_Goto() == nullptr ||
1318 (i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1319 (i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1320 "safepoint state of Goto returned by canonicalizer incorrect");
1321
1322 if (is_profiling()) {
1323 If* if_node = i->as_If();
1324 if (if_node != nullptr) {
1325 // Note that we'd collect profile data in this method if we wanted it.
1326 compilation()->set_would_profile(true);
1327 // At level 2 we need the proper bci to count backedges
1328 if_node->set_profiled_bci(bci());
1329 if (profile_branches()) {
1330 // Successors can be rotated by the canonicalizer, check for this case.
1331 if_node->set_profiled_method(method());
1332 if_node->set_should_profile(true);
1333 if (if_node->tsux() == fsux) {
1334 if_node->set_swapped(true);
1335 }
1546
1547 if (needs_check) {
1548 // Perform the registration of finalizable objects.
1549 ValueStack* state_before = copy_state_for_exception();
1550 load_local(objectType, 0);
1551 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1552 state()->pop_arguments(1),
1553 true, state_before, true));
1554 }
1555 }
1556
1557
1558 void GraphBuilder::method_return(Value x, bool ignore_return) {
1559 if (RegisterFinalizersAtInit &&
1560 method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1561 call_register_finalizer();
1562 }
1563
1564 // The conditions for a memory barrier are described in Parse::do_exits().
1565 bool need_mem_bar = false;
1566 if (method()->name() == ciSymbols::object_initializer_name() &&
1567 (scope()->wrote_final() ||
1568 (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1569 (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1570 need_mem_bar = true;
1571 }
1572
1573 BasicType bt = method()->return_type()->basic_type();
1574 switch (bt) {
1575 case T_BYTE:
1576 {
1577 Value shift = append(new Constant(new IntConstant(24)));
1578 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1579 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1580 break;
1581 }
1582 case T_SHORT:
1583 {
1584 Value shift = append(new Constant(new IntConstant(16)));
1585 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1586 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1697 // Attach dimension info to stable arrays.
1698 if (FoldStableValues &&
1699 field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1700 ciArray* array = field_value.as_object()->as_array();
1701 jint dimension = field->type()->as_array_klass()->dimension();
1702 value = new StableArrayConstant(array, dimension);
1703 }
1704
1705 switch (field_type) {
1706 case T_ARRAY:
1707 case T_OBJECT:
1708 if (field_value.as_object()->should_be_constant()) {
1709 return new Constant(value);
1710 }
1711 return nullptr; // Not a constant.
1712 default:
1713 return new Constant(value);
1714 }
1715 }
1716
1717 void GraphBuilder::access_field(Bytecodes::Code code) {
1718 bool will_link;
1719 ciField* field = stream()->get_field(will_link);
1720 ciInstanceKlass* holder = field->holder();
1721 BasicType field_type = field->type()->basic_type();
1722 ValueType* type = as_ValueType(field_type);
1723 // call will_link again to determine if the field is valid.
1724 const bool needs_patching = !holder->is_loaded() ||
1725 !field->will_link(method(), code) ||
1726 PatchALot;
1727
1728 ValueStack* state_before = nullptr;
1729 if (!holder->is_initialized() || needs_patching) {
1730 // save state before instruction for debug info when
1731 // deoptimization happens during patching
1732 state_before = copy_state_before();
1733 }
1734
1735 Value obj = nullptr;
1736 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1737 if (state_before != nullptr) {
1738 // build a patching constant
1739 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1740 } else {
1741 obj = new Constant(new InstanceConstant(holder->java_mirror()));
1742 }
1743 }
1744
1745 if (field->is_final() && (code == Bytecodes::_putfield)) {
1746 scope()->set_wrote_final();
1747 }
1748
1749 if (code == Bytecodes::_putfield) {
1750 scope()->set_wrote_fields();
1751 if (field->is_volatile()) {
1752 scope()->set_wrote_volatile();
1753 }
1754 }
1755
1756 const int offset = !needs_patching ? field->offset_in_bytes() : -1;
1757 switch (code) {
1758 case Bytecodes::_getstatic: {
1759 // check for compile-time constants, i.e., initialized static final fields
1760 Value constant = nullptr;
1761 if (field->is_static_constant() && !PatchALot) {
1762 ciConstant field_value = field->constant_value();
1763 assert(!field->is_stable() || !field_value.is_null_or_zero(),
1764 "stable static w/ default value shouldn't be a constant");
1765 constant = make_constant(field_value, field);
1766 }
1767 if (constant != nullptr) {
1768 push(type, append(constant));
1769 } else {
1770 if (state_before == nullptr) {
1771 state_before = copy_state_for_exception();
1772 }
1773 push(type, append(new LoadField(append(obj), offset, field, true,
1774 state_before, needs_patching)));
1775 }
1776 break;
1777 }
1778 case Bytecodes::_putstatic: {
1779 Value val = pop(type);
1780 if (state_before == nullptr) {
1781 state_before = copy_state_for_exception();
1782 }
1783 if (field->type()->basic_type() == T_BOOLEAN) {
1784 Value mask = append(new Constant(new IntConstant(1)));
1785 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1786 }
1787 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1788 break;
1789 }
1790 case Bytecodes::_getfield: {
1791 // Check for compile-time constants, i.e., trusted final non-static fields.
1792 Value constant = nullptr;
1793 obj = apop();
1794 ObjectType* obj_type = obj->type()->as_ObjectType();
1795 if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1796 ciObject* const_oop = obj_type->constant_value();
1797 if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1798 ciConstant field_value = field->constant_value_of(const_oop);
1799 if (field_value.is_valid()) {
1800 constant = make_constant(field_value, field);
1801 // For CallSite objects add a dependency for invalidation of the optimization.
1802 if (field->is_call_site_target()) {
1803 ciCallSite* call_site = const_oop->as_call_site();
1804 if (!call_site->is_fully_initialized_constant_call_site()) {
1805 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1806 dependency_recorder()->assert_call_site_target_value(call_site, target);
1807 }
1808 }
1809 }
1810 }
1811 }
1812 if (constant != nullptr) {
1813 push(type, append(constant));
1814 } else {
1815 if (state_before == nullptr) {
1816 state_before = copy_state_for_exception();
1817 }
1818 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1819 Value replacement = !needs_patching ? _memory->load(load) : load;
1820 if (replacement != load) {
1821 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1822 // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1823 // conversion. Emit an explicit conversion here to get the correct field value after the write.
1824 BasicType bt = field->type()->basic_type();
1825 switch (bt) {
1826 case T_BOOLEAN:
1827 case T_BYTE:
1828 replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
1829 break;
1830 case T_CHAR:
1831 replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
1832 break;
1833 case T_SHORT:
1834 replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
1835 break;
1836 default:
1837 break;
1838 }
1839 push(type, replacement);
1840 } else {
1841 push(type, append(load));
1842 }
1843 }
1844 break;
1845 }
1846 case Bytecodes::_putfield: {
1847 Value val = pop(type);
1848 obj = apop();
1849 if (state_before == nullptr) {
1850 state_before = copy_state_for_exception();
1851 }
1852 if (field->type()->basic_type() == T_BOOLEAN) {
1853 Value mask = append(new Constant(new IntConstant(1)));
1854 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1855 }
1856 StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1857 if (!needs_patching) store = _memory->store(store);
1858 if (store != nullptr) {
1859 append(store);
1860 }
1861 break;
1862 }
1863 default:
1864 ShouldNotReachHere();
1865 break;
1866 }
1867 }
1868
1869
1870 Dependencies* GraphBuilder::dependency_recorder() const {
1871 assert(DeoptC1, "need debug information");
1872 return compilation()->dependency_recorder();
1873 }
1874
1875 // How many arguments do we want to profile?
1876 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1877 int n = 0;
1878 bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1879 start = has_receiver ? 1 : 0;
1880 if (profile_arguments()) {
1881 ciProfileData* data = method()->method_data()->bci_to_data(bci());
1882 if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1883 n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1884 }
1885 }
1886 // If we are inlining then we need to collect arguments to profile parameters for the target
1887 if (profile_parameters() && target != nullptr) {
1888 if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
1889 // The receiver is profiled on method entry so it's included in
1966 break;
1967 case Bytecodes::_invokehandle:
1968 code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1969 break;
1970 default:
1971 break;
1972 }
1973 } else {
1974 if (bc_raw == Bytecodes::_invokehandle) {
1975 assert(!will_link, "should come here only for unlinked call");
1976 code = Bytecodes::_invokespecial;
1977 }
1978 }
1979
1980 if (code == Bytecodes::_invokespecial) {
1981 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
1982 ciKlass* receiver_constraint = nullptr;
1983
1984 if (bc_raw == Bytecodes::_invokeinterface) {
1985 receiver_constraint = holder;
1986 } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer() && calling_klass->is_interface()) {
1987 receiver_constraint = calling_klass;
1988 }
1989
1990 if (receiver_constraint != nullptr) {
1991 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1992 Value receiver = state()->stack_at(index);
1993 CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
1994 // go to uncommon_trap when checkcast fails
1995 c->set_invokespecial_receiver_check();
1996 state()->stack_at_put(index, append_split(c));
1997 }
1998 }
1999
2000 // Push appendix argument (MethodType, CallSite, etc.), if one.
2001 bool patch_for_appendix = false;
2002 int patching_appendix_arg = 0;
2003 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2004 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2005 apush(arg);
2006 patch_for_appendix = true;
2219 }
2220 }
2221
2222 Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2223 // push result
2224 append_split(result);
2225
2226 if (result_type != voidType) {
2227 push(result_type, round_fp(result));
2228 }
2229 if (profile_return() && result_type->is_object_kind()) {
2230 profile_return_type(result, target);
2231 }
2232 }
2233
2234
2235 void GraphBuilder::new_instance(int klass_index) {
2236 ValueStack* state_before = copy_state_exhandling();
2237 ciKlass* klass = stream()->get_klass();
2238 assert(klass->is_instance_klass(), "must be an instance klass");
2239 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2240 _memory->new_instance(new_instance);
2241 apush(append_split(new_instance));
2242 }
2243
2244
2245 void GraphBuilder::new_type_array() {
2246 ValueStack* state_before = copy_state_exhandling();
2247 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2248 }
2249
2250
2251 void GraphBuilder::new_object_array() {
2252 ciKlass* klass = stream()->get_klass();
2253 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2254 NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2255 apush(append_split(n));
2256 }
2257
2258
2259 bool GraphBuilder::direct_compare(ciKlass* k) {
2260 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2261 ciInstanceKlass* ik = k->as_instance_klass();
2262 if (ik->is_final()) {
2263 return true;
2264 } else {
2297 ciKlass* klass = stream()->get_klass();
2298 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2299 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2300 ipush(append_split(i));
2301 i->set_direct_compare(direct_compare(klass));
2302
2303 if (is_profiling()) {
2304 // Note that we'd collect profile data in this method if we wanted it.
2305 compilation()->set_would_profile(true);
2306
2307 if (profile_checkcasts()) {
2308 i->set_profiled_method(method());
2309 i->set_profiled_bci(bci());
2310 i->set_should_profile(true);
2311 }
2312 }
2313 }
2314
2315
2316 void GraphBuilder::monitorenter(Value x, int bci) {
2317 // save state before locking in case of deoptimization after a NullPointerException
2318 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2319 compilation()->set_has_monitors(true);
2320 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2321 kill_all();
2322 }
2323
2324
2325 void GraphBuilder::monitorexit(Value x, int bci) {
2326 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2327 kill_all();
2328 }
2329
2330
2331 void GraphBuilder::new_multi_array(int dimensions) {
2332 ciKlass* klass = stream()->get_klass();
2333 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2334
2335 Values* dims = new Values(dimensions, dimensions, nullptr);
2336 // fill in all dimensions
2337 int i = dimensions;
2338 while (i-- > 0) dims->at_put(i, ipop());
2339 // create array
2340 NewArray* n = new NewMultiArray(klass, dims, state_before);
2448
2449 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2450 return append_with_bci(instr, bci());
2451 }
2452
2453
2454 void GraphBuilder::null_check(Value value) {
2455 if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2456 return;
2457 } else {
2458 Constant* con = value->as_Constant();
2459 if (con) {
2460 ObjectType* c = con->type()->as_ObjectType();
2461 if (c && c->is_loaded()) {
2462 ObjectConstant* oc = c->as_ObjectConstant();
2463 if (!oc || !oc->value()->is_null_object()) {
2464 return;
2465 }
2466 }
2467 }
2468 }
2469 append(new NullCheck(value, copy_state_for_exception()));
2470 }
2471
2472
2473
2474 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2475 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2476 assert(instruction->exception_state() == nullptr
2477 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2478 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2479 "exception_state should be of exception kind");
2480 return new XHandlers();
2481 }
2482
2483 XHandlers* exception_handlers = new XHandlers();
2484 ScopeData* cur_scope_data = scope_data();
2485 ValueStack* cur_state = instruction->state_before();
2486 ValueStack* prev_state = nullptr;
2487 int scope_count = 0;
2488
2489 assert(cur_state != nullptr, "state_before must be set");
2490 do {
2491 int cur_bci = cur_state->bci();
2492 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2493 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
2494
2495 // join with all potential exception handlers
2496 XHandlers* list = cur_scope_data->xhandlers();
2497 const int n = list->length();
2498 for (int i = 0; i < n; i++) {
2499 XHandler* h = list->handler_at(i);
2500 if (h->covers(cur_bci)) {
2501 // h is a potential exception handler => join it
2502 compilation()->set_has_exception_handlers(true);
2503
2504 BlockBegin* entry = h->entry_block();
2505 if (entry == block()) {
2506 // It's acceptable for an exception handler to cover itself
2507 // but we don't handle that in the parser currently. It's
2508 // very rare so we bailout instead of trying to handle it.
2509 BAILOUT_("exception handler covers itself", exception_handlers);
2510 }
2511 assert(entry->bci() == h->handler_bci(), "must match");
2512 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2513
3255 // the storage for the OSR buffer is freed manually in the LIRGenerator.
3256
3257 assert(state->caller_state() == nullptr, "should be top scope");
3258 state->clear_locals();
3259 Goto* g = new Goto(target, false);
3260 append(g);
3261 _osr_entry->set_end(g);
3262 target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3263
3264 scope_data()->set_stream(nullptr);
3265 }
3266
3267
3268 ValueStack* GraphBuilder::state_at_entry() {
3269 ValueStack* state = new ValueStack(scope(), nullptr);
3270
3271 // Set up locals for receiver
3272 int idx = 0;
3273 if (!method()->is_static()) {
3274 // we should always see the receiver
3275 state->store_local(idx, new Local(method()->holder(), objectType, idx, true));
3276 idx = 1;
3277 }
3278
3279 // Set up locals for incoming arguments
3280 ciSignature* sig = method()->signature();
3281 for (int i = 0; i < sig->count(); i++) {
3282 ciType* type = sig->type_at(i);
3283 BasicType basic_type = type->basic_type();
3284 // don't allow T_ARRAY to propagate into locals types
3285 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3286 ValueType* vt = as_ValueType(basic_type);
3287 state->store_local(idx, new Local(type, vt, idx, false));
3288 idx += type->size();
3289 }
3290
3291 // lock synchronized method
3292 if (method()->is_synchronized()) {
3293 state->lock(nullptr);
3294 }
3295
3296 return state;
3297 }
3298
3299
3300 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3301 : _scope_data(nullptr)
3302 , _compilation(compilation)
3303 , _memory(new MemoryBuffer())
3304 , _inline_bailout_msg(nullptr)
3305 , _instruction_count(0)
3306 , _osr_entry(nullptr)
3307 {
3308 int osr_bci = compilation->osr_bci();
3309
3310 // determine entry points and bci2block mapping
3311 BlockListBuilder blm(compilation, scope, osr_bci);
3312 CHECK_BAILOUT();
3313
3314 BlockList* bci2block = blm.bci2block();
3315 BlockBegin* start_block = bci2block->at(0);
3316
3317 push_root_scope(scope, bci2block, start_block);
3318
3319 // setup state for std entry
3320 _initial_state = state_at_entry();
3321 start_block->merge(_initial_state, compilation->has_irreducible_loops());
3322
3323 // End nulls still exist here
3324
3325 // complete graph
3326 _vmap = new ValueMap();
|
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "c1/c1_CFGPrinter.hpp"
27 #include "c1/c1_Canonicalizer.hpp"
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_GraphBuilder.hpp"
30 #include "c1/c1_InstructionPrinter.hpp"
31 #include "ci/ciCallSite.hpp"
32 #include "ci/ciField.hpp"
33 #include "ci/ciFlatArrayKlass.hpp"
34 #include "ci/ciInlineKlass.hpp"
35 #include "ci/ciKlass.hpp"
36 #include "ci/ciMemberName.hpp"
37 #include "ci/ciSymbols.hpp"
38 #include "ci/ciUtilities.inline.hpp"
39 #include "classfile/javaClasses.hpp"
40 #include "compiler/compilationPolicy.hpp"
41 #include "compiler/compileBroker.hpp"
42 #include "compiler/compilerEvent.hpp"
43 #include "interpreter/bytecode.hpp"
44 #include "jfr/jfrEvents.hpp"
45 #include "memory/resourceArea.hpp"
46 #include "oops/oop.inline.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/vm_version.hpp"
49 #include "utilities/bitMap.inline.hpp"
50 #include "utilities/checkedCast.hpp"
51 #include "utilities/powerOfTwo.hpp"
52 #include "utilities/macros.hpp"
53 #if INCLUDE_JFR
54 #include "jfr/jfr.hpp"
1048 // they are using this local. We don't handle skipping over a
1049 // ret.
1050 for (ScopeData* cur_scope_data = scope_data()->parent();
1051 cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1052 cur_scope_data = cur_scope_data->parent()) {
1053 if (cur_scope_data->jsr_return_address_local() == index) {
1054 BAILOUT("subroutine overwrites return address from previous subroutine");
1055 }
1056 }
1057 } else if (index == scope_data()->jsr_return_address_local()) {
1058 scope_data()->set_jsr_return_address_local(-1);
1059 }
1060 }
1061
1062 state->store_local(index, round_fp(x));
1063 }
1064
1065
1066 void GraphBuilder::load_indexed(BasicType type) {
1067 // In case of in block code motion in range check elimination
1068 ValueStack* state_before = nullptr;
1069 int array_idx = state()->stack_size() - 2;
1070 if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1071 // Save the entire state and re-execute on deopt when accessing flat arrays
1072 state_before = copy_state_before();
1073 state_before->set_should_reexecute(true);
1074 } else {
1075 state_before = copy_state_indexed_access();
1076 }
1077 compilation()->set_has_access_indexed(true);
1078 Value index = ipop();
1079 Value array = apop();
1080 Value length = nullptr;
1081 if (CSEArrayLength ||
1082 (array->as_Constant() != nullptr) ||
1083 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1084 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1085 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1086 length = append(new ArrayLength(array, state_before));
1087 }
1088
1089 bool need_membar = false;
1090 LoadIndexed* load_indexed = nullptr;
1091 Instruction* result = nullptr;
1092 if (array->is_loaded_flat_array()) {
1093 ciType* array_type = array->declared_type();
1094 ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
1095
1096 bool can_delay_access = false;
1097 ciBytecodeStream s(method());
1098 s.force_bci(bci());
1099 s.next();
1100 if (s.cur_bc() == Bytecodes::_getfield) {
1101 bool will_link;
1102 ciField* next_field = s.get_field(will_link);
1103 bool next_needs_patching = !next_field->holder()->is_initialized() ||
1104 !next_field->will_link(method(), Bytecodes::_getfield) ||
1105 PatchALot;
1106 can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1107 }
1108 if (can_delay_access) {
1109 // potentially optimizable array access, storing information for delayed decision
1110 LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
1111 DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
1112 li->set_delayed(dli);
1113 set_pending_load_indexed(dli);
1114 return; // Nothing else to do for now
1115 } else {
1116 if (elem_klass->is_empty()) {
1117 // No need to create a new instance, the default instance will be used instead
1118 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1119 apush(append(load_indexed));
1120 } else {
1121 NewInstance* new_instance = new NewInstance(elem_klass, state_before, false, true);
1122 _memory->new_instance(new_instance);
1123 apush(append_split(new_instance));
1124 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1125 load_indexed->set_vt(new_instance);
1126 // The LoadIndexed node will initialise this instance by copying from
1127 // the flat field. Ensure these stores are visible before any
1128 // subsequent store that publishes this reference.
1129 need_membar = true;
1130 }
1131 }
1132 } else {
1133 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1134 if (profile_array_accesses() && is_reference_type(type)) {
1135 compilation()->set_would_profile(true);
1136 load_indexed->set_should_profile(true);
1137 load_indexed->set_profiled_method(method());
1138 load_indexed->set_profiled_bci(bci());
1139 }
1140 }
1141 result = append(load_indexed);
1142 if (need_membar) {
1143 append(new MemBar(lir_membar_storestore));
1144 }
1145 assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
1146 if (!array->is_loaded_flat_array()) {
1147 push(as_ValueType(type), result);
1148 }
1149 }
1150
1151
1152 void GraphBuilder::store_indexed(BasicType type) {
1153 // In case of in block code motion in range check elimination
1154 ValueStack* state_before = nullptr;
1155 int array_idx = state()->stack_size() - 3;
1156 if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1157 // Save the entire state and re-execute on deopt when accessing flat arrays
1158 state_before = copy_state_before();
1159 state_before->set_should_reexecute(true);
1160 } else {
1161 state_before = copy_state_indexed_access();
1162 }
1163 compilation()->set_has_access_indexed(true);
1164 Value value = pop(as_ValueType(type));
1165 Value index = ipop();
1166 Value array = apop();
1167 Value length = nullptr;
1168 if (CSEArrayLength ||
1169 (array->as_Constant() != nullptr) ||
1170 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1171 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1172 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1173 length = append(new ArrayLength(array, state_before));
1174 }
1175 ciType* array_type = array->declared_type();
1176 bool check_boolean = false;
1177 if (array_type != nullptr) {
1178 if (array_type->is_loaded() &&
1179 array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1180 assert(type == T_BYTE, "boolean store uses bastore");
1181 Value mask = append(new Constant(new IntConstant(1)));
1182 value = append(new LogicOp(Bytecodes::_iand, value, mask));
1183 }
1184 } else if (type == T_BYTE) {
1185 check_boolean = true;
1186 }
1187
1188 StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1189 if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flat_array()) {
1190 compilation()->set_would_profile(true);
1191 store_indexed->set_should_profile(true);
1192 store_indexed->set_profiled_method(method());
1193 store_indexed->set_profiled_bci(bci());
1194 }
1195 Instruction* result = append(store_indexed);
1196 assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
1197 _memory->store_value(value);
1198 }
1199
1200 void GraphBuilder::stack_op(Bytecodes::Code code) {
1201 switch (code) {
1202 case Bytecodes::_pop:
1203 { Value w = state()->raw_pop();
1204 }
1205 break;
1206 case Bytecodes::_pop2:
1207 { Value w1 = state()->raw_pop();
1208 Value w2 = state()->raw_pop();
1209 }
1210 break;
1211 case Bytecodes::_dup:
1212 { Value w = state()->raw_pop();
1213 state()->raw_push(w);
1214 state()->raw_push(w);
1215 }
1216 break;
1217 case Bytecodes::_dup_x1:
1218 { Value w1 = state()->raw_pop();
1219 Value w2 = state()->raw_pop();
1220 state()->raw_push(w1);
1221 state()->raw_push(w2);
1222 state()->raw_push(w1);
1223 }
1224 break;
1225 case Bytecodes::_dup_x2:
1226 { Value w1 = state()->raw_pop();
1227 Value w2 = state()->raw_pop();
1228 Value w3 = state()->raw_pop();
1367
1368
1369 void GraphBuilder::_goto(int from_bci, int to_bci) {
1370 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1371 if (is_profiling()) {
1372 compilation()->set_would_profile(true);
1373 x->set_profiled_bci(bci());
1374 if (profile_branches()) {
1375 x->set_profiled_method(method());
1376 x->set_should_profile(true);
1377 }
1378 }
1379 append(x);
1380 }
1381
1382
1383 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1384 BlockBegin* tsux = block_at(stream()->get_dest());
1385 BlockBegin* fsux = block_at(stream()->next_bci());
1386 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1387
1388 bool subst_check = false;
1389 if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
1390 ValueType* left_vt = x->type();
1391 ValueType* right_vt = y->type();
1392 if (left_vt->is_object()) {
1393 assert(right_vt->is_object(), "must be");
1394 ciKlass* left_klass = x->as_loaded_klass_or_null();
1395 ciKlass* right_klass = y->as_loaded_klass_or_null();
1396
1397 if (left_klass == nullptr || right_klass == nullptr) {
1398 // The klass is still unloaded, or came from a Phi node. Go slow case;
1399 subst_check = true;
1400 } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
1401 // Either operand may be a value object, but we're not sure. Go slow case;
1402 subst_check = true;
1403 } else {
1404 // No need to do substitutability check
1405 }
1406 }
1407 }
1408 if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
1409 is_profiling() && profile_branches()) {
1410 compilation()->set_would_profile(true);
1411 append(new ProfileACmpTypes(method(), bci(), x, y));
1412 }
1413
1414 // In case of loop invariant code motion or predicate insertion
1415 // before the body of a loop the state is needed
1416 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : nullptr, is_bb, subst_check));
1417
1418 assert(i->as_Goto() == nullptr ||
1419 (i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1420 (i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1421 "safepoint state of Goto returned by canonicalizer incorrect");
1422
1423 if (is_profiling()) {
1424 If* if_node = i->as_If();
1425 if (if_node != nullptr) {
1426 // Note that we'd collect profile data in this method if we wanted it.
1427 compilation()->set_would_profile(true);
1428 // At level 2 we need the proper bci to count backedges
1429 if_node->set_profiled_bci(bci());
1430 if (profile_branches()) {
1431 // Successors can be rotated by the canonicalizer, check for this case.
1432 if_node->set_profiled_method(method());
1433 if_node->set_should_profile(true);
1434 if (if_node->tsux() == fsux) {
1435 if_node->set_swapped(true);
1436 }
1647
1648 if (needs_check) {
1649 // Perform the registration of finalizable objects.
1650 ValueStack* state_before = copy_state_for_exception();
1651 load_local(objectType, 0);
1652 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1653 state()->pop_arguments(1),
1654 true, state_before, true));
1655 }
1656 }
1657
1658
1659 void GraphBuilder::method_return(Value x, bool ignore_return) {
1660 if (RegisterFinalizersAtInit &&
1661 method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1662 call_register_finalizer();
1663 }
1664
1665 // The conditions for a memory barrier are described in Parse::do_exits().
1666 bool need_mem_bar = false;
1667 if (method()->is_object_constructor() &&
1668 (scope()->wrote_final() ||
1669 (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1670 (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1671 need_mem_bar = true;
1672 }
1673
1674 BasicType bt = method()->return_type()->basic_type();
1675 switch (bt) {
1676 case T_BYTE:
1677 {
1678 Value shift = append(new Constant(new IntConstant(24)));
1679 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1680 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1681 break;
1682 }
1683 case T_SHORT:
1684 {
1685 Value shift = append(new Constant(new IntConstant(16)));
1686 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1687 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1798 // Attach dimension info to stable arrays.
1799 if (FoldStableValues &&
1800 field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1801 ciArray* array = field_value.as_object()->as_array();
1802 jint dimension = field->type()->as_array_klass()->dimension();
1803 value = new StableArrayConstant(array, dimension);
1804 }
1805
1806 switch (field_type) {
1807 case T_ARRAY:
1808 case T_OBJECT:
1809 if (field_value.as_object()->should_be_constant()) {
1810 return new Constant(value);
1811 }
1812 return nullptr; // Not a constant.
1813 default:
1814 return new Constant(value);
1815 }
1816 }
1817
1818 void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
1819 for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
1820 ciField* inner_field = vk->nonstatic_field_at(i);
1821 assert(!inner_field->is_flat(), "the iteration over nested fields is handled by the loop itself");
1822 int off = inner_field->offset_in_bytes() - vk->first_field_offset();
1823 LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, false);
1824 Value replacement = append(load);
1825 StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, false);
1826 store->set_enclosing_field(enclosing_field);
1827 append(store);
1828 }
1829 }
1830
1831 void GraphBuilder::access_field(Bytecodes::Code code) {
1832 bool will_link;
1833 ciField* field = stream()->get_field(will_link);
1834 ciInstanceKlass* holder = field->holder();
1835 BasicType field_type = field->type()->basic_type();
1836 ValueType* type = as_ValueType(field_type);
1837
1838 // call will_link again to determine if the field is valid.
1839 const bool needs_patching = !holder->is_loaded() ||
1840 !field->will_link(method(), code) ||
1841 (!field->is_flat() && PatchALot);
1842
1843 ValueStack* state_before = nullptr;
1844 if (!holder->is_initialized() || needs_patching) {
1845 // save state before instruction for debug info when
1846 // deoptimization happens during patching
1847 state_before = copy_state_before();
1848 }
1849
1850 Value obj = nullptr;
1851 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1852 if (state_before != nullptr) {
1853 // build a patching constant
1854 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1855 } else {
1856 obj = new Constant(new InstanceConstant(holder->java_mirror()));
1857 }
1858 }
1859
1860 if (field->is_final() && code == Bytecodes::_putfield) {
1861 scope()->set_wrote_final();
1862 }
1863
1864 if (code == Bytecodes::_putfield) {
1865 scope()->set_wrote_fields();
1866 if (field->is_volatile()) {
1867 scope()->set_wrote_volatile();
1868 }
1869 }
1870
1871 int offset = !needs_patching ? field->offset_in_bytes() : -1;
1872 switch (code) {
1873 case Bytecodes::_getstatic: {
1874 // check for compile-time constants, i.e., initialized static final fields
1875 Value constant = nullptr;
1876 if (field->is_static_constant() && !PatchALot) {
1877 ciConstant field_value = field->constant_value();
1878 assert(!field->is_stable() || !field_value.is_null_or_zero(),
1879 "stable static w/ default value shouldn't be a constant");
1880 constant = make_constant(field_value, field);
1881 } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
1882 field->type()->as_inline_klass()->is_empty()) {
1883 // Loading from a field of an empty inline type. Just return the default instance.
1884 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1885 }
1886 if (constant != nullptr) {
1887 push(type, append(constant));
1888 } else {
1889 if (state_before == nullptr) {
1890 state_before = copy_state_for_exception();
1891 }
1892 LoadField* load_field = new LoadField(append(obj), offset, field, true,
1893 state_before, needs_patching);
1894 push(type, append(load_field));
1895 }
1896 break;
1897 }
1898 case Bytecodes::_putstatic: {
1899 Value val = pop(type);
1900 if (state_before == nullptr) {
1901 state_before = copy_state_for_exception();
1902 }
1903 if (field_type == T_BOOLEAN) {
1904 Value mask = append(new Constant(new IntConstant(1)));
1905 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1906 }
1907 if (field->is_null_free()) {
1908 null_check(val);
1909 }
1910 if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
1911 // Storing to a field of an empty inline type. Ignore.
1912 break;
1913 }
1914 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1915 break;
1916 }
1917 case Bytecodes::_getfield: {
1918 // Check for compile-time constants, i.e., trusted final non-static fields.
1919 Value constant = nullptr;
1920 if (state_before == nullptr && field->is_flat()) {
1921 // Save the entire state and re-execute on deopt when accessing flat fields
1922 assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
1923 state_before = copy_state_before();
1924 }
1925 if (!has_pending_field_access() && !has_pending_load_indexed()) {
1926 obj = apop();
1927 ObjectType* obj_type = obj->type()->as_ObjectType();
1928 if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized()
1929 && field->type()->as_inline_klass()->is_empty()) {
1930 // Loading from a field of an empty inline type. Just return the default instance.
1931 null_check(obj);
1932 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1933 } else if (field->is_constant() && !field->is_flat() && obj_type->is_constant() && !PatchALot) {
1934 ciObject* const_oop = obj_type->constant_value();
1935 if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1936 ciConstant field_value = field->constant_value_of(const_oop);
1937 if (field_value.is_valid()) {
1938 if (field->is_null_free() && field_value.is_null_or_zero()) {
1939 // Non-flat inline type field. Replace null by the default value.
1940 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1941 } else {
1942 constant = make_constant(field_value, field);
1943 }
1944 // For CallSite objects add a dependency for invalidation of the optimization.
1945 if (field->is_call_site_target()) {
1946 ciCallSite* call_site = const_oop->as_call_site();
1947 if (!call_site->is_fully_initialized_constant_call_site()) {
1948 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1949 dependency_recorder()->assert_call_site_target_value(call_site, target);
1950 }
1951 }
1952 }
1953 }
1954 }
1955 }
1956 if (constant != nullptr) {
1957 push(type, append(constant));
1958 } else {
1959 if (state_before == nullptr) {
1960 state_before = copy_state_for_exception();
1961 }
1962 if (!field->is_flat()) {
1963 if (has_pending_field_access()) {
1964 assert(!needs_patching, "Can't patch delayed field access");
1965 obj = pending_field_access()->obj();
1966 offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->first_field_offset();
1967 field = pending_field_access()->holder()->get_field_by_offset(offset, false);
1968 assert(field != nullptr, "field not found");
1969 set_pending_field_access(nullptr);
1970 } else if (has_pending_load_indexed()) {
1971 assert(!needs_patching, "Can't patch delayed field access");
1972 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
1973 LoadIndexed* li = pending_load_indexed()->load_instr();
1974 li->set_type(type);
1975 push(type, append(li));
1976 set_pending_load_indexed(nullptr);
1977 break;
1978 }
1979 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1980 Value replacement = !needs_patching ? _memory->load(load) : load;
1981 if (replacement != load) {
1982 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1983 // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1984 // conversion. Emit an explicit conversion here to get the correct field value after the write.
1985 switch (field_type) {
1986 case T_BOOLEAN:
1987 case T_BYTE:
1988 replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
1989 break;
1990 case T_CHAR:
1991 replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
1992 break;
1993 case T_SHORT:
1994 replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
1995 break;
1996 default:
1997 break;
1998 }
1999 push(type, replacement);
2000 } else {
2001 push(type, append(load));
2002 }
2003 } else { // field is flat
2004 // Look at the next bytecode to check if we can delay the field access
2005 bool can_delay_access = false;
2006 ciBytecodeStream s(method());
2007 s.force_bci(bci());
2008 s.next();
2009 if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
2010 ciField* next_field = s.get_field(will_link);
2011 bool next_needs_patching = !next_field->holder()->is_loaded() ||
2012 !next_field->will_link(method(), Bytecodes::_getfield) ||
2013 PatchALot;
2014 can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
2015 }
2016 if (can_delay_access) {
2017 if (has_pending_load_indexed()) {
2018 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2019 } else if (has_pending_field_access()) {
2020 pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->first_field_offset());
2021 } else {
2022 null_check(obj);
2023 DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset_in_bytes());
2024 set_pending_field_access(dfa);
2025 }
2026 } else {
2027 ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2028 scope()->set_wrote_final();
2029 scope()->set_wrote_fields();
2030 bool need_membar = false;
2031 if (inline_klass->is_initialized() && inline_klass->is_empty()) {
2032 apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
2033 if (has_pending_field_access()) {
2034 set_pending_field_access(nullptr);
2035 } else if (has_pending_load_indexed()) {
2036 set_pending_load_indexed(nullptr);
2037 }
2038 } else if (has_pending_load_indexed()) {
2039 assert(!needs_patching, "Can't patch delayed field access");
2040 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2041 NewInstance* vt = new NewInstance(inline_klass, pending_load_indexed()->state_before(), false, true);
2042 _memory->new_instance(vt);
2043 pending_load_indexed()->load_instr()->set_vt(vt);
2044 apush(append_split(vt));
2045 append(pending_load_indexed()->load_instr());
2046 set_pending_load_indexed(nullptr);
2047 need_membar = true;
2048 } else {
2049 NewInstance* new_instance = new NewInstance(inline_klass, state_before, false, true);
2050 _memory->new_instance(new_instance);
2051 apush(append_split(new_instance));
2052 assert(!needs_patching, "Can't patch flat inline type field access");
2053 if (has_pending_field_access()) {
2054 copy_inline_content(inline_klass, pending_field_access()->obj(),
2055 pending_field_access()->offset() + field->offset_in_bytes() - field->holder()->as_inline_klass()->first_field_offset(),
2056 new_instance, inline_klass->first_field_offset(), state_before);
2057 set_pending_field_access(nullptr);
2058 } else {
2059 copy_inline_content(inline_klass, obj, field->offset_in_bytes(), new_instance, inline_klass->first_field_offset(), state_before);
2060 }
2061 need_membar = true;
2062 }
2063 if (need_membar) {
2064 // If we allocated a new instance ensure the stores to copy the
2065 // field contents are visible before any subsequent store that
2066 // publishes this reference.
2067 append(new MemBar(lir_membar_storestore));
2068 }
2069 }
2070 }
2071 }
2072 break;
2073 }
2074 case Bytecodes::_putfield: {
2075 Value val = pop(type);
2076 obj = apop();
2077 if (state_before == nullptr) {
2078 state_before = copy_state_for_exception();
2079 }
2080 if (field_type == T_BOOLEAN) {
2081 Value mask = append(new Constant(new IntConstant(1)));
2082 val = append(new LogicOp(Bytecodes::_iand, val, mask));
2083 }
2084 if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
2085 // Storing to a field of an empty inline type. Ignore.
2086 null_check(obj);
2087 null_check(val);
2088 } else if (!field->is_flat()) {
2089 if (field->is_null_free()) {
2090 null_check(val);
2091 }
2092 StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
2093 if (!needs_patching) store = _memory->store(store);
2094 if (store != nullptr) {
2095 append(store);
2096 }
2097 } else {
2098 assert(!needs_patching, "Can't patch flat inline type field access");
2099 ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2100 copy_inline_content(inline_klass, val, inline_klass->first_field_offset(), obj, offset, state_before, field);
2101 }
2102 break;
2103 }
2104 default:
2105 ShouldNotReachHere();
2106 break;
2107 }
2108 }
2109
2110 Dependencies* GraphBuilder::dependency_recorder() const {
2111 assert(DeoptC1, "need debug information");
2112 return compilation()->dependency_recorder();
2113 }
2114
2115 // How many arguments do we want to profile?
2116 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
2117 int n = 0;
2118 bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
2119 start = has_receiver ? 1 : 0;
2120 if (profile_arguments()) {
2121 ciProfileData* data = method()->method_data()->bci_to_data(bci());
2122 if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
2123 n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
2124 }
2125 }
2126 // If we are inlining then we need to collect arguments to profile parameters for the target
2127 if (profile_parameters() && target != nullptr) {
2128 if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
2129 // The receiver is profiled on method entry so it's included in
2206 break;
2207 case Bytecodes::_invokehandle:
2208 code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
2209 break;
2210 default:
2211 break;
2212 }
2213 } else {
2214 if (bc_raw == Bytecodes::_invokehandle) {
2215 assert(!will_link, "should come here only for unlinked call");
2216 code = Bytecodes::_invokespecial;
2217 }
2218 }
2219
2220 if (code == Bytecodes::_invokespecial) {
2221 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
2222 ciKlass* receiver_constraint = nullptr;
2223
2224 if (bc_raw == Bytecodes::_invokeinterface) {
2225 receiver_constraint = holder;
2226 } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor() && calling_klass->is_interface()) {
2227 receiver_constraint = calling_klass;
2228 }
2229
2230 if (receiver_constraint != nullptr) {
2231 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
2232 Value receiver = state()->stack_at(index);
2233 CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
2234 // go to uncommon_trap when checkcast fails
2235 c->set_invokespecial_receiver_check();
2236 state()->stack_at_put(index, append_split(c));
2237 }
2238 }
2239
2240 // Push appendix argument (MethodType, CallSite, etc.), if one.
2241 bool patch_for_appendix = false;
2242 int patching_appendix_arg = 0;
2243 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2244 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2245 apush(arg);
2246 patch_for_appendix = true;
2459 }
2460 }
2461
2462 Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2463 // push result
2464 append_split(result);
2465
2466 if (result_type != voidType) {
2467 push(result_type, round_fp(result));
2468 }
2469 if (profile_return() && result_type->is_object_kind()) {
2470 profile_return_type(result, target);
2471 }
2472 }
2473
2474
2475 void GraphBuilder::new_instance(int klass_index) {
2476 ValueStack* state_before = copy_state_exhandling();
2477 ciKlass* klass = stream()->get_klass();
2478 assert(klass->is_instance_klass(), "must be an instance klass");
2479 if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
2480 klass->as_inline_klass()->is_initialized() && klass->as_inline_klass()->is_empty()) {
2481 ciInlineKlass* vk = klass->as_inline_klass();
2482 apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
2483 } else {
2484 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass(), false);
2485 _memory->new_instance(new_instance);
2486 apush(append_split(new_instance));
2487 }
2488 }
2489
2490 void GraphBuilder::new_type_array() {
2491 ValueStack* state_before = copy_state_exhandling();
2492 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2493 }
2494
2495
2496 void GraphBuilder::new_object_array() {
2497 ciKlass* klass = stream()->get_klass();
2498 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2499 NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2500 apush(append_split(n));
2501 }
2502
2503
2504 bool GraphBuilder::direct_compare(ciKlass* k) {
2505 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2506 ciInstanceKlass* ik = k->as_instance_klass();
2507 if (ik->is_final()) {
2508 return true;
2509 } else {
2542 ciKlass* klass = stream()->get_klass();
2543 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2544 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2545 ipush(append_split(i));
2546 i->set_direct_compare(direct_compare(klass));
2547
2548 if (is_profiling()) {
2549 // Note that we'd collect profile data in this method if we wanted it.
2550 compilation()->set_would_profile(true);
2551
2552 if (profile_checkcasts()) {
2553 i->set_profiled_method(method());
2554 i->set_profiled_bci(bci());
2555 i->set_should_profile(true);
2556 }
2557 }
2558 }
2559
2560
2561 void GraphBuilder::monitorenter(Value x, int bci) {
2562 bool maybe_inlinetype = false;
2563 if (bci == InvocationEntryBci) {
2564 // Called by GraphBuilder::inline_sync_entry.
2565 #ifdef ASSERT
2566 ciType* obj_type = x->declared_type();
2567 assert(obj_type == nullptr || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
2568 #endif
2569 } else {
2570 // We are compiling a monitorenter bytecode
2571 if (EnableValhalla) {
2572 ciType* obj_type = x->declared_type();
2573 if (obj_type == nullptr || obj_type->as_klass()->can_be_inline_klass()) {
2574 // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
2575 // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2576 maybe_inlinetype = true;
2577 }
2578 }
2579 }
2580
2581 // save state before locking in case of deoptimization after a NullPointerException
2582 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2583 compilation()->set_has_monitors(true);
2584 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
2585 kill_all();
2586 }
2587
2588
2589 void GraphBuilder::monitorexit(Value x, int bci) {
2590 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2591 kill_all();
2592 }
2593
2594
2595 void GraphBuilder::new_multi_array(int dimensions) {
2596 ciKlass* klass = stream()->get_klass();
2597 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2598
2599 Values* dims = new Values(dimensions, dimensions, nullptr);
2600 // fill in all dimensions
2601 int i = dimensions;
2602 while (i-- > 0) dims->at_put(i, ipop());
2603 // create array
2604 NewArray* n = new NewMultiArray(klass, dims, state_before);
2712
2713 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2714 return append_with_bci(instr, bci());
2715 }
2716
2717
2718 void GraphBuilder::null_check(Value value) {
2719 if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2720 return;
2721 } else {
2722 Constant* con = value->as_Constant();
2723 if (con) {
2724 ObjectType* c = con->type()->as_ObjectType();
2725 if (c && c->is_loaded()) {
2726 ObjectConstant* oc = c->as_ObjectConstant();
2727 if (!oc || !oc->value()->is_null_object()) {
2728 return;
2729 }
2730 }
2731 }
2732 if (value->is_null_free()) return;
2733 }
2734 append(new NullCheck(value, copy_state_for_exception()));
2735 }
2736
2737
2738
2739 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2740 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2741 assert(instruction->exception_state() == nullptr
2742 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2743 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2744 "exception_state should be of exception kind");
2745 return new XHandlers();
2746 }
2747
2748 XHandlers* exception_handlers = new XHandlers();
2749 ScopeData* cur_scope_data = scope_data();
2750 ValueStack* cur_state = instruction->state_before();
2751 ValueStack* prev_state = nullptr;
2752 int scope_count = 0;
2753
2754 assert(cur_state != nullptr, "state_before must be set");
2755 do {
2756 int cur_bci = cur_state->bci();
2757 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2758 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
2759 || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
2760
2761
2762 // join with all potential exception handlers
2763 XHandlers* list = cur_scope_data->xhandlers();
2764 const int n = list->length();
2765 for (int i = 0; i < n; i++) {
2766 XHandler* h = list->handler_at(i);
2767 if (h->covers(cur_bci)) {
2768 // h is a potential exception handler => join it
2769 compilation()->set_has_exception_handlers(true);
2770
2771 BlockBegin* entry = h->entry_block();
2772 if (entry == block()) {
2773 // It's acceptable for an exception handler to cover itself
2774 // but we don't handle that in the parser currently. It's
2775 // very rare so we bailout instead of trying to handle it.
2776 BAILOUT_("exception handler covers itself", exception_handlers);
2777 }
2778 assert(entry->bci() == h->handler_bci(), "must match");
2779 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2780
3522 // the storage for the OSR buffer is freed manually in the LIRGenerator.
3523
3524 assert(state->caller_state() == nullptr, "should be top scope");
3525 state->clear_locals();
3526 Goto* g = new Goto(target, false);
3527 append(g);
3528 _osr_entry->set_end(g);
3529 target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3530
3531 scope_data()->set_stream(nullptr);
3532 }
3533
3534
3535 ValueStack* GraphBuilder::state_at_entry() {
3536 ValueStack* state = new ValueStack(scope(), nullptr);
3537
3538 // Set up locals for receiver
3539 int idx = 0;
3540 if (!method()->is_static()) {
3541 // we should always see the receiver
3542 state->store_local(idx, new Local(method()->holder(), objectType, idx,
3543 /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
3544 idx = 1;
3545 }
3546
3547 // Set up locals for incoming arguments
3548 ciSignature* sig = method()->signature();
3549 for (int i = 0; i < sig->count(); i++) {
3550 ciType* type = sig->type_at(i);
3551 BasicType basic_type = type->basic_type();
3552 // don't allow T_ARRAY to propagate into locals types
3553 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3554 ValueType* vt = as_ValueType(basic_type);
3555 state->store_local(idx, new Local(type, vt, idx, false, false));
3556 idx += type->size();
3557 }
3558
3559 // lock synchronized method
3560 if (method()->is_synchronized()) {
3561 state->lock(nullptr);
3562 }
3563
3564 return state;
3565 }
3566
3567
3568 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3569 : _scope_data(nullptr)
3570 , _compilation(compilation)
3571 , _memory(new MemoryBuffer())
3572 , _inline_bailout_msg(nullptr)
3573 , _instruction_count(0)
3574 , _osr_entry(nullptr)
3575 , _pending_field_access(nullptr)
3576 , _pending_load_indexed(nullptr)
3577 {
3578 int osr_bci = compilation->osr_bci();
3579
3580 // determine entry points and bci2block mapping
3581 BlockListBuilder blm(compilation, scope, osr_bci);
3582 CHECK_BAILOUT();
3583
3584 BlockList* bci2block = blm.bci2block();
3585 BlockBegin* start_block = bci2block->at(0);
3586
3587 push_root_scope(scope, bci2block, start_block);
3588
3589 // setup state for std entry
3590 _initial_state = state_at_entry();
3591 start_block->merge(_initial_state, compilation->has_irreducible_loops());
3592
3593 // End nulls still exist here
3594
3595 // complete graph
3596 _vmap = new ValueMap();
|