13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "c1/c1_CFGPrinter.hpp"
27 #include "c1/c1_Canonicalizer.hpp"
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_GraphBuilder.hpp"
30 #include "c1/c1_InstructionPrinter.hpp"
31 #include "ci/ciCallSite.hpp"
32 #include "ci/ciField.hpp"
33 #include "ci/ciKlass.hpp"
34 #include "ci/ciMemberName.hpp"
35 #include "ci/ciSymbols.hpp"
36 #include "ci/ciUtilities.inline.hpp"
37 #include "classfile/javaClasses.hpp"
38 #include "compiler/compilationPolicy.hpp"
39 #include "compiler/compileBroker.hpp"
40 #include "compiler/compilerEvent.hpp"
41 #include "interpreter/bytecode.hpp"
42 #include "jfr/jfrEvents.hpp"
43 #include "memory/resourceArea.hpp"
44 #include "oops/oop.inline.hpp"
45 #include "runtime/sharedRuntime.hpp"
46 #include "runtime/vm_version.hpp"
47 #include "utilities/bitMap.inline.hpp"
48 #include "utilities/powerOfTwo.hpp"
49 #include "utilities/macros.hpp"
50 #if INCLUDE_JFR
51 #include "jfr/jfr.hpp"
52 #endif
707 }
708 #endif
709 assert(result->type()->tag() == load->type()->tag(), "wrong types");
710 return result;
711 }
712 }
713 return load;
714 }
715
716 // Record this newly allocated object
717 void new_instance(NewInstance* object) {
718 int index = _newobjects.length();
719 _newobjects.append(object);
720 if (_fields.at_grow(index, nullptr) == nullptr) {
721 _fields.at_put(index, new FieldBuffer());
722 } else {
723 _fields.at(index)->kill();
724 }
725 }
726
727 void store_value(Value value) {
728 int index = _newobjects.find(value);
729 if (index != -1) {
730 // stored a newly allocated object into another object.
731 // Assume we've lost track of it as separate slice of memory.
732 // We could do better by keeping track of whether individual
733 // fields could alias each other.
734 _newobjects.remove_at(index);
735 // pull out the field info and store it at the end up the list
736 // of field info list to be reused later.
737 _fields.append(_fields.at(index));
738 _fields.remove_at(index);
739 }
740 }
741
742 void kill() {
743 _newobjects.trunc_to(0);
744 _objects.trunc_to(0);
745 _values.kill();
746 }
1006 int offset = java_lang_boxing_object::value_offset(type);
1007 ciField* value_field = box_klass->get_field_by_offset(offset, false /*is_static*/);
1008 x = new LoadField(append(x), offset, value_field, false /*is_static*/, patch_state, false /*needs_patching*/);
1009 t = as_ValueType(type);
1010 } else {
1011 assert(is_reference_type(type), "not a reference: %s", type2name(type));
1012 }
1013 }
1014
1015 push(t, append(x));
1016 } else {
1017 BAILOUT("could not resolve a constant");
1018 }
1019 }
1020
1021
1022 void GraphBuilder::load_local(ValueType* type, int index) {
1023 Value x = state()->local_at(index);
1024 assert(x != nullptr && !x->type()->is_illegal(), "access of illegal local variable");
1025 push(type, x);
1026 }
1027
1028
1029 void GraphBuilder::store_local(ValueType* type, int index) {
1030 Value x = pop(type);
1031 store_local(state(), x, index);
1032 }
1033
1034
1035 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
1036 if (parsing_jsr()) {
1037 // We need to do additional tracking of the location of the return
1038 // address for jsrs since we don't handle arbitrary jsr/ret
1039 // constructs. Here we are figuring out in which circumstances we
1040 // need to bail out.
1041 if (x->type()->is_address()) {
1042 scope_data()->set_jsr_return_address_local(index);
1043
1044 // Also check parent jsrs (if any) at this time to see whether
1045 // they are using this local. We don't handle skipping over a
1046 // ret.
1047 for (ScopeData* cur_scope_data = scope_data()->parent();
1048 cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1049 cur_scope_data = cur_scope_data->parent()) {
1050 if (cur_scope_data->jsr_return_address_local() == index) {
1051 BAILOUT("subroutine overwrites return address from previous subroutine");
1052 }
1053 }
1054 } else if (index == scope_data()->jsr_return_address_local()) {
1055 scope_data()->set_jsr_return_address_local(-1);
1056 }
1057 }
1058
1059 state->store_local(index, round_fp(x));
1060 }
1061
1062
1063 void GraphBuilder::load_indexed(BasicType type) {
1064 // In case of in block code motion in range check elimination
1065 ValueStack* state_before = copy_state_indexed_access();
1066 compilation()->set_has_access_indexed(true);
1067 Value index = ipop();
1068 Value array = apop();
1069 Value length = nullptr;
1070 if (CSEArrayLength ||
1071 (array->as_Constant() != nullptr) ||
1072 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1073 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1074 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1075 length = append(new ArrayLength(array, state_before));
1076 }
1077 push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
1078 }
1079
1080
1081 void GraphBuilder::store_indexed(BasicType type) {
1082 // In case of in block code motion in range check elimination
1083 ValueStack* state_before = copy_state_indexed_access();
1084 compilation()->set_has_access_indexed(true);
1085 Value value = pop(as_ValueType(type));
1086 Value index = ipop();
1087 Value array = apop();
1088 Value length = nullptr;
1089 if (CSEArrayLength ||
1090 (array->as_Constant() != nullptr) ||
1091 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1092 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1093 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1094 length = append(new ArrayLength(array, state_before));
1095 }
1096 ciType* array_type = array->declared_type();
1097 bool check_boolean = false;
1098 if (array_type != nullptr) {
1099 if (array_type->is_loaded() &&
1100 array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1101 assert(type == T_BYTE, "boolean store uses bastore");
1102 Value mask = append(new Constant(new IntConstant(1)));
1103 value = append(new LogicOp(Bytecodes::_iand, value, mask));
1104 }
1105 } else if (type == T_BYTE) {
1106 check_boolean = true;
1107 }
1108 StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1109 append(result);
1110 _memory->store_value(value);
1111
1112 if (type == T_OBJECT && is_profiling()) {
1113 // Note that we'd collect profile data in this method if we wanted it.
1114 compilation()->set_would_profile(true);
1115
1116 if (profile_checkcasts()) {
1117 result->set_profiled_method(method());
1118 result->set_profiled_bci(bci());
1119 result->set_should_profile(true);
1120 }
1121 }
1122 }
1123
1124
1125 void GraphBuilder::stack_op(Bytecodes::Code code) {
1126 switch (code) {
1127 case Bytecodes::_pop:
1128 { state()->raw_pop();
1129 }
1130 break;
1131 case Bytecodes::_pop2:
1132 { state()->raw_pop();
1133 state()->raw_pop();
1134 }
1135 break;
1136 case Bytecodes::_dup:
1137 { Value w = state()->raw_pop();
1138 state()->raw_push(w);
1139 state()->raw_push(w);
1140 }
1141 break;
1142 case Bytecodes::_dup_x1:
1143 { Value w1 = state()->raw_pop();
1144 Value w2 = state()->raw_pop();
1145 state()->raw_push(w1);
1146 state()->raw_push(w2);
1147 state()->raw_push(w1);
1148 }
1149 break;
1150 case Bytecodes::_dup_x2:
1151 { Value w1 = state()->raw_pop();
1152 Value w2 = state()->raw_pop();
1153 Value w3 = state()->raw_pop();
1154 state()->raw_push(w1);
1155 state()->raw_push(w3);
1156 state()->raw_push(w2);
1157 state()->raw_push(w1);
1158 }
1159 break;
1160 case Bytecodes::_dup2:
1161 { Value w1 = state()->raw_pop();
1162 Value w2 = state()->raw_pop();
1163 state()->raw_push(w2);
1164 state()->raw_push(w1);
1165 state()->raw_push(w2);
1166 state()->raw_push(w1);
1167 }
1168 break;
1169 case Bytecodes::_dup2_x1:
1170 { Value w1 = state()->raw_pop();
1171 Value w2 = state()->raw_pop();
1172 Value w3 = state()->raw_pop();
1173 state()->raw_push(w2);
1174 state()->raw_push(w1);
1175 state()->raw_push(w3);
1176 state()->raw_push(w2);
1177 state()->raw_push(w1);
1178 }
1179 break;
1180 case Bytecodes::_dup2_x2:
1181 { Value w1 = state()->raw_pop();
1182 Value w2 = state()->raw_pop();
1183 Value w3 = state()->raw_pop();
1184 Value w4 = state()->raw_pop();
1185 state()->raw_push(w2);
1186 state()->raw_push(w1);
1187 state()->raw_push(w4);
1188 state()->raw_push(w3);
1189 state()->raw_push(w2);
1190 state()->raw_push(w1);
1191 }
1192 break;
1193 case Bytecodes::_swap:
1194 { Value w1 = state()->raw_pop();
1195 Value w2 = state()->raw_pop();
1196 state()->raw_push(w1);
1197 state()->raw_push(w2);
1198 }
1199 break;
1200 default:
1201 ShouldNotReachHere();
1202 break;
1203 }
1204 }
1292
1293
1294 void GraphBuilder::_goto(int from_bci, int to_bci) {
1295 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1296 if (is_profiling()) {
1297 compilation()->set_would_profile(true);
1298 x->set_profiled_bci(bci());
1299 if (profile_branches()) {
1300 x->set_profiled_method(method());
1301 x->set_should_profile(true);
1302 }
1303 }
1304 append(x);
1305 }
1306
1307
1308 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1309 BlockBegin* tsux = block_at(stream()->get_dest());
1310 BlockBegin* fsux = block_at(stream()->next_bci());
1311 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1312 // In case of loop invariant code motion or predicate insertion
1313 // before the body of a loop the state is needed
1314 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : nullptr, is_bb));
1315
1316 assert(i->as_Goto() == nullptr ||
1317 (i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1318 (i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1319 "safepoint state of Goto returned by canonicalizer incorrect");
1320
1321 if (is_profiling()) {
1322 If* if_node = i->as_If();
1323 if (if_node != nullptr) {
1324 // Note that we'd collect profile data in this method if we wanted it.
1325 compilation()->set_would_profile(true);
1326 // At level 2 we need the proper bci to count backedges
1327 if_node->set_profiled_bci(bci());
1328 if (profile_branches()) {
1329 // Successors can be rotated by the canonicalizer, check for this case.
1330 if_node->set_profiled_method(method());
1331 if_node->set_should_profile(true);
1332 if (if_node->tsux() == fsux) {
1333 if_node->set_swapped(true);
1334 }
1545
1546 if (needs_check) {
1547 // Perform the registration of finalizable objects.
1548 ValueStack* state_before = copy_state_for_exception();
1549 load_local(objectType, 0);
1550 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1551 state()->pop_arguments(1),
1552 true, state_before, true));
1553 }
1554 }
1555
1556
1557 void GraphBuilder::method_return(Value x, bool ignore_return) {
1558 if (RegisterFinalizersAtInit &&
1559 method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1560 call_register_finalizer();
1561 }
1562
1563 // The conditions for a memory barrier are described in Parse::do_exits().
1564 bool need_mem_bar = false;
1565 if (method()->name() == ciSymbols::object_initializer_name() &&
1566 (scope()->wrote_final() ||
1567 (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1568 (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1569 need_mem_bar = true;
1570 }
1571
1572 BasicType bt = method()->return_type()->basic_type();
1573 switch (bt) {
1574 case T_BYTE:
1575 {
1576 Value shift = append(new Constant(new IntConstant(24)));
1577 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1578 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1579 break;
1580 }
1581 case T_SHORT:
1582 {
1583 Value shift = append(new Constant(new IntConstant(16)));
1584 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1585 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1696 // Attach dimension info to stable arrays.
1697 if (FoldStableValues &&
1698 field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1699 ciArray* array = field_value.as_object()->as_array();
1700 jint dimension = field->type()->as_array_klass()->dimension();
1701 value = new StableArrayConstant(array, dimension);
1702 }
1703
1704 switch (field_type) {
1705 case T_ARRAY:
1706 case T_OBJECT:
1707 if (field_value.as_object()->should_be_constant()) {
1708 return new Constant(value);
1709 }
1710 return nullptr; // Not a constant.
1711 default:
1712 return new Constant(value);
1713 }
1714 }
1715
1716 void GraphBuilder::access_field(Bytecodes::Code code) {
1717 bool will_link;
1718 ciField* field = stream()->get_field(will_link);
1719 ciInstanceKlass* holder = field->holder();
1720 BasicType field_type = field->type()->basic_type();
1721 ValueType* type = as_ValueType(field_type);
1722 // call will_link again to determine if the field is valid.
1723 const bool needs_patching = !holder->is_loaded() ||
1724 !field->will_link(method(), code) ||
1725 PatchALot;
1726
1727 ValueStack* state_before = nullptr;
1728 if (!holder->is_initialized() || needs_patching) {
1729 // save state before instruction for debug info when
1730 // deoptimization happens during patching
1731 state_before = copy_state_before();
1732 }
1733
1734 Value obj = nullptr;
1735 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1736 if (state_before != nullptr) {
1737 // build a patching constant
1738 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1739 } else {
1740 obj = new Constant(new InstanceConstant(holder->java_mirror()));
1741 }
1742 }
1743
1744 if (field->is_final() && (code == Bytecodes::_putfield)) {
1745 scope()->set_wrote_final();
1746 }
1747
1748 if (code == Bytecodes::_putfield) {
1749 scope()->set_wrote_fields();
1750 if (field->is_volatile()) {
1751 scope()->set_wrote_volatile();
1752 }
1753 }
1754
1755 const int offset = !needs_patching ? field->offset_in_bytes() : -1;
1756 switch (code) {
1757 case Bytecodes::_getstatic: {
1758 // check for compile-time constants, i.e., initialized static final fields
1759 Value constant = nullptr;
1760 if (field->is_static_constant() && !PatchALot) {
1761 ciConstant field_value = field->constant_value();
1762 assert(!field->is_stable() || !field_value.is_null_or_zero(),
1763 "stable static w/ default value shouldn't be a constant");
1764 constant = make_constant(field_value, field);
1765 }
1766 if (constant != nullptr) {
1767 push(type, append(constant));
1768 } else {
1769 if (state_before == nullptr) {
1770 state_before = copy_state_for_exception();
1771 }
1772 push(type, append(new LoadField(append(obj), offset, field, true,
1773 state_before, needs_patching)));
1774 }
1775 break;
1776 }
1777 case Bytecodes::_putstatic: {
1778 Value val = pop(type);
1779 if (state_before == nullptr) {
1780 state_before = copy_state_for_exception();
1781 }
1782 if (field->type()->basic_type() == T_BOOLEAN) {
1783 Value mask = append(new Constant(new IntConstant(1)));
1784 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1785 }
1786 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1787 break;
1788 }
1789 case Bytecodes::_getfield: {
1790 // Check for compile-time constants, i.e., trusted final non-static fields.
1791 Value constant = nullptr;
1792 obj = apop();
1793 ObjectType* obj_type = obj->type()->as_ObjectType();
1794 if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1795 ciObject* const_oop = obj_type->constant_value();
1796 if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1797 ciConstant field_value = field->constant_value_of(const_oop);
1798 if (field_value.is_valid()) {
1799 constant = make_constant(field_value, field);
1800 // For CallSite objects add a dependency for invalidation of the optimization.
1801 if (field->is_call_site_target()) {
1802 ciCallSite* call_site = const_oop->as_call_site();
1803 if (!call_site->is_fully_initialized_constant_call_site()) {
1804 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1805 dependency_recorder()->assert_call_site_target_value(call_site, target);
1806 }
1807 }
1808 }
1809 }
1810 }
1811 if (constant != nullptr) {
1812 push(type, append(constant));
1813 } else {
1814 if (state_before == nullptr) {
1815 state_before = copy_state_for_exception();
1816 }
1817 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1818 Value replacement = !needs_patching ? _memory->load(load) : load;
1819 if (replacement != load) {
1820 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1821 // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1822 // conversion. Emit an explicit conversion here to get the correct field value after the write.
1823 BasicType bt = field->type()->basic_type();
1824 switch (bt) {
1825 case T_BOOLEAN:
1826 case T_BYTE:
1827 replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
1828 break;
1829 case T_CHAR:
1830 replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
1831 break;
1832 case T_SHORT:
1833 replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
1834 break;
1835 default:
1836 break;
1837 }
1838 push(type, replacement);
1839 } else {
1840 push(type, append(load));
1841 }
1842 }
1843 break;
1844 }
1845 case Bytecodes::_putfield: {
1846 Value val = pop(type);
1847 obj = apop();
1848 if (state_before == nullptr) {
1849 state_before = copy_state_for_exception();
1850 }
1851 if (field->type()->basic_type() == T_BOOLEAN) {
1852 Value mask = append(new Constant(new IntConstant(1)));
1853 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1854 }
1855 StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1856 if (!needs_patching) store = _memory->store(store);
1857 if (store != nullptr) {
1858 append(store);
1859 }
1860 break;
1861 }
1862 default:
1863 ShouldNotReachHere();
1864 break;
1865 }
1866 }
1867
1868
1869 Dependencies* GraphBuilder::dependency_recorder() const {
1870 assert(DeoptC1, "need debug information");
1871 return compilation()->dependency_recorder();
1872 }
1873
1874 // How many arguments do we want to profile?
1875 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1876 int n = 0;
1877 bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1878 start = has_receiver ? 1 : 0;
1879 if (profile_arguments()) {
1880 ciProfileData* data = method()->method_data()->bci_to_data(bci());
1881 if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1882 n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1883 }
1884 }
1885 // If we are inlining then we need to collect arguments to profile parameters for the target
1886 if (profile_parameters() && target != nullptr) {
1887 if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
1965 break;
1966 case Bytecodes::_invokehandle:
1967 code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1968 break;
1969 default:
1970 break;
1971 }
1972 } else {
1973 if (bc_raw == Bytecodes::_invokehandle) {
1974 assert(!will_link, "should come here only for unlinked call");
1975 code = Bytecodes::_invokespecial;
1976 }
1977 }
1978
1979 if (code == Bytecodes::_invokespecial) {
1980 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
1981 ciKlass* receiver_constraint = nullptr;
1982
1983 if (bc_raw == Bytecodes::_invokeinterface) {
1984 receiver_constraint = holder;
1985 } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer() && calling_klass->is_interface()) {
1986 receiver_constraint = calling_klass;
1987 }
1988
1989 if (receiver_constraint != nullptr) {
1990 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1991 Value receiver = state()->stack_at(index);
1992 CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
1993 // go to uncommon_trap when checkcast fails
1994 c->set_invokespecial_receiver_check();
1995 state()->stack_at_put(index, append_split(c));
1996 }
1997 }
1998
1999 // Push appendix argument (MethodType, CallSite, etc.), if one.
2000 bool patch_for_appendix = false;
2001 int patching_appendix_arg = 0;
2002 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2003 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2004 apush(arg);
2005 patch_for_appendix = true;
2200 null_check(recv);
2201 }
2202
2203 if (is_profiling()) {
2204 // Note that we'd collect profile data in this method if we wanted it.
2205 compilation()->set_would_profile(true);
2206
2207 if (profile_calls()) {
2208 assert(cha_monomorphic_target == nullptr || exact_target == nullptr, "both can not be set");
2209 ciKlass* target_klass = nullptr;
2210 if (cha_monomorphic_target != nullptr) {
2211 target_klass = cha_monomorphic_target->holder();
2212 } else if (exact_target != nullptr) {
2213 target_klass = exact_target->holder();
2214 }
2215 profile_call(target, recv, target_klass, collect_args_for_profiling(args, nullptr, false), false);
2216 }
2217 }
2218 }
2219
2220 Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2221 // push result
2222 append_split(result);
2223
2224 if (result_type != voidType) {
2225 push(result_type, round_fp(result));
2226 }
2227 if (profile_return() && result_type->is_object_kind()) {
2228 profile_return_type(result, target);
2229 }
2230 }
2231
2232
2233 void GraphBuilder::new_instance(int klass_index) {
2234 ValueStack* state_before = copy_state_exhandling();
2235 ciKlass* klass = stream()->get_klass();
2236 assert(klass->is_instance_klass(), "must be an instance klass");
2237 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2238 _memory->new_instance(new_instance);
2239 apush(append_split(new_instance));
2240 }
2241
2242
2243 void GraphBuilder::new_type_array() {
2244 ValueStack* state_before = copy_state_exhandling();
2245 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2246 }
2247
2248
2249 void GraphBuilder::new_object_array() {
2250 ciKlass* klass = stream()->get_klass();
2251 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2252 NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2253 apush(append_split(n));
2254 }
2255
2256
2257 bool GraphBuilder::direct_compare(ciKlass* k) {
2258 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2259 ciInstanceKlass* ik = k->as_instance_klass();
2260 if (ik->is_final()) {
2261 return true;
2262 } else {
2263 if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2264 // test class is leaf class
2265 dependency_recorder()->assert_leaf_type(ik);
2266 return true;
2267 }
2268 }
2269 }
2270 return false;
2271 }
2272
2273
2274 void GraphBuilder::check_cast(int klass_index) {
2275 ciKlass* klass = stream()->get_klass();
2276 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2277 CheckCast* c = new CheckCast(klass, apop(), state_before);
2278 apush(append_split(c));
2279 c->set_direct_compare(direct_compare(klass));
2280
2281 if (is_profiling()) {
2282 // Note that we'd collect profile data in this method if we wanted it.
2283 compilation()->set_would_profile(true);
2284
2285 if (profile_checkcasts()) {
2286 c->set_profiled_method(method());
2287 c->set_profiled_bci(bci());
2288 c->set_should_profile(true);
2289 }
2290 }
2291 }
2292
2293
2294 void GraphBuilder::instance_of(int klass_index) {
2295 ciKlass* klass = stream()->get_klass();
2296 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2297 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2298 ipush(append_split(i));
2299 i->set_direct_compare(direct_compare(klass));
2300
2301 if (is_profiling()) {
2302 // Note that we'd collect profile data in this method if we wanted it.
2303 compilation()->set_would_profile(true);
2304
2305 if (profile_checkcasts()) {
2306 i->set_profiled_method(method());
2307 i->set_profiled_bci(bci());
2308 i->set_should_profile(true);
2309 }
2310 }
2311 }
2312
2313
2314 void GraphBuilder::monitorenter(Value x, int bci) {
2315 // save state before locking in case of deoptimization after a NullPointerException
2316 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2317 compilation()->set_has_monitors(true);
2318 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2319 kill_all();
2320 }
2321
2322
2323 void GraphBuilder::monitorexit(Value x, int bci) {
2324 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2325 kill_all();
2326 }
2327
2328
2329 void GraphBuilder::new_multi_array(int dimensions) {
2330 ciKlass* klass = stream()->get_klass();
2331 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2332
2333 Values* dims = new Values(dimensions, dimensions, nullptr);
2334 // fill in all dimensions
2335 int i = dimensions;
2336 while (i-- > 0) dims->at_put(i, ipop());
2337 // create array
2338 NewArray* n = new NewMultiArray(klass, dims, state_before);
2433 if (i1->can_trap()) {
2434 i1->set_exception_handlers(handle_exception(i1));
2435 assert(i1->exception_state() != nullptr || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
2436 }
2437 return i1;
2438 }
2439
2440
2441 Instruction* GraphBuilder::append(Instruction* instr) {
2442 assert(instr->as_StateSplit() == nullptr || instr->as_BlockEnd() != nullptr, "wrong append used");
2443 return append_with_bci(instr, bci());
2444 }
2445
2446
2447 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2448 return append_with_bci(instr, bci());
2449 }
2450
2451
2452 void GraphBuilder::null_check(Value value) {
2453 if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2454 return;
2455 } else {
2456 Constant* con = value->as_Constant();
2457 if (con) {
2458 ObjectType* c = con->type()->as_ObjectType();
2459 if (c && c->is_loaded()) {
2460 ObjectConstant* oc = c->as_ObjectConstant();
2461 if (!oc || !oc->value()->is_null_object()) {
2462 return;
2463 }
2464 }
2465 }
2466 }
2467 append(new NullCheck(value, copy_state_for_exception()));
2468 }
2469
2470
2471
2472 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2473 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2474 assert(instruction->exception_state() == nullptr
2475 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2476 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2477 "exception_state should be of exception kind");
2478 return new XHandlers();
2479 }
2480
2481 XHandlers* exception_handlers = new XHandlers();
2482 ScopeData* cur_scope_data = scope_data();
2483 ValueStack* cur_state = instruction->state_before();
2484 ValueStack* prev_state = nullptr;
2485 int scope_count = 0;
2486
2487 assert(cur_state != nullptr, "state_before must be set");
2488 do {
2489 int cur_bci = cur_state->bci();
2490 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2491 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
2492
2493 // join with all potential exception handlers
2494 XHandlers* list = cur_scope_data->xhandlers();
2495 const int n = list->length();
2496 for (int i = 0; i < n; i++) {
2497 XHandler* h = list->handler_at(i);
2498 if (h->covers(cur_bci)) {
2499 // h is a potential exception handler => join it
2500 compilation()->set_has_exception_handlers(true);
2501
2502 BlockBegin* entry = h->entry_block();
2503 if (entry == block()) {
2504 // It's acceptable for an exception handler to cover itself
2505 // but we don't handle that in the parser currently. It's
2506 // very rare so we bailout instead of trying to handle it.
2507 BAILOUT_("exception handler covers itself", exception_handlers);
2508 }
2509 assert(entry->bci() == h->handler_bci(), "must match");
2510 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2511
2959 case Bytecodes::_invokevirtual : // fall through
2960 case Bytecodes::_invokespecial : // fall through
2961 case Bytecodes::_invokestatic : // fall through
2962 case Bytecodes::_invokedynamic : // fall through
2963 case Bytecodes::_invokeinterface: invoke(code); break;
2964 case Bytecodes::_new : new_instance(s.get_index_u2()); break;
2965 case Bytecodes::_newarray : new_type_array(); break;
2966 case Bytecodes::_anewarray : new_object_array(); break;
2967 case Bytecodes::_arraylength : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
2968 case Bytecodes::_athrow : throw_op(s.cur_bci()); break;
2969 case Bytecodes::_checkcast : check_cast(s.get_index_u2()); break;
2970 case Bytecodes::_instanceof : instance_of(s.get_index_u2()); break;
2971 case Bytecodes::_monitorenter : monitorenter(apop(), s.cur_bci()); break;
2972 case Bytecodes::_monitorexit : monitorexit (apop(), s.cur_bci()); break;
2973 case Bytecodes::_wide : ShouldNotReachHere(); break;
2974 case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
2975 case Bytecodes::_ifnull : if_null(objectType, If::eql); break;
2976 case Bytecodes::_ifnonnull : if_null(objectType, If::neq); break;
2977 case Bytecodes::_goto_w : _goto(s.cur_bci(), s.get_far_dest()); break;
2978 case Bytecodes::_jsr_w : jsr(s.get_far_dest()); break;
2979 case Bytecodes::_breakpoint : BAILOUT_("concurrent setting of breakpoint", nullptr);
2980 default : ShouldNotReachHere(); break;
2981 }
2982
2983 if (log != nullptr)
2984 log->clear_context(); // skip marker if nothing was printed
2985
2986 // save current bci to setup Goto at the end
2987 prev_bci = s.cur_bci();
2988
2989 }
2990 CHECK_BAILOUT_(nullptr);
2991 // stop processing of this block (see try_inline_full)
2992 if (_skip_block) {
2993 _skip_block = false;
2994 assert(_last && _last->as_BlockEnd(), "");
2995 return _last->as_BlockEnd();
2996 }
2997 // if there are any, check if last instruction is a BlockEnd instruction
2998 BlockEnd* end = last()->as_BlockEnd();
3247 // the storage for the OSR buffer is freed manually in the LIRGenerator.
3248
3249 assert(state->caller_state() == nullptr, "should be top scope");
3250 state->clear_locals();
3251 Goto* g = new Goto(target, false);
3252 append(g);
3253 _osr_entry->set_end(g);
3254 target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3255
3256 scope_data()->set_stream(nullptr);
3257 }
3258
3259
3260 ValueStack* GraphBuilder::state_at_entry() {
3261 ValueStack* state = new ValueStack(scope(), nullptr);
3262
3263 // Set up locals for receiver
3264 int idx = 0;
3265 if (!method()->is_static()) {
3266 // we should always see the receiver
3267 state->store_local(idx, new Local(method()->holder(), objectType, idx, true));
3268 idx = 1;
3269 }
3270
3271 // Set up locals for incoming arguments
3272 ciSignature* sig = method()->signature();
3273 for (int i = 0; i < sig->count(); i++) {
3274 ciType* type = sig->type_at(i);
3275 BasicType basic_type = type->basic_type();
3276 // don't allow T_ARRAY to propagate into locals types
3277 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3278 ValueType* vt = as_ValueType(basic_type);
3279 state->store_local(idx, new Local(type, vt, idx, false));
3280 idx += type->size();
3281 }
3282
3283 // lock synchronized method
3284 if (method()->is_synchronized()) {
3285 state->lock(nullptr);
3286 }
3287
3288 return state;
3289 }
3290
3291
3292 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3293 : _scope_data(nullptr)
3294 , _compilation(compilation)
3295 , _memory(new MemoryBuffer())
3296 , _inline_bailout_msg(nullptr)
3297 , _instruction_count(0)
3298 , _osr_entry(nullptr)
3299 {
3300 int osr_bci = compilation->osr_bci();
3301
3302 // determine entry points and bci2block mapping
3303 BlockListBuilder blm(compilation, scope, osr_bci);
3304 CHECK_BAILOUT();
3305
3306 BlockList* bci2block = blm.bci2block();
3307 BlockBegin* start_block = bci2block->at(0);
3308
3309 push_root_scope(scope, bci2block, start_block);
3310
3311 // setup state for std entry
3312 _initial_state = state_at_entry();
3313 start_block->merge(_initial_state, compilation->has_irreducible_loops());
3314
3315 // End nulls still exist here
3316
3317 // complete graph
3318 _vmap = new ValueMap();
|
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "c1/c1_CFGPrinter.hpp"
27 #include "c1/c1_Canonicalizer.hpp"
28 #include "c1/c1_Compilation.hpp"
29 #include "c1/c1_GraphBuilder.hpp"
30 #include "c1/c1_InstructionPrinter.hpp"
31 #include "ci/ciCallSite.hpp"
32 #include "ci/ciField.hpp"
33 #include "ci/ciFlatArrayKlass.hpp"
34 #include "ci/ciInlineKlass.hpp"
35 #include "ci/ciKlass.hpp"
36 #include "ci/ciMemberName.hpp"
37 #include "ci/ciSymbols.hpp"
38 #include "ci/ciUtilities.inline.hpp"
39 #include "classfile/javaClasses.hpp"
40 #include "compiler/compilationPolicy.hpp"
41 #include "compiler/compileBroker.hpp"
42 #include "compiler/compilerEvent.hpp"
43 #include "interpreter/bytecode.hpp"
44 #include "jfr/jfrEvents.hpp"
45 #include "memory/resourceArea.hpp"
46 #include "oops/oop.inline.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/vm_version.hpp"
49 #include "utilities/bitMap.inline.hpp"
50 #include "utilities/powerOfTwo.hpp"
51 #include "utilities/macros.hpp"
52 #if INCLUDE_JFR
53 #include "jfr/jfr.hpp"
54 #endif
709 }
710 #endif
711 assert(result->type()->tag() == load->type()->tag(), "wrong types");
712 return result;
713 }
714 }
715 return load;
716 }
717
718 // Record this newly allocated object
719 void new_instance(NewInstance* object) {
720 int index = _newobjects.length();
721 _newobjects.append(object);
722 if (_fields.at_grow(index, nullptr) == nullptr) {
723 _fields.at_put(index, new FieldBuffer());
724 } else {
725 _fields.at(index)->kill();
726 }
727 }
728
729 // Record this newly allocated object
730 void new_instance(NewInlineTypeInstance* object) {
731 int index = _newobjects.length();
732 _newobjects.append(object);
733 if (_fields.at_grow(index, nullptr) == nullptr) {
734 _fields.at_put(index, new FieldBuffer());
735 } else {
736 _fields.at(index)->kill();
737 }
738 }
739
740 void store_value(Value value) {
741 int index = _newobjects.find(value);
742 if (index != -1) {
743 // stored a newly allocated object into another object.
744 // Assume we've lost track of it as separate slice of memory.
745 // We could do better by keeping track of whether individual
746 // fields could alias each other.
747 _newobjects.remove_at(index);
748 // pull out the field info and store it at the end up the list
749 // of field info list to be reused later.
750 _fields.append(_fields.at(index));
751 _fields.remove_at(index);
752 }
753 }
754
755 void kill() {
756 _newobjects.trunc_to(0);
757 _objects.trunc_to(0);
758 _values.kill();
759 }
1019 int offset = java_lang_boxing_object::value_offset(type);
1020 ciField* value_field = box_klass->get_field_by_offset(offset, false /*is_static*/);
1021 x = new LoadField(append(x), offset, value_field, false /*is_static*/, patch_state, false /*needs_patching*/);
1022 t = as_ValueType(type);
1023 } else {
1024 assert(is_reference_type(type), "not a reference: %s", type2name(type));
1025 }
1026 }
1027
1028 push(t, append(x));
1029 } else {
1030 BAILOUT("could not resolve a constant");
1031 }
1032 }
1033
1034
1035 void GraphBuilder::load_local(ValueType* type, int index) {
1036 Value x = state()->local_at(index);
1037 assert(x != nullptr && !x->type()->is_illegal(), "access of illegal local variable");
1038 push(type, x);
1039 if (x->as_NewInlineTypeInstance() != nullptr && x->as_NewInlineTypeInstance()->in_larval_state()) {
1040 if (x->as_NewInlineTypeInstance()->on_stack_count() == 1) {
1041 x->as_NewInlineTypeInstance()->set_not_larva_anymore();
1042 } else {
1043 x->as_NewInlineTypeInstance()->increment_on_stack_count();
1044 }
1045 }
1046 }
1047
1048
1049 void GraphBuilder::store_local(ValueType* type, int index) {
1050 Value x = pop(type);
1051 store_local(state(), x, index);
1052 if (x->as_NewInlineTypeInstance() != nullptr) {
1053 x->as_NewInlineTypeInstance()->set_local_index(index);
1054 }
1055 }
1056
1057
1058 void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
1059 if (parsing_jsr()) {
1060 // We need to do additional tracking of the location of the return
1061 // address for jsrs since we don't handle arbitrary jsr/ret
1062 // constructs. Here we are figuring out in which circumstances we
1063 // need to bail out.
1064 if (x->type()->is_address()) {
1065 scope_data()->set_jsr_return_address_local(index);
1066
1067 // Also check parent jsrs (if any) at this time to see whether
1068 // they are using this local. We don't handle skipping over a
1069 // ret.
1070 for (ScopeData* cur_scope_data = scope_data()->parent();
1071 cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1072 cur_scope_data = cur_scope_data->parent()) {
1073 if (cur_scope_data->jsr_return_address_local() == index) {
1074 BAILOUT("subroutine overwrites return address from previous subroutine");
1075 }
1076 }
1077 } else if (index == scope_data()->jsr_return_address_local()) {
1078 scope_data()->set_jsr_return_address_local(-1);
1079 }
1080 }
1081
1082 state->store_local(index, round_fp(x));
1083 if (x->as_NewInlineTypeInstance() != nullptr) {
1084 x->as_NewInlineTypeInstance()->set_local_index(index);
1085 }
1086 }
1087
1088
1089 void GraphBuilder::load_indexed(BasicType type) {
1090 // In case of in block code motion in range check elimination
1091 ValueStack* state_before = nullptr;
1092 int array_idx = state()->stack_size() - 2;
1093 if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1094 // Save the entire state and re-execute on deopt when accessing flat arrays
1095 state_before = copy_state_before();
1096 state_before->set_should_reexecute(true);
1097 } else {
1098 state_before = copy_state_indexed_access();
1099 }
1100 compilation()->set_has_access_indexed(true);
1101 Value index = ipop();
1102 Value array = apop();
1103 Value length = nullptr;
1104 if (CSEArrayLength ||
1105 (array->as_Constant() != nullptr) ||
1106 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1107 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1108 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1109 length = append(new ArrayLength(array, state_before));
1110 }
1111
1112 bool need_membar = false;
1113 LoadIndexed* load_indexed = nullptr;
1114 Instruction* result = nullptr;
1115 if (array->is_loaded_flat_array()) {
1116 ciType* array_type = array->declared_type();
1117 ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
1118
1119 bool can_delay_access = false;
1120 ciBytecodeStream s(method());
1121 s.force_bci(bci());
1122 s.next();
1123 if (s.cur_bc() == Bytecodes::_getfield) {
1124 bool will_link;
1125 ciField* next_field = s.get_field(will_link);
1126 bool next_needs_patching = !next_field->holder()->is_initialized() ||
1127 !next_field->will_link(method(), Bytecodes::_getfield) ||
1128 PatchALot;
1129 can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1130 }
1131 if (can_delay_access) {
1132 // potentially optimizable array access, storing information for delayed decision
1133 LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
1134 DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
1135 li->set_delayed(dli);
1136 set_pending_load_indexed(dli);
1137 return; // Nothing else to do for now
1138 } else {
1139 if (elem_klass->is_empty()) {
1140 // No need to create a new instance, the default instance will be used instead
1141 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1142 apush(append(load_indexed));
1143 } else {
1144 NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(elem_klass, state_before);
1145 _memory->new_instance(new_instance);
1146 apush(append_split(new_instance));
1147 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1148 load_indexed->set_vt(new_instance);
1149 // The LoadIndexed node will initialise this instance by copying from
1150 // the flat field. Ensure these stores are visible before any
1151 // subsequent store that publishes this reference.
1152 need_membar = true;
1153 }
1154 }
1155 } else {
1156 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1157 if (profile_array_accesses() && is_reference_type(type)) {
1158 compilation()->set_would_profile(true);
1159 load_indexed->set_should_profile(true);
1160 load_indexed->set_profiled_method(method());
1161 load_indexed->set_profiled_bci(bci());
1162 }
1163 }
1164 result = append(load_indexed);
1165 if (need_membar) {
1166 append(new MemBar(lir_membar_storestore));
1167 }
1168 assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
1169 if (!array->is_loaded_flat_array()) {
1170 push(as_ValueType(type), result);
1171 }
1172 }
1173
1174
1175 void GraphBuilder::store_indexed(BasicType type) {
1176 // In case of in block code motion in range check elimination
1177 ValueStack* state_before = nullptr;
1178 int array_idx = state()->stack_size() - 3;
1179 if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1180 // Save the entire state and re-execute on deopt when accessing flat arrays
1181 state_before = copy_state_before();
1182 state_before->set_should_reexecute(true);
1183 } else {
1184 state_before = copy_state_indexed_access();
1185 }
1186 compilation()->set_has_access_indexed(true);
1187 Value value = pop(as_ValueType(type));
1188 Value index = ipop();
1189 Value array = apop();
1190 Value length = nullptr;
1191 if (CSEArrayLength ||
1192 (array->as_Constant() != nullptr) ||
1193 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1194 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1195 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1196 length = append(new ArrayLength(array, state_before));
1197 }
1198 ciType* array_type = array->declared_type();
1199 bool check_boolean = false;
1200 if (array_type != nullptr) {
1201 if (array_type->is_loaded() &&
1202 array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1203 assert(type == T_BYTE, "boolean store uses bastore");
1204 Value mask = append(new Constant(new IntConstant(1)));
1205 value = append(new LogicOp(Bytecodes::_iand, value, mask));
1206 }
1207 } else if (type == T_BYTE) {
1208 check_boolean = true;
1209 }
1210
1211 StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1212 if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flat_array()) {
1213 compilation()->set_would_profile(true);
1214 store_indexed->set_should_profile(true);
1215 store_indexed->set_profiled_method(method());
1216 store_indexed->set_profiled_bci(bci());
1217 }
1218 Instruction* result = append(store_indexed);
1219 assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
1220 _memory->store_value(value);
1221 }
1222
1223 void GraphBuilder::stack_op(Bytecodes::Code code) {
1224 switch (code) {
1225 case Bytecodes::_pop:
1226 { Value w = state()->raw_pop();
1227 update_larva_stack_count(w);
1228 }
1229 break;
1230 case Bytecodes::_pop2:
1231 { Value w1 = state()->raw_pop();
1232 Value w2 = state()->raw_pop();
1233 update_larva_stack_count(w1);
1234 update_larva_stack_count(w2);
1235 }
1236 break;
1237 case Bytecodes::_dup:
1238 { Value w = state()->raw_pop();
1239 update_larval_state(w);
1240 state()->raw_push(w);
1241 state()->raw_push(w);
1242 }
1243 break;
1244 case Bytecodes::_dup_x1:
1245 { Value w1 = state()->raw_pop();
1246 Value w2 = state()->raw_pop();
1247 update_larval_state(w1);
1248 state()->raw_push(w1);
1249 state()->raw_push(w2);
1250 state()->raw_push(w1);
1251 }
1252 break;
1253 case Bytecodes::_dup_x2:
1254 { Value w1 = state()->raw_pop();
1255 Value w2 = state()->raw_pop();
1256 Value w3 = state()->raw_pop();
1257 // special handling for the dup_x2/pop sequence (see JDK-8251046)
1258 if (w1 != nullptr && w1->as_NewInlineTypeInstance() != nullptr) {
1259 ciBytecodeStream s(method());
1260 s.force_bci(bci());
1261 s.next();
1262 if (s.cur_bc() != Bytecodes::_pop) {
1263 w1->as_NewInlineTypeInstance()->set_not_larva_anymore();
1264 } else {
1265 w1->as_NewInlineTypeInstance()->increment_on_stack_count();
1266 }
1267 }
1268 state()->raw_push(w1);
1269 state()->raw_push(w3);
1270 state()->raw_push(w2);
1271 state()->raw_push(w1);
1272 }
1273 break;
1274 case Bytecodes::_dup2:
1275 { Value w1 = state()->raw_pop();
1276 Value w2 = state()->raw_pop();
1277 update_larval_state(w1);
1278 update_larval_state(w2);
1279 state()->raw_push(w2);
1280 state()->raw_push(w1);
1281 state()->raw_push(w2);
1282 state()->raw_push(w1);
1283 }
1284 break;
1285 case Bytecodes::_dup2_x1:
1286 { Value w1 = state()->raw_pop();
1287 Value w2 = state()->raw_pop();
1288 Value w3 = state()->raw_pop();
1289 update_larval_state(w1);
1290 update_larval_state(w2);
1291 state()->raw_push(w2);
1292 state()->raw_push(w1);
1293 state()->raw_push(w3);
1294 state()->raw_push(w2);
1295 state()->raw_push(w1);
1296 }
1297 break;
1298 case Bytecodes::_dup2_x2:
1299 { Value w1 = state()->raw_pop();
1300 Value w2 = state()->raw_pop();
1301 Value w3 = state()->raw_pop();
1302 Value w4 = state()->raw_pop();
1303 update_larval_state(w1);
1304 update_larval_state(w2);
1305 state()->raw_push(w2);
1306 state()->raw_push(w1);
1307 state()->raw_push(w4);
1308 state()->raw_push(w3);
1309 state()->raw_push(w2);
1310 state()->raw_push(w1);
1311 }
1312 break;
1313 case Bytecodes::_swap:
1314 { Value w1 = state()->raw_pop();
1315 Value w2 = state()->raw_pop();
1316 state()->raw_push(w1);
1317 state()->raw_push(w2);
1318 }
1319 break;
1320 default:
1321 ShouldNotReachHere();
1322 break;
1323 }
1324 }
1412
1413
1414 void GraphBuilder::_goto(int from_bci, int to_bci) {
1415 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1416 if (is_profiling()) {
1417 compilation()->set_would_profile(true);
1418 x->set_profiled_bci(bci());
1419 if (profile_branches()) {
1420 x->set_profiled_method(method());
1421 x->set_should_profile(true);
1422 }
1423 }
1424 append(x);
1425 }
1426
1427
1428 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1429 BlockBegin* tsux = block_at(stream()->get_dest());
1430 BlockBegin* fsux = block_at(stream()->next_bci());
1431 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1432
1433 bool subst_check = false;
1434 if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
1435 ValueType* left_vt = x->type();
1436 ValueType* right_vt = y->type();
1437 if (left_vt->is_object()) {
1438 assert(right_vt->is_object(), "must be");
1439 ciKlass* left_klass = x->as_loaded_klass_or_null();
1440 ciKlass* right_klass = y->as_loaded_klass_or_null();
1441
1442 if (left_klass == nullptr || right_klass == nullptr) {
1443 // The klass is still unloaded, or came from a Phi node. Go slow case;
1444 subst_check = true;
1445 } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
1446 // Either operand may be a value object, but we're not sure. Go slow case;
1447 subst_check = true;
1448 } else {
1449 // No need to do substitutability check
1450 }
1451 }
1452 }
1453 if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
1454 is_profiling() && profile_branches()) {
1455 compilation()->set_would_profile(true);
1456 append(new ProfileACmpTypes(method(), bci(), x, y));
1457 }
1458
1459 // In case of loop invariant code motion or predicate insertion
1460 // before the body of a loop the state is needed
1461 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : nullptr, is_bb, subst_check));
1462
1463 assert(i->as_Goto() == nullptr ||
1464 (i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
1465 (i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
1466 "safepoint state of Goto returned by canonicalizer incorrect");
1467
1468 if (is_profiling()) {
1469 If* if_node = i->as_If();
1470 if (if_node != nullptr) {
1471 // Note that we'd collect profile data in this method if we wanted it.
1472 compilation()->set_would_profile(true);
1473 // At level 2 we need the proper bci to count backedges
1474 if_node->set_profiled_bci(bci());
1475 if (profile_branches()) {
1476 // Successors can be rotated by the canonicalizer, check for this case.
1477 if_node->set_profiled_method(method());
1478 if_node->set_should_profile(true);
1479 if (if_node->tsux() == fsux) {
1480 if_node->set_swapped(true);
1481 }
1692
1693 if (needs_check) {
1694 // Perform the registration of finalizable objects.
1695 ValueStack* state_before = copy_state_for_exception();
1696 load_local(objectType, 0);
1697 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1698 state()->pop_arguments(1),
1699 true, state_before, true));
1700 }
1701 }
1702
1703
1704 void GraphBuilder::method_return(Value x, bool ignore_return) {
1705 if (RegisterFinalizersAtInit &&
1706 method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1707 call_register_finalizer();
1708 }
1709
1710 // The conditions for a memory barrier are described in Parse::do_exits().
1711 bool need_mem_bar = false;
1712 if ((method()->is_object_constructor() || method()->is_static_vnew_factory()) &&
1713 (scope()->wrote_final() ||
1714 (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1715 (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1716 need_mem_bar = true;
1717 }
1718
1719 BasicType bt = method()->return_type()->basic_type();
1720 switch (bt) {
1721 case T_BYTE:
1722 {
1723 Value shift = append(new Constant(new IntConstant(24)));
1724 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1725 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1726 break;
1727 }
1728 case T_SHORT:
1729 {
1730 Value shift = append(new Constant(new IntConstant(16)));
1731 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1732 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1843 // Attach dimension info to stable arrays.
1844 if (FoldStableValues &&
1845 field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1846 ciArray* array = field_value.as_object()->as_array();
1847 jint dimension = field->type()->as_array_klass()->dimension();
1848 value = new StableArrayConstant(array, dimension);
1849 }
1850
1851 switch (field_type) {
1852 case T_ARRAY:
1853 case T_OBJECT:
1854 if (field_value.as_object()->should_be_constant()) {
1855 return new Constant(value);
1856 }
1857 return nullptr; // Not a constant.
1858 default:
1859 return new Constant(value);
1860 }
1861 }
1862
1863 void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
1864 for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
1865 ciField* inner_field = vk->nonstatic_field_at(i);
1866 assert(!inner_field->is_flat(), "the iteration over nested fields is handled by the loop itself");
1867 int off = inner_field->offset_in_bytes() - vk->first_field_offset();
1868 LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, false);
1869 Value replacement = append(load);
1870 StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, false);
1871 store->set_enclosing_field(enclosing_field);
1872 append(store);
1873 }
1874 }
1875
1876 void GraphBuilder::access_field(Bytecodes::Code code) {
1877 bool will_link;
1878 ciField* field = stream()->get_field(will_link);
1879 ciInstanceKlass* holder = field->holder();
1880 BasicType field_type = field->type()->basic_type();
1881 ValueType* type = as_ValueType(field_type);
1882
1883 // call will_link again to determine if the field is valid.
1884 const bool needs_patching = !holder->is_loaded() ||
1885 !field->will_link(method(), code) ||
1886 (!field->is_flat() && PatchALot);
1887
1888 ValueStack* state_before = nullptr;
1889 if (!holder->is_initialized() || needs_patching) {
1890 // save state before instruction for debug info when
1891 // deoptimization happens during patching
1892 state_before = copy_state_before();
1893 }
1894
1895 Value obj = nullptr;
1896 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1897 if (state_before != nullptr) {
1898 // build a patching constant
1899 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1900 } else {
1901 obj = new Constant(new InstanceConstant(holder->java_mirror()));
1902 }
1903 }
1904
1905 if (field->is_final() && code == Bytecodes::_putfield) {
1906 scope()->set_wrote_final();
1907 }
1908
1909 if (code == Bytecodes::_putfield) {
1910 scope()->set_wrote_fields();
1911 if (field->is_volatile()) {
1912 scope()->set_wrote_volatile();
1913 }
1914 }
1915
1916 int offset = !needs_patching ? field->offset_in_bytes() : -1;
1917 switch (code) {
1918 case Bytecodes::_getstatic: {
1919 // check for compile-time constants, i.e., initialized static final fields
1920 Value constant = nullptr;
1921 if (field->is_static_constant() && !PatchALot) {
1922 ciConstant field_value = field->constant_value();
1923 assert(!field->is_stable() || !field_value.is_null_or_zero(),
1924 "stable static w/ default value shouldn't be a constant");
1925 constant = make_constant(field_value, field);
1926 } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
1927 field->type()->as_inline_klass()->is_empty()) {
1928 // Loading from a field of an empty inline type. Just return the default instance.
1929 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1930 }
1931 if (constant != nullptr) {
1932 push(type, append(constant));
1933 } else {
1934 if (state_before == nullptr) {
1935 state_before = copy_state_for_exception();
1936 }
1937 LoadField* load_field = new LoadField(append(obj), offset, field, true,
1938 state_before, needs_patching);
1939 push(type, append(load_field));
1940 }
1941 break;
1942 }
1943 case Bytecodes::_putstatic: {
1944 Value val = pop(type);
1945 if (state_before == nullptr) {
1946 state_before = copy_state_for_exception();
1947 }
1948 if (field_type == T_BOOLEAN) {
1949 Value mask = append(new Constant(new IntConstant(1)));
1950 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1951 }
1952 if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
1953 // Storing to a field of an empty inline type. Ignore.
1954 break;
1955 }
1956 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1957 break;
1958 }
1959 case Bytecodes::_getfield: {
1960 // Check for compile-time constants, i.e., trusted final non-static fields.
1961 Value constant = nullptr;
1962 if (state_before == nullptr && field->is_flat()) {
1963 // Save the entire state and re-execute on deopt when accessing flat fields
1964 assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
1965 state_before = copy_state_before();
1966 }
1967 if (!has_pending_field_access() && !has_pending_load_indexed()) {
1968 obj = apop();
1969 ObjectType* obj_type = obj->type()->as_ObjectType();
1970 if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized()
1971 && field->type()->as_inline_klass()->is_empty()) {
1972 // Loading from a field of an empty inline type. Just return the default instance.
1973 null_check(obj);
1974 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1975 } else if (field->is_constant() && !field->is_flat() && obj_type->is_constant() && !PatchALot) {
1976 ciObject* const_oop = obj_type->constant_value();
1977 if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1978 ciConstant field_value = field->constant_value_of(const_oop);
1979 if (field_value.is_valid()) {
1980 if (field->is_null_free() && field_value.is_null_or_zero()) {
1981 // Non-flat inline type field. Replace null by the default value.
1982 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
1983 } else {
1984 constant = make_constant(field_value, field);
1985 }
1986 // For CallSite objects add a dependency for invalidation of the optimization.
1987 if (field->is_call_site_target()) {
1988 ciCallSite* call_site = const_oop->as_call_site();
1989 if (!call_site->is_fully_initialized_constant_call_site()) {
1990 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1991 dependency_recorder()->assert_call_site_target_value(call_site, target);
1992 }
1993 }
1994 }
1995 }
1996 }
1997 }
1998 if (constant != nullptr) {
1999 push(type, append(constant));
2000 } else {
2001 if (state_before == nullptr) {
2002 state_before = copy_state_for_exception();
2003 }
2004 if (!field->is_flat()) {
2005 if (has_pending_field_access()) {
2006 assert(!needs_patching, "Can't patch delayed field access");
2007 obj = pending_field_access()->obj();
2008 offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->first_field_offset();
2009 field = pending_field_access()->holder()->get_field_by_offset(offset, false);
2010 assert(field != nullptr, "field not found");
2011 set_pending_field_access(nullptr);
2012 } else if (has_pending_load_indexed()) {
2013 assert(!needs_patching, "Can't patch delayed field access");
2014 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2015 LoadIndexed* li = pending_load_indexed()->load_instr();
2016 li->set_type(type);
2017 push(type, append(li));
2018 set_pending_load_indexed(nullptr);
2019 break;
2020 }
2021 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
2022 Value replacement = !needs_patching ? _memory->load(load) : load;
2023 if (replacement != load) {
2024 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
2025 // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
2026 // conversion. Emit an explicit conversion here to get the correct field value after the write.
2027 switch (field_type) {
2028 case T_BOOLEAN:
2029 case T_BYTE:
2030 replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
2031 break;
2032 case T_CHAR:
2033 replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
2034 break;
2035 case T_SHORT:
2036 replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
2037 break;
2038 default:
2039 break;
2040 }
2041 push(type, replacement);
2042 } else {
2043 push(type, append(load));
2044 }
2045 } else {
2046 // Look at the next bytecode to check if we can delay the field access
2047 bool can_delay_access = false;
2048 ciBytecodeStream s(method());
2049 s.force_bci(bci());
2050 s.next();
2051 if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
2052 ciField* next_field = s.get_field(will_link);
2053 bool next_needs_patching = !next_field->holder()->is_loaded() ||
2054 !next_field->will_link(method(), Bytecodes::_getfield) ||
2055 PatchALot;
2056 can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
2057 }
2058 if (can_delay_access) {
2059 if (has_pending_load_indexed()) {
2060 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2061 } else if (has_pending_field_access()) {
2062 pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->first_field_offset());
2063 } else {
2064 null_check(obj);
2065 DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset_in_bytes());
2066 set_pending_field_access(dfa);
2067 }
2068 } else {
2069 ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2070 scope()->set_wrote_final();
2071 scope()->set_wrote_fields();
2072 bool need_membar = false;
2073 if (inline_klass->is_initialized() && inline_klass->is_empty()) {
2074 apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
2075 if (has_pending_field_access()) {
2076 set_pending_field_access(nullptr);
2077 } else if (has_pending_load_indexed()) {
2078 set_pending_load_indexed(nullptr);
2079 }
2080 } else if (has_pending_load_indexed()) {
2081 assert(!needs_patching, "Can't patch delayed field access");
2082 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
2083 NewInlineTypeInstance* vt = new NewInlineTypeInstance(inline_klass, pending_load_indexed()->state_before());
2084 _memory->new_instance(vt);
2085 pending_load_indexed()->load_instr()->set_vt(vt);
2086 apush(append_split(vt));
2087 append(pending_load_indexed()->load_instr());
2088 set_pending_load_indexed(nullptr);
2089 need_membar = true;
2090 } else {
2091 NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(inline_klass, state_before);
2092 _memory->new_instance(new_instance);
2093 apush(append_split(new_instance));
2094 assert(!needs_patching, "Can't patch flat inline type field access");
2095 if (has_pending_field_access()) {
2096 copy_inline_content(inline_klass, pending_field_access()->obj(),
2097 pending_field_access()->offset() + field->offset_in_bytes() - field->holder()->as_inline_klass()->first_field_offset(),
2098 new_instance, inline_klass->first_field_offset(), state_before);
2099 set_pending_field_access(nullptr);
2100 } else {
2101 copy_inline_content(inline_klass, obj, field->offset_in_bytes(), new_instance, inline_klass->first_field_offset(), state_before);
2102 }
2103 need_membar = true;
2104 }
2105 if (need_membar) {
2106 // If we allocated a new instance ensure the stores to copy the
2107 // field contents are visible before any subsequent store that
2108 // publishes this reference.
2109 append(new MemBar(lir_membar_storestore));
2110 }
2111 }
2112 }
2113 }
2114 break;
2115 }
2116 case Bytecodes::_putfield: {
2117 Value val = pop(type);
2118 obj = apop();
2119 if (state_before == nullptr) {
2120 state_before = copy_state_for_exception();
2121 }
2122 if (field_type == T_BOOLEAN) {
2123 Value mask = append(new Constant(new IntConstant(1)));
2124 val = append(new LogicOp(Bytecodes::_iand, val, mask));
2125 }
2126 if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
2127 // Storing to a field of an empty inline type. Ignore.
2128 null_check(obj);
2129 } else if (!field->is_flat()) {
2130 StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
2131 if (!needs_patching) store = _memory->store(store);
2132 if (store != nullptr) {
2133 append(store);
2134 }
2135 } else {
2136 assert(!needs_patching, "Can't patch flat inline type field access");
2137 ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2138 copy_inline_content(inline_klass, val, inline_klass->first_field_offset(), obj, offset, state_before, field);
2139 }
2140 break;
2141 }
2142 default:
2143 ShouldNotReachHere();
2144 break;
2145 }
2146 }
2147
2148 // Baseline version of withfield, allocate every time
2149 void GraphBuilder::withfield(int field_index) {
2150 // Save the entire state and re-execute on deopt
2151 ValueStack* state_before = copy_state_before();
2152 state_before->set_should_reexecute(true);
2153
2154 bool will_link;
2155 ciField* field_modify = stream()->get_field(will_link);
2156 ciInstanceKlass* holder = field_modify->holder();
2157 BasicType field_type = field_modify->type()->basic_type();
2158 ValueType* type = as_ValueType(field_type);
2159 Value val = pop(type);
2160 Value obj = apop();
2161 null_check(obj);
2162
2163 if (!holder->is_loaded() || !holder->is_inlinetype() || !will_link) {
2164 apush(append_split(new Deoptimize(holder, state_before)));
2165 return;
2166 }
2167
2168 // call will_link again to determine if the field is valid.
2169 const bool needs_patching = !field_modify->will_link(method(), Bytecodes::_withfield) ||
2170 (!field_modify->is_flat() && PatchALot);
2171 const int offset_modify = !needs_patching ? field_modify->offset_in_bytes() : -1;
2172
2173 scope()->set_wrote_final();
2174 scope()->set_wrote_fields();
2175
2176 NewInlineTypeInstance* new_instance;
2177 if (obj->as_NewInlineTypeInstance() != nullptr && obj->as_NewInlineTypeInstance()->in_larval_state()) {
2178 new_instance = obj->as_NewInlineTypeInstance();
2179 apush(append_split(new_instance));
2180 } else {
2181 new_instance = new NewInlineTypeInstance(holder->as_inline_klass(), state_before);
2182 _memory->new_instance(new_instance);
2183 apush(append_split(new_instance));
2184
2185 // Initialize fields which are not modified
2186 for (int i = 0; i < holder->nof_nonstatic_fields(); i++) {
2187 ciField* field = holder->nonstatic_field_at(i);
2188 int offset = field->offset_in_bytes();
2189 // Don't use offset_modify here, it might be set to -1 if needs_patching
2190 if (offset != field_modify->offset_in_bytes()) {
2191 if (field->is_flat()) {
2192 ciInlineKlass* vk = field->type()->as_inline_klass();
2193 if (!vk->is_empty()) {
2194 copy_inline_content(vk, obj, offset, new_instance, vk->first_field_offset(), state_before, field);
2195 }
2196 } else {
2197 LoadField* load = new LoadField(obj, offset, field, false, state_before, false);
2198 Value replacement = append(load);
2199 StoreField* store = new StoreField(new_instance, offset, field, replacement, false, state_before, false);
2200 append(store);
2201 }
2202 }
2203 }
2204 }
2205
2206 // Field to modify
2207 if (field_type == T_BOOLEAN) {
2208 Value mask = append(new Constant(new IntConstant(1)));
2209 val = append(new LogicOp(Bytecodes::_iand, val, mask));
2210 }
2211 if (field_modify->is_flat()) {
2212 assert(!needs_patching, "Can't patch flat inline type field access");
2213 ciInlineKlass* vk = field_modify->type()->as_inline_klass();
2214 if (!vk->is_empty()) {
2215 copy_inline_content(vk, val, vk->first_field_offset(), new_instance, offset_modify, state_before, field_modify);
2216 }
2217 } else {
2218 StoreField* store = new StoreField(new_instance, offset_modify, field_modify, val, false, state_before, needs_patching);
2219 append(store);
2220 }
2221 }
2222
2223 Dependencies* GraphBuilder::dependency_recorder() const {
2224 assert(DeoptC1, "need debug information");
2225 return compilation()->dependency_recorder();
2226 }
2227
2228 // How many arguments do we want to profile?
2229 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
2230 int n = 0;
2231 bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
2232 start = has_receiver ? 1 : 0;
2233 if (profile_arguments()) {
2234 ciProfileData* data = method()->method_data()->bci_to_data(bci());
2235 if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
2236 n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
2237 }
2238 }
2239 // If we are inlining then we need to collect arguments to profile parameters for the target
2240 if (profile_parameters() && target != nullptr) {
2241 if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
2319 break;
2320 case Bytecodes::_invokehandle:
2321 code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
2322 break;
2323 default:
2324 break;
2325 }
2326 } else {
2327 if (bc_raw == Bytecodes::_invokehandle) {
2328 assert(!will_link, "should come here only for unlinked call");
2329 code = Bytecodes::_invokespecial;
2330 }
2331 }
2332
2333 if (code == Bytecodes::_invokespecial) {
2334 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
2335 ciKlass* receiver_constraint = nullptr;
2336
2337 if (bc_raw == Bytecodes::_invokeinterface) {
2338 receiver_constraint = holder;
2339 } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor() && calling_klass->is_interface()) {
2340 receiver_constraint = calling_klass;
2341 }
2342
2343 if (receiver_constraint != nullptr) {
2344 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
2345 Value receiver = state()->stack_at(index);
2346 CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
2347 // go to uncommon_trap when checkcast fails
2348 c->set_invokespecial_receiver_check();
2349 state()->stack_at_put(index, append_split(c));
2350 }
2351 }
2352
2353 // Push appendix argument (MethodType, CallSite, etc.), if one.
2354 bool patch_for_appendix = false;
2355 int patching_appendix_arg = 0;
2356 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2357 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2358 apush(arg);
2359 patch_for_appendix = true;
2554 null_check(recv);
2555 }
2556
2557 if (is_profiling()) {
2558 // Note that we'd collect profile data in this method if we wanted it.
2559 compilation()->set_would_profile(true);
2560
2561 if (profile_calls()) {
2562 assert(cha_monomorphic_target == nullptr || exact_target == nullptr, "both can not be set");
2563 ciKlass* target_klass = nullptr;
2564 if (cha_monomorphic_target != nullptr) {
2565 target_klass = cha_monomorphic_target->holder();
2566 } else if (exact_target != nullptr) {
2567 target_klass = exact_target->holder();
2568 }
2569 profile_call(target, recv, target_klass, collect_args_for_profiling(args, nullptr, false), false);
2570 }
2571 }
2572 }
2573
2574 Invoke* result = new Invoke(code, result_type, recv, args, target, state_before,
2575 declared_signature->returns_null_free_inline_type());
2576 // push result
2577 append_split(result);
2578
2579 if (result_type != voidType) {
2580 push(result_type, round_fp(result));
2581 }
2582 if (profile_return() && result_type->is_object_kind()) {
2583 profile_return_type(result, target);
2584 }
2585 }
2586
2587
2588 void GraphBuilder::new_instance(int klass_index) {
2589 ValueStack* state_before = copy_state_exhandling();
2590 ciKlass* klass = stream()->get_klass();
2591 assert(klass->is_instance_klass(), "must be an instance klass");
2592 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2593 _memory->new_instance(new_instance);
2594 apush(append_split(new_instance));
2595 }
2596
2597 void GraphBuilder::default_value(int klass_index) {
2598 bool will_link;
2599 ciKlass* klass = stream()->get_klass(will_link);
2600 if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
2601 klass->as_inline_klass()->is_initialized()) {
2602 ciInlineKlass* vk = klass->as_inline_klass();
2603 apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
2604 } else {
2605 apush(append_split(new Deoptimize(klass, copy_state_before())));
2606 }
2607 }
2608
2609 void GraphBuilder::new_type_array() {
2610 ValueStack* state_before = copy_state_exhandling();
2611 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
2612 }
2613
2614
2615 void GraphBuilder::new_object_array() {
2616 ciKlass* klass = stream()->get_klass();
2617 bool null_free = stream()->has_Q_signature();
2618 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2619 NewArray* n = new NewObjectArray(klass, ipop(), state_before, null_free);
2620 apush(append_split(n));
2621 }
2622
2623
2624 bool GraphBuilder::direct_compare(ciKlass* k) {
2625 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2626 ciInstanceKlass* ik = k->as_instance_klass();
2627 if (ik->is_final()) {
2628 return true;
2629 } else {
2630 if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
2631 // test class is leaf class
2632 dependency_recorder()->assert_leaf_type(ik);
2633 return true;
2634 }
2635 }
2636 }
2637 return false;
2638 }
2639
2640
2641 void GraphBuilder::check_cast(int klass_index) {
2642 ciKlass* klass = stream()->get_klass();
2643 bool null_free = stream()->has_Q_signature();
2644 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
2645 CheckCast* c = new CheckCast(klass, apop(), state_before, null_free);
2646 apush(append_split(c));
2647 c->set_direct_compare(direct_compare(klass));
2648
2649 if (is_profiling()) {
2650 // Note that we'd collect profile data in this method if we wanted it.
2651 compilation()->set_would_profile(true);
2652
2653 if (profile_checkcasts()) {
2654 c->set_profiled_method(method());
2655 c->set_profiled_bci(bci());
2656 c->set_should_profile(true);
2657 }
2658 }
2659 }
2660
2661
2662 void GraphBuilder::instance_of(int klass_index) {
2663 ciKlass* klass = stream()->get_klass();
2664 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2665 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2666 ipush(append_split(i));
2667 i->set_direct_compare(direct_compare(klass));
2668
2669 if (is_profiling()) {
2670 // Note that we'd collect profile data in this method if we wanted it.
2671 compilation()->set_would_profile(true);
2672
2673 if (profile_checkcasts()) {
2674 i->set_profiled_method(method());
2675 i->set_profiled_bci(bci());
2676 i->set_should_profile(true);
2677 }
2678 }
2679 }
2680
2681
2682 void GraphBuilder::monitorenter(Value x, int bci) {
2683 bool maybe_inlinetype = false;
2684 if (bci == InvocationEntryBci) {
2685 // Called by GraphBuilder::inline_sync_entry.
2686 #ifdef ASSERT
2687 ciType* obj_type = x->declared_type();
2688 assert(obj_type == nullptr || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
2689 #endif
2690 } else {
2691 // We are compiling a monitorenter bytecode
2692 if (EnableValhalla) {
2693 ciType* obj_type = x->declared_type();
2694 if (obj_type == nullptr || obj_type->as_klass()->can_be_inline_klass()) {
2695 // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
2696 // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2697 maybe_inlinetype = true;
2698 }
2699 }
2700 }
2701
2702 // save state before locking in case of deoptimization after a NullPointerException
2703 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2704 compilation()->set_has_monitors(true);
2705 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
2706 kill_all();
2707 }
2708
2709
2710 void GraphBuilder::monitorexit(Value x, int bci) {
2711 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2712 kill_all();
2713 }
2714
2715
2716 void GraphBuilder::new_multi_array(int dimensions) {
2717 ciKlass* klass = stream()->get_klass();
2718 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2719
2720 Values* dims = new Values(dimensions, dimensions, nullptr);
2721 // fill in all dimensions
2722 int i = dimensions;
2723 while (i-- > 0) dims->at_put(i, ipop());
2724 // create array
2725 NewArray* n = new NewMultiArray(klass, dims, state_before);
2820 if (i1->can_trap()) {
2821 i1->set_exception_handlers(handle_exception(i1));
2822 assert(i1->exception_state() != nullptr || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
2823 }
2824 return i1;
2825 }
2826
2827
2828 Instruction* GraphBuilder::append(Instruction* instr) {
2829 assert(instr->as_StateSplit() == nullptr || instr->as_BlockEnd() != nullptr, "wrong append used");
2830 return append_with_bci(instr, bci());
2831 }
2832
2833
2834 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2835 return append_with_bci(instr, bci());
2836 }
2837
2838
2839 void GraphBuilder::null_check(Value value) {
2840 if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr || value->as_NewInlineTypeInstance() != nullptr) {
2841 return;
2842 } else {
2843 Constant* con = value->as_Constant();
2844 if (con) {
2845 ObjectType* c = con->type()->as_ObjectType();
2846 if (c && c->is_loaded()) {
2847 ObjectConstant* oc = c->as_ObjectConstant();
2848 if (!oc || !oc->value()->is_null_object()) {
2849 return;
2850 }
2851 }
2852 }
2853 if (value->is_null_free()) return;
2854 }
2855 append(new NullCheck(value, copy_state_for_exception()));
2856 }
2857
2858
2859
2860 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2861 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2862 assert(instruction->exception_state() == nullptr
2863 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2864 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2865 "exception_state should be of exception kind");
2866 return new XHandlers();
2867 }
2868
2869 XHandlers* exception_handlers = new XHandlers();
2870 ScopeData* cur_scope_data = scope_data();
2871 ValueStack* cur_state = instruction->state_before();
2872 ValueStack* prev_state = nullptr;
2873 int scope_count = 0;
2874
2875 assert(cur_state != nullptr, "state_before must be set");
2876 do {
2877 int cur_bci = cur_state->bci();
2878 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2879 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
2880 || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
2881
2882
2883 // join with all potential exception handlers
2884 XHandlers* list = cur_scope_data->xhandlers();
2885 const int n = list->length();
2886 for (int i = 0; i < n; i++) {
2887 XHandler* h = list->handler_at(i);
2888 if (h->covers(cur_bci)) {
2889 // h is a potential exception handler => join it
2890 compilation()->set_has_exception_handlers(true);
2891
2892 BlockBegin* entry = h->entry_block();
2893 if (entry == block()) {
2894 // It's acceptable for an exception handler to cover itself
2895 // but we don't handle that in the parser currently. It's
2896 // very rare so we bailout instead of trying to handle it.
2897 BAILOUT_("exception handler covers itself", exception_handlers);
2898 }
2899 assert(entry->bci() == h->handler_bci(), "must match");
2900 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2901
3349 case Bytecodes::_invokevirtual : // fall through
3350 case Bytecodes::_invokespecial : // fall through
3351 case Bytecodes::_invokestatic : // fall through
3352 case Bytecodes::_invokedynamic : // fall through
3353 case Bytecodes::_invokeinterface: invoke(code); break;
3354 case Bytecodes::_new : new_instance(s.get_index_u2()); break;
3355 case Bytecodes::_newarray : new_type_array(); break;
3356 case Bytecodes::_anewarray : new_object_array(); break;
3357 case Bytecodes::_arraylength : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
3358 case Bytecodes::_athrow : throw_op(s.cur_bci()); break;
3359 case Bytecodes::_checkcast : check_cast(s.get_index_u2()); break;
3360 case Bytecodes::_instanceof : instance_of(s.get_index_u2()); break;
3361 case Bytecodes::_monitorenter : monitorenter(apop(), s.cur_bci()); break;
3362 case Bytecodes::_monitorexit : monitorexit (apop(), s.cur_bci()); break;
3363 case Bytecodes::_wide : ShouldNotReachHere(); break;
3364 case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
3365 case Bytecodes::_ifnull : if_null(objectType, If::eql); break;
3366 case Bytecodes::_ifnonnull : if_null(objectType, If::neq); break;
3367 case Bytecodes::_goto_w : _goto(s.cur_bci(), s.get_far_dest()); break;
3368 case Bytecodes::_jsr_w : jsr(s.get_far_dest()); break;
3369 case Bytecodes::_aconst_init : default_value(s.get_index_u2()); break;
3370 case Bytecodes::_withfield : withfield(s.get_index_u2()); break;
3371 case Bytecodes::_breakpoint : BAILOUT_("concurrent setting of breakpoint", nullptr);
3372 default : ShouldNotReachHere(); break;
3373 }
3374
3375 if (log != nullptr)
3376 log->clear_context(); // skip marker if nothing was printed
3377
3378 // save current bci to setup Goto at the end
3379 prev_bci = s.cur_bci();
3380
3381 }
3382 CHECK_BAILOUT_(nullptr);
3383 // stop processing of this block (see try_inline_full)
3384 if (_skip_block) {
3385 _skip_block = false;
3386 assert(_last && _last->as_BlockEnd(), "");
3387 return _last->as_BlockEnd();
3388 }
3389 // if there are any, check if last instruction is a BlockEnd instruction
3390 BlockEnd* end = last()->as_BlockEnd();
3639 // the storage for the OSR buffer is freed manually in the LIRGenerator.
3640
3641 assert(state->caller_state() == nullptr, "should be top scope");
3642 state->clear_locals();
3643 Goto* g = new Goto(target, false);
3644 append(g);
3645 _osr_entry->set_end(g);
3646 target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3647
3648 scope_data()->set_stream(nullptr);
3649 }
3650
3651
3652 ValueStack* GraphBuilder::state_at_entry() {
3653 ValueStack* state = new ValueStack(scope(), nullptr);
3654
3655 // Set up locals for receiver
3656 int idx = 0;
3657 if (!method()->is_static()) {
3658 // we should always see the receiver
3659 state->store_local(idx, new Local(method()->holder(), objectType, idx,
3660 /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
3661 idx = 1;
3662 }
3663
3664 // Set up locals for incoming arguments
3665 ciSignature* sig = method()->signature();
3666 for (int i = 0; i < sig->count(); i++) {
3667 ciType* type = sig->type_at(i);
3668 BasicType basic_type = type->basic_type();
3669 // don't allow T_ARRAY to propagate into locals types
3670 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3671 ValueType* vt = as_ValueType(basic_type);
3672 state->store_local(idx, new Local(type, vt, idx, false, sig->is_null_free_at(i)));
3673 idx += type->size();
3674 }
3675
3676 // lock synchronized method
3677 if (method()->is_synchronized()) {
3678 state->lock(nullptr);
3679 }
3680
3681 return state;
3682 }
3683
3684
3685 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3686 : _scope_data(nullptr)
3687 , _compilation(compilation)
3688 , _memory(new MemoryBuffer())
3689 , _inline_bailout_msg(nullptr)
3690 , _instruction_count(0)
3691 , _osr_entry(nullptr)
3692 , _pending_field_access(nullptr)
3693 , _pending_load_indexed(nullptr)
3694 {
3695 int osr_bci = compilation->osr_bci();
3696
3697 // determine entry points and bci2block mapping
3698 BlockListBuilder blm(compilation, scope, osr_bci);
3699 CHECK_BAILOUT();
3700
3701 BlockList* bci2block = blm.bci2block();
3702 BlockBegin* start_block = bci2block->at(0);
3703
3704 push_root_scope(scope, bci2block, start_block);
3705
3706 // setup state for std entry
3707 _initial_state = state_at_entry();
3708 start_block->merge(_initial_state, compilation->has_irreducible_loops());
3709
3710 // End nulls still exist here
3711
3712 // complete graph
3713 _vmap = new ValueMap();
|