873 class LIR_Op;
874 class LIR_Op0;
875 class LIR_OpLabel;
876 class LIR_Op1;
877 class LIR_OpBranch;
878 class LIR_OpConvert;
879 class LIR_OpAllocObj;
880 class LIR_OpReturn;
881 class LIR_Op2;
882 class LIR_OpDelay;
883 class LIR_Op3;
884 class LIR_OpAllocArray;
885 class LIR_Op4;
886 class LIR_OpCall;
887 class LIR_OpJavaCall;
888 class LIR_OpRTCall;
889 class LIR_OpArrayCopy;
890 class LIR_OpUpdateCRC32;
891 class LIR_OpLock;
892 class LIR_OpTypeCheck;
893 class LIR_OpCompareAndSwap;
894 class LIR_OpLoadKlass;
895 class LIR_OpProfileCall;
896 class LIR_OpProfileType;
897 #ifdef ASSERT
898 class LIR_OpAssert;
899 #endif
900
901 // LIR operation codes
902 enum LIR_Code {
903 lir_none
904 , begin_op0
905 , lir_label
906 , lir_nop
907 , lir_std_entry
908 , lir_osr_entry
909 , lir_breakpoint
910 , lir_rtcall
911 , lir_membar
912 , lir_membar_acquire
913 , lir_membar_release
914 , lir_membar_loadload
915 , lir_membar_storestore
916 , lir_membar_loadstore
917 , lir_membar_storeload
918 , lir_get_thread
919 , lir_on_spin_wait
920 , end_op0
921 , begin_op1
922 , lir_push
923 , lir_pop
924 , lir_null_check
925 , lir_return
926 , lir_leal
927 , lir_move
928 , lir_convert
929 , lir_alloc_object
930 , lir_monaddr
931 , lir_sqrt
932 , lir_abs
933 , lir_neg
934 , lir_f2hf
935 , lir_hf2f
936 , lir_safepoint
937 , lir_unwind
938 , lir_load_klass
939 , end_op1
976 , lir_dynamic_call
977 , end_opJavaCall
978 , begin_opArrayCopy
979 , lir_arraycopy
980 , end_opArrayCopy
981 , begin_opUpdateCRC32
982 , lir_updatecrc32
983 , end_opUpdateCRC32
984 , begin_opLock
985 , lir_lock
986 , lir_unlock
987 , end_opLock
988 , begin_delay_slot
989 , lir_delay_slot
990 , end_delay_slot
991 , begin_opTypeCheck
992 , lir_instanceof
993 , lir_checkcast
994 , lir_store_check
995 , end_opTypeCheck
996 , begin_opCompareAndSwap
997 , lir_cas_long
998 , lir_cas_obj
999 , lir_cas_int
1000 , end_opCompareAndSwap
1001 , begin_opMDOProfile
1002 , lir_profile_call
1003 , lir_profile_type
1004 , end_opMDOProfile
1005 , begin_opAssert
1006 , lir_assert
1007 , end_opAssert
1008 #if INCLUDE_ZGC
1009 , begin_opXLoadBarrierTest
1010 , lir_xloadbarrier_test
1011 , end_opXLoadBarrierTest
1012 #endif
1013 };
1014
1015
1016 enum LIR_Condition {
1017 lir_cond_equal
1018 , lir_cond_notEqual
1019 , lir_cond_less
1020 , lir_cond_lessEqual
1021 , lir_cond_greaterEqual
1022 , lir_cond_greater
1023 , lir_cond_belowEqual
1123 virtual bool is_patching() { return false; }
1124 virtual LIR_OpCall* as_OpCall() { return nullptr; }
1125 virtual LIR_OpJavaCall* as_OpJavaCall() { return nullptr; }
1126 virtual LIR_OpLabel* as_OpLabel() { return nullptr; }
1127 virtual LIR_OpDelay* as_OpDelay() { return nullptr; }
1128 virtual LIR_OpLock* as_OpLock() { return nullptr; }
1129 virtual LIR_OpAllocArray* as_OpAllocArray() { return nullptr; }
1130 virtual LIR_OpAllocObj* as_OpAllocObj() { return nullptr; }
1131 virtual LIR_OpBranch* as_OpBranch() { return nullptr; }
1132 virtual LIR_OpReturn* as_OpReturn() { return nullptr; }
1133 virtual LIR_OpRTCall* as_OpRTCall() { return nullptr; }
1134 virtual LIR_OpConvert* as_OpConvert() { return nullptr; }
1135 virtual LIR_Op0* as_Op0() { return nullptr; }
1136 virtual LIR_Op1* as_Op1() { return nullptr; }
1137 virtual LIR_Op2* as_Op2() { return nullptr; }
1138 virtual LIR_Op3* as_Op3() { return nullptr; }
1139 virtual LIR_Op4* as_Op4() { return nullptr; }
1140 virtual LIR_OpArrayCopy* as_OpArrayCopy() { return nullptr; }
1141 virtual LIR_OpUpdateCRC32* as_OpUpdateCRC32() { return nullptr; }
1142 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return nullptr; }
1143 virtual LIR_OpCompareAndSwap* as_OpCompareAndSwap() { return nullptr; }
1144 virtual LIR_OpLoadKlass* as_OpLoadKlass() { return nullptr; }
1145 virtual LIR_OpProfileCall* as_OpProfileCall() { return nullptr; }
1146 virtual LIR_OpProfileType* as_OpProfileType() { return nullptr; }
1147 #ifdef ASSERT
1148 virtual LIR_OpAssert* as_OpAssert() { return nullptr; }
1149 #endif
1150
1151 virtual void verify() const {}
1152 };
1153
1154 // for calls
1155 class LIR_OpCall: public LIR_Op {
1156 friend class LIR_OpVisitState;
1157
1158 protected:
1159 address _addr;
1160 LIR_OprList* _arguments;
1161 protected:
1162 LIR_OpCall(LIR_Code code, address addr, LIR_Opr result,
1163 LIR_OprList* arguments, CodeEmitInfo* info = nullptr)
1164 : LIR_Op(code, result, info)
1165 , _addr(addr)
1166 , _arguments(arguments) {}
1199 LIR_OprList* arguments, CodeEmitInfo* info)
1200 : LIR_OpCall(code, (address)vtable_offset, result, arguments, info)
1201 , _method(method)
1202 , _receiver(receiver)
1203 , _method_handle_invoke_SP_save_opr(LIR_OprFact::illegalOpr)
1204 { assert(is_in_range(code, begin_opJavaCall, end_opJavaCall), "code check"); }
1205
1206 LIR_Opr receiver() const { return _receiver; }
1207 ciMethod* method() const { return _method; }
1208
1209 // JSR 292 support.
1210 bool is_invokedynamic() const { return code() == lir_dynamic_call; }
1211 bool is_method_handle_invoke() const {
1212 return method()->is_compiled_lambda_form() || // Java-generated lambda form
1213 method()->is_method_handle_intrinsic(); // JVM-generated MH intrinsic
1214 }
1215
1216 virtual void emit_code(LIR_Assembler* masm);
1217 virtual LIR_OpJavaCall* as_OpJavaCall() { return this; }
1218 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1219 };
1220
1221 // --------------------------------------------------
1222 // LIR_OpLabel
1223 // --------------------------------------------------
1224 // Location where a branch can continue
1225 class LIR_OpLabel: public LIR_Op {
1226 friend class LIR_OpVisitState;
1227
1228 private:
1229 Label* _label;
1230 public:
1231 LIR_OpLabel(Label* lbl)
1232 : LIR_Op(lir_label, LIR_OprFact::illegalOpr, nullptr)
1233 , _label(lbl) {}
1234 Label* label() const { return _label; }
1235
1236 virtual void emit_code(LIR_Assembler* masm);
1237 virtual LIR_OpLabel* as_OpLabel() { return this; }
1238 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1250 LIR_Opr _dst_pos;
1251 LIR_Opr _length;
1252 LIR_Opr _tmp;
1253 ciArrayKlass* _expected_type;
1254 int _flags;
1255
1256 public:
1257 enum Flags {
1258 src_null_check = 1 << 0,
1259 dst_null_check = 1 << 1,
1260 src_pos_positive_check = 1 << 2,
1261 dst_pos_positive_check = 1 << 3,
1262 length_positive_check = 1 << 4,
1263 src_range_check = 1 << 5,
1264 dst_range_check = 1 << 6,
1265 type_check = 1 << 7,
1266 overlapping = 1 << 8,
1267 unaligned = 1 << 9,
1268 src_objarray = 1 << 10,
1269 dst_objarray = 1 << 11,
1270 all_flags = (1 << 12) - 1
1271 };
1272
1273 LIR_OpArrayCopy(LIR_Opr src, LIR_Opr src_pos, LIR_Opr dst, LIR_Opr dst_pos, LIR_Opr length, LIR_Opr tmp,
1274 ciArrayKlass* expected_type, int flags, CodeEmitInfo* info);
1275
1276 LIR_Opr src() const { return _src; }
1277 LIR_Opr src_pos() const { return _src_pos; }
1278 LIR_Opr dst() const { return _dst; }
1279 LIR_Opr dst_pos() const { return _dst_pos; }
1280 LIR_Opr length() const { return _length; }
1281 LIR_Opr tmp() const { return _tmp; }
1282 int flags() const { return _flags; }
1283 ciArrayKlass* expected_type() const { return _expected_type; }
1284 ArrayCopyStub* stub() const { return _stub; }
1285 static int get_initial_copy_flags() { return LIR_OpArrayCopy::unaligned |
1286 LIR_OpArrayCopy::overlapping; }
1287
1288 virtual void emit_code(LIR_Assembler* masm);
1289 virtual LIR_OpArrayCopy* as_OpArrayCopy() { return this; }
1290 void print_instr(outputStream* out) const PRODUCT_RETURN;
1510
1511
1512 // LIR_OpTypeCheck
1513 class LIR_OpTypeCheck: public LIR_Op {
1514 friend class LIR_OpVisitState;
1515
1516 private:
1517 LIR_Opr _object;
1518 LIR_Opr _array;
1519 ciKlass* _klass;
1520 LIR_Opr _tmp1;
1521 LIR_Opr _tmp2;
1522 LIR_Opr _tmp3;
1523 CodeEmitInfo* _info_for_patch;
1524 CodeEmitInfo* _info_for_exception;
1525 CodeStub* _stub;
1526 ciMethod* _profiled_method;
1527 int _profiled_bci;
1528 bool _should_profile;
1529 bool _fast_check;
1530
1531 public:
1532 LIR_OpTypeCheck(LIR_Code code, LIR_Opr result, LIR_Opr object, ciKlass* klass,
1533 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check,
1534 CodeEmitInfo* info_for_exception, CodeEmitInfo* info_for_patch, CodeStub* stub);
1535 LIR_OpTypeCheck(LIR_Code code, LIR_Opr object, LIR_Opr array,
1536 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, CodeEmitInfo* info_for_exception);
1537
1538 LIR_Opr object() const { return _object; }
1539 LIR_Opr array() const { assert(code() == lir_store_check, "not valid"); return _array; }
1540 LIR_Opr tmp1() const { return _tmp1; }
1541 LIR_Opr tmp2() const { return _tmp2; }
1542 LIR_Opr tmp3() const { return _tmp3; }
1543 ciKlass* klass() const { assert(code() == lir_instanceof || code() == lir_checkcast, "not valid"); return _klass; }
1544 bool fast_check() const { assert(code() == lir_instanceof || code() == lir_checkcast, "not valid"); return _fast_check; }
1545 CodeEmitInfo* info_for_patch() const { return _info_for_patch; }
1546 CodeEmitInfo* info_for_exception() const { return _info_for_exception; }
1547 CodeStub* stub() const { return _stub; }
1548
1549 // MethodData* profiling
1550 void set_profiled_method(ciMethod *method) { _profiled_method = method; }
1551 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1552 void set_should_profile(bool b) { _should_profile = b; }
1553 ciMethod* profiled_method() const { return _profiled_method; }
1554 int profiled_bci() const { return _profiled_bci; }
1555 bool should_profile() const { return _should_profile; }
1556
1557 virtual bool is_patching() { return _info_for_patch != nullptr; }
1558 virtual void emit_code(LIR_Assembler* masm);
1559 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return this; }
1560 void print_instr(outputStream* out) const PRODUCT_RETURN;
1561 };
1562
1563 // LIR_Op2
1564 class LIR_Op2: public LIR_Op {
1565 friend class LIR_OpVisitState;
1566
1567 protected:
1568 LIR_Opr _opr1;
1569 LIR_Opr _opr2;
1570 LIR_Opr _tmp1;
1571 LIR_Opr _tmp2;
1572 LIR_Opr _tmp3;
1573 LIR_Opr _tmp4;
1574 LIR_Opr _tmp5;
1575 LIR_Condition _condition;
1576 BasicType _type;
1577
1578 void verify() const;
1579
1580 public:
1581 LIR_Op2(LIR_Code code, LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, CodeEmitInfo* info = nullptr, BasicType type = T_ILLEGAL)
1582 : LIR_Op(code, LIR_OprFact::illegalOpr, info)
1701 void negate_cond();
1702
1703 virtual void emit_code(LIR_Assembler* masm);
1704 virtual LIR_OpBranch* as_OpBranch() { return this; }
1705 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1706 };
1707
1708 class LIR_OpAllocArray : public LIR_Op {
1709 friend class LIR_OpVisitState;
1710
1711 private:
1712 LIR_Opr _klass;
1713 LIR_Opr _len;
1714 LIR_Opr _tmp1;
1715 LIR_Opr _tmp2;
1716 LIR_Opr _tmp3;
1717 LIR_Opr _tmp4;
1718 CodeStub* _stub;
1719 BasicType _type;
1720 bool _zero_array;
1721
1722 public:
1723 LIR_OpAllocArray(LIR_Opr klass, LIR_Opr len, LIR_Opr result, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, BasicType type, CodeStub* stub, bool zero_array)
1724 : LIR_Op(lir_alloc_array, result, nullptr)
1725 , _klass(klass)
1726 , _len(len)
1727 , _tmp1(t1)
1728 , _tmp2(t2)
1729 , _tmp3(t3)
1730 , _tmp4(t4)
1731 , _stub(stub)
1732 , _type(type)
1733 , _zero_array(zero_array) {}
1734
1735 LIR_Opr klass() const { return _klass; }
1736 LIR_Opr len() const { return _len; }
1737 LIR_Opr obj() const { return result_opr(); }
1738 LIR_Opr tmp1() const { return _tmp1; }
1739 LIR_Opr tmp2() const { return _tmp2; }
1740 LIR_Opr tmp3() const { return _tmp3; }
1741 LIR_Opr tmp4() const { return _tmp4; }
1742 BasicType type() const { return _type; }
1743 CodeStub* stub() const { return _stub; }
1744 bool zero_array() const { return _zero_array; }
1745
1746 virtual void emit_code(LIR_Assembler* masm);
1747 virtual LIR_OpAllocArray * as_OpAllocArray () { return this; }
1748 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1749 };
1750
1751
1752 class LIR_Op3: public LIR_Op {
1753 friend class LIR_OpVisitState;
1754
1755 private:
1756 LIR_Opr _opr1;
1757 LIR_Opr _opr2;
1758 LIR_Opr _opr3;
1759 public:
1760 LIR_Op3(LIR_Code code, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr opr3, LIR_Opr result, CodeEmitInfo* info = nullptr)
1761 : LIR_Op(code, result, info)
1762 , _opr1(opr1)
1763 , _opr2(opr2)
1764 , _opr3(opr3) { assert(is_in_range(code, begin_op3, end_op3), "code check"); }
1831
1832 //--------------------------------
1833 class LabelObj: public CompilationResourceObj {
1834 private:
1835 Label _label;
1836 public:
1837 LabelObj() {}
1838 Label* label() { return &_label; }
1839 };
1840
1841
1842 class LIR_OpLock: public LIR_Op {
1843 friend class LIR_OpVisitState;
1844
1845 private:
1846 LIR_Opr _hdr;
1847 LIR_Opr _obj;
1848 LIR_Opr _lock;
1849 LIR_Opr _scratch;
1850 CodeStub* _stub;
1851 public:
1852 LIR_OpLock(LIR_Code code, LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info)
1853 : LIR_Op(code, LIR_OprFact::illegalOpr, info)
1854 , _hdr(hdr)
1855 , _obj(obj)
1856 , _lock(lock)
1857 , _scratch(scratch)
1858 , _stub(stub) {}
1859
1860 LIR_Opr hdr_opr() const { return _hdr; }
1861 LIR_Opr obj_opr() const { return _obj; }
1862 LIR_Opr lock_opr() const { return _lock; }
1863 LIR_Opr scratch_opr() const { return _scratch; }
1864 CodeStub* stub() const { return _stub; }
1865
1866 virtual void emit_code(LIR_Assembler* masm);
1867 virtual LIR_OpLock* as_OpLock() { return this; }
1868 void print_instr(outputStream* out) const PRODUCT_RETURN;
1869 };
1870
1871 class LIR_OpLoadKlass: public LIR_Op {
1872 friend class LIR_OpVisitState;
1873
1874 private:
1875 LIR_Opr _obj;
1876 public:
1877 LIR_OpLoadKlass(LIR_Opr obj, LIR_Opr result, CodeEmitInfo* info)
1878 : LIR_Op(lir_load_klass, result, info)
1879 , _obj(obj)
1880 {}
1881
1882 LIR_Opr obj() const { return _obj; }
1883
1884 virtual LIR_OpLoadKlass* as_OpLoadKlass() { return this; }
2029 , _obj(obj)
2030 , _tmp(tmp)
2031 , _exact_klass(exact_klass)
2032 , _current_klass(current_klass)
2033 , _not_null(not_null)
2034 , _no_conflict(no_conflict) { }
2035
2036 LIR_Opr mdp() const { return _mdp; }
2037 LIR_Opr obj() const { return _obj; }
2038 LIR_Opr tmp() const { return _tmp; }
2039 ciKlass* exact_klass() const { return _exact_klass; }
2040 intptr_t current_klass() const { return _current_klass; }
2041 bool not_null() const { return _not_null; }
2042 bool no_conflict() const { return _no_conflict; }
2043
2044 virtual void emit_code(LIR_Assembler* masm);
2045 virtual LIR_OpProfileType* as_OpProfileType() { return this; }
2046 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
2047 };
2048
2049 class LIR_InsertionBuffer;
2050
2051 //--------------------------------LIR_List---------------------------------------------------
2052 // Maintains a list of LIR instructions (one instance of LIR_List per basic block)
2053 // The LIR instructions are appended by the LIR_List class itself;
2054 //
2055 // Notes:
2056 // - all offsets are(should be) in bytes
2057 // - local positions are specified with an offset, with offset 0 being local 0
2058
2059 class LIR_List: public CompilationResourceObj {
2060 private:
2061 LIR_OpList _operations;
2062
2063 Compilation* _compilation;
2064 #ifndef PRODUCT
2065 BlockBegin* _block;
2066 #endif
2067 #ifdef ASSERT
2068 const char * _file;
2253 void div (LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_div, left, right, res, tmp)); }
2254 void rem (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = nullptr) { append(new LIR_Op2(lir_rem, left, right, res, info)); }
2255
2256 void volatile_load_mem_reg(LIR_Address* address, LIR_Opr dst, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2257 void volatile_load_unsafe_reg(LIR_Opr base, LIR_Opr offset, LIR_Opr dst, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2258
2259 void load(LIR_Address* addr, LIR_Opr src, CodeEmitInfo* info = nullptr, LIR_PatchCode patch_code = lir_patch_none);
2260
2261 void store_mem_int(jint v, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2262 void store_mem_oop(jobject o, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2263 void store(LIR_Opr src, LIR_Address* addr, CodeEmitInfo* info = nullptr, LIR_PatchCode patch_code = lir_patch_none);
2264 void volatile_store_mem_reg(LIR_Opr src, LIR_Address* address, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2265 void volatile_store_unsafe_reg(LIR_Opr src, LIR_Opr base, LIR_Opr offset, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2266
2267 void idiv(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2268 void idiv(LIR_Opr left, int right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2269 void irem(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2270 void irem(LIR_Opr left, int right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2271
2272 void allocate_object(LIR_Opr dst, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, int header_size, int object_size, LIR_Opr klass, bool init_check, CodeStub* stub);
2273 void allocate_array(LIR_Opr dst, LIR_Opr len, LIR_Opr t1,LIR_Opr t2, LIR_Opr t3,LIR_Opr t4, BasicType type, LIR_Opr klass, CodeStub* stub, bool zero_array = true);
2274
2275 // jump is an unconditional branch
2276 void jump(BlockBegin* block) {
2277 append(new LIR_OpBranch(lir_cond_always, block));
2278 }
2279 void jump(CodeStub* stub) {
2280 append(new LIR_OpBranch(lir_cond_always, stub));
2281 }
2282 void branch(LIR_Condition cond, Label* lbl) {
2283 append(new LIR_OpBranch(cond, lbl));
2284 }
2285 // Should not be used for fp comparisons
2286 void branch(LIR_Condition cond, BlockBegin* block) {
2287 append(new LIR_OpBranch(cond, block));
2288 }
2289 // Should not be used for fp comparisons
2290 void branch(LIR_Condition cond, CodeStub* stub) {
2291 append(new LIR_OpBranch(cond, stub));
2292 }
2293 // Should only be used for fp comparisons
2300 void unsigned_shift_right(LIR_Opr value, LIR_Opr count, LIR_Opr dst, LIR_Opr tmp);
2301
2302 void shift_left(LIR_Opr value, int count, LIR_Opr dst) { shift_left(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2303 void shift_right(LIR_Opr value, int count, LIR_Opr dst) { shift_right(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2304 void unsigned_shift_right(LIR_Opr value, int count, LIR_Opr dst) { unsigned_shift_right(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2305
2306 void lcmp2int(LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_cmp_l2i, left, right, dst)); }
2307 void fcmp2int(LIR_Opr left, LIR_Opr right, LIR_Opr dst, bool is_unordered_less);
2308
2309 void call_runtime_leaf(address routine, LIR_Opr tmp, LIR_Opr result, LIR_OprList* arguments) {
2310 append(new LIR_OpRTCall(routine, tmp, result, arguments));
2311 }
2312
2313 void call_runtime(address routine, LIR_Opr tmp, LIR_Opr result,
2314 LIR_OprList* arguments, CodeEmitInfo* info) {
2315 append(new LIR_OpRTCall(routine, tmp, result, arguments, info));
2316 }
2317
2318 void load_stack_address_monitor(int monitor_ix, LIR_Opr dst) { append(new LIR_Op1(lir_monaddr, LIR_OprFact::intConst(monitor_ix), dst)); }
2319 void unlock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub);
2320 void lock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info);
2321
2322 void breakpoint() { append(new LIR_Op0(lir_breakpoint)); }
2323
2324 void arraycopy(LIR_Opr src, LIR_Opr src_pos, LIR_Opr dst, LIR_Opr dst_pos, LIR_Opr length, LIR_Opr tmp, ciArrayKlass* expected_type, int flags, CodeEmitInfo* info) { append(new LIR_OpArrayCopy(src, src_pos, dst, dst_pos, length, tmp, expected_type, flags, info)); }
2325
2326 void update_crc32(LIR_Opr crc, LIR_Opr val, LIR_Opr res) { append(new LIR_OpUpdateCRC32(crc, val, res)); }
2327
2328 void instanceof(LIR_Opr result, LIR_Opr object, ciKlass* klass, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check, CodeEmitInfo* info_for_patch, ciMethod* profiled_method, int profiled_bci);
2329 void store_check(LIR_Opr object, LIR_Opr array, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, CodeEmitInfo* info_for_exception, ciMethod* profiled_method, int profiled_bci);
2330
2331 void checkcast (LIR_Opr result, LIR_Opr object, ciKlass* klass,
2332 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check,
2333 CodeEmitInfo* info_for_exception, CodeEmitInfo* info_for_patch, CodeStub* stub,
2334 ciMethod* profiled_method, int profiled_bci);
2335 // MethodData* profiling
2336 void profile_call(ciMethod* method, int bci, ciMethod* callee, LIR_Opr mdo, LIR_Opr recv, LIR_Opr t1, ciKlass* cha_klass) {
2337 append(new LIR_OpProfileCall(method, bci, callee, mdo, recv, t1, cha_klass));
2338 }
2339 void profile_type(LIR_Address* mdp, LIR_Opr obj, ciKlass* exact_klass, intptr_t current_klass, LIR_Opr tmp, bool not_null, bool no_conflict) {
2340 append(new LIR_OpProfileType(LIR_OprFact::address(mdp), obj, exact_klass, current_klass, tmp, not_null, no_conflict));
2341 }
2342
2343 void xadd(LIR_Opr src, LIR_Opr add, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_xadd, src, add, res, tmp)); }
2344 void xchg(LIR_Opr src, LIR_Opr set, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_xchg, src, set, res, tmp)); }
2345
2346 void load_klass(LIR_Opr obj, LIR_Opr result, CodeEmitInfo* info) { append(new LIR_OpLoadKlass(obj, result, info)); }
2347
2348 #ifdef ASSERT
2349 void lir_assert(LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, const char* msg, bool halt) { append(new LIR_OpAssert(condition, opr1, opr2, msg, halt)); }
2350 #endif
2351 };
2352
2353 void print_LIR(BlockList* blocks);
2354
2355 class LIR_InsertionBuffer : public CompilationResourceObj {
2356 private:
2357 LIR_List* _lir; // the lir list where ops of this buffer should be inserted later (null when uninitialized)
2358
2359 // list of insertion points. index and count are stored alternately:
2360 // _index_and_count[i * 2]: the index into lir list where "count" ops should be inserted
2361 // _index_and_count[i * 2 + 1]: the number of ops to be inserted at index
|
873 class LIR_Op;
874 class LIR_Op0;
875 class LIR_OpLabel;
876 class LIR_Op1;
877 class LIR_OpBranch;
878 class LIR_OpConvert;
879 class LIR_OpAllocObj;
880 class LIR_OpReturn;
881 class LIR_Op2;
882 class LIR_OpDelay;
883 class LIR_Op3;
884 class LIR_OpAllocArray;
885 class LIR_Op4;
886 class LIR_OpCall;
887 class LIR_OpJavaCall;
888 class LIR_OpRTCall;
889 class LIR_OpArrayCopy;
890 class LIR_OpUpdateCRC32;
891 class LIR_OpLock;
892 class LIR_OpTypeCheck;
893 class LIR_OpFlattenedArrayCheck;
894 class LIR_OpNullFreeArrayCheck;
895 class LIR_OpSubstitutabilityCheck;
896 class LIR_OpCompareAndSwap;
897 class LIR_OpLoadKlass;
898 class LIR_OpProfileCall;
899 class LIR_OpProfileType;
900 class LIR_OpProfileInlineType;
901 #ifdef ASSERT
902 class LIR_OpAssert;
903 #endif
904
905 // LIR operation codes
906 enum LIR_Code {
907 lir_none
908 , begin_op0
909 , lir_label
910 , lir_nop
911 , lir_std_entry
912 , lir_osr_entry
913 , lir_breakpoint
914 , lir_rtcall
915 , lir_membar
916 , lir_membar_acquire
917 , lir_membar_release
918 , lir_membar_loadload
919 , lir_membar_storestore
920 , lir_membar_loadstore
921 , lir_membar_storeload
922 , lir_get_thread
923 , lir_on_spin_wait
924 , lir_check_orig_pc
925 , end_op0
926 , begin_op1
927 , lir_push
928 , lir_pop
929 , lir_null_check
930 , lir_return
931 , lir_leal
932 , lir_move
933 , lir_convert
934 , lir_alloc_object
935 , lir_monaddr
936 , lir_sqrt
937 , lir_abs
938 , lir_neg
939 , lir_f2hf
940 , lir_hf2f
941 , lir_safepoint
942 , lir_unwind
943 , lir_load_klass
944 , end_op1
981 , lir_dynamic_call
982 , end_opJavaCall
983 , begin_opArrayCopy
984 , lir_arraycopy
985 , end_opArrayCopy
986 , begin_opUpdateCRC32
987 , lir_updatecrc32
988 , end_opUpdateCRC32
989 , begin_opLock
990 , lir_lock
991 , lir_unlock
992 , end_opLock
993 , begin_delay_slot
994 , lir_delay_slot
995 , end_delay_slot
996 , begin_opTypeCheck
997 , lir_instanceof
998 , lir_checkcast
999 , lir_store_check
1000 , end_opTypeCheck
1001 , begin_opFlattenedArrayCheck
1002 , lir_flat_array_check
1003 , end_opFlattenedArrayCheck
1004 , begin_opNullFreeArrayCheck
1005 , lir_null_free_array_check
1006 , end_opNullFreeArrayCheck
1007 , begin_opSubstitutabilityCheck
1008 , lir_substitutability_check
1009 , end_opSubstitutabilityCheck
1010 , begin_opCompareAndSwap
1011 , lir_cas_long
1012 , lir_cas_obj
1013 , lir_cas_int
1014 , end_opCompareAndSwap
1015 , begin_opMDOProfile
1016 , lir_profile_call
1017 , lir_profile_type
1018 , lir_profile_inline_type
1019 , end_opMDOProfile
1020 , begin_opAssert
1021 , lir_assert
1022 , end_opAssert
1023 #if INCLUDE_ZGC
1024 , begin_opXLoadBarrierTest
1025 , lir_xloadbarrier_test
1026 , end_opXLoadBarrierTest
1027 #endif
1028 };
1029
1030
1031 enum LIR_Condition {
1032 lir_cond_equal
1033 , lir_cond_notEqual
1034 , lir_cond_less
1035 , lir_cond_lessEqual
1036 , lir_cond_greaterEqual
1037 , lir_cond_greater
1038 , lir_cond_belowEqual
1138 virtual bool is_patching() { return false; }
1139 virtual LIR_OpCall* as_OpCall() { return nullptr; }
1140 virtual LIR_OpJavaCall* as_OpJavaCall() { return nullptr; }
1141 virtual LIR_OpLabel* as_OpLabel() { return nullptr; }
1142 virtual LIR_OpDelay* as_OpDelay() { return nullptr; }
1143 virtual LIR_OpLock* as_OpLock() { return nullptr; }
1144 virtual LIR_OpAllocArray* as_OpAllocArray() { return nullptr; }
1145 virtual LIR_OpAllocObj* as_OpAllocObj() { return nullptr; }
1146 virtual LIR_OpBranch* as_OpBranch() { return nullptr; }
1147 virtual LIR_OpReturn* as_OpReturn() { return nullptr; }
1148 virtual LIR_OpRTCall* as_OpRTCall() { return nullptr; }
1149 virtual LIR_OpConvert* as_OpConvert() { return nullptr; }
1150 virtual LIR_Op0* as_Op0() { return nullptr; }
1151 virtual LIR_Op1* as_Op1() { return nullptr; }
1152 virtual LIR_Op2* as_Op2() { return nullptr; }
1153 virtual LIR_Op3* as_Op3() { return nullptr; }
1154 virtual LIR_Op4* as_Op4() { return nullptr; }
1155 virtual LIR_OpArrayCopy* as_OpArrayCopy() { return nullptr; }
1156 virtual LIR_OpUpdateCRC32* as_OpUpdateCRC32() { return nullptr; }
1157 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return nullptr; }
1158 virtual LIR_OpFlattenedArrayCheck* as_OpFlattenedArrayCheck() { return nullptr; }
1159 virtual LIR_OpNullFreeArrayCheck* as_OpNullFreeArrayCheck() { return nullptr; }
1160 virtual LIR_OpSubstitutabilityCheck* as_OpSubstitutabilityCheck() { return nullptr; }
1161 virtual LIR_OpCompareAndSwap* as_OpCompareAndSwap() { return nullptr; }
1162 virtual LIR_OpLoadKlass* as_OpLoadKlass() { return nullptr; }
1163 virtual LIR_OpProfileCall* as_OpProfileCall() { return nullptr; }
1164 virtual LIR_OpProfileType* as_OpProfileType() { return nullptr; }
1165 virtual LIR_OpProfileInlineType* as_OpProfileInlineType() { return nullptr; }
1166 #ifdef ASSERT
1167 virtual LIR_OpAssert* as_OpAssert() { return nullptr; }
1168 #endif
1169
1170 virtual void verify() const {}
1171 };
1172
1173 // for calls
1174 class LIR_OpCall: public LIR_Op {
1175 friend class LIR_OpVisitState;
1176
1177 protected:
1178 address _addr;
1179 LIR_OprList* _arguments;
1180 protected:
1181 LIR_OpCall(LIR_Code code, address addr, LIR_Opr result,
1182 LIR_OprList* arguments, CodeEmitInfo* info = nullptr)
1183 : LIR_Op(code, result, info)
1184 , _addr(addr)
1185 , _arguments(arguments) {}
1218 LIR_OprList* arguments, CodeEmitInfo* info)
1219 : LIR_OpCall(code, (address)vtable_offset, result, arguments, info)
1220 , _method(method)
1221 , _receiver(receiver)
1222 , _method_handle_invoke_SP_save_opr(LIR_OprFact::illegalOpr)
1223 { assert(is_in_range(code, begin_opJavaCall, end_opJavaCall), "code check"); }
1224
1225 LIR_Opr receiver() const { return _receiver; }
1226 ciMethod* method() const { return _method; }
1227
1228 // JSR 292 support.
1229 bool is_invokedynamic() const { return code() == lir_dynamic_call; }
1230 bool is_method_handle_invoke() const {
1231 return method()->is_compiled_lambda_form() || // Java-generated lambda form
1232 method()->is_method_handle_intrinsic(); // JVM-generated MH intrinsic
1233 }
1234
1235 virtual void emit_code(LIR_Assembler* masm);
1236 virtual LIR_OpJavaCall* as_OpJavaCall() { return this; }
1237 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1238
1239 bool maybe_return_as_fields(ciInlineKlass** vk = nullptr) const;
1240 };
1241
1242 // --------------------------------------------------
1243 // LIR_OpLabel
1244 // --------------------------------------------------
1245 // Location where a branch can continue
1246 class LIR_OpLabel: public LIR_Op {
1247 friend class LIR_OpVisitState;
1248
1249 private:
1250 Label* _label;
1251 public:
1252 LIR_OpLabel(Label* lbl)
1253 : LIR_Op(lir_label, LIR_OprFact::illegalOpr, nullptr)
1254 , _label(lbl) {}
1255 Label* label() const { return _label; }
1256
1257 virtual void emit_code(LIR_Assembler* masm);
1258 virtual LIR_OpLabel* as_OpLabel() { return this; }
1259 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1271 LIR_Opr _dst_pos;
1272 LIR_Opr _length;
1273 LIR_Opr _tmp;
1274 ciArrayKlass* _expected_type;
1275 int _flags;
1276
1277 public:
1278 enum Flags {
1279 src_null_check = 1 << 0,
1280 dst_null_check = 1 << 1,
1281 src_pos_positive_check = 1 << 2,
1282 dst_pos_positive_check = 1 << 3,
1283 length_positive_check = 1 << 4,
1284 src_range_check = 1 << 5,
1285 dst_range_check = 1 << 6,
1286 type_check = 1 << 7,
1287 overlapping = 1 << 8,
1288 unaligned = 1 << 9,
1289 src_objarray = 1 << 10,
1290 dst_objarray = 1 << 11,
1291 always_slow_path = 1 << 12,
1292 src_inlinetype_check = 1 << 13,
1293 dst_inlinetype_check = 1 << 14,
1294 all_flags = (1 << 15) - 1
1295 };
1296
1297 LIR_OpArrayCopy(LIR_Opr src, LIR_Opr src_pos, LIR_Opr dst, LIR_Opr dst_pos, LIR_Opr length, LIR_Opr tmp,
1298 ciArrayKlass* expected_type, int flags, CodeEmitInfo* info);
1299
1300 LIR_Opr src() const { return _src; }
1301 LIR_Opr src_pos() const { return _src_pos; }
1302 LIR_Opr dst() const { return _dst; }
1303 LIR_Opr dst_pos() const { return _dst_pos; }
1304 LIR_Opr length() const { return _length; }
1305 LIR_Opr tmp() const { return _tmp; }
1306 int flags() const { return _flags; }
1307 ciArrayKlass* expected_type() const { return _expected_type; }
1308 ArrayCopyStub* stub() const { return _stub; }
1309 static int get_initial_copy_flags() { return LIR_OpArrayCopy::unaligned |
1310 LIR_OpArrayCopy::overlapping; }
1311
1312 virtual void emit_code(LIR_Assembler* masm);
1313 virtual LIR_OpArrayCopy* as_OpArrayCopy() { return this; }
1314 void print_instr(outputStream* out) const PRODUCT_RETURN;
1534
1535
1536 // LIR_OpTypeCheck
1537 class LIR_OpTypeCheck: public LIR_Op {
1538 friend class LIR_OpVisitState;
1539
1540 private:
1541 LIR_Opr _object;
1542 LIR_Opr _array;
1543 ciKlass* _klass;
1544 LIR_Opr _tmp1;
1545 LIR_Opr _tmp2;
1546 LIR_Opr _tmp3;
1547 CodeEmitInfo* _info_for_patch;
1548 CodeEmitInfo* _info_for_exception;
1549 CodeStub* _stub;
1550 ciMethod* _profiled_method;
1551 int _profiled_bci;
1552 bool _should_profile;
1553 bool _fast_check;
1554 bool _need_null_check;
1555
1556 public:
1557 LIR_OpTypeCheck(LIR_Code code, LIR_Opr result, LIR_Opr object, ciKlass* klass,
1558 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check,
1559 CodeEmitInfo* info_for_exception, CodeEmitInfo* info_for_patch, CodeStub* stub, bool need_null_check = true);
1560 LIR_OpTypeCheck(LIR_Code code, LIR_Opr object, LIR_Opr array,
1561 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, CodeEmitInfo* info_for_exception);
1562
1563 LIR_Opr object() const { return _object; }
1564 LIR_Opr array() const { assert(code() == lir_store_check, "not valid"); return _array; }
1565 LIR_Opr tmp1() const { return _tmp1; }
1566 LIR_Opr tmp2() const { return _tmp2; }
1567 LIR_Opr tmp3() const { return _tmp3; }
1568 ciKlass* klass() const { assert(code() == lir_instanceof || code() == lir_checkcast, "not valid"); return _klass; }
1569 bool fast_check() const { assert(code() == lir_instanceof || code() == lir_checkcast, "not valid"); return _fast_check; }
1570 CodeEmitInfo* info_for_patch() const { return _info_for_patch; }
1571 CodeEmitInfo* info_for_exception() const { return _info_for_exception; }
1572 CodeStub* stub() const { return _stub; }
1573
1574 // MethodData* profiling
1575 void set_profiled_method(ciMethod *method) { _profiled_method = method; }
1576 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1577 void set_should_profile(bool b) { _should_profile = b; }
1578 ciMethod* profiled_method() const { return _profiled_method; }
1579 int profiled_bci() const { return _profiled_bci; }
1580 bool should_profile() const { return _should_profile; }
1581 bool need_null_check() const { return _need_null_check; }
1582 virtual bool is_patching() { return _info_for_patch != nullptr; }
1583 virtual void emit_code(LIR_Assembler* masm);
1584 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return this; }
1585 void print_instr(outputStream* out) const PRODUCT_RETURN;
1586 };
1587
1588 // LIR_OpFlattenedArrayCheck
1589 class LIR_OpFlattenedArrayCheck: public LIR_Op {
1590 friend class LIR_OpVisitState;
1591
1592 private:
1593 LIR_Opr _array;
1594 LIR_Opr _value;
1595 LIR_Opr _tmp;
1596 CodeStub* _stub;
1597 public:
1598 LIR_OpFlattenedArrayCheck(LIR_Opr array, LIR_Opr value, LIR_Opr tmp, CodeStub* stub);
1599 LIR_Opr array() const { return _array; }
1600 LIR_Opr value() const { return _value; }
1601 LIR_Opr tmp() const { return _tmp; }
1602 CodeStub* stub() const { return _stub; }
1603
1604 virtual void emit_code(LIR_Assembler* masm);
1605 virtual LIR_OpFlattenedArrayCheck* as_OpFlattenedArrayCheck() { return this; }
1606 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1607 };
1608
1609 // LIR_OpNullFreeArrayCheck
1610 class LIR_OpNullFreeArrayCheck: public LIR_Op {
1611 friend class LIR_OpVisitState;
1612
1613 private:
1614 LIR_Opr _array;
1615 LIR_Opr _tmp;
1616 public:
1617 LIR_OpNullFreeArrayCheck(LIR_Opr array, LIR_Opr tmp);
1618 LIR_Opr array() const { return _array; }
1619 LIR_Opr tmp() const { return _tmp; }
1620
1621 virtual void emit_code(LIR_Assembler* masm);
1622 virtual LIR_OpNullFreeArrayCheck* as_OpNullFreeArrayCheck() { return this; }
1623 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1624 };
1625
1626 class LIR_OpSubstitutabilityCheck: public LIR_Op {
1627 friend class LIR_OpVisitState;
1628
1629 private:
1630 LIR_Opr _left;
1631 LIR_Opr _right;
1632 LIR_Opr _equal_result;
1633 LIR_Opr _not_equal_result;
1634 LIR_Opr _tmp1;
1635 LIR_Opr _tmp2;
1636 ciKlass* _left_klass;
1637 ciKlass* _right_klass;
1638 LIR_Opr _left_klass_op;
1639 LIR_Opr _right_klass_op;
1640 CodeStub* _stub;
1641 public:
1642 LIR_OpSubstitutabilityCheck(LIR_Opr result, LIR_Opr left, LIR_Opr right, LIR_Opr equal_result, LIR_Opr not_equal_result,
1643 LIR_Opr tmp1, LIR_Opr tmp2,
1644 ciKlass* left_klass, ciKlass* right_klass, LIR_Opr left_klass_op, LIR_Opr right_klass_op,
1645 CodeEmitInfo* info, CodeStub* stub);
1646
1647 LIR_Opr left() const { return _left; }
1648 LIR_Opr right() const { return _right; }
1649 LIR_Opr equal_result() const { return _equal_result; }
1650 LIR_Opr not_equal_result() const { return _not_equal_result; }
1651 LIR_Opr tmp1() const { return _tmp1; }
1652 LIR_Opr tmp2() const { return _tmp2; }
1653 ciKlass* left_klass() const { return _left_klass; }
1654 ciKlass* right_klass() const { return _right_klass; }
1655 LIR_Opr left_klass_op() const { return _left_klass_op; }
1656 LIR_Opr right_klass_op() const { return _right_klass_op; }
1657 CodeStub* stub() const { return _stub; }
1658
1659 virtual void emit_code(LIR_Assembler* masm);
1660 virtual LIR_OpSubstitutabilityCheck* as_OpSubstitutabilityCheck() { return this; }
1661 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1662 };
1663
1664 // LIR_Op2
1665 class LIR_Op2: public LIR_Op {
1666 friend class LIR_OpVisitState;
1667
1668 protected:
1669 LIR_Opr _opr1;
1670 LIR_Opr _opr2;
1671 LIR_Opr _tmp1;
1672 LIR_Opr _tmp2;
1673 LIR_Opr _tmp3;
1674 LIR_Opr _tmp4;
1675 LIR_Opr _tmp5;
1676 LIR_Condition _condition;
1677 BasicType _type;
1678
1679 void verify() const;
1680
1681 public:
1682 LIR_Op2(LIR_Code code, LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, CodeEmitInfo* info = nullptr, BasicType type = T_ILLEGAL)
1683 : LIR_Op(code, LIR_OprFact::illegalOpr, info)
1802 void negate_cond();
1803
1804 virtual void emit_code(LIR_Assembler* masm);
1805 virtual LIR_OpBranch* as_OpBranch() { return this; }
1806 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1807 };
1808
1809 class LIR_OpAllocArray : public LIR_Op {
1810 friend class LIR_OpVisitState;
1811
1812 private:
1813 LIR_Opr _klass;
1814 LIR_Opr _len;
1815 LIR_Opr _tmp1;
1816 LIR_Opr _tmp2;
1817 LIR_Opr _tmp3;
1818 LIR_Opr _tmp4;
1819 CodeStub* _stub;
1820 BasicType _type;
1821 bool _zero_array;
1822 bool _always_slow_path;
1823
1824 public:
1825 LIR_OpAllocArray(LIR_Opr klass, LIR_Opr len, LIR_Opr result, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, BasicType type, CodeStub* stub, bool zero_array, bool always_slow_path)
1826 : LIR_Op(lir_alloc_array, result, nullptr)
1827 , _klass(klass)
1828 , _len(len)
1829 , _tmp1(t1)
1830 , _tmp2(t2)
1831 , _tmp3(t3)
1832 , _tmp4(t4)
1833 , _stub(stub)
1834 , _type(type)
1835 , _zero_array(zero_array)
1836 , _always_slow_path(always_slow_path) {}
1837
1838 LIR_Opr klass() const { return _klass; }
1839 LIR_Opr len() const { return _len; }
1840 LIR_Opr obj() const { return result_opr(); }
1841 LIR_Opr tmp1() const { return _tmp1; }
1842 LIR_Opr tmp2() const { return _tmp2; }
1843 LIR_Opr tmp3() const { return _tmp3; }
1844 LIR_Opr tmp4() const { return _tmp4; }
1845 BasicType type() const { return _type; }
1846 CodeStub* stub() const { return _stub; }
1847 bool zero_array() const { return _zero_array; }
1848 bool always_slow_path() const { return _always_slow_path; }
1849
1850 virtual void emit_code(LIR_Assembler* masm);
1851 virtual LIR_OpAllocArray * as_OpAllocArray () { return this; }
1852 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1853 };
1854
1855
1856 class LIR_Op3: public LIR_Op {
1857 friend class LIR_OpVisitState;
1858
1859 private:
1860 LIR_Opr _opr1;
1861 LIR_Opr _opr2;
1862 LIR_Opr _opr3;
1863 public:
1864 LIR_Op3(LIR_Code code, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr opr3, LIR_Opr result, CodeEmitInfo* info = nullptr)
1865 : LIR_Op(code, result, info)
1866 , _opr1(opr1)
1867 , _opr2(opr2)
1868 , _opr3(opr3) { assert(is_in_range(code, begin_op3, end_op3), "code check"); }
1935
1936 //--------------------------------
1937 class LabelObj: public CompilationResourceObj {
1938 private:
1939 Label _label;
1940 public:
1941 LabelObj() {}
1942 Label* label() { return &_label; }
1943 };
1944
1945
1946 class LIR_OpLock: public LIR_Op {
1947 friend class LIR_OpVisitState;
1948
1949 private:
1950 LIR_Opr _hdr;
1951 LIR_Opr _obj;
1952 LIR_Opr _lock;
1953 LIR_Opr _scratch;
1954 CodeStub* _stub;
1955 CodeStub* _throw_ie_stub;
1956 public:
1957 LIR_OpLock(LIR_Code code, LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info, CodeStub* throw_ie_stub=nullptr)
1958 : LIR_Op(code, LIR_OprFact::illegalOpr, info)
1959 , _hdr(hdr)
1960 , _obj(obj)
1961 , _lock(lock)
1962 , _scratch(scratch)
1963 , _stub(stub)
1964 , _throw_ie_stub(throw_ie_stub) {}
1965
1966 LIR_Opr hdr_opr() const { return _hdr; }
1967 LIR_Opr obj_opr() const { return _obj; }
1968 LIR_Opr lock_opr() const { return _lock; }
1969 LIR_Opr scratch_opr() const { return _scratch; }
1970 CodeStub* stub() const { return _stub; }
1971 CodeStub* throw_ie_stub() const { return _throw_ie_stub; }
1972
1973 virtual void emit_code(LIR_Assembler* masm);
1974 virtual LIR_OpLock* as_OpLock() { return this; }
1975 void print_instr(outputStream* out) const PRODUCT_RETURN;
1976 };
1977
1978 class LIR_OpLoadKlass: public LIR_Op {
1979 friend class LIR_OpVisitState;
1980
1981 private:
1982 LIR_Opr _obj;
1983 public:
1984 LIR_OpLoadKlass(LIR_Opr obj, LIR_Opr result, CodeEmitInfo* info)
1985 : LIR_Op(lir_load_klass, result, info)
1986 , _obj(obj)
1987 {}
1988
1989 LIR_Opr obj() const { return _obj; }
1990
1991 virtual LIR_OpLoadKlass* as_OpLoadKlass() { return this; }
2136 , _obj(obj)
2137 , _tmp(tmp)
2138 , _exact_klass(exact_klass)
2139 , _current_klass(current_klass)
2140 , _not_null(not_null)
2141 , _no_conflict(no_conflict) { }
2142
2143 LIR_Opr mdp() const { return _mdp; }
2144 LIR_Opr obj() const { return _obj; }
2145 LIR_Opr tmp() const { return _tmp; }
2146 ciKlass* exact_klass() const { return _exact_klass; }
2147 intptr_t current_klass() const { return _current_klass; }
2148 bool not_null() const { return _not_null; }
2149 bool no_conflict() const { return _no_conflict; }
2150
2151 virtual void emit_code(LIR_Assembler* masm);
2152 virtual LIR_OpProfileType* as_OpProfileType() { return this; }
2153 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
2154 };
2155
2156 // LIR_OpProfileInlineType
2157 class LIR_OpProfileInlineType : public LIR_Op {
2158 friend class LIR_OpVisitState;
2159
2160 private:
2161 LIR_Opr _mdp;
2162 LIR_Opr _obj;
2163 int _flag;
2164 LIR_Opr _tmp;
2165 bool _not_null; // true if we know statically that _obj cannot be null
2166
2167 public:
2168 // Destroys recv
2169 LIR_OpProfileInlineType(LIR_Opr mdp, LIR_Opr obj, int flag, LIR_Opr tmp, bool not_null)
2170 : LIR_Op(lir_profile_inline_type, LIR_OprFact::illegalOpr, nullptr) // no result, no info
2171 , _mdp(mdp)
2172 , _obj(obj)
2173 , _flag(flag)
2174 , _tmp(tmp)
2175 , _not_null(not_null) { }
2176
2177 LIR_Opr mdp() const { return _mdp; }
2178 LIR_Opr obj() const { return _obj; }
2179 int flag() const { return _flag; }
2180 LIR_Opr tmp() const { return _tmp; }
2181 bool not_null() const { return _not_null; }
2182
2183 virtual void emit_code(LIR_Assembler* masm);
2184 virtual LIR_OpProfileInlineType* as_OpProfileInlineType() { return this; }
2185 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
2186 };
2187
2188 class LIR_InsertionBuffer;
2189
2190 //--------------------------------LIR_List---------------------------------------------------
2191 // Maintains a list of LIR instructions (one instance of LIR_List per basic block)
2192 // The LIR instructions are appended by the LIR_List class itself;
2193 //
2194 // Notes:
2195 // - all offsets are(should be) in bytes
2196 // - local positions are specified with an offset, with offset 0 being local 0
2197
2198 class LIR_List: public CompilationResourceObj {
2199 private:
2200 LIR_OpList _operations;
2201
2202 Compilation* _compilation;
2203 #ifndef PRODUCT
2204 BlockBegin* _block;
2205 #endif
2206 #ifdef ASSERT
2207 const char * _file;
2392 void div (LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_div, left, right, res, tmp)); }
2393 void rem (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = nullptr) { append(new LIR_Op2(lir_rem, left, right, res, info)); }
2394
2395 void volatile_load_mem_reg(LIR_Address* address, LIR_Opr dst, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2396 void volatile_load_unsafe_reg(LIR_Opr base, LIR_Opr offset, LIR_Opr dst, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2397
2398 void load(LIR_Address* addr, LIR_Opr src, CodeEmitInfo* info = nullptr, LIR_PatchCode patch_code = lir_patch_none);
2399
2400 void store_mem_int(jint v, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2401 void store_mem_oop(jobject o, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2402 void store(LIR_Opr src, LIR_Address* addr, CodeEmitInfo* info = nullptr, LIR_PatchCode patch_code = lir_patch_none);
2403 void volatile_store_mem_reg(LIR_Opr src, LIR_Address* address, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2404 void volatile_store_unsafe_reg(LIR_Opr src, LIR_Opr base, LIR_Opr offset, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2405
2406 void idiv(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2407 void idiv(LIR_Opr left, int right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2408 void irem(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2409 void irem(LIR_Opr left, int right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2410
2411 void allocate_object(LIR_Opr dst, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, int header_size, int object_size, LIR_Opr klass, bool init_check, CodeStub* stub);
2412 void allocate_array(LIR_Opr dst, LIR_Opr len, LIR_Opr t1,LIR_Opr t2, LIR_Opr t3,LIR_Opr t4, BasicType type, LIR_Opr klass, CodeStub* stub, bool zero_array = true, bool always_slow_path = false);
2413
2414 // jump is an unconditional branch
2415 void jump(BlockBegin* block) {
2416 append(new LIR_OpBranch(lir_cond_always, block));
2417 }
2418 void jump(CodeStub* stub) {
2419 append(new LIR_OpBranch(lir_cond_always, stub));
2420 }
2421 void branch(LIR_Condition cond, Label* lbl) {
2422 append(new LIR_OpBranch(cond, lbl));
2423 }
2424 // Should not be used for fp comparisons
2425 void branch(LIR_Condition cond, BlockBegin* block) {
2426 append(new LIR_OpBranch(cond, block));
2427 }
2428 // Should not be used for fp comparisons
2429 void branch(LIR_Condition cond, CodeStub* stub) {
2430 append(new LIR_OpBranch(cond, stub));
2431 }
2432 // Should only be used for fp comparisons
2439 void unsigned_shift_right(LIR_Opr value, LIR_Opr count, LIR_Opr dst, LIR_Opr tmp);
2440
2441 void shift_left(LIR_Opr value, int count, LIR_Opr dst) { shift_left(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2442 void shift_right(LIR_Opr value, int count, LIR_Opr dst) { shift_right(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2443 void unsigned_shift_right(LIR_Opr value, int count, LIR_Opr dst) { unsigned_shift_right(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2444
2445 void lcmp2int(LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_cmp_l2i, left, right, dst)); }
2446 void fcmp2int(LIR_Opr left, LIR_Opr right, LIR_Opr dst, bool is_unordered_less);
2447
2448 void call_runtime_leaf(address routine, LIR_Opr tmp, LIR_Opr result, LIR_OprList* arguments) {
2449 append(new LIR_OpRTCall(routine, tmp, result, arguments));
2450 }
2451
2452 void call_runtime(address routine, LIR_Opr tmp, LIR_Opr result,
2453 LIR_OprList* arguments, CodeEmitInfo* info) {
2454 append(new LIR_OpRTCall(routine, tmp, result, arguments, info));
2455 }
2456
2457 void load_stack_address_monitor(int monitor_ix, LIR_Opr dst) { append(new LIR_Op1(lir_monaddr, LIR_OprFact::intConst(monitor_ix), dst)); }
2458 void unlock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub);
2459 void lock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info, CodeStub* throw_ie_stub=nullptr);
2460
2461 void breakpoint() { append(new LIR_Op0(lir_breakpoint)); }
2462
2463 void arraycopy(LIR_Opr src, LIR_Opr src_pos, LIR_Opr dst, LIR_Opr dst_pos, LIR_Opr length, LIR_Opr tmp, ciArrayKlass* expected_type, int flags, CodeEmitInfo* info) { append(new LIR_OpArrayCopy(src, src_pos, dst, dst_pos, length, tmp, expected_type, flags, info)); }
2464
2465 void update_crc32(LIR_Opr crc, LIR_Opr val, LIR_Opr res) { append(new LIR_OpUpdateCRC32(crc, val, res)); }
2466
2467 void instanceof(LIR_Opr result, LIR_Opr object, ciKlass* klass, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check, CodeEmitInfo* info_for_patch, ciMethod* profiled_method, int profiled_bci);
2468 void store_check(LIR_Opr object, LIR_Opr array, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, CodeEmitInfo* info_for_exception, ciMethod* profiled_method, int profiled_bci);
2469 void check_flat_array(LIR_Opr array, LIR_Opr value, LIR_Opr tmp, CodeStub* stub);
2470 void check_null_free_array(LIR_Opr array, LIR_Opr tmp);
2471 void substitutability_check(LIR_Opr result, LIR_Opr left, LIR_Opr right, LIR_Opr equal_result, LIR_Opr not_equal_result,
2472 LIR_Opr tmp1, LIR_Opr tmp2,
2473 ciKlass* left_klass, ciKlass* right_klass, LIR_Opr left_klass_op, LIR_Opr right_klass_op,
2474 CodeEmitInfo* info, CodeStub* stub);
2475
2476 void checkcast (LIR_Opr result, LIR_Opr object, ciKlass* klass,
2477 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check,
2478 CodeEmitInfo* info_for_exception, CodeEmitInfo* info_for_patch, CodeStub* stub,
2479 ciMethod* profiled_method, int profiled_bci, bool is_null_free);
2480 // MethodData* profiling
2481 void profile_call(ciMethod* method, int bci, ciMethod* callee, LIR_Opr mdo, LIR_Opr recv, LIR_Opr t1, ciKlass* cha_klass) {
2482 append(new LIR_OpProfileCall(method, bci, callee, mdo, recv, t1, cha_klass));
2483 }
2484 void profile_type(LIR_Address* mdp, LIR_Opr obj, ciKlass* exact_klass, intptr_t current_klass, LIR_Opr tmp, bool not_null, bool no_conflict) {
2485 append(new LIR_OpProfileType(LIR_OprFact::address(mdp), obj, exact_klass, current_klass, tmp, not_null, no_conflict));
2486 }
2487 void profile_inline_type(LIR_Address* mdp, LIR_Opr obj, int flag, LIR_Opr tmp, bool not_null) {
2488 append(new LIR_OpProfileInlineType(LIR_OprFact::address(mdp), obj, flag, tmp, not_null));
2489 }
2490
2491 void xadd(LIR_Opr src, LIR_Opr add, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_xadd, src, add, res, tmp)); }
2492 void xchg(LIR_Opr src, LIR_Opr set, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_xchg, src, set, res, tmp)); }
2493
2494 void load_klass(LIR_Opr obj, LIR_Opr result, CodeEmitInfo* info) { append(new LIR_OpLoadKlass(obj, result, info)); }
2495
2496 #ifdef ASSERT
2497 void lir_assert(LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, const char* msg, bool halt) { append(new LIR_OpAssert(condition, opr1, opr2, msg, halt)); }
2498 #endif
2499 };
2500
2501 void print_LIR(BlockList* blocks);
2502
2503 class LIR_InsertionBuffer : public CompilationResourceObj {
2504 private:
2505 LIR_List* _lir; // the lir list where ops of this buffer should be inserted later (null when uninitialized)
2506
2507 // list of insertion points. index and count are stored alternately:
2508 // _index_and_count[i * 2]: the index into lir list where "count" ops should be inserted
2509 // _index_and_count[i * 2 + 1]: the number of ops to be inserted at index
|