880 class LIR_Op0;
881 class LIR_OpLabel;
882 class LIR_Op1;
883 class LIR_OpBranch;
884 class LIR_OpConvert;
885 class LIR_OpAllocObj;
886 class LIR_OpReturn;
887 class LIR_OpRoundFP;
888 class LIR_Op2;
889 class LIR_OpDelay;
890 class LIR_Op3;
891 class LIR_OpAllocArray;
892 class LIR_Op4;
893 class LIR_OpCall;
894 class LIR_OpJavaCall;
895 class LIR_OpRTCall;
896 class LIR_OpArrayCopy;
897 class LIR_OpUpdateCRC32;
898 class LIR_OpLock;
899 class LIR_OpTypeCheck;
900 class LIR_OpCompareAndSwap;
901 class LIR_OpLoadKlass;
902 class LIR_OpProfileCall;
903 class LIR_OpProfileType;
904 #ifdef ASSERT
905 class LIR_OpAssert;
906 #endif
907
908 // LIR operation codes
909 enum LIR_Code {
910 lir_none
911 , begin_op0
912 , lir_label
913 , lir_nop
914 , lir_std_entry
915 , lir_osr_entry
916 , lir_fpop_raw
917 , lir_breakpoint
918 , lir_rtcall
919 , lir_membar
920 , lir_membar_acquire
921 , lir_membar_release
922 , lir_membar_loadload
923 , lir_membar_storestore
924 , lir_membar_loadstore
925 , lir_membar_storeload
926 , lir_get_thread
927 , lir_on_spin_wait
928 , end_op0
929 , begin_op1
930 , lir_fxch
931 , lir_fld
932 , lir_push
933 , lir_pop
934 , lir_null_check
935 , lir_return
936 , lir_leal
937 , lir_move
938 , lir_convert
939 , lir_alloc_object
940 , lir_monaddr
941 , lir_roundfp
942 , lir_safepoint
943 , lir_unwind
944 , lir_load_klass
945 , end_op1
946 , begin_op2
947 , lir_branch
989 , lir_dynamic_call
990 , end_opJavaCall
991 , begin_opArrayCopy
992 , lir_arraycopy
993 , end_opArrayCopy
994 , begin_opUpdateCRC32
995 , lir_updatecrc32
996 , end_opUpdateCRC32
997 , begin_opLock
998 , lir_lock
999 , lir_unlock
1000 , end_opLock
1001 , begin_delay_slot
1002 , lir_delay_slot
1003 , end_delay_slot
1004 , begin_opTypeCheck
1005 , lir_instanceof
1006 , lir_checkcast
1007 , lir_store_check
1008 , end_opTypeCheck
1009 , begin_opCompareAndSwap
1010 , lir_cas_long
1011 , lir_cas_obj
1012 , lir_cas_int
1013 , end_opCompareAndSwap
1014 , begin_opMDOProfile
1015 , lir_profile_call
1016 , lir_profile_type
1017 , end_opMDOProfile
1018 , begin_opAssert
1019 , lir_assert
1020 , end_opAssert
1021 #ifdef INCLUDE_ZGC
1022 , begin_opXLoadBarrierTest
1023 , lir_xloadbarrier_test
1024 , end_opXLoadBarrierTest
1025 #endif
1026 };
1027
1028
1029 enum LIR_Condition {
1030 lir_cond_equal
1031 , lir_cond_notEqual
1032 , lir_cond_less
1033 , lir_cond_lessEqual
1034 , lir_cond_greaterEqual
1035 , lir_cond_greater
1036 , lir_cond_belowEqual
1145 virtual LIR_OpCall* as_OpCall() { return nullptr; }
1146 virtual LIR_OpJavaCall* as_OpJavaCall() { return nullptr; }
1147 virtual LIR_OpLabel* as_OpLabel() { return nullptr; }
1148 virtual LIR_OpDelay* as_OpDelay() { return nullptr; }
1149 virtual LIR_OpLock* as_OpLock() { return nullptr; }
1150 virtual LIR_OpAllocArray* as_OpAllocArray() { return nullptr; }
1151 virtual LIR_OpAllocObj* as_OpAllocObj() { return nullptr; }
1152 virtual LIR_OpRoundFP* as_OpRoundFP() { return nullptr; }
1153 virtual LIR_OpBranch* as_OpBranch() { return nullptr; }
1154 virtual LIR_OpReturn* as_OpReturn() { return nullptr; }
1155 virtual LIR_OpRTCall* as_OpRTCall() { return nullptr; }
1156 virtual LIR_OpConvert* as_OpConvert() { return nullptr; }
1157 virtual LIR_Op0* as_Op0() { return nullptr; }
1158 virtual LIR_Op1* as_Op1() { return nullptr; }
1159 virtual LIR_Op2* as_Op2() { return nullptr; }
1160 virtual LIR_Op3* as_Op3() { return nullptr; }
1161 virtual LIR_Op4* as_Op4() { return nullptr; }
1162 virtual LIR_OpArrayCopy* as_OpArrayCopy() { return nullptr; }
1163 virtual LIR_OpUpdateCRC32* as_OpUpdateCRC32() { return nullptr; }
1164 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return nullptr; }
1165 virtual LIR_OpCompareAndSwap* as_OpCompareAndSwap() { return nullptr; }
1166 virtual LIR_OpLoadKlass* as_OpLoadKlass() { return nullptr; }
1167 virtual LIR_OpProfileCall* as_OpProfileCall() { return nullptr; }
1168 virtual LIR_OpProfileType* as_OpProfileType() { return nullptr; }
1169 #ifdef ASSERT
1170 virtual LIR_OpAssert* as_OpAssert() { return nullptr; }
1171 #endif
1172
1173 virtual void verify() const {}
1174 };
1175
1176 // for calls
1177 class LIR_OpCall: public LIR_Op {
1178 friend class LIR_OpVisitState;
1179
1180 protected:
1181 address _addr;
1182 LIR_OprList* _arguments;
1183 protected:
1184 LIR_OpCall(LIR_Code code, address addr, LIR_Opr result,
1185 LIR_OprList* arguments, CodeEmitInfo* info = nullptr)
1186 : LIR_Op(code, result, info)
1187 , _addr(addr)
1188 , _arguments(arguments) {}
1221 LIR_OprList* arguments, CodeEmitInfo* info)
1222 : LIR_OpCall(code, (address)vtable_offset, result, arguments, info)
1223 , _method(method)
1224 , _receiver(receiver)
1225 , _method_handle_invoke_SP_save_opr(LIR_OprFact::illegalOpr)
1226 { assert(is_in_range(code, begin_opJavaCall, end_opJavaCall), "code check"); }
1227
1228 LIR_Opr receiver() const { return _receiver; }
1229 ciMethod* method() const { return _method; }
1230
1231 // JSR 292 support.
1232 bool is_invokedynamic() const { return code() == lir_dynamic_call; }
1233 bool is_method_handle_invoke() const {
1234 return method()->is_compiled_lambda_form() || // Java-generated lambda form
1235 method()->is_method_handle_intrinsic(); // JVM-generated MH intrinsic
1236 }
1237
1238 virtual void emit_code(LIR_Assembler* masm);
1239 virtual LIR_OpJavaCall* as_OpJavaCall() { return this; }
1240 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1241 };
1242
1243 // --------------------------------------------------
1244 // LIR_OpLabel
1245 // --------------------------------------------------
1246 // Location where a branch can continue
1247 class LIR_OpLabel: public LIR_Op {
1248 friend class LIR_OpVisitState;
1249
1250 private:
1251 Label* _label;
1252 public:
1253 LIR_OpLabel(Label* lbl)
1254 : LIR_Op(lir_label, LIR_OprFact::illegalOpr, nullptr)
1255 , _label(lbl) {}
1256 Label* label() const { return _label; }
1257
1258 virtual void emit_code(LIR_Assembler* masm);
1259 virtual LIR_OpLabel* as_OpLabel() { return this; }
1260 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1272 LIR_Opr _dst_pos;
1273 LIR_Opr _length;
1274 LIR_Opr _tmp;
1275 ciArrayKlass* _expected_type;
1276 int _flags;
1277
1278 public:
1279 enum Flags {
1280 src_null_check = 1 << 0,
1281 dst_null_check = 1 << 1,
1282 src_pos_positive_check = 1 << 2,
1283 dst_pos_positive_check = 1 << 3,
1284 length_positive_check = 1 << 4,
1285 src_range_check = 1 << 5,
1286 dst_range_check = 1 << 6,
1287 type_check = 1 << 7,
1288 overlapping = 1 << 8,
1289 unaligned = 1 << 9,
1290 src_objarray = 1 << 10,
1291 dst_objarray = 1 << 11,
1292 all_flags = (1 << 12) - 1
1293 };
1294
1295 LIR_OpArrayCopy(LIR_Opr src, LIR_Opr src_pos, LIR_Opr dst, LIR_Opr dst_pos, LIR_Opr length, LIR_Opr tmp,
1296 ciArrayKlass* expected_type, int flags, CodeEmitInfo* info);
1297
1298 LIR_Opr src() const { return _src; }
1299 LIR_Opr src_pos() const { return _src_pos; }
1300 LIR_Opr dst() const { return _dst; }
1301 LIR_Opr dst_pos() const { return _dst_pos; }
1302 LIR_Opr length() const { return _length; }
1303 LIR_Opr tmp() const { return _tmp; }
1304 int flags() const { return _flags; }
1305 ciArrayKlass* expected_type() const { return _expected_type; }
1306 ArrayCopyStub* stub() const { return _stub; }
1307
1308 virtual void emit_code(LIR_Assembler* masm);
1309 virtual LIR_OpArrayCopy* as_OpArrayCopy() { return this; }
1310 void print_instr(outputStream* out) const PRODUCT_RETURN;
1311 };
1312
1535 };
1536
1537 // LIR_OpTypeCheck
1538 class LIR_OpTypeCheck: public LIR_Op {
1539 friend class LIR_OpVisitState;
1540
1541 private:
1542 LIR_Opr _object;
1543 LIR_Opr _array;
1544 ciKlass* _klass;
1545 LIR_Opr _tmp1;
1546 LIR_Opr _tmp2;
1547 LIR_Opr _tmp3;
1548 CodeEmitInfo* _info_for_patch;
1549 CodeEmitInfo* _info_for_exception;
1550 CodeStub* _stub;
1551 ciMethod* _profiled_method;
1552 int _profiled_bci;
1553 bool _should_profile;
1554 bool _fast_check;
1555
1556 public:
1557 LIR_OpTypeCheck(LIR_Code code, LIR_Opr result, LIR_Opr object, ciKlass* klass,
1558 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check,
1559 CodeEmitInfo* info_for_exception, CodeEmitInfo* info_for_patch, CodeStub* stub);
1560 LIR_OpTypeCheck(LIR_Code code, LIR_Opr object, LIR_Opr array,
1561 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, CodeEmitInfo* info_for_exception);
1562
1563 LIR_Opr object() const { return _object; }
1564 LIR_Opr array() const { assert(code() == lir_store_check, "not valid"); return _array; }
1565 LIR_Opr tmp1() const { return _tmp1; }
1566 LIR_Opr tmp2() const { return _tmp2; }
1567 LIR_Opr tmp3() const { return _tmp3; }
1568 ciKlass* klass() const { assert(code() == lir_instanceof || code() == lir_checkcast, "not valid"); return _klass; }
1569 bool fast_check() const { assert(code() == lir_instanceof || code() == lir_checkcast, "not valid"); return _fast_check; }
1570 CodeEmitInfo* info_for_patch() const { return _info_for_patch; }
1571 CodeEmitInfo* info_for_exception() const { return _info_for_exception; }
1572 CodeStub* stub() const { return _stub; }
1573
1574 // MethodData* profiling
1575 void set_profiled_method(ciMethod *method) { _profiled_method = method; }
1576 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1577 void set_should_profile(bool b) { _should_profile = b; }
1578 ciMethod* profiled_method() const { return _profiled_method; }
1579 int profiled_bci() const { return _profiled_bci; }
1580 bool should_profile() const { return _should_profile; }
1581
1582 virtual bool is_patching() { return _info_for_patch != nullptr; }
1583 virtual void emit_code(LIR_Assembler* masm);
1584 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return this; }
1585 void print_instr(outputStream* out) const PRODUCT_RETURN;
1586 };
1587
1588 // LIR_Op2
1589 class LIR_Op2: public LIR_Op {
1590 friend class LIR_OpVisitState;
1591
1592 int _fpu_stack_size; // for sin/cos implementation on Intel
1593
1594 protected:
1595 LIR_Opr _opr1;
1596 LIR_Opr _opr2;
1597 LIR_Opr _tmp1;
1598 LIR_Opr _tmp2;
1599 LIR_Opr _tmp3;
1600 LIR_Opr _tmp4;
1601 LIR_Opr _tmp5;
1602 LIR_Condition _condition;
1603 BasicType _type;
1604
1605 void verify() const;
1606
1607 public:
1735 void negate_cond();
1736
1737 virtual void emit_code(LIR_Assembler* masm);
1738 virtual LIR_OpBranch* as_OpBranch() { return this; }
1739 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1740 };
1741
1742 class LIR_OpAllocArray : public LIR_Op {
1743 friend class LIR_OpVisitState;
1744
1745 private:
1746 LIR_Opr _klass;
1747 LIR_Opr _len;
1748 LIR_Opr _tmp1;
1749 LIR_Opr _tmp2;
1750 LIR_Opr _tmp3;
1751 LIR_Opr _tmp4;
1752 CodeStub* _stub;
1753 BasicType _type;
1754 bool _zero_array;
1755
1756 public:
1757 LIR_OpAllocArray(LIR_Opr klass, LIR_Opr len, LIR_Opr result, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, BasicType type, CodeStub* stub, bool zero_array)
1758 : LIR_Op(lir_alloc_array, result, nullptr)
1759 , _klass(klass)
1760 , _len(len)
1761 , _tmp1(t1)
1762 , _tmp2(t2)
1763 , _tmp3(t3)
1764 , _tmp4(t4)
1765 , _stub(stub)
1766 , _type(type)
1767 , _zero_array(zero_array) {}
1768
1769 LIR_Opr klass() const { return _klass; }
1770 LIR_Opr len() const { return _len; }
1771 LIR_Opr obj() const { return result_opr(); }
1772 LIR_Opr tmp1() const { return _tmp1; }
1773 LIR_Opr tmp2() const { return _tmp2; }
1774 LIR_Opr tmp3() const { return _tmp3; }
1775 LIR_Opr tmp4() const { return _tmp4; }
1776 BasicType type() const { return _type; }
1777 CodeStub* stub() const { return _stub; }
1778 bool zero_array() const { return _zero_array; }
1779
1780 virtual void emit_code(LIR_Assembler* masm);
1781 virtual LIR_OpAllocArray * as_OpAllocArray () { return this; }
1782 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1783 };
1784
1785
1786 class LIR_Op3: public LIR_Op {
1787 friend class LIR_OpVisitState;
1788
1789 private:
1790 LIR_Opr _opr1;
1791 LIR_Opr _opr2;
1792 LIR_Opr _opr3;
1793 public:
1794 LIR_Op3(LIR_Code code, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr opr3, LIR_Opr result, CodeEmitInfo* info = nullptr)
1795 : LIR_Op(code, result, info)
1796 , _opr1(opr1)
1797 , _opr2(opr2)
1798 , _opr3(opr3) { assert(is_in_range(code, begin_op3, end_op3), "code check"); }
1865
1866 //--------------------------------
1867 class LabelObj: public CompilationResourceObj {
1868 private:
1869 Label _label;
1870 public:
1871 LabelObj() {}
1872 Label* label() { return &_label; }
1873 };
1874
1875
1876 class LIR_OpLock: public LIR_Op {
1877 friend class LIR_OpVisitState;
1878
1879 private:
1880 LIR_Opr _hdr;
1881 LIR_Opr _obj;
1882 LIR_Opr _lock;
1883 LIR_Opr _scratch;
1884 CodeStub* _stub;
1885 public:
1886 LIR_OpLock(LIR_Code code, LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info)
1887 : LIR_Op(code, LIR_OprFact::illegalOpr, info)
1888 , _hdr(hdr)
1889 , _obj(obj)
1890 , _lock(lock)
1891 , _scratch(scratch)
1892 , _stub(stub) {}
1893
1894 LIR_Opr hdr_opr() const { return _hdr; }
1895 LIR_Opr obj_opr() const { return _obj; }
1896 LIR_Opr lock_opr() const { return _lock; }
1897 LIR_Opr scratch_opr() const { return _scratch; }
1898 CodeStub* stub() const { return _stub; }
1899
1900 virtual void emit_code(LIR_Assembler* masm);
1901 virtual LIR_OpLock* as_OpLock() { return this; }
1902 void print_instr(outputStream* out) const PRODUCT_RETURN;
1903 };
1904
1905 class LIR_OpLoadKlass: public LIR_Op {
1906 friend class LIR_OpVisitState;
1907
1908 private:
1909 LIR_Opr _obj;
1910 public:
1911 LIR_OpLoadKlass(LIR_Opr obj, LIR_Opr result, CodeEmitInfo* info)
1912 : LIR_Op(lir_load_klass, result, info)
1913 , _obj(obj)
1914 {}
1915
1916 LIR_Opr obj() const { return _obj; }
1917
1918 virtual LIR_OpLoadKlass* as_OpLoadKlass() { return this; }
2063 , _obj(obj)
2064 , _tmp(tmp)
2065 , _exact_klass(exact_klass)
2066 , _current_klass(current_klass)
2067 , _not_null(not_null)
2068 , _no_conflict(no_conflict) { }
2069
2070 LIR_Opr mdp() const { return _mdp; }
2071 LIR_Opr obj() const { return _obj; }
2072 LIR_Opr tmp() const { return _tmp; }
2073 ciKlass* exact_klass() const { return _exact_klass; }
2074 intptr_t current_klass() const { return _current_klass; }
2075 bool not_null() const { return _not_null; }
2076 bool no_conflict() const { return _no_conflict; }
2077
2078 virtual void emit_code(LIR_Assembler* masm);
2079 virtual LIR_OpProfileType* as_OpProfileType() { return this; }
2080 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
2081 };
2082
2083 class LIR_InsertionBuffer;
2084
2085 //--------------------------------LIR_List---------------------------------------------------
2086 // Maintains a list of LIR instructions (one instance of LIR_List per basic block)
2087 // The LIR instructions are appended by the LIR_List class itself;
2088 //
2089 // Notes:
2090 // - all offsets are(should be) in bytes
2091 // - local positions are specified with an offset, with offset 0 being local 0
2092
2093 class LIR_List: public CompilationResourceObj {
2094 private:
2095 LIR_OpList _operations;
2096
2097 Compilation* _compilation;
2098 #ifndef PRODUCT
2099 BlockBegin* _block;
2100 #endif
2101 #ifdef ASSERT
2102 const char * _file;
2290 void div (LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_div, left, right, res, tmp)); }
2291 void rem (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = nullptr) { append(new LIR_Op2(lir_rem, left, right, res, info)); }
2292
2293 void volatile_load_mem_reg(LIR_Address* address, LIR_Opr dst, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2294 void volatile_load_unsafe_reg(LIR_Opr base, LIR_Opr offset, LIR_Opr dst, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2295
2296 void load(LIR_Address* addr, LIR_Opr src, CodeEmitInfo* info = nullptr, LIR_PatchCode patch_code = lir_patch_none);
2297
2298 void store_mem_int(jint v, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2299 void store_mem_oop(jobject o, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2300 void store(LIR_Opr src, LIR_Address* addr, CodeEmitInfo* info = nullptr, LIR_PatchCode patch_code = lir_patch_none);
2301 void volatile_store_mem_reg(LIR_Opr src, LIR_Address* address, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2302 void volatile_store_unsafe_reg(LIR_Opr src, LIR_Opr base, LIR_Opr offset, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2303
2304 void idiv(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2305 void idiv(LIR_Opr left, int right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2306 void irem(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2307 void irem(LIR_Opr left, int right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2308
2309 void allocate_object(LIR_Opr dst, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, int header_size, int object_size, LIR_Opr klass, bool init_check, CodeStub* stub);
2310 void allocate_array(LIR_Opr dst, LIR_Opr len, LIR_Opr t1,LIR_Opr t2, LIR_Opr t3,LIR_Opr t4, BasicType type, LIR_Opr klass, CodeStub* stub, bool zero_array = true);
2311
2312 // jump is an unconditional branch
2313 void jump(BlockBegin* block) {
2314 append(new LIR_OpBranch(lir_cond_always, block));
2315 }
2316 void jump(CodeStub* stub) {
2317 append(new LIR_OpBranch(lir_cond_always, stub));
2318 }
2319 void branch(LIR_Condition cond, Label* lbl) {
2320 append(new LIR_OpBranch(cond, lbl));
2321 }
2322 // Should not be used for fp comparisons
2323 void branch(LIR_Condition cond, BlockBegin* block) {
2324 append(new LIR_OpBranch(cond, block));
2325 }
2326 // Should not be used for fp comparisons
2327 void branch(LIR_Condition cond, CodeStub* stub) {
2328 append(new LIR_OpBranch(cond, stub));
2329 }
2330 // Should only be used for fp comparisons
2337 void unsigned_shift_right(LIR_Opr value, LIR_Opr count, LIR_Opr dst, LIR_Opr tmp);
2338
2339 void shift_left(LIR_Opr value, int count, LIR_Opr dst) { shift_left(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2340 void shift_right(LIR_Opr value, int count, LIR_Opr dst) { shift_right(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2341 void unsigned_shift_right(LIR_Opr value, int count, LIR_Opr dst) { unsigned_shift_right(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2342
2343 void lcmp2int(LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_cmp_l2i, left, right, dst)); }
2344 void fcmp2int(LIR_Opr left, LIR_Opr right, LIR_Opr dst, bool is_unordered_less);
2345
2346 void call_runtime_leaf(address routine, LIR_Opr tmp, LIR_Opr result, LIR_OprList* arguments) {
2347 append(new LIR_OpRTCall(routine, tmp, result, arguments));
2348 }
2349
2350 void call_runtime(address routine, LIR_Opr tmp, LIR_Opr result,
2351 LIR_OprList* arguments, CodeEmitInfo* info) {
2352 append(new LIR_OpRTCall(routine, tmp, result, arguments, info));
2353 }
2354
2355 void load_stack_address_monitor(int monitor_ix, LIR_Opr dst) { append(new LIR_Op1(lir_monaddr, LIR_OprFact::intConst(monitor_ix), dst)); }
2356 void unlock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub);
2357 void lock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info);
2358
2359 void breakpoint() { append(new LIR_Op0(lir_breakpoint)); }
2360
2361 void arraycopy(LIR_Opr src, LIR_Opr src_pos, LIR_Opr dst, LIR_Opr dst_pos, LIR_Opr length, LIR_Opr tmp, ciArrayKlass* expected_type, int flags, CodeEmitInfo* info) { append(new LIR_OpArrayCopy(src, src_pos, dst, dst_pos, length, tmp, expected_type, flags, info)); }
2362
2363 void update_crc32(LIR_Opr crc, LIR_Opr val, LIR_Opr res) { append(new LIR_OpUpdateCRC32(crc, val, res)); }
2364
2365 void instanceof(LIR_Opr result, LIR_Opr object, ciKlass* klass, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check, CodeEmitInfo* info_for_patch, ciMethod* profiled_method, int profiled_bci);
2366 void store_check(LIR_Opr object, LIR_Opr array, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, CodeEmitInfo* info_for_exception, ciMethod* profiled_method, int profiled_bci);
2367
2368 void checkcast (LIR_Opr result, LIR_Opr object, ciKlass* klass,
2369 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check,
2370 CodeEmitInfo* info_for_exception, CodeEmitInfo* info_for_patch, CodeStub* stub,
2371 ciMethod* profiled_method, int profiled_bci);
2372 // MethodData* profiling
2373 void profile_call(ciMethod* method, int bci, ciMethod* callee, LIR_Opr mdo, LIR_Opr recv, LIR_Opr t1, ciKlass* cha_klass) {
2374 append(new LIR_OpProfileCall(method, bci, callee, mdo, recv, t1, cha_klass));
2375 }
2376 void profile_type(LIR_Address* mdp, LIR_Opr obj, ciKlass* exact_klass, intptr_t current_klass, LIR_Opr tmp, bool not_null, bool no_conflict) {
2377 append(new LIR_OpProfileType(LIR_OprFact::address(mdp), obj, exact_klass, current_klass, tmp, not_null, no_conflict));
2378 }
2379
2380 void xadd(LIR_Opr src, LIR_Opr add, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_xadd, src, add, res, tmp)); }
2381 void xchg(LIR_Opr src, LIR_Opr set, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_xchg, src, set, res, tmp)); }
2382
2383 void load_klass(LIR_Opr obj, LIR_Opr result, CodeEmitInfo* info) { append(new LIR_OpLoadKlass(obj, result, info)); }
2384
2385 #ifdef ASSERT
2386 void lir_assert(LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, const char* msg, bool halt) { append(new LIR_OpAssert(condition, opr1, opr2, msg, halt)); }
2387 #endif
2388 };
2389
2390 void print_LIR(BlockList* blocks);
2391
2392 class LIR_InsertionBuffer : public CompilationResourceObj {
2393 private:
2394 LIR_List* _lir; // the lir list where ops of this buffer should be inserted later (null when uninitialized)
2395
2396 // list of insertion points. index and count are stored alternately:
2397 // _index_and_count[i * 2]: the index into lir list where "count" ops should be inserted
2398 // _index_and_count[i * 2 + 1]: the number of ops to be inserted at index
|
880 class LIR_Op0;
881 class LIR_OpLabel;
882 class LIR_Op1;
883 class LIR_OpBranch;
884 class LIR_OpConvert;
885 class LIR_OpAllocObj;
886 class LIR_OpReturn;
887 class LIR_OpRoundFP;
888 class LIR_Op2;
889 class LIR_OpDelay;
890 class LIR_Op3;
891 class LIR_OpAllocArray;
892 class LIR_Op4;
893 class LIR_OpCall;
894 class LIR_OpJavaCall;
895 class LIR_OpRTCall;
896 class LIR_OpArrayCopy;
897 class LIR_OpUpdateCRC32;
898 class LIR_OpLock;
899 class LIR_OpTypeCheck;
900 class LIR_OpFlattenedArrayCheck;
901 class LIR_OpNullFreeArrayCheck;
902 class LIR_OpSubstitutabilityCheck;
903 class LIR_OpCompareAndSwap;
904 class LIR_OpLoadKlass;
905 class LIR_OpProfileCall;
906 class LIR_OpProfileType;
907 class LIR_OpProfileInlineType;
908 #ifdef ASSERT
909 class LIR_OpAssert;
910 #endif
911
912 // LIR operation codes
913 enum LIR_Code {
914 lir_none
915 , begin_op0
916 , lir_label
917 , lir_nop
918 , lir_std_entry
919 , lir_osr_entry
920 , lir_fpop_raw
921 , lir_breakpoint
922 , lir_rtcall
923 , lir_membar
924 , lir_membar_acquire
925 , lir_membar_release
926 , lir_membar_loadload
927 , lir_membar_storestore
928 , lir_membar_loadstore
929 , lir_membar_storeload
930 , lir_get_thread
931 , lir_on_spin_wait
932 , lir_check_orig_pc
933 , end_op0
934 , begin_op1
935 , lir_fxch
936 , lir_fld
937 , lir_push
938 , lir_pop
939 , lir_null_check
940 , lir_return
941 , lir_leal
942 , lir_move
943 , lir_convert
944 , lir_alloc_object
945 , lir_monaddr
946 , lir_roundfp
947 , lir_safepoint
948 , lir_unwind
949 , lir_load_klass
950 , end_op1
951 , begin_op2
952 , lir_branch
994 , lir_dynamic_call
995 , end_opJavaCall
996 , begin_opArrayCopy
997 , lir_arraycopy
998 , end_opArrayCopy
999 , begin_opUpdateCRC32
1000 , lir_updatecrc32
1001 , end_opUpdateCRC32
1002 , begin_opLock
1003 , lir_lock
1004 , lir_unlock
1005 , end_opLock
1006 , begin_delay_slot
1007 , lir_delay_slot
1008 , end_delay_slot
1009 , begin_opTypeCheck
1010 , lir_instanceof
1011 , lir_checkcast
1012 , lir_store_check
1013 , end_opTypeCheck
1014 , begin_opFlattenedArrayCheck
1015 , lir_flat_array_check
1016 , end_opFlattenedArrayCheck
1017 , begin_opNullFreeArrayCheck
1018 , lir_null_free_array_check
1019 , end_opNullFreeArrayCheck
1020 , begin_opSubstitutabilityCheck
1021 , lir_substitutability_check
1022 , end_opSubstitutabilityCheck
1023 , begin_opCompareAndSwap
1024 , lir_cas_long
1025 , lir_cas_obj
1026 , lir_cas_int
1027 , end_opCompareAndSwap
1028 , begin_opMDOProfile
1029 , lir_profile_call
1030 , lir_profile_type
1031 , lir_profile_inline_type
1032 , end_opMDOProfile
1033 , begin_opAssert
1034 , lir_assert
1035 , end_opAssert
1036 #ifdef INCLUDE_ZGC
1037 , begin_opXLoadBarrierTest
1038 , lir_xloadbarrier_test
1039 , end_opXLoadBarrierTest
1040 #endif
1041 };
1042
1043
1044 enum LIR_Condition {
1045 lir_cond_equal
1046 , lir_cond_notEqual
1047 , lir_cond_less
1048 , lir_cond_lessEqual
1049 , lir_cond_greaterEqual
1050 , lir_cond_greater
1051 , lir_cond_belowEqual
1160 virtual LIR_OpCall* as_OpCall() { return nullptr; }
1161 virtual LIR_OpJavaCall* as_OpJavaCall() { return nullptr; }
1162 virtual LIR_OpLabel* as_OpLabel() { return nullptr; }
1163 virtual LIR_OpDelay* as_OpDelay() { return nullptr; }
1164 virtual LIR_OpLock* as_OpLock() { return nullptr; }
1165 virtual LIR_OpAllocArray* as_OpAllocArray() { return nullptr; }
1166 virtual LIR_OpAllocObj* as_OpAllocObj() { return nullptr; }
1167 virtual LIR_OpRoundFP* as_OpRoundFP() { return nullptr; }
1168 virtual LIR_OpBranch* as_OpBranch() { return nullptr; }
1169 virtual LIR_OpReturn* as_OpReturn() { return nullptr; }
1170 virtual LIR_OpRTCall* as_OpRTCall() { return nullptr; }
1171 virtual LIR_OpConvert* as_OpConvert() { return nullptr; }
1172 virtual LIR_Op0* as_Op0() { return nullptr; }
1173 virtual LIR_Op1* as_Op1() { return nullptr; }
1174 virtual LIR_Op2* as_Op2() { return nullptr; }
1175 virtual LIR_Op3* as_Op3() { return nullptr; }
1176 virtual LIR_Op4* as_Op4() { return nullptr; }
1177 virtual LIR_OpArrayCopy* as_OpArrayCopy() { return nullptr; }
1178 virtual LIR_OpUpdateCRC32* as_OpUpdateCRC32() { return nullptr; }
1179 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return nullptr; }
1180 virtual LIR_OpFlattenedArrayCheck* as_OpFlattenedArrayCheck() { return nullptr; }
1181 virtual LIR_OpNullFreeArrayCheck* as_OpNullFreeArrayCheck() { return nullptr; }
1182 virtual LIR_OpSubstitutabilityCheck* as_OpSubstitutabilityCheck() { return nullptr; }
1183 virtual LIR_OpCompareAndSwap* as_OpCompareAndSwap() { return nullptr; }
1184 virtual LIR_OpLoadKlass* as_OpLoadKlass() { return nullptr; }
1185 virtual LIR_OpProfileCall* as_OpProfileCall() { return nullptr; }
1186 virtual LIR_OpProfileType* as_OpProfileType() { return nullptr; }
1187 virtual LIR_OpProfileInlineType* as_OpProfileInlineType() { return nullptr; }
1188 #ifdef ASSERT
1189 virtual LIR_OpAssert* as_OpAssert() { return nullptr; }
1190 #endif
1191
1192 virtual void verify() const {}
1193 };
1194
1195 // for calls
1196 class LIR_OpCall: public LIR_Op {
1197 friend class LIR_OpVisitState;
1198
1199 protected:
1200 address _addr;
1201 LIR_OprList* _arguments;
1202 protected:
1203 LIR_OpCall(LIR_Code code, address addr, LIR_Opr result,
1204 LIR_OprList* arguments, CodeEmitInfo* info = nullptr)
1205 : LIR_Op(code, result, info)
1206 , _addr(addr)
1207 , _arguments(arguments) {}
1240 LIR_OprList* arguments, CodeEmitInfo* info)
1241 : LIR_OpCall(code, (address)vtable_offset, result, arguments, info)
1242 , _method(method)
1243 , _receiver(receiver)
1244 , _method_handle_invoke_SP_save_opr(LIR_OprFact::illegalOpr)
1245 { assert(is_in_range(code, begin_opJavaCall, end_opJavaCall), "code check"); }
1246
1247 LIR_Opr receiver() const { return _receiver; }
1248 ciMethod* method() const { return _method; }
1249
1250 // JSR 292 support.
1251 bool is_invokedynamic() const { return code() == lir_dynamic_call; }
1252 bool is_method_handle_invoke() const {
1253 return method()->is_compiled_lambda_form() || // Java-generated lambda form
1254 method()->is_method_handle_intrinsic(); // JVM-generated MH intrinsic
1255 }
1256
1257 virtual void emit_code(LIR_Assembler* masm);
1258 virtual LIR_OpJavaCall* as_OpJavaCall() { return this; }
1259 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1260
1261 bool maybe_return_as_fields(ciInlineKlass** vk = nullptr) const;
1262 };
1263
1264 // --------------------------------------------------
1265 // LIR_OpLabel
1266 // --------------------------------------------------
1267 // Location where a branch can continue
1268 class LIR_OpLabel: public LIR_Op {
1269 friend class LIR_OpVisitState;
1270
1271 private:
1272 Label* _label;
1273 public:
1274 LIR_OpLabel(Label* lbl)
1275 : LIR_Op(lir_label, LIR_OprFact::illegalOpr, nullptr)
1276 , _label(lbl) {}
1277 Label* label() const { return _label; }
1278
1279 virtual void emit_code(LIR_Assembler* masm);
1280 virtual LIR_OpLabel* as_OpLabel() { return this; }
1281 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1293 LIR_Opr _dst_pos;
1294 LIR_Opr _length;
1295 LIR_Opr _tmp;
1296 ciArrayKlass* _expected_type;
1297 int _flags;
1298
1299 public:
1300 enum Flags {
1301 src_null_check = 1 << 0,
1302 dst_null_check = 1 << 1,
1303 src_pos_positive_check = 1 << 2,
1304 dst_pos_positive_check = 1 << 3,
1305 length_positive_check = 1 << 4,
1306 src_range_check = 1 << 5,
1307 dst_range_check = 1 << 6,
1308 type_check = 1 << 7,
1309 overlapping = 1 << 8,
1310 unaligned = 1 << 9,
1311 src_objarray = 1 << 10,
1312 dst_objarray = 1 << 11,
1313 always_slow_path = 1 << 12,
1314 src_inlinetype_check = 1 << 13,
1315 dst_inlinetype_check = 1 << 14,
1316 all_flags = (1 << 15) - 1
1317 };
1318
1319 LIR_OpArrayCopy(LIR_Opr src, LIR_Opr src_pos, LIR_Opr dst, LIR_Opr dst_pos, LIR_Opr length, LIR_Opr tmp,
1320 ciArrayKlass* expected_type, int flags, CodeEmitInfo* info);
1321
1322 LIR_Opr src() const { return _src; }
1323 LIR_Opr src_pos() const { return _src_pos; }
1324 LIR_Opr dst() const { return _dst; }
1325 LIR_Opr dst_pos() const { return _dst_pos; }
1326 LIR_Opr length() const { return _length; }
1327 LIR_Opr tmp() const { return _tmp; }
1328 int flags() const { return _flags; }
1329 ciArrayKlass* expected_type() const { return _expected_type; }
1330 ArrayCopyStub* stub() const { return _stub; }
1331
1332 virtual void emit_code(LIR_Assembler* masm);
1333 virtual LIR_OpArrayCopy* as_OpArrayCopy() { return this; }
1334 void print_instr(outputStream* out) const PRODUCT_RETURN;
1335 };
1336
1559 };
1560
1561 // LIR_OpTypeCheck
1562 class LIR_OpTypeCheck: public LIR_Op {
1563 friend class LIR_OpVisitState;
1564
1565 private:
1566 LIR_Opr _object;
1567 LIR_Opr _array;
1568 ciKlass* _klass;
1569 LIR_Opr _tmp1;
1570 LIR_Opr _tmp2;
1571 LIR_Opr _tmp3;
1572 CodeEmitInfo* _info_for_patch;
1573 CodeEmitInfo* _info_for_exception;
1574 CodeStub* _stub;
1575 ciMethod* _profiled_method;
1576 int _profiled_bci;
1577 bool _should_profile;
1578 bool _fast_check;
1579 bool _need_null_check;
1580
1581 public:
1582 LIR_OpTypeCheck(LIR_Code code, LIR_Opr result, LIR_Opr object, ciKlass* klass,
1583 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check,
1584 CodeEmitInfo* info_for_exception, CodeEmitInfo* info_for_patch, CodeStub* stub, bool need_null_check = true);
1585 LIR_OpTypeCheck(LIR_Code code, LIR_Opr object, LIR_Opr array,
1586 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, CodeEmitInfo* info_for_exception);
1587
1588 LIR_Opr object() const { return _object; }
1589 LIR_Opr array() const { assert(code() == lir_store_check, "not valid"); return _array; }
1590 LIR_Opr tmp1() const { return _tmp1; }
1591 LIR_Opr tmp2() const { return _tmp2; }
1592 LIR_Opr tmp3() const { return _tmp3; }
1593 ciKlass* klass() const { assert(code() == lir_instanceof || code() == lir_checkcast, "not valid"); return _klass; }
1594 bool fast_check() const { assert(code() == lir_instanceof || code() == lir_checkcast, "not valid"); return _fast_check; }
1595 CodeEmitInfo* info_for_patch() const { return _info_for_patch; }
1596 CodeEmitInfo* info_for_exception() const { return _info_for_exception; }
1597 CodeStub* stub() const { return _stub; }
1598
1599 // MethodData* profiling
1600 void set_profiled_method(ciMethod *method) { _profiled_method = method; }
1601 void set_profiled_bci(int bci) { _profiled_bci = bci; }
1602 void set_should_profile(bool b) { _should_profile = b; }
1603 ciMethod* profiled_method() const { return _profiled_method; }
1604 int profiled_bci() const { return _profiled_bci; }
1605 bool should_profile() const { return _should_profile; }
1606 bool need_null_check() const { return _need_null_check; }
1607 virtual bool is_patching() { return _info_for_patch != nullptr; }
1608 virtual void emit_code(LIR_Assembler* masm);
1609 virtual LIR_OpTypeCheck* as_OpTypeCheck() { return this; }
1610 void print_instr(outputStream* out) const PRODUCT_RETURN;
1611 };
1612
1613 // LIR_OpFlattenedArrayCheck
1614 class LIR_OpFlattenedArrayCheck: public LIR_Op {
1615 friend class LIR_OpVisitState;
1616
1617 private:
1618 LIR_Opr _array;
1619 LIR_Opr _value;
1620 LIR_Opr _tmp;
1621 CodeStub* _stub;
1622 public:
1623 LIR_OpFlattenedArrayCheck(LIR_Opr array, LIR_Opr value, LIR_Opr tmp, CodeStub* stub);
1624 LIR_Opr array() const { return _array; }
1625 LIR_Opr value() const { return _value; }
1626 LIR_Opr tmp() const { return _tmp; }
1627 CodeStub* stub() const { return _stub; }
1628
1629 virtual void emit_code(LIR_Assembler* masm);
1630 virtual LIR_OpFlattenedArrayCheck* as_OpFlattenedArrayCheck() { return this; }
1631 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1632 };
1633
1634 // LIR_OpNullFreeArrayCheck
1635 class LIR_OpNullFreeArrayCheck: public LIR_Op {
1636 friend class LIR_OpVisitState;
1637
1638 private:
1639 LIR_Opr _array;
1640 LIR_Opr _tmp;
1641 public:
1642 LIR_OpNullFreeArrayCheck(LIR_Opr array, LIR_Opr tmp);
1643 LIR_Opr array() const { return _array; }
1644 LIR_Opr tmp() const { return _tmp; }
1645
1646 virtual void emit_code(LIR_Assembler* masm);
1647 virtual LIR_OpNullFreeArrayCheck* as_OpNullFreeArrayCheck() { return this; }
1648 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1649 };
1650
1651 class LIR_OpSubstitutabilityCheck: public LIR_Op {
1652 friend class LIR_OpVisitState;
1653
1654 private:
1655 LIR_Opr _left;
1656 LIR_Opr _right;
1657 LIR_Opr _equal_result;
1658 LIR_Opr _not_equal_result;
1659 LIR_Opr _tmp1;
1660 LIR_Opr _tmp2;
1661 ciKlass* _left_klass;
1662 ciKlass* _right_klass;
1663 LIR_Opr _left_klass_op;
1664 LIR_Opr _right_klass_op;
1665 CodeStub* _stub;
1666 public:
1667 LIR_OpSubstitutabilityCheck(LIR_Opr result, LIR_Opr left, LIR_Opr right, LIR_Opr equal_result, LIR_Opr not_equal_result,
1668 LIR_Opr tmp1, LIR_Opr tmp2,
1669 ciKlass* left_klass, ciKlass* right_klass, LIR_Opr left_klass_op, LIR_Opr right_klass_op,
1670 CodeEmitInfo* info, CodeStub* stub);
1671
1672 LIR_Opr left() const { return _left; }
1673 LIR_Opr right() const { return _right; }
1674 LIR_Opr equal_result() const { return _equal_result; }
1675 LIR_Opr not_equal_result() const { return _not_equal_result; }
1676 LIR_Opr tmp1() const { return _tmp1; }
1677 LIR_Opr tmp2() const { return _tmp2; }
1678 ciKlass* left_klass() const { return _left_klass; }
1679 ciKlass* right_klass() const { return _right_klass; }
1680 LIR_Opr left_klass_op() const { return _left_klass_op; }
1681 LIR_Opr right_klass_op() const { return _right_klass_op; }
1682 CodeStub* stub() const { return _stub; }
1683
1684 virtual void emit_code(LIR_Assembler* masm);
1685 virtual LIR_OpSubstitutabilityCheck* as_OpSubstitutabilityCheck() { return this; }
1686 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1687 };
1688
1689 // LIR_Op2
1690 class LIR_Op2: public LIR_Op {
1691 friend class LIR_OpVisitState;
1692
1693 int _fpu_stack_size; // for sin/cos implementation on Intel
1694
1695 protected:
1696 LIR_Opr _opr1;
1697 LIR_Opr _opr2;
1698 LIR_Opr _tmp1;
1699 LIR_Opr _tmp2;
1700 LIR_Opr _tmp3;
1701 LIR_Opr _tmp4;
1702 LIR_Opr _tmp5;
1703 LIR_Condition _condition;
1704 BasicType _type;
1705
1706 void verify() const;
1707
1708 public:
1836 void negate_cond();
1837
1838 virtual void emit_code(LIR_Assembler* masm);
1839 virtual LIR_OpBranch* as_OpBranch() { return this; }
1840 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1841 };
1842
1843 class LIR_OpAllocArray : public LIR_Op {
1844 friend class LIR_OpVisitState;
1845
1846 private:
1847 LIR_Opr _klass;
1848 LIR_Opr _len;
1849 LIR_Opr _tmp1;
1850 LIR_Opr _tmp2;
1851 LIR_Opr _tmp3;
1852 LIR_Opr _tmp4;
1853 CodeStub* _stub;
1854 BasicType _type;
1855 bool _zero_array;
1856 bool _is_null_free;
1857
1858 public:
1859 LIR_OpAllocArray(LIR_Opr klass, LIR_Opr len, LIR_Opr result, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, BasicType type, CodeStub* stub, bool zero_array, bool is_null_free)
1860 : LIR_Op(lir_alloc_array, result, nullptr)
1861 , _klass(klass)
1862 , _len(len)
1863 , _tmp1(t1)
1864 , _tmp2(t2)
1865 , _tmp3(t3)
1866 , _tmp4(t4)
1867 , _stub(stub)
1868 , _type(type)
1869 , _zero_array(zero_array)
1870 , _is_null_free(is_null_free) {}
1871
1872 LIR_Opr klass() const { return _klass; }
1873 LIR_Opr len() const { return _len; }
1874 LIR_Opr obj() const { return result_opr(); }
1875 LIR_Opr tmp1() const { return _tmp1; }
1876 LIR_Opr tmp2() const { return _tmp2; }
1877 LIR_Opr tmp3() const { return _tmp3; }
1878 LIR_Opr tmp4() const { return _tmp4; }
1879 BasicType type() const { return _type; }
1880 CodeStub* stub() const { return _stub; }
1881 bool zero_array() const { return _zero_array; }
1882 bool is_null_free() const { return _is_null_free;}
1883
1884 virtual void emit_code(LIR_Assembler* masm);
1885 virtual LIR_OpAllocArray * as_OpAllocArray () { return this; }
1886 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
1887 };
1888
1889
1890 class LIR_Op3: public LIR_Op {
1891 friend class LIR_OpVisitState;
1892
1893 private:
1894 LIR_Opr _opr1;
1895 LIR_Opr _opr2;
1896 LIR_Opr _opr3;
1897 public:
1898 LIR_Op3(LIR_Code code, LIR_Opr opr1, LIR_Opr opr2, LIR_Opr opr3, LIR_Opr result, CodeEmitInfo* info = nullptr)
1899 : LIR_Op(code, result, info)
1900 , _opr1(opr1)
1901 , _opr2(opr2)
1902 , _opr3(opr3) { assert(is_in_range(code, begin_op3, end_op3), "code check"); }
1969
1970 //--------------------------------
1971 class LabelObj: public CompilationResourceObj {
1972 private:
1973 Label _label;
1974 public:
1975 LabelObj() {}
1976 Label* label() { return &_label; }
1977 };
1978
1979
1980 class LIR_OpLock: public LIR_Op {
1981 friend class LIR_OpVisitState;
1982
1983 private:
1984 LIR_Opr _hdr;
1985 LIR_Opr _obj;
1986 LIR_Opr _lock;
1987 LIR_Opr _scratch;
1988 CodeStub* _stub;
1989 CodeStub* _throw_ie_stub;
1990 public:
1991 LIR_OpLock(LIR_Code code, LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info, CodeStub* throw_ie_stub=nullptr)
1992 : LIR_Op(code, LIR_OprFact::illegalOpr, info)
1993 , _hdr(hdr)
1994 , _obj(obj)
1995 , _lock(lock)
1996 , _scratch(scratch)
1997 , _stub(stub)
1998 , _throw_ie_stub(throw_ie_stub) {}
1999
2000 LIR_Opr hdr_opr() const { return _hdr; }
2001 LIR_Opr obj_opr() const { return _obj; }
2002 LIR_Opr lock_opr() const { return _lock; }
2003 LIR_Opr scratch_opr() const { return _scratch; }
2004 CodeStub* stub() const { return _stub; }
2005 CodeStub* throw_ie_stub() const { return _throw_ie_stub; }
2006
2007 virtual void emit_code(LIR_Assembler* masm);
2008 virtual LIR_OpLock* as_OpLock() { return this; }
2009 void print_instr(outputStream* out) const PRODUCT_RETURN;
2010 };
2011
2012 class LIR_OpLoadKlass: public LIR_Op {
2013 friend class LIR_OpVisitState;
2014
2015 private:
2016 LIR_Opr _obj;
2017 public:
2018 LIR_OpLoadKlass(LIR_Opr obj, LIR_Opr result, CodeEmitInfo* info)
2019 : LIR_Op(lir_load_klass, result, info)
2020 , _obj(obj)
2021 {}
2022
2023 LIR_Opr obj() const { return _obj; }
2024
2025 virtual LIR_OpLoadKlass* as_OpLoadKlass() { return this; }
2170 , _obj(obj)
2171 , _tmp(tmp)
2172 , _exact_klass(exact_klass)
2173 , _current_klass(current_klass)
2174 , _not_null(not_null)
2175 , _no_conflict(no_conflict) { }
2176
2177 LIR_Opr mdp() const { return _mdp; }
2178 LIR_Opr obj() const { return _obj; }
2179 LIR_Opr tmp() const { return _tmp; }
2180 ciKlass* exact_klass() const { return _exact_klass; }
2181 intptr_t current_klass() const { return _current_klass; }
2182 bool not_null() const { return _not_null; }
2183 bool no_conflict() const { return _no_conflict; }
2184
2185 virtual void emit_code(LIR_Assembler* masm);
2186 virtual LIR_OpProfileType* as_OpProfileType() { return this; }
2187 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
2188 };
2189
2190 // LIR_OpProfileInlineType
2191 class LIR_OpProfileInlineType : public LIR_Op {
2192 friend class LIR_OpVisitState;
2193
2194 private:
2195 LIR_Opr _mdp;
2196 LIR_Opr _obj;
2197 int _flag;
2198 LIR_Opr _tmp;
2199 bool _not_null; // true if we know statically that _obj cannot be null
2200
2201 public:
2202 // Destroys recv
2203 LIR_OpProfileInlineType(LIR_Opr mdp, LIR_Opr obj, int flag, LIR_Opr tmp, bool not_null)
2204 : LIR_Op(lir_profile_inline_type, LIR_OprFact::illegalOpr, nullptr) // no result, no info
2205 , _mdp(mdp)
2206 , _obj(obj)
2207 , _flag(flag)
2208 , _tmp(tmp)
2209 , _not_null(not_null) { }
2210
2211 LIR_Opr mdp() const { return _mdp; }
2212 LIR_Opr obj() const { return _obj; }
2213 int flag() const { return _flag; }
2214 LIR_Opr tmp() const { return _tmp; }
2215 bool not_null() const { return _not_null; }
2216
2217 virtual void emit_code(LIR_Assembler* masm);
2218 virtual LIR_OpProfileInlineType* as_OpProfileInlineType() { return this; }
2219 virtual void print_instr(outputStream* out) const PRODUCT_RETURN;
2220 };
2221
2222 class LIR_InsertionBuffer;
2223
2224 //--------------------------------LIR_List---------------------------------------------------
2225 // Maintains a list of LIR instructions (one instance of LIR_List per basic block)
2226 // The LIR instructions are appended by the LIR_List class itself;
2227 //
2228 // Notes:
2229 // - all offsets are(should be) in bytes
2230 // - local positions are specified with an offset, with offset 0 being local 0
2231
2232 class LIR_List: public CompilationResourceObj {
2233 private:
2234 LIR_OpList _operations;
2235
2236 Compilation* _compilation;
2237 #ifndef PRODUCT
2238 BlockBegin* _block;
2239 #endif
2240 #ifdef ASSERT
2241 const char * _file;
2429 void div (LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_div, left, right, res, tmp)); }
2430 void rem (LIR_Opr left, LIR_Opr right, LIR_Opr res, CodeEmitInfo* info = nullptr) { append(new LIR_Op2(lir_rem, left, right, res, info)); }
2431
2432 void volatile_load_mem_reg(LIR_Address* address, LIR_Opr dst, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2433 void volatile_load_unsafe_reg(LIR_Opr base, LIR_Opr offset, LIR_Opr dst, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2434
2435 void load(LIR_Address* addr, LIR_Opr src, CodeEmitInfo* info = nullptr, LIR_PatchCode patch_code = lir_patch_none);
2436
2437 void store_mem_int(jint v, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2438 void store_mem_oop(jobject o, LIR_Opr base, int offset_in_bytes, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2439 void store(LIR_Opr src, LIR_Address* addr, CodeEmitInfo* info = nullptr, LIR_PatchCode patch_code = lir_patch_none);
2440 void volatile_store_mem_reg(LIR_Opr src, LIR_Address* address, CodeEmitInfo* info, LIR_PatchCode patch_code = lir_patch_none);
2441 void volatile_store_unsafe_reg(LIR_Opr src, LIR_Opr base, LIR_Opr offset, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code);
2442
2443 void idiv(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2444 void idiv(LIR_Opr left, int right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2445 void irem(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2446 void irem(LIR_Opr left, int right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info);
2447
2448 void allocate_object(LIR_Opr dst, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, int header_size, int object_size, LIR_Opr klass, bool init_check, CodeStub* stub);
2449 void allocate_array(LIR_Opr dst, LIR_Opr len, LIR_Opr t1,LIR_Opr t2, LIR_Opr t3,LIR_Opr t4, BasicType type, LIR_Opr klass, CodeStub* stub, bool zero_array = true, bool is_null_free = false);
2450
2451 // jump is an unconditional branch
2452 void jump(BlockBegin* block) {
2453 append(new LIR_OpBranch(lir_cond_always, block));
2454 }
2455 void jump(CodeStub* stub) {
2456 append(new LIR_OpBranch(lir_cond_always, stub));
2457 }
2458 void branch(LIR_Condition cond, Label* lbl) {
2459 append(new LIR_OpBranch(cond, lbl));
2460 }
2461 // Should not be used for fp comparisons
2462 void branch(LIR_Condition cond, BlockBegin* block) {
2463 append(new LIR_OpBranch(cond, block));
2464 }
2465 // Should not be used for fp comparisons
2466 void branch(LIR_Condition cond, CodeStub* stub) {
2467 append(new LIR_OpBranch(cond, stub));
2468 }
2469 // Should only be used for fp comparisons
2476 void unsigned_shift_right(LIR_Opr value, LIR_Opr count, LIR_Opr dst, LIR_Opr tmp);
2477
2478 void shift_left(LIR_Opr value, int count, LIR_Opr dst) { shift_left(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2479 void shift_right(LIR_Opr value, int count, LIR_Opr dst) { shift_right(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2480 void unsigned_shift_right(LIR_Opr value, int count, LIR_Opr dst) { unsigned_shift_right(value, LIR_OprFact::intConst(count), dst, LIR_OprFact::illegalOpr); }
2481
2482 void lcmp2int(LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_cmp_l2i, left, right, dst)); }
2483 void fcmp2int(LIR_Opr left, LIR_Opr right, LIR_Opr dst, bool is_unordered_less);
2484
2485 void call_runtime_leaf(address routine, LIR_Opr tmp, LIR_Opr result, LIR_OprList* arguments) {
2486 append(new LIR_OpRTCall(routine, tmp, result, arguments));
2487 }
2488
2489 void call_runtime(address routine, LIR_Opr tmp, LIR_Opr result,
2490 LIR_OprList* arguments, CodeEmitInfo* info) {
2491 append(new LIR_OpRTCall(routine, tmp, result, arguments, info));
2492 }
2493
2494 void load_stack_address_monitor(int monitor_ix, LIR_Opr dst) { append(new LIR_Op1(lir_monaddr, LIR_OprFact::intConst(monitor_ix), dst)); }
2495 void unlock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub);
2496 void lock_object(LIR_Opr hdr, LIR_Opr obj, LIR_Opr lock, LIR_Opr scratch, CodeStub* stub, CodeEmitInfo* info, CodeStub* throw_ie_stub=nullptr);
2497
2498 void breakpoint() { append(new LIR_Op0(lir_breakpoint)); }
2499
2500 void arraycopy(LIR_Opr src, LIR_Opr src_pos, LIR_Opr dst, LIR_Opr dst_pos, LIR_Opr length, LIR_Opr tmp, ciArrayKlass* expected_type, int flags, CodeEmitInfo* info) { append(new LIR_OpArrayCopy(src, src_pos, dst, dst_pos, length, tmp, expected_type, flags, info)); }
2501
2502 void update_crc32(LIR_Opr crc, LIR_Opr val, LIR_Opr res) { append(new LIR_OpUpdateCRC32(crc, val, res)); }
2503
2504 void instanceof(LIR_Opr result, LIR_Opr object, ciKlass* klass, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check, CodeEmitInfo* info_for_patch, ciMethod* profiled_method, int profiled_bci);
2505 void store_check(LIR_Opr object, LIR_Opr array, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, CodeEmitInfo* info_for_exception, ciMethod* profiled_method, int profiled_bci);
2506 void check_flat_array(LIR_Opr array, LIR_Opr value, LIR_Opr tmp, CodeStub* stub);
2507 void check_null_free_array(LIR_Opr array, LIR_Opr tmp);
2508 void substitutability_check(LIR_Opr result, LIR_Opr left, LIR_Opr right, LIR_Opr equal_result, LIR_Opr not_equal_result,
2509 LIR_Opr tmp1, LIR_Opr tmp2,
2510 ciKlass* left_klass, ciKlass* right_klass, LIR_Opr left_klass_op, LIR_Opr right_klass_op,
2511 CodeEmitInfo* info, CodeStub* stub);
2512
2513 void checkcast (LIR_Opr result, LIR_Opr object, ciKlass* klass,
2514 LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, bool fast_check,
2515 CodeEmitInfo* info_for_exception, CodeEmitInfo* info_for_patch, CodeStub* stub,
2516 ciMethod* profiled_method, int profiled_bci, bool is_null_free);
2517 // MethodData* profiling
2518 void profile_call(ciMethod* method, int bci, ciMethod* callee, LIR_Opr mdo, LIR_Opr recv, LIR_Opr t1, ciKlass* cha_klass) {
2519 append(new LIR_OpProfileCall(method, bci, callee, mdo, recv, t1, cha_klass));
2520 }
2521 void profile_type(LIR_Address* mdp, LIR_Opr obj, ciKlass* exact_klass, intptr_t current_klass, LIR_Opr tmp, bool not_null, bool no_conflict) {
2522 append(new LIR_OpProfileType(LIR_OprFact::address(mdp), obj, exact_klass, current_klass, tmp, not_null, no_conflict));
2523 }
2524 void profile_inline_type(LIR_Address* mdp, LIR_Opr obj, int flag, LIR_Opr tmp, bool not_null) {
2525 append(new LIR_OpProfileInlineType(LIR_OprFact::address(mdp), obj, flag, tmp, not_null));
2526 }
2527
2528 void xadd(LIR_Opr src, LIR_Opr add, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_xadd, src, add, res, tmp)); }
2529 void xchg(LIR_Opr src, LIR_Opr set, LIR_Opr res, LIR_Opr tmp) { append(new LIR_Op2(lir_xchg, src, set, res, tmp)); }
2530
2531 void load_klass(LIR_Opr obj, LIR_Opr result, CodeEmitInfo* info) { append(new LIR_OpLoadKlass(obj, result, info)); }
2532
2533 #ifdef ASSERT
2534 void lir_assert(LIR_Condition condition, LIR_Opr opr1, LIR_Opr opr2, const char* msg, bool halt) { append(new LIR_OpAssert(condition, opr1, opr2, msg, halt)); }
2535 #endif
2536 };
2537
2538 void print_LIR(BlockList* blocks);
2539
2540 class LIR_InsertionBuffer : public CompilationResourceObj {
2541 private:
2542 LIR_List* _lir; // the lir list where ops of this buffer should be inserted later (null when uninitialized)
2543
2544 // list of insertion points. index and count are stored alternately:
2545 // _index_and_count[i * 2]: the index into lir list where "count" ops should be inserted
2546 // _index_and_count[i * 2 + 1]: the number of ops to be inserted at index
|