26 #include "asm/macroAssembler.inline.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "compiler/compilerDefinitions.inline.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "gc/shared/collectedHeap.hpp"
31 #include "gc/shared/tlab_globals.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/method.inline.hpp"
39 #include "oops/objArrayKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/resolvedFieldEntry.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/frame.inline.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/synchronizer.hpp"
50 #include "utilities/powerOfTwo.hpp"
51
52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
53
54 // Address computation: local variables
55
56 static inline Address iaddress(int n) {
57 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
58 }
59
60 static inline Address laddress(int n) {
61 return iaddress(n + 1);
62 }
63
64 static inline Address faddress(int n) {
65 return iaddress(n);
156 Address src,
157 Register dst,
158 DecoratorSet decorators) {
159 __ load_heap_oop(dst, src, r10, r11, decorators);
160 }
161
162 Address TemplateTable::at_bcp(int offset) {
163 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
164 return Address(rbcp, offset);
165 }
166
167 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
168 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
169 int byte_no)
170 {
171 assert_different_registers(bc_reg, temp_reg);
172 if (!RewriteBytecodes) return;
173 Label L_patch_done;
174
175 switch (bc) {
176 case Bytecodes::_fast_aputfield:
177 case Bytecodes::_fast_bputfield:
178 case Bytecodes::_fast_zputfield:
179 case Bytecodes::_fast_cputfield:
180 case Bytecodes::_fast_dputfield:
181 case Bytecodes::_fast_fputfield:
182 case Bytecodes::_fast_iputfield:
183 case Bytecodes::_fast_lputfield:
184 case Bytecodes::_fast_sputfield:
185 {
186 // We skip bytecode quickening for putfield instructions when
187 // the put_code written to the constant pool cache is zero.
188 // This is required so that every execution of this instruction
189 // calls out to InterpreterRuntime::resolve_get_put to do
190 // additional, required work.
191 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
192 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
193 __ load_field_entry(temp_reg, bc_reg);
194 if (byte_no == f1_byte) {
195 __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
740 locals_index_wide(r1);
741 __ ldr(r0, aaddress(r1));
742 }
743
744 void TemplateTable::index_check(Register array, Register index)
745 {
746 // destroys r1, rscratch1
747 // sign extend index for use by indexed load
748 // __ movl2ptr(index, index);
749 // check index
750 Register length = rscratch1;
751 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
752 __ cmpw(index, length);
753 if (index != r1) {
754 // ??? convention: move aberrant index into r1 for exception message
755 assert(r1 != array, "different registers");
756 __ mov(r1, index);
757 }
758 Label ok;
759 __ br(Assembler::LO, ok);
760 // ??? convention: move array into r3 for exception message
761 __ mov(r3, array);
762 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
763 __ br(rscratch1);
764 __ bind(ok);
765 }
766
767 void TemplateTable::iaload()
768 {
769 transition(itos, itos);
770 __ mov(r1, r0);
771 __ pop_ptr(r0);
772 // r0: array
773 // r1: index
774 index_check(r0, r1); // leaves index in r1, kills rscratch1
775 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
776 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
777 }
778
779 void TemplateTable::laload()
780 {
781 transition(itos, ltos);
782 __ mov(r1, r0);
783 __ pop_ptr(r0);
803 void TemplateTable::daload()
804 {
805 transition(itos, dtos);
806 __ mov(r1, r0);
807 __ pop_ptr(r0);
808 // r0: array
809 // r1: index
810 index_check(r0, r1); // leaves index in r1, kills rscratch1
811 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
812 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
813 }
814
815 void TemplateTable::aaload()
816 {
817 transition(itos, atos);
818 __ mov(r1, r0);
819 __ pop_ptr(r0);
820 // r0: array
821 // r1: index
822 index_check(r0, r1); // leaves index in r1, kills rscratch1
823 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
824 do_oop_load(_masm,
825 Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
826 r0,
827 IS_ARRAY);
828 }
829
830 void TemplateTable::baload()
831 {
832 transition(itos, itos);
833 __ mov(r1, r0);
834 __ pop_ptr(r0);
835 // r0: array
836 // r1: index
837 index_check(r0, r1); // leaves index in r1, kills rscratch1
838 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
839 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
840 }
841
842 void TemplateTable::caload()
843 {
844 transition(itos, itos);
845 __ mov(r1, r0);
846 __ pop_ptr(r0);
847 // r0: array
1094 // r1: index
1095 // r3: array
1096 index_check(r3, r1); // prefer index in r1
1097 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1098 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1099 }
1100
1101 void TemplateTable::dastore() {
1102 transition(dtos, vtos);
1103 __ pop_i(r1);
1104 __ pop_ptr(r3);
1105 // v0: value
1106 // r1: index
1107 // r3: array
1108 index_check(r3, r1); // prefer index in r1
1109 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1110 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1111 }
1112
1113 void TemplateTable::aastore() {
1114 Label is_null, ok_is_subtype, done;
1115 transition(vtos, vtos);
1116 // stack: ..., array, index, value
1117 __ ldr(r0, at_tos()); // value
1118 __ ldr(r2, at_tos_p1()); // index
1119 __ ldr(r3, at_tos_p2()); // array
1120
1121 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1122
1123 index_check(r3, r2); // kills r1
1124 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1125
1126 // do array store check - check for null value first
1127 __ cbz(r0, is_null);
1128
1129 // Move subklass into r1
1130 __ load_klass(r1, r0);
1131 // Move superklass into r0
1132 __ load_klass(r0, r3);
1133 __ ldr(r0, Address(r0,
1134 ObjArrayKlass::element_klass_offset()));
1135 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1136
1137 // Generate subtype check. Blows r2, r5
1138 // Superklass in r0. Subklass in r1.
1139 __ gen_subtype_check(r1, ok_is_subtype);
1140
1141 // Come here on failure
1142 // object is at TOS
1143 __ b(Interpreter::_throw_ArrayStoreException_entry);
1144
1145 // Come here on success
1146 __ bind(ok_is_subtype);
1147
1148 // Get the value we will store
1149 __ ldr(r0, at_tos());
1150 // Now store using the appropriate barrier
1151 // Clobbers: r10, r11, r3
1152 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1153 __ b(done);
1154
1155 // Have a null in r0, r3=array, r2=index. Store null at ary[idx]
1156 __ bind(is_null);
1157 __ profile_null_seen(r2);
1158
1159 // Store a null
1160 // Clobbers: r10, r11, r3
1161 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1162
1163 // Pop stack arguments
1164 __ bind(done);
1165 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1166 }
1167
1168 void TemplateTable::bastore()
1169 {
1170 transition(itos, vtos);
1171 __ pop_i(r1);
1172 __ pop_ptr(r3);
1173 // r0: value
1174 // r1: index
1175 // r3: array
1176 index_check(r3, r1); // prefer index in r1
1177
1178 // Need to check whether array is boolean or byte
1179 // since both types share the bastore bytecode.
1180 __ load_klass(r2, r3);
1181 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
1948 __ br(j_not(cc), not_taken);
1949 branch(false, false);
1950 __ bind(not_taken);
1951 __ profile_not_taken_branch(r0);
1952 }
1953
1954 void TemplateTable::if_nullcmp(Condition cc)
1955 {
1956 transition(atos, vtos);
1957 // assume branch is more often taken than not (loops use backward branches)
1958 Label not_taken;
1959 if (cc == equal)
1960 __ cbnz(r0, not_taken);
1961 else
1962 __ cbz(r0, not_taken);
1963 branch(false, false);
1964 __ bind(not_taken);
1965 __ profile_not_taken_branch(r0);
1966 }
1967
1968 void TemplateTable::if_acmp(Condition cc)
1969 {
1970 transition(atos, vtos);
1971 // assume branch is more often taken than not (loops use backward branches)
1972 Label not_taken;
1973 __ pop_ptr(r1);
1974 __ cmpoop(r1, r0);
1975 __ br(j_not(cc), not_taken);
1976 branch(false, false);
1977 __ bind(not_taken);
1978 __ profile_not_taken_branch(r0);
1979 }
1980
1981 void TemplateTable::ret() {
1982 transition(vtos, vtos);
1983 locals_index(r1);
1984 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1985 __ profile_ret(r1, r2);
1986 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1987 __ lea(rbcp, Address(rbcp, r1));
1988 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
1989 __ dispatch_next(vtos, 0, /*generate_poll*/true);
1990 }
1991
1992 void TemplateTable::wide_ret() {
1993 transition(vtos, vtos);
1994 locals_index_wide(r1);
1995 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1996 __ profile_ret(r1, r2);
1997 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1998 __ lea(rbcp, Address(rbcp, r1));
1999 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2000 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2194 assert(_desc->calls_vm(),
2195 "inconsistent calls_vm information"); // call in remove_activation
2196
2197 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2198 assert(state == vtos, "only valid state");
2199
2200 __ ldr(c_rarg1, aaddress(0));
2201 __ load_klass(r3, c_rarg1);
2202 __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2203 Label skip_register_finalizer;
2204 __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2205
2206 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2207
2208 __ bind(skip_register_finalizer);
2209 }
2210
2211 // Issue a StoreStore barrier after all stores but before return
2212 // from any constructor for any class with a final field. We don't
2213 // know if this is a finalizer, so we always do so.
2214 if (_desc->bytecode() == Bytecodes::_return)
2215 __ membar(MacroAssembler::StoreStore);
2216
2217 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2218 Label no_safepoint;
2219 __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2220 __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2221 __ push(state);
2222 __ push_cont_fastpath(rthread);
2223 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2224 __ pop_cont_fastpath(rthread);
2225 __ pop(state);
2226 __ bind(no_safepoint);
2227 }
2228
2229 // Narrow result if state is itos but result type is smaller.
2230 // Need to narrow in the return bytecode rather than in generate_return_entry
2231 // since compiled code callers expect the result to already be narrowed.
2232 if (state == itos) {
2233 __ narrow(r0);
2234 }
2586 }
2587 // c_rarg1: object pointer or null
2588 // c_rarg2: cache entry pointer
2589 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2590 InterpreterRuntime::post_field_access),
2591 c_rarg1, c_rarg2);
2592 __ load_field_entry(cache, index);
2593 __ bind(L1);
2594 }
2595 }
2596
2597 void TemplateTable::pop_and_check_object(Register r)
2598 {
2599 __ pop_ptr(r);
2600 __ null_check(r); // for field access must check obj.
2601 __ verify_oop(r);
2602 }
2603
2604 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2605 {
2606 const Register cache = r4;
2607 const Register obj = r4;
2608 const Register index = r3;
2609 const Register tos_state = r3;
2610 const Register off = r19;
2611 const Register flags = r6;
2612 const Register bc = r4; // uses same reg as obj, so don't mix them
2613
2614 resolve_cache_and_index_for_field(byte_no, cache, index);
2615 jvmti_post_field_access(cache, index, is_static, false);
2616 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2617
2618 if (!is_static) {
2619 // obj is on the stack
2620 pop_and_check_object(obj);
2621 }
2622
2623 // 8179954: We need to make sure that the code generated for
2624 // volatile accesses forms a sequentially-consistent set of
2625 // operations when combined with STLR and LDAR. Without a leading
2626 // membar it's possible for a simple Dekker test to fail if loads
2627 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2628 // the stores in one method and we interpret the loads in another.
2629 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2630 Label notVolatile;
2631 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2632 __ membar(MacroAssembler::AnyAny);
2633 __ bind(notVolatile);
2634 }
2635
2654 __ b(Done);
2655
2656 __ bind(notByte);
2657 __ cmp(tos_state, (u1)ztos);
2658 __ br(Assembler::NE, notBool);
2659
2660 // ztos (same code as btos)
2661 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2662 __ push(ztos);
2663 // Rewrite bytecode to be faster
2664 if (rc == may_rewrite) {
2665 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2666 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2667 }
2668 __ b(Done);
2669
2670 __ bind(notBool);
2671 __ cmp(tos_state, (u1)atos);
2672 __ br(Assembler::NE, notObj);
2673 // atos
2674 do_oop_load(_masm, field, r0, IN_HEAP);
2675 __ push(atos);
2676 if (rc == may_rewrite) {
2677 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2678 }
2679 __ b(Done);
2680
2681 __ bind(notObj);
2682 __ cmp(tos_state, (u1)itos);
2683 __ br(Assembler::NE, notInt);
2684 // itos
2685 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2686 __ push(itos);
2687 // Rewrite bytecode to be faster
2688 if (rc == may_rewrite) {
2689 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2690 }
2691 __ b(Done);
2692
2693 __ bind(notInt);
2694 __ cmp(tos_state, (u1)ctos);
2695 __ br(Assembler::NE, notChar);
2696 // ctos
2697 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2698 __ push(ctos);
2699 // Rewrite bytecode to be faster
2820 // c_rarg1: object pointer set up above (null if static)
2821 // c_rarg2: cache entry pointer
2822 // c_rarg3: jvalue object on the stack
2823 __ call_VM(noreg,
2824 CAST_FROM_FN_PTR(address,
2825 InterpreterRuntime::post_field_modification),
2826 c_rarg1, c_rarg2, c_rarg3);
2827 __ load_field_entry(cache, index);
2828 __ bind(L1);
2829 }
2830 }
2831
2832 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2833 transition(vtos, vtos);
2834
2835 const Register cache = r2;
2836 const Register index = r3;
2837 const Register tos_state = r3;
2838 const Register obj = r2;
2839 const Register off = r19;
2840 const Register flags = r0;
2841 const Register bc = r4;
2842
2843 resolve_cache_and_index_for_field(byte_no, cache, index);
2844 jvmti_post_field_mod(cache, index, is_static);
2845 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2846
2847 Label Done;
2848 __ mov(r5, flags);
2849
2850 {
2851 Label notVolatile;
2852 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2853 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2854 __ bind(notVolatile);
2855 }
2856
2857 // field address
2858 const Address field(obj, off);
2859
2860 Label notByte, notBool, notInt, notShort, notChar,
2861 notLong, notFloat, notObj, notDouble;
2862
2863 assert(btos == 0, "change code, btos != 0");
2864 __ cbnz(tos_state, notByte);
2865
2866 // Don't rewrite putstatic, only putfield
2867 if (is_static) rc = may_not_rewrite;
2868
2869 // btos
2870 {
2871 __ pop(btos);
2872 if (!is_static) pop_and_check_object(obj);
2881 __ cmp(tos_state, (u1)ztos);
2882 __ br(Assembler::NE, notBool);
2883
2884 // ztos
2885 {
2886 __ pop(ztos);
2887 if (!is_static) pop_and_check_object(obj);
2888 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
2889 if (rc == may_rewrite) {
2890 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2891 }
2892 __ b(Done);
2893 }
2894
2895 __ bind(notBool);
2896 __ cmp(tos_state, (u1)atos);
2897 __ br(Assembler::NE, notObj);
2898
2899 // atos
2900 {
2901 __ pop(atos);
2902 if (!is_static) pop_and_check_object(obj);
2903 // Store into the field
2904 // Clobbers: r10, r11, r3
2905 do_oop_store(_masm, field, r0, IN_HEAP);
2906 if (rc == may_rewrite) {
2907 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2908 }
2909 __ b(Done);
2910 }
2911
2912 __ bind(notObj);
2913 __ cmp(tos_state, (u1)itos);
2914 __ br(Assembler::NE, notInt);
2915
2916 // itos
2917 {
2918 __ pop(itos);
2919 if (!is_static) pop_and_check_object(obj);
2920 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
2921 if (rc == may_rewrite) {
2922 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2923 }
2924 __ b(Done);
2925 }
2926
2927 __ bind(notInt);
2928 __ cmp(tos_state, (u1)ctos);
2929 __ br(Assembler::NE, notChar);
2994 {
2995 __ pop(dtos);
2996 if (!is_static) pop_and_check_object(obj);
2997 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
2998 if (rc == may_rewrite) {
2999 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
3000 }
3001 }
3002
3003 #ifdef ASSERT
3004 __ b(Done);
3005
3006 __ bind(notDouble);
3007 __ stop("Bad state");
3008 #endif
3009
3010 __ bind(Done);
3011
3012 {
3013 Label notVolatile;
3014 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3015 __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3016 __ bind(notVolatile);
3017 }
3018 }
3019
3020 void TemplateTable::putfield(int byte_no)
3021 {
3022 putfield_or_static(byte_no, false);
3023 }
3024
3025 void TemplateTable::nofast_putfield(int byte_no) {
3026 putfield_or_static(byte_no, false, may_not_rewrite);
3027 }
3028
3029 void TemplateTable::putstatic(int byte_no) {
3030 putfield_or_static(byte_no, true);
3031 }
3032
3033 void TemplateTable::jvmti_post_fast_field_mod() {
3034 if (JvmtiExport::can_post_field_modification()) {
3035 // Check to see if a field modification watch has been set before
3036 // we take the time to call into the VM.
3037 Label L2;
3038 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3039 __ ldrw(c_rarg3, Address(rscratch1));
3040 __ cbzw(c_rarg3, L2);
3041 __ pop_ptr(r19); // copy the object pointer from tos
3042 __ verify_oop(r19);
3043 __ push_ptr(r19); // put the object pointer back on tos
3044 // Save tos values before call_VM() clobbers them. Since we have
3045 // to do it for every data type, we use the saved values as the
3046 // jvalue object.
3047 switch (bytecode()) { // load values into the jvalue object
3048 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3049 case Bytecodes::_fast_bputfield: // fall through
3050 case Bytecodes::_fast_zputfield: // fall through
3051 case Bytecodes::_fast_sputfield: // fall through
3052 case Bytecodes::_fast_cputfield: // fall through
3053 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3054 case Bytecodes::_fast_dputfield: __ push_d(); break;
3055 case Bytecodes::_fast_fputfield: __ push_f(); break;
3056 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3057
3058 default:
3059 ShouldNotReachHere();
3060 }
3061 __ mov(c_rarg3, esp); // points to jvalue on the stack
3062 // access constant pool cache entry
3063 __ load_field_entry(c_rarg2, r0);
3064 __ verify_oop(r19);
3065 // r19: object pointer copied above
3066 // c_rarg2: cache entry pointer
3067 // c_rarg3: jvalue object on the stack
3068 __ call_VM(noreg,
3069 CAST_FROM_FN_PTR(address,
3070 InterpreterRuntime::post_field_modification),
3071 r19, c_rarg2, c_rarg3);
3072
3073 switch (bytecode()) { // restore tos values
3074 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3075 case Bytecodes::_fast_bputfield: // fall through
3076 case Bytecodes::_fast_zputfield: // fall through
3077 case Bytecodes::_fast_sputfield: // fall through
3078 case Bytecodes::_fast_cputfield: // fall through
3079 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3080 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3081 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3082 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3083 default: break;
3084 }
3085 __ bind(L2);
3086 }
3087 }
3088
3089 void TemplateTable::fast_storefield(TosState state)
3090 {
3091 transition(state, vtos);
3092
3093 ByteSize base = ConstantPoolCache::base_offset();
3101 load_resolved_field_entry(r2, r2, noreg, r1, r5);
3102 __ verify_field_offset(r1);
3103
3104 {
3105 Label notVolatile;
3106 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3107 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3108 __ bind(notVolatile);
3109 }
3110
3111 Label notVolatile;
3112
3113 // Get object from stack
3114 pop_and_check_object(r2);
3115
3116 // field address
3117 const Address field(r2, r1);
3118
3119 // access field
3120 switch (bytecode()) {
3121 case Bytecodes::_fast_aputfield:
3122 // Clobbers: r10, r11, r3
3123 do_oop_store(_masm, field, r0, IN_HEAP);
3124 break;
3125 case Bytecodes::_fast_lputfield:
3126 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3127 break;
3128 case Bytecodes::_fast_iputfield:
3129 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3130 break;
3131 case Bytecodes::_fast_zputfield:
3132 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3133 break;
3134 case Bytecodes::_fast_bputfield:
3135 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3136 break;
3137 case Bytecodes::_fast_sputfield:
3138 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3139 break;
3140 case Bytecodes::_fast_cputfield:
3196 // r0: object
3197 __ verify_oop(r0);
3198 __ null_check(r0);
3199 const Address field(r0, r1);
3200
3201 // 8179954: We need to make sure that the code generated for
3202 // volatile accesses forms a sequentially-consistent set of
3203 // operations when combined with STLR and LDAR. Without a leading
3204 // membar it's possible for a simple Dekker test to fail if loads
3205 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3206 // the stores in one method and we interpret the loads in another.
3207 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3208 Label notVolatile;
3209 __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3210 __ membar(MacroAssembler::AnyAny);
3211 __ bind(notVolatile);
3212 }
3213
3214 // access field
3215 switch (bytecode()) {
3216 case Bytecodes::_fast_agetfield:
3217 do_oop_load(_masm, field, r0, IN_HEAP);
3218 __ verify_oop(r0);
3219 break;
3220 case Bytecodes::_fast_lgetfield:
3221 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3222 break;
3223 case Bytecodes::_fast_igetfield:
3224 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3225 break;
3226 case Bytecodes::_fast_bgetfield:
3227 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3228 break;
3229 case Bytecodes::_fast_sgetfield:
3230 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3231 break;
3232 case Bytecodes::_fast_cgetfield:
3233 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3234 break;
3235 case Bytecodes::_fast_fgetfield:
3616 Label initialize_header;
3617
3618 __ get_cpool_and_tags(r4, r0);
3619 // Make sure the class we're about to instantiate has been resolved.
3620 // This is done before loading InstanceKlass to be consistent with the order
3621 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3622 const int tags_offset = Array<u1>::base_offset_in_bytes();
3623 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3624 __ lea(rscratch1, Address(rscratch1, tags_offset));
3625 __ ldarb(rscratch1, rscratch1);
3626 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3627 __ br(Assembler::NE, slow_case);
3628
3629 // get InstanceKlass
3630 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3631
3632 // make sure klass is initialized
3633 assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3634 __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3635
3636 // get instance_size in InstanceKlass (scaled to a count of bytes)
3637 __ ldrw(r3,
3638 Address(r4,
3639 Klass::layout_helper_offset()));
3640 // test to see if it is malformed in some way
3641 __ tbnz(r3, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3642
3643 // Allocate the instance:
3644 // If TLAB is enabled:
3645 // Try to allocate in the TLAB.
3646 // If fails, go to the slow path.
3647 // Initialize the allocation.
3648 // Exit.
3649 //
3650 // Go to slow path.
3651
3652 if (UseTLAB) {
3653 __ tlab_allocate(r0, r3, 0, noreg, r1, slow_case);
3654
3655 if (ZeroTLAB) {
3656 // the fields have been already cleared
3657 __ b(initialize_header);
3658 }
3659
3660 // The object is initialized before the header. If the object size is
3661 // zero, go directly to the header initialization.
3662 int header_size = oopDesc::header_size() * HeapWordSize;
3663 assert(is_aligned(header_size, BytesPerLong), "oop header size must be 8-byte-aligned");
3664 __ sub(r3, r3, header_size);
3665 __ cbz(r3, initialize_header);
3666
3667 // Initialize object fields
3668 {
3669 __ add(r2, r0, header_size);
3670 Label loop;
3671 __ bind(loop);
3672 __ str(zr, Address(__ post(r2, BytesPerLong)));
3673 __ sub(r3, r3, BytesPerLong);
3674 __ cbnz(r3, loop);
3675 }
3676
3677 // initialize object header only.
3678 __ bind(initialize_header);
3679 if (UseCompactObjectHeaders) {
3680 __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3681 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3682 } else {
3683 __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3684 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3685 __ store_klass_gap(r0, zr); // zero klass gap for compressed oops
3686 __ store_klass(r0, r4); // store klass last
3687 }
3688
3689 if (DTraceAllocProbes) {
3690 // Trigger dtrace event for fastpath
3691 __ push(atos); // save the return value
3692 __ call_VM_leaf(
3693 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3694 __ pop(atos); // restore the return value
3695
3696 }
3697 __ b(done);
3698 }
3699
3700 // slow case
3701 __ bind(slow_case);
3702 __ get_constant_pool(c_rarg1);
3703 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3704 __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3705 __ verify_oop(r0);
3706
3707 // continue
3708 __ bind(done);
3709 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3710 __ membar(Assembler::StoreStore);
3711 }
3712
3713 void TemplateTable::newarray() {
3714 transition(itos, atos);
3715 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3716 __ mov(c_rarg2, r0);
3717 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3718 c_rarg1, c_rarg2);
3762 __ bind(quicked);
3763 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3764 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3765
3766 __ bind(resolved);
3767 __ load_klass(r19, r3);
3768
3769 // Generate subtype check. Blows r2, r5. Object in r3.
3770 // Superklass in r0. Subklass in r19.
3771 __ gen_subtype_check(r19, ok_is_subtype);
3772
3773 // Come here on failure
3774 __ push(r3);
3775 // object is at TOS
3776 __ b(Interpreter::_throw_ClassCastException_entry);
3777
3778 // Come here on success
3779 __ bind(ok_is_subtype);
3780 __ mov(r0, r3); // Restore object in r3
3781
3782 // Collect counts on whether this test sees nulls a lot or not.
3783 if (ProfileInterpreter) {
3784 __ b(done);
3785 __ bind(is_null);
3786 __ profile_null_seen(r2);
3787 } else {
3788 __ bind(is_null); // same as 'done'
3789 }
3790 __ bind(done);
3791 }
3792
3793 void TemplateTable::instanceof() {
3794 transition(atos, itos);
3795 Label done, is_null, ok_is_subtype, quicked, resolved;
3796 __ cbz(r0, is_null);
3797
3798 // Get cpool & tags index
3799 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3800 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3801 // See if bytecode has already been quicked
3802 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3803 __ lea(r1, Address(rscratch1, r19));
3804 __ ldarb(r1, r1);
3805 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3806 __ br(Assembler::EQ, quicked);
3807
3808 __ push(atos); // save receiver for result, and for GC
3809 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3887 // in the assembly code structure as well
3888 //
3889 // Stack layout:
3890 //
3891 // [expressions ] <--- esp = expression stack top
3892 // ..
3893 // [expressions ]
3894 // [monitor entry] <--- monitor block top = expression stack bot
3895 // ..
3896 // [monitor entry]
3897 // [frame data ] <--- monitor block bot
3898 // ...
3899 // [saved rfp ] <--- rfp
3900 void TemplateTable::monitorenter()
3901 {
3902 transition(atos, vtos);
3903
3904 // check for null object
3905 __ null_check(r0);
3906
3907 const Address monitor_block_top(
3908 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3909 const Address monitor_block_bot(
3910 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
3911 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3912
3913 Label allocated;
3914
3915 // initialize entry pointer
3916 __ mov(c_rarg1, zr); // points to free slot or null
3917
3918 // find a free slot in the monitor block (result in c_rarg1)
3919 {
3920 Label entry, loop, exit;
3921 __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
3922 __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
3923 // c_rarg3 points to current entry, starting with top-most entry
3924
3925 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3926
3988 // c_rarg1: points to monitor entry
3989 __ bind(allocated);
3990
3991 // Increment bcp to point to the next bytecode, so exception
3992 // handling for async. exceptions work correctly.
3993 // The object has already been popped from the stack, so the
3994 // expression stack looks correct.
3995 __ increment(rbcp);
3996
3997 // store object
3998 __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
3999 __ lock_object(c_rarg1);
4000
4001 // check to make sure this monitor doesn't cause stack overflow after locking
4002 __ save_bcp(); // in case of exception
4003 __ generate_stack_overflow_check(0);
4004
4005 // The bcp has already been incremented. Just need to dispatch to
4006 // next instruction.
4007 __ dispatch_next(vtos);
4008 }
4009
4010
4011 void TemplateTable::monitorexit()
4012 {
4013 transition(atos, vtos);
4014
4015 // check for null object
4016 __ null_check(r0);
4017
4018 const Address monitor_block_top(
4019 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4020 const Address monitor_block_bot(
4021 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4022 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4023
4024 Label found;
4025
4026 // find matching slot
4027 {
4028 Label entry, loop;
4029 __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4030 __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4031 // c_rarg1 points to current entry, starting with top-most entry
4032
4033 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4034 // of monitor block
4035 __ b(entry);
4036
4037 __ bind(loop);
|
26 #include "asm/macroAssembler.inline.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "compiler/compilerDefinitions.inline.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "gc/shared/collectedHeap.hpp"
31 #include "gc/shared/tlab_globals.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/method.inline.hpp"
39 #include "oops/objArrayKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/resolvedFieldEntry.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/arguments.hpp"
47 #include "runtime/frame.inline.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "runtime/stubRoutines.hpp"
50 #include "runtime/synchronizer.hpp"
51 #include "utilities/powerOfTwo.hpp"
52
53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
54
55 // Address computation: local variables
56
57 static inline Address iaddress(int n) {
58 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
59 }
60
61 static inline Address laddress(int n) {
62 return iaddress(n + 1);
63 }
64
65 static inline Address faddress(int n) {
66 return iaddress(n);
157 Address src,
158 Register dst,
159 DecoratorSet decorators) {
160 __ load_heap_oop(dst, src, r10, r11, decorators);
161 }
162
163 Address TemplateTable::at_bcp(int offset) {
164 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
165 return Address(rbcp, offset);
166 }
167
168 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
169 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
170 int byte_no)
171 {
172 assert_different_registers(bc_reg, temp_reg);
173 if (!RewriteBytecodes) return;
174 Label L_patch_done;
175
176 switch (bc) {
177 case Bytecodes::_fast_vputfield:
178 case Bytecodes::_fast_aputfield:
179 case Bytecodes::_fast_bputfield:
180 case Bytecodes::_fast_zputfield:
181 case Bytecodes::_fast_cputfield:
182 case Bytecodes::_fast_dputfield:
183 case Bytecodes::_fast_fputfield:
184 case Bytecodes::_fast_iputfield:
185 case Bytecodes::_fast_lputfield:
186 case Bytecodes::_fast_sputfield:
187 {
188 // We skip bytecode quickening for putfield instructions when
189 // the put_code written to the constant pool cache is zero.
190 // This is required so that every execution of this instruction
191 // calls out to InterpreterRuntime::resolve_get_put to do
192 // additional, required work.
193 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
194 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
195 __ load_field_entry(temp_reg, bc_reg);
196 if (byte_no == f1_byte) {
197 __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
742 locals_index_wide(r1);
743 __ ldr(r0, aaddress(r1));
744 }
745
746 void TemplateTable::index_check(Register array, Register index)
747 {
748 // destroys r1, rscratch1
749 // sign extend index for use by indexed load
750 // __ movl2ptr(index, index);
751 // check index
752 Register length = rscratch1;
753 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
754 __ cmpw(index, length);
755 if (index != r1) {
756 // ??? convention: move aberrant index into r1 for exception message
757 assert(r1 != array, "different registers");
758 __ mov(r1, index);
759 }
760 Label ok;
761 __ br(Assembler::LO, ok);
762 // ??? convention: move array into r3 for exception message
763 __ mov(r3, array);
764 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
765 __ br(rscratch1);
766 __ bind(ok);
767 }
768
769 void TemplateTable::iaload()
770 {
771 transition(itos, itos);
772 __ mov(r1, r0);
773 __ pop_ptr(r0);
774 // r0: array
775 // r1: index
776 index_check(r0, r1); // leaves index in r1, kills rscratch1
777 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
778 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
779 }
780
781 void TemplateTable::laload()
782 {
783 transition(itos, ltos);
784 __ mov(r1, r0);
785 __ pop_ptr(r0);
805 void TemplateTable::daload()
806 {
807 transition(itos, dtos);
808 __ mov(r1, r0);
809 __ pop_ptr(r0);
810 // r0: array
811 // r1: index
812 index_check(r0, r1); // leaves index in r1, kills rscratch1
813 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
814 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
815 }
816
817 void TemplateTable::aaload()
818 {
819 transition(itos, atos);
820 __ mov(r1, r0);
821 __ pop_ptr(r0);
822 // r0: array
823 // r1: index
824 index_check(r0, r1); // leaves index in r1, kills rscratch1
825 __ profile_array_type<ArrayLoadData>(r2, r0, r4);
826 if (UseArrayFlattening) {
827 Label is_flat_array, done;
828
829 __ test_flat_array_oop(r0, rscratch1 /*temp*/, is_flat_array);
830 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
831 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
832
833 __ b(done);
834 __ bind(is_flat_array);
835 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), r0, r1);
836 // Ensure the stores to copy the inline field contents are visible
837 // before any subsequent store that publishes this reference.
838 __ membar(Assembler::StoreStore);
839 __ bind(done);
840 } else {
841 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
842 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
843 }
844 __ profile_element_type(r2, r0, r4);
845 }
846
847 void TemplateTable::baload()
848 {
849 transition(itos, itos);
850 __ mov(r1, r0);
851 __ pop_ptr(r0);
852 // r0: array
853 // r1: index
854 index_check(r0, r1); // leaves index in r1, kills rscratch1
855 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
856 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
857 }
858
859 void TemplateTable::caload()
860 {
861 transition(itos, itos);
862 __ mov(r1, r0);
863 __ pop_ptr(r0);
864 // r0: array
1111 // r1: index
1112 // r3: array
1113 index_check(r3, r1); // prefer index in r1
1114 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1115 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1116 }
1117
1118 void TemplateTable::dastore() {
1119 transition(dtos, vtos);
1120 __ pop_i(r1);
1121 __ pop_ptr(r3);
1122 // v0: value
1123 // r1: index
1124 // r3: array
1125 index_check(r3, r1); // prefer index in r1
1126 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1127 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1128 }
1129
1130 void TemplateTable::aastore() {
1131 Label is_null, is_flat_array, ok_is_subtype, done;
1132 transition(vtos, vtos);
1133 // stack: ..., array, index, value
1134 __ ldr(r0, at_tos()); // value
1135 __ ldr(r2, at_tos_p1()); // index
1136 __ ldr(r3, at_tos_p2()); // array
1137
1138 index_check(r3, r2); // kills r1
1139
1140 __ profile_array_type<ArrayStoreData>(r4, r3, r5);
1141 __ profile_multiple_element_types(r4, r0, r5, r6);
1142
1143 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1144 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1145 // Be careful not to clobber r4 below
1146
1147 // do array store check - check for null value first
1148 __ cbz(r0, is_null);
1149
1150 // Move array class to r5
1151 __ load_klass(r5, r3);
1152
1153 if (UseArrayFlattening) {
1154 __ ldrw(r6, Address(r5, Klass::layout_helper_offset()));
1155 __ test_flat_array_layout(r6, is_flat_array);
1156 }
1157
1158 // Move subklass into r1
1159 __ load_klass(r1, r0);
1160
1161 // Move array element superklass into r0
1162 __ ldr(r0, Address(r5, ObjArrayKlass::element_klass_offset()));
1163 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1164
1165 // Generate subtype check. Blows r2, r5
1166 // Superklass in r0. Subklass in r1.
1167
1168 // is "r1 <: r0" ? (value subclass <: array element superclass)
1169 __ gen_subtype_check(r1, ok_is_subtype, false);
1170
1171 // Come here on failure
1172 // object is at TOS
1173 __ b(Interpreter::_throw_ArrayStoreException_entry);
1174
1175 // Come here on success
1176 __ bind(ok_is_subtype);
1177
1178 // Get the value we will store
1179 __ ldr(r0, at_tos());
1180 // Now store using the appropriate barrier
1181 // Clobbers: r10, r11, r3
1182 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1183 __ b(done);
1184
1185 // Have a null in r0, r3=array, r2=index. Store null at ary[idx]
1186 __ bind(is_null);
1187 if (Arguments::is_valhalla_enabled()) {
1188 Label is_null_into_value_array_npe, store_null;
1189
1190 if (UseArrayFlattening) {
1191 __ test_flat_array_oop(r3, rscratch1, is_flat_array);
1192 }
1193
1194 // No way to store null in a null-free array
1195 __ test_null_free_array_oop(r3, rscratch1, is_null_into_value_array_npe);
1196 __ b(store_null);
1197
1198 __ bind(is_null_into_value_array_npe);
1199 __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1200
1201 __ bind(store_null);
1202 }
1203
1204 // Store a null
1205 // Clobbers: r10, r11, r3
1206 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1207 __ b(done);
1208
1209 if (UseArrayFlattening) {
1210 Label is_type_ok;
1211 __ bind(is_flat_array); // Store non-null value to flat
1212
1213 __ ldr(r0, at_tos()); // value
1214 __ ldr(r3, at_tos_p1()); // index
1215 __ ldr(r2, at_tos_p2()); // array
1216 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), r0, r2, r3);
1217 }
1218
1219 // Pop stack arguments
1220 __ bind(done);
1221 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1222 }
1223
1224 void TemplateTable::bastore()
1225 {
1226 transition(itos, vtos);
1227 __ pop_i(r1);
1228 __ pop_ptr(r3);
1229 // r0: value
1230 // r1: index
1231 // r3: array
1232 index_check(r3, r1); // prefer index in r1
1233
1234 // Need to check whether array is boolean or byte
1235 // since both types share the bastore bytecode.
1236 __ load_klass(r2, r3);
1237 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
2004 __ br(j_not(cc), not_taken);
2005 branch(false, false);
2006 __ bind(not_taken);
2007 __ profile_not_taken_branch(r0);
2008 }
2009
2010 void TemplateTable::if_nullcmp(Condition cc)
2011 {
2012 transition(atos, vtos);
2013 // assume branch is more often taken than not (loops use backward branches)
2014 Label not_taken;
2015 if (cc == equal)
2016 __ cbnz(r0, not_taken);
2017 else
2018 __ cbz(r0, not_taken);
2019 branch(false, false);
2020 __ bind(not_taken);
2021 __ profile_not_taken_branch(r0);
2022 }
2023
2024 void TemplateTable::if_acmp(Condition cc) {
2025 transition(atos, vtos);
2026 // assume branch is more often taken than not (loops use backward branches)
2027 Label taken, not_taken;
2028 __ pop_ptr(r1);
2029
2030 __ profile_acmp(r2, r1, r0, r4);
2031
2032 Register is_inline_type_mask = rscratch1;
2033 __ mov(is_inline_type_mask, markWord::inline_type_pattern);
2034
2035 if (Arguments::is_valhalla_enabled()) {
2036 __ cmp(r1, r0);
2037 __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2038
2039 // might be substitutable, test if either r0 or r1 is null
2040 __ andr(r2, r0, r1);
2041 __ cbz(r2, (cc == equal) ? not_taken : taken);
2042
2043 // and both are values ?
2044 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2045 __ andr(r2, r2, is_inline_type_mask);
2046 __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2047 __ andr(r4, r4, is_inline_type_mask);
2048 __ andr(r2, r2, r4);
2049 __ cmp(r2, is_inline_type_mask);
2050 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2051
2052 // same value klass ?
2053 __ load_metadata(r2, r1);
2054 __ load_metadata(r4, r0);
2055 __ cmp(r2, r4);
2056 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2057
2058 // Know both are the same type, let's test for substitutability...
2059 if (cc == equal) {
2060 invoke_is_substitutable(r0, r1, taken, not_taken);
2061 } else {
2062 invoke_is_substitutable(r0, r1, not_taken, taken);
2063 }
2064 __ stop("Not reachable");
2065 }
2066
2067 __ cmpoop(r1, r0);
2068 __ br(j_not(cc), not_taken);
2069 __ bind(taken);
2070 branch(false, false);
2071 __ bind(not_taken);
2072 __ profile_not_taken_branch(r0, true);
2073 }
2074
2075 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2076 Label& is_subst, Label& not_subst) {
2077
2078 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2079 // Restored... r0 answer, jmp to outcome...
2080 __ cbz(r0, not_subst);
2081 __ b(is_subst);
2082 }
2083
2084
2085 void TemplateTable::ret() {
2086 transition(vtos, vtos);
2087 locals_index(r1);
2088 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2089 __ profile_ret(r1, r2);
2090 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2091 __ lea(rbcp, Address(rbcp, r1));
2092 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2093 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2094 }
2095
2096 void TemplateTable::wide_ret() {
2097 transition(vtos, vtos);
2098 locals_index_wide(r1);
2099 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2100 __ profile_ret(r1, r2);
2101 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2102 __ lea(rbcp, Address(rbcp, r1));
2103 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2104 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2298 assert(_desc->calls_vm(),
2299 "inconsistent calls_vm information"); // call in remove_activation
2300
2301 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2302 assert(state == vtos, "only valid state");
2303
2304 __ ldr(c_rarg1, aaddress(0));
2305 __ load_klass(r3, c_rarg1);
2306 __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2307 Label skip_register_finalizer;
2308 __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2309
2310 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2311
2312 __ bind(skip_register_finalizer);
2313 }
2314
2315 // Issue a StoreStore barrier after all stores but before return
2316 // from any constructor for any class with a final field. We don't
2317 // know if this is a finalizer, so we always do so.
2318 if (_desc->bytecode() == Bytecodes::_return
2319 || _desc->bytecode() == Bytecodes::_return_register_finalizer)
2320 __ membar(MacroAssembler::StoreStore);
2321
2322 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2323 Label no_safepoint;
2324 __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2325 __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2326 __ push(state);
2327 __ push_cont_fastpath(rthread);
2328 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2329 __ pop_cont_fastpath(rthread);
2330 __ pop(state);
2331 __ bind(no_safepoint);
2332 }
2333
2334 // Narrow result if state is itos but result type is smaller.
2335 // Need to narrow in the return bytecode rather than in generate_return_entry
2336 // since compiled code callers expect the result to already be narrowed.
2337 if (state == itos) {
2338 __ narrow(r0);
2339 }
2691 }
2692 // c_rarg1: object pointer or null
2693 // c_rarg2: cache entry pointer
2694 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2695 InterpreterRuntime::post_field_access),
2696 c_rarg1, c_rarg2);
2697 __ load_field_entry(cache, index);
2698 __ bind(L1);
2699 }
2700 }
2701
2702 void TemplateTable::pop_and_check_object(Register r)
2703 {
2704 __ pop_ptr(r);
2705 __ null_check(r); // for field access must check obj.
2706 __ verify_oop(r);
2707 }
2708
2709 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2710 {
2711 const Register cache = r2;
2712 const Register obj = r4;
2713 const Register index = r3;
2714 const Register tos_state = r3;
2715 const Register off = r19;
2716 const Register flags = r6;
2717 const Register bc = r4; // uses same reg as obj, so don't mix them
2718
2719 resolve_cache_and_index_for_field(byte_no, cache, index);
2720 jvmti_post_field_access(cache, index, is_static, false);
2721
2722 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2723
2724 if (!is_static) {
2725 // obj is on the stack
2726 pop_and_check_object(obj);
2727 }
2728
2729 // 8179954: We need to make sure that the code generated for
2730 // volatile accesses forms a sequentially-consistent set of
2731 // operations when combined with STLR and LDAR. Without a leading
2732 // membar it's possible for a simple Dekker test to fail if loads
2733 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2734 // the stores in one method and we interpret the loads in another.
2735 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2736 Label notVolatile;
2737 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2738 __ membar(MacroAssembler::AnyAny);
2739 __ bind(notVolatile);
2740 }
2741
2760 __ b(Done);
2761
2762 __ bind(notByte);
2763 __ cmp(tos_state, (u1)ztos);
2764 __ br(Assembler::NE, notBool);
2765
2766 // ztos (same code as btos)
2767 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2768 __ push(ztos);
2769 // Rewrite bytecode to be faster
2770 if (rc == may_rewrite) {
2771 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2772 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2773 }
2774 __ b(Done);
2775
2776 __ bind(notBool);
2777 __ cmp(tos_state, (u1)atos);
2778 __ br(Assembler::NE, notObj);
2779 // atos
2780 if (!Arguments::is_valhalla_enabled()) {
2781 do_oop_load(_masm, field, r0, IN_HEAP);
2782 __ push(atos);
2783 if (rc == may_rewrite) {
2784 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2785 }
2786 __ b(Done);
2787 } else { // Valhalla
2788 if (is_static) {
2789 __ load_heap_oop(r0, field, rscratch1, rscratch2);
2790 __ push(atos);
2791 __ b(Done);
2792 } else {
2793 Label is_flat;
2794 __ test_field_is_flat(flags, noreg /* temp */, is_flat);
2795 __ load_heap_oop(r0, field, rscratch1, rscratch2);
2796 __ push(atos);
2797 if (rc == may_rewrite) {
2798 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2799 }
2800 __ b(Done);
2801 __ bind(is_flat);
2802 // field is flat (null-free or nullable with a null-marker)
2803 __ mov(r0, obj);
2804 __ read_flat_field(cache, r0);
2805 __ verify_oop(r0);
2806 __ push(atos);
2807 if (rc == may_rewrite) {
2808 patch_bytecode(Bytecodes::_fast_vgetfield, bc, r1);
2809 }
2810 __ b(Done);
2811 }
2812 }
2813
2814 __ bind(notObj);
2815 __ cmp(tos_state, (u1)itos);
2816 __ br(Assembler::NE, notInt);
2817 // itos
2818 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2819 __ push(itos);
2820 // Rewrite bytecode to be faster
2821 if (rc == may_rewrite) {
2822 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2823 }
2824 __ b(Done);
2825
2826 __ bind(notInt);
2827 __ cmp(tos_state, (u1)ctos);
2828 __ br(Assembler::NE, notChar);
2829 // ctos
2830 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2831 __ push(ctos);
2832 // Rewrite bytecode to be faster
2953 // c_rarg1: object pointer set up above (null if static)
2954 // c_rarg2: cache entry pointer
2955 // c_rarg3: jvalue object on the stack
2956 __ call_VM(noreg,
2957 CAST_FROM_FN_PTR(address,
2958 InterpreterRuntime::post_field_modification),
2959 c_rarg1, c_rarg2, c_rarg3);
2960 __ load_field_entry(cache, index);
2961 __ bind(L1);
2962 }
2963 }
2964
2965 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2966 transition(vtos, vtos);
2967
2968 const Register cache = r2;
2969 const Register index = r3;
2970 const Register tos_state = r3;
2971 const Register obj = r2;
2972 const Register off = r19;
2973 const Register flags = r6;
2974 const Register bc = r4;
2975
2976 resolve_cache_and_index_for_field(byte_no, cache, index);
2977 jvmti_post_field_mod(cache, index, is_static);
2978 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2979
2980 Label Done;
2981 {
2982 Label notVolatile;
2983 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2984 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2985 __ bind(notVolatile);
2986 }
2987
2988 // field address
2989 const Address field(obj, off);
2990
2991 Label notByte, notBool, notInt, notShort, notChar,
2992 notLong, notFloat, notObj, notDouble;
2993
2994 assert(btos == 0, "change code, btos != 0");
2995 __ cbnz(tos_state, notByte);
2996
2997 // Don't rewrite putstatic, only putfield
2998 if (is_static) rc = may_not_rewrite;
2999
3000 // btos
3001 {
3002 __ pop(btos);
3003 if (!is_static) pop_and_check_object(obj);
3012 __ cmp(tos_state, (u1)ztos);
3013 __ br(Assembler::NE, notBool);
3014
3015 // ztos
3016 {
3017 __ pop(ztos);
3018 if (!is_static) pop_and_check_object(obj);
3019 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3020 if (rc == may_rewrite) {
3021 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
3022 }
3023 __ b(Done);
3024 }
3025
3026 __ bind(notBool);
3027 __ cmp(tos_state, (u1)atos);
3028 __ br(Assembler::NE, notObj);
3029
3030 // atos
3031 {
3032 if (!Arguments::is_valhalla_enabled()) {
3033 __ pop(atos);
3034 if (!is_static) pop_and_check_object(obj);
3035 // Store into the field
3036 // Clobbers: r10, r11, r3
3037 do_oop_store(_masm, field, r0, IN_HEAP);
3038 if (rc == may_rewrite) {
3039 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
3040 }
3041 __ b(Done);
3042 } else { // Valhalla
3043 __ pop(atos);
3044 if (is_static) {
3045 Label is_nullable;
3046 __ test_field_is_not_null_free_inline_type(flags, noreg /* temp */, is_nullable);
3047 __ null_check(r0); // FIXME JDK-8341120
3048 __ bind(is_nullable);
3049 do_oop_store(_masm, field, r0, IN_HEAP);
3050 __ b(Done);
3051 } else {
3052 Label null_free_reference, is_flat, rewrite_inline;
3053 __ test_field_is_flat(flags, noreg /* temp */, is_flat);
3054 __ test_field_is_null_free_inline_type(flags, noreg /* temp */, null_free_reference);
3055 pop_and_check_object(obj);
3056 // Store into the field
3057 // Clobbers: r10, r11, r3
3058 do_oop_store(_masm, field, r0, IN_HEAP);
3059 if (rc == may_rewrite) {
3060 patch_bytecode(Bytecodes::_fast_aputfield, bc, r19, true, byte_no);
3061 }
3062 __ b(Done);
3063 // Implementation of the inline type semantic
3064 __ bind(null_free_reference);
3065 __ null_check(r0); // FIXME JDK-8341120
3066 pop_and_check_object(obj);
3067 // Store into the field
3068 // Clobbers: r10, r11, r3
3069 do_oop_store(_masm, field, r0, IN_HEAP);
3070 __ b(rewrite_inline);
3071 __ bind(is_flat);
3072 pop_and_check_object(r7);
3073 __ write_flat_field(cache, off, index, flags, r7);
3074 __ bind(rewrite_inline);
3075 if (rc == may_rewrite) {
3076 patch_bytecode(Bytecodes::_fast_vputfield, bc, r19, true, byte_no);
3077 }
3078 __ b(Done);
3079 }
3080 } // Valhalla
3081 }
3082
3083 __ bind(notObj);
3084 __ cmp(tos_state, (u1)itos);
3085 __ br(Assembler::NE, notInt);
3086
3087 // itos
3088 {
3089 __ pop(itos);
3090 if (!is_static) pop_and_check_object(obj);
3091 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3092 if (rc == may_rewrite) {
3093 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
3094 }
3095 __ b(Done);
3096 }
3097
3098 __ bind(notInt);
3099 __ cmp(tos_state, (u1)ctos);
3100 __ br(Assembler::NE, notChar);
3165 {
3166 __ pop(dtos);
3167 if (!is_static) pop_and_check_object(obj);
3168 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
3169 if (rc == may_rewrite) {
3170 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
3171 }
3172 }
3173
3174 #ifdef ASSERT
3175 __ b(Done);
3176
3177 __ bind(notDouble);
3178 __ stop("Bad state");
3179 #endif
3180
3181 __ bind(Done);
3182
3183 {
3184 Label notVolatile;
3185 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3186 __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3187 __ bind(notVolatile);
3188 }
3189 }
3190
3191 void TemplateTable::putfield(int byte_no)
3192 {
3193 putfield_or_static(byte_no, false);
3194 }
3195
3196 void TemplateTable::nofast_putfield(int byte_no) {
3197 putfield_or_static(byte_no, false, may_not_rewrite);
3198 }
3199
3200 void TemplateTable::putstatic(int byte_no) {
3201 putfield_or_static(byte_no, true);
3202 }
3203
3204 void TemplateTable::jvmti_post_fast_field_mod() {
3205 if (JvmtiExport::can_post_field_modification()) {
3206 // Check to see if a field modification watch has been set before
3207 // we take the time to call into the VM.
3208 Label L2;
3209 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3210 __ ldrw(c_rarg3, Address(rscratch1));
3211 __ cbzw(c_rarg3, L2);
3212 __ pop_ptr(r19); // copy the object pointer from tos
3213 __ verify_oop(r19);
3214 __ push_ptr(r19); // put the object pointer back on tos
3215 // Save tos values before call_VM() clobbers them. Since we have
3216 // to do it for every data type, we use the saved values as the
3217 // jvalue object.
3218 switch (bytecode()) { // load values into the jvalue object
3219 case Bytecodes::_fast_vputfield: // fall through
3220 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3221 case Bytecodes::_fast_bputfield: // fall through
3222 case Bytecodes::_fast_zputfield: // fall through
3223 case Bytecodes::_fast_sputfield: // fall through
3224 case Bytecodes::_fast_cputfield: // fall through
3225 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3226 case Bytecodes::_fast_dputfield: __ push_d(); break;
3227 case Bytecodes::_fast_fputfield: __ push_f(); break;
3228 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3229
3230 default:
3231 ShouldNotReachHere();
3232 }
3233 __ mov(c_rarg3, esp); // points to jvalue on the stack
3234 // access constant pool cache entry
3235 __ load_field_entry(c_rarg2, r0);
3236 __ verify_oop(r19);
3237 // r19: object pointer copied above
3238 // c_rarg2: cache entry pointer
3239 // c_rarg3: jvalue object on the stack
3240 __ call_VM(noreg,
3241 CAST_FROM_FN_PTR(address,
3242 InterpreterRuntime::post_field_modification),
3243 r19, c_rarg2, c_rarg3);
3244
3245 switch (bytecode()) { // restore tos values
3246 case Bytecodes::_fast_vputfield: // fall through
3247 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3248 case Bytecodes::_fast_bputfield: // fall through
3249 case Bytecodes::_fast_zputfield: // fall through
3250 case Bytecodes::_fast_sputfield: // fall through
3251 case Bytecodes::_fast_cputfield: // fall through
3252 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3253 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3254 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3255 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3256 default: break;
3257 }
3258 __ bind(L2);
3259 }
3260 }
3261
3262 void TemplateTable::fast_storefield(TosState state)
3263 {
3264 transition(state, vtos);
3265
3266 ByteSize base = ConstantPoolCache::base_offset();
3274 load_resolved_field_entry(r2, r2, noreg, r1, r5);
3275 __ verify_field_offset(r1);
3276
3277 {
3278 Label notVolatile;
3279 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3280 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3281 __ bind(notVolatile);
3282 }
3283
3284 Label notVolatile;
3285
3286 // Get object from stack
3287 pop_and_check_object(r2);
3288
3289 // field address
3290 const Address field(r2, r1);
3291
3292 // access field
3293 switch (bytecode()) {
3294 case Bytecodes::_fast_vputfield:
3295 {
3296 Label is_flat, done;
3297 __ test_field_is_flat(r5, noreg /* temp */, is_flat);
3298 __ null_check(r0);
3299 do_oop_store(_masm, field, r0, IN_HEAP);
3300 __ b(done);
3301 __ bind(is_flat);
3302 __ load_field_entry(r4, r5);
3303 // Re-shuffle registers because of VM calls calling convention
3304 __ mov(r19, r1);
3305 __ mov(r7, r2);
3306 __ write_flat_field(r4, r19, r6, r8, r7);
3307 __ bind(done);
3308 }
3309 break;
3310 case Bytecodes::_fast_aputfield:
3311 // Clobbers: r10, r11, r3
3312 do_oop_store(_masm, field, r0, IN_HEAP);
3313 break;
3314 case Bytecodes::_fast_lputfield:
3315 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3316 break;
3317 case Bytecodes::_fast_iputfield:
3318 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3319 break;
3320 case Bytecodes::_fast_zputfield:
3321 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3322 break;
3323 case Bytecodes::_fast_bputfield:
3324 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3325 break;
3326 case Bytecodes::_fast_sputfield:
3327 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3328 break;
3329 case Bytecodes::_fast_cputfield:
3385 // r0: object
3386 __ verify_oop(r0);
3387 __ null_check(r0);
3388 const Address field(r0, r1);
3389
3390 // 8179954: We need to make sure that the code generated for
3391 // volatile accesses forms a sequentially-consistent set of
3392 // operations when combined with STLR and LDAR. Without a leading
3393 // membar it's possible for a simple Dekker test to fail if loads
3394 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3395 // the stores in one method and we interpret the loads in another.
3396 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3397 Label notVolatile;
3398 __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3399 __ membar(MacroAssembler::AnyAny);
3400 __ bind(notVolatile);
3401 }
3402
3403 // access field
3404 switch (bytecode()) {
3405 case Bytecodes::_fast_vgetfield:
3406 {
3407 // field is flat
3408 __ read_flat_field(r2, r0);
3409 __ verify_oop(r0);
3410 }
3411 break;
3412 case Bytecodes::_fast_agetfield:
3413 do_oop_load(_masm, field, r0, IN_HEAP);
3414 __ verify_oop(r0);
3415 break;
3416 case Bytecodes::_fast_lgetfield:
3417 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3418 break;
3419 case Bytecodes::_fast_igetfield:
3420 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3421 break;
3422 case Bytecodes::_fast_bgetfield:
3423 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3424 break;
3425 case Bytecodes::_fast_sgetfield:
3426 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3427 break;
3428 case Bytecodes::_fast_cgetfield:
3429 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3430 break;
3431 case Bytecodes::_fast_fgetfield:
3812 Label initialize_header;
3813
3814 __ get_cpool_and_tags(r4, r0);
3815 // Make sure the class we're about to instantiate has been resolved.
3816 // This is done before loading InstanceKlass to be consistent with the order
3817 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3818 const int tags_offset = Array<u1>::base_offset_in_bytes();
3819 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3820 __ lea(rscratch1, Address(rscratch1, tags_offset));
3821 __ ldarb(rscratch1, rscratch1);
3822 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3823 __ br(Assembler::NE, slow_case);
3824
3825 // get InstanceKlass
3826 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3827
3828 // make sure klass is initialized
3829 assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3830 __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3831
3832 __ allocate_instance(r4, r0, r3, r1, true, slow_case);
3833 __ b(done);
3834
3835 // slow case
3836 __ bind(slow_case);
3837 __ get_constant_pool(c_rarg1);
3838 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3839 __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3840 __ verify_oop(r0);
3841
3842 // continue
3843 __ bind(done);
3844 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3845 __ membar(Assembler::StoreStore);
3846 }
3847
3848 void TemplateTable::newarray() {
3849 transition(itos, atos);
3850 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3851 __ mov(c_rarg2, r0);
3852 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3853 c_rarg1, c_rarg2);
3897 __ bind(quicked);
3898 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3899 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3900
3901 __ bind(resolved);
3902 __ load_klass(r19, r3);
3903
3904 // Generate subtype check. Blows r2, r5. Object in r3.
3905 // Superklass in r0. Subklass in r19.
3906 __ gen_subtype_check(r19, ok_is_subtype);
3907
3908 // Come here on failure
3909 __ push(r3);
3910 // object is at TOS
3911 __ b(Interpreter::_throw_ClassCastException_entry);
3912
3913 // Come here on success
3914 __ bind(ok_is_subtype);
3915 __ mov(r0, r3); // Restore object in r3
3916
3917 __ b(done);
3918 __ bind(is_null);
3919
3920 // Collect counts on whether this test sees nulls a lot or not.
3921 if (ProfileInterpreter) {
3922 __ profile_null_seen(r2);
3923 }
3924
3925 __ bind(done);
3926 }
3927
3928 void TemplateTable::instanceof() {
3929 transition(atos, itos);
3930 Label done, is_null, ok_is_subtype, quicked, resolved;
3931 __ cbz(r0, is_null);
3932
3933 // Get cpool & tags index
3934 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3935 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3936 // See if bytecode has already been quicked
3937 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3938 __ lea(r1, Address(rscratch1, r19));
3939 __ ldarb(r1, r1);
3940 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3941 __ br(Assembler::EQ, quicked);
3942
3943 __ push(atos); // save receiver for result, and for GC
3944 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4022 // in the assembly code structure as well
4023 //
4024 // Stack layout:
4025 //
4026 // [expressions ] <--- esp = expression stack top
4027 // ..
4028 // [expressions ]
4029 // [monitor entry] <--- monitor block top = expression stack bot
4030 // ..
4031 // [monitor entry]
4032 // [frame data ] <--- monitor block bot
4033 // ...
4034 // [saved rfp ] <--- rfp
4035 void TemplateTable::monitorenter()
4036 {
4037 transition(atos, vtos);
4038
4039 // check for null object
4040 __ null_check(r0);
4041
4042 Label is_inline_type;
4043 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4044 __ test_markword_is_inline_type(rscratch1, is_inline_type);
4045
4046 const Address monitor_block_top(
4047 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4048 const Address monitor_block_bot(
4049 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4050 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4051
4052 Label allocated;
4053
4054 // initialize entry pointer
4055 __ mov(c_rarg1, zr); // points to free slot or null
4056
4057 // find a free slot in the monitor block (result in c_rarg1)
4058 {
4059 Label entry, loop, exit;
4060 __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
4061 __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
4062 // c_rarg3 points to current entry, starting with top-most entry
4063
4064 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4065
4127 // c_rarg1: points to monitor entry
4128 __ bind(allocated);
4129
4130 // Increment bcp to point to the next bytecode, so exception
4131 // handling for async. exceptions work correctly.
4132 // The object has already been popped from the stack, so the
4133 // expression stack looks correct.
4134 __ increment(rbcp);
4135
4136 // store object
4137 __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
4138 __ lock_object(c_rarg1);
4139
4140 // check to make sure this monitor doesn't cause stack overflow after locking
4141 __ save_bcp(); // in case of exception
4142 __ generate_stack_overflow_check(0);
4143
4144 // The bcp has already been incremented. Just need to dispatch to
4145 // next instruction.
4146 __ dispatch_next(vtos);
4147
4148 __ bind(is_inline_type);
4149 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4150 InterpreterRuntime::throw_identity_exception), r0);
4151 __ should_not_reach_here();
4152 }
4153
4154
4155 void TemplateTable::monitorexit()
4156 {
4157 transition(atos, vtos);
4158
4159 // check for null object
4160 __ null_check(r0);
4161
4162 const int is_inline_type_mask = markWord::inline_type_pattern;
4163 Label has_identity;
4164 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4165 __ mov(rscratch2, is_inline_type_mask);
4166 __ andr(rscratch1, rscratch1, rscratch2);
4167 __ cmp(rscratch1, rscratch2);
4168 __ br(Assembler::NE, has_identity);
4169 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4170 InterpreterRuntime::throw_illegal_monitor_state_exception));
4171 __ should_not_reach_here();
4172 __ bind(has_identity);
4173
4174 const Address monitor_block_top(
4175 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4176 const Address monitor_block_bot(
4177 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4178 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4179
4180 Label found;
4181
4182 // find matching slot
4183 {
4184 Label entry, loop;
4185 __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4186 __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4187 // c_rarg1 points to current entry, starting with top-most entry
4188
4189 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4190 // of monitor block
4191 __ b(entry);
4192
4193 __ bind(loop);
|