26 #include "asm/macroAssembler.inline.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "compiler/compilerDefinitions.inline.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "gc/shared/collectedHeap.hpp"
31 #include "gc/shared/tlab_globals.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/method.inline.hpp"
39 #include "oops/objArrayKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/resolvedFieldEntry.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/frame.inline.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/synchronizer.hpp"
50 #include "utilities/powerOfTwo.hpp"
51
52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
53
54 // Address computation: local variables
55
56 static inline Address iaddress(int n) {
57 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
58 }
59
60 static inline Address laddress(int n) {
61 return iaddress(n + 1);
62 }
63
64 static inline Address faddress(int n) {
65 return iaddress(n);
152 Address src,
153 Register dst,
154 DecoratorSet decorators) {
155 __ load_heap_oop(dst, src, r10, r11, decorators);
156 }
157
158 Address TemplateTable::at_bcp(int offset) {
159 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
160 return Address(rbcp, offset);
161 }
162
163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
164 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
165 int byte_no)
166 {
167 assert_different_registers(bc_reg, temp_reg);
168 if (!RewriteBytecodes) return;
169 Label L_patch_done;
170
171 switch (bc) {
172 case Bytecodes::_fast_aputfield:
173 case Bytecodes::_fast_bputfield:
174 case Bytecodes::_fast_zputfield:
175 case Bytecodes::_fast_cputfield:
176 case Bytecodes::_fast_dputfield:
177 case Bytecodes::_fast_fputfield:
178 case Bytecodes::_fast_iputfield:
179 case Bytecodes::_fast_lputfield:
180 case Bytecodes::_fast_sputfield:
181 {
182 // We skip bytecode quickening for putfield instructions when
183 // the put_code written to the constant pool cache is zero.
184 // This is required so that every execution of this instruction
185 // calls out to InterpreterRuntime::resolve_get_put to do
186 // additional, required work.
187 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
188 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
189 __ load_field_entry(temp_reg, bc_reg);
190 if (byte_no == f1_byte) {
191 __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
736 locals_index_wide(r1);
737 __ ldr(r0, aaddress(r1));
738 }
739
740 void TemplateTable::index_check(Register array, Register index)
741 {
742 // destroys r1, rscratch1
743 // sign extend index for use by indexed load
744 // __ movl2ptr(index, index);
745 // check index
746 Register length = rscratch1;
747 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
748 __ cmpw(index, length);
749 if (index != r1) {
750 // ??? convention: move aberrant index into r1 for exception message
751 assert(r1 != array, "different registers");
752 __ mov(r1, index);
753 }
754 Label ok;
755 __ br(Assembler::LO, ok);
756 // ??? convention: move array into r3 for exception message
757 __ mov(r3, array);
758 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
759 __ br(rscratch1);
760 __ bind(ok);
761 }
762
763 void TemplateTable::iaload()
764 {
765 transition(itos, itos);
766 __ mov(r1, r0);
767 __ pop_ptr(r0);
768 // r0: array
769 // r1: index
770 index_check(r0, r1); // leaves index in r1, kills rscratch1
771 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
772 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
773 }
774
775 void TemplateTable::laload()
776 {
777 transition(itos, ltos);
778 __ mov(r1, r0);
779 __ pop_ptr(r0);
799 void TemplateTable::daload()
800 {
801 transition(itos, dtos);
802 __ mov(r1, r0);
803 __ pop_ptr(r0);
804 // r0: array
805 // r1: index
806 index_check(r0, r1); // leaves index in r1, kills rscratch1
807 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
808 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
809 }
810
811 void TemplateTable::aaload()
812 {
813 transition(itos, atos);
814 __ mov(r1, r0);
815 __ pop_ptr(r0);
816 // r0: array
817 // r1: index
818 index_check(r0, r1); // leaves index in r1, kills rscratch1
819 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
820 do_oop_load(_masm,
821 Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
822 r0,
823 IS_ARRAY);
824 }
825
826 void TemplateTable::baload()
827 {
828 transition(itos, itos);
829 __ mov(r1, r0);
830 __ pop_ptr(r0);
831 // r0: array
832 // r1: index
833 index_check(r0, r1); // leaves index in r1, kills rscratch1
834 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
835 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
836 }
837
838 void TemplateTable::caload()
839 {
840 transition(itos, itos);
841 __ mov(r1, r0);
842 __ pop_ptr(r0);
843 // r0: array
1090 // r1: index
1091 // r3: array
1092 index_check(r3, r1); // prefer index in r1
1093 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1094 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1095 }
1096
1097 void TemplateTable::dastore() {
1098 transition(dtos, vtos);
1099 __ pop_i(r1);
1100 __ pop_ptr(r3);
1101 // v0: value
1102 // r1: index
1103 // r3: array
1104 index_check(r3, r1); // prefer index in r1
1105 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1106 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1107 }
1108
1109 void TemplateTable::aastore() {
1110 Label is_null, ok_is_subtype, done;
1111 transition(vtos, vtos);
1112 // stack: ..., array, index, value
1113 __ ldr(r0, at_tos()); // value
1114 __ ldr(r2, at_tos_p1()); // index
1115 __ ldr(r3, at_tos_p2()); // array
1116
1117 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1118
1119 index_check(r3, r2); // kills r1
1120 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1121
1122 // do array store check - check for null value first
1123 __ cbz(r0, is_null);
1124
1125 // Move subklass into r1
1126 __ load_klass(r1, r0);
1127 // Move superklass into r0
1128 __ load_klass(r0, r3);
1129 __ ldr(r0, Address(r0,
1130 ObjArrayKlass::element_klass_offset()));
1131 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1132
1133 // Generate subtype check. Blows r2, r5
1134 // Superklass in r0. Subklass in r1.
1135 __ gen_subtype_check(r1, ok_is_subtype);
1136
1137 // Come here on failure
1138 // object is at TOS
1139 __ b(Interpreter::_throw_ArrayStoreException_entry);
1140
1141 // Come here on success
1142 __ bind(ok_is_subtype);
1143
1144 // Get the value we will store
1145 __ ldr(r0, at_tos());
1146 // Now store using the appropriate barrier
1147 // Clobbers: r10, r11, r3
1148 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1149 __ b(done);
1150
1151 // Have a null in r0, r3=array, r2=index. Store null at ary[idx]
1152 __ bind(is_null);
1153 __ profile_null_seen(r2);
1154
1155 // Store a null
1156 // Clobbers: r10, r11, r3
1157 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1158
1159 // Pop stack arguments
1160 __ bind(done);
1161 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1162 }
1163
1164 void TemplateTable::bastore()
1165 {
1166 transition(itos, vtos);
1167 __ pop_i(r1);
1168 __ pop_ptr(r3);
1169 // r0: value
1170 // r1: index
1171 // r3: array
1172 index_check(r3, r1); // prefer index in r1
1173
1174 // Need to check whether array is boolean or byte
1175 // since both types share the bastore bytecode.
1176 __ load_klass(r2, r3);
1177 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
1944 __ br(j_not(cc), not_taken);
1945 branch(false, false);
1946 __ bind(not_taken);
1947 __ profile_not_taken_branch(r0);
1948 }
1949
1950 void TemplateTable::if_nullcmp(Condition cc)
1951 {
1952 transition(atos, vtos);
1953 // assume branch is more often taken than not (loops use backward branches)
1954 Label not_taken;
1955 if (cc == equal)
1956 __ cbnz(r0, not_taken);
1957 else
1958 __ cbz(r0, not_taken);
1959 branch(false, false);
1960 __ bind(not_taken);
1961 __ profile_not_taken_branch(r0);
1962 }
1963
1964 void TemplateTable::if_acmp(Condition cc)
1965 {
1966 transition(atos, vtos);
1967 // assume branch is more often taken than not (loops use backward branches)
1968 Label not_taken;
1969 __ pop_ptr(r1);
1970 __ cmpoop(r1, r0);
1971 __ br(j_not(cc), not_taken);
1972 branch(false, false);
1973 __ bind(not_taken);
1974 __ profile_not_taken_branch(r0);
1975 }
1976
1977 void TemplateTable::ret() {
1978 transition(vtos, vtos);
1979 locals_index(r1);
1980 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1981 __ profile_ret(r1, r2);
1982 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1983 __ lea(rbcp, Address(rbcp, r1));
1984 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
1985 __ dispatch_next(vtos, 0, /*generate_poll*/true);
1986 }
1987
1988 void TemplateTable::wide_ret() {
1989 transition(vtos, vtos);
1990 locals_index_wide(r1);
1991 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1992 __ profile_ret(r1, r2);
1993 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1994 __ lea(rbcp, Address(rbcp, r1));
1995 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
1996 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2190 assert(_desc->calls_vm(),
2191 "inconsistent calls_vm information"); // call in remove_activation
2192
2193 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2194 assert(state == vtos, "only valid state");
2195
2196 __ ldr(c_rarg1, aaddress(0));
2197 __ load_klass(r3, c_rarg1);
2198 __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2199 Label skip_register_finalizer;
2200 __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2201
2202 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2203
2204 __ bind(skip_register_finalizer);
2205 }
2206
2207 // Issue a StoreStore barrier after all stores but before return
2208 // from any constructor for any class with a final field. We don't
2209 // know if this is a finalizer, so we always do so.
2210 if (_desc->bytecode() == Bytecodes::_return)
2211 __ membar(MacroAssembler::StoreStore);
2212
2213 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2214 Label no_safepoint;
2215 __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2216 __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2217 __ push(state);
2218 __ push_cont_fastpath(rthread);
2219 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2220 __ pop_cont_fastpath(rthread);
2221 __ pop(state);
2222 __ bind(no_safepoint);
2223 }
2224
2225 // Narrow result if state is itos but result type is smaller.
2226 // Need to narrow in the return bytecode rather than in generate_return_entry
2227 // since compiled code callers expect the result to already be narrowed.
2228 if (state == itos) {
2229 __ narrow(r0);
2230 }
2582 }
2583 // c_rarg1: object pointer or null
2584 // c_rarg2: cache entry pointer
2585 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2586 InterpreterRuntime::post_field_access),
2587 c_rarg1, c_rarg2);
2588 __ load_field_entry(cache, index);
2589 __ bind(L1);
2590 }
2591 }
2592
2593 void TemplateTable::pop_and_check_object(Register r)
2594 {
2595 __ pop_ptr(r);
2596 __ null_check(r); // for field access must check obj.
2597 __ verify_oop(r);
2598 }
2599
2600 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2601 {
2602 const Register cache = r4;
2603 const Register obj = r4;
2604 const Register index = r3;
2605 const Register tos_state = r3;
2606 const Register off = r19;
2607 const Register flags = r6;
2608 const Register bc = r4; // uses same reg as obj, so don't mix them
2609
2610 resolve_cache_and_index_for_field(byte_no, cache, index);
2611 jvmti_post_field_access(cache, index, is_static, false);
2612 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2613
2614 if (!is_static) {
2615 // obj is on the stack
2616 pop_and_check_object(obj);
2617 }
2618
2619 // 8179954: We need to make sure that the code generated for
2620 // volatile accesses forms a sequentially-consistent set of
2621 // operations when combined with STLR and LDAR. Without a leading
2622 // membar it's possible for a simple Dekker test to fail if loads
2623 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2624 // the stores in one method and we interpret the loads in another.
2625 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2626 Label notVolatile;
2627 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2628 __ membar(MacroAssembler::AnyAny);
2629 __ bind(notVolatile);
2630 }
2631
2650 __ b(Done);
2651
2652 __ bind(notByte);
2653 __ cmp(tos_state, (u1)ztos);
2654 __ br(Assembler::NE, notBool);
2655
2656 // ztos (same code as btos)
2657 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2658 __ push(ztos);
2659 // Rewrite bytecode to be faster
2660 if (rc == may_rewrite) {
2661 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2662 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2663 }
2664 __ b(Done);
2665
2666 __ bind(notBool);
2667 __ cmp(tos_state, (u1)atos);
2668 __ br(Assembler::NE, notObj);
2669 // atos
2670 do_oop_load(_masm, field, r0, IN_HEAP);
2671 __ push(atos);
2672 if (rc == may_rewrite) {
2673 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2674 }
2675 __ b(Done);
2676
2677 __ bind(notObj);
2678 __ cmp(tos_state, (u1)itos);
2679 __ br(Assembler::NE, notInt);
2680 // itos
2681 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2682 __ push(itos);
2683 // Rewrite bytecode to be faster
2684 if (rc == may_rewrite) {
2685 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2686 }
2687 __ b(Done);
2688
2689 __ bind(notInt);
2690 __ cmp(tos_state, (u1)ctos);
2691 __ br(Assembler::NE, notChar);
2692 // ctos
2693 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2694 __ push(ctos);
2695 // Rewrite bytecode to be faster
2816 // c_rarg1: object pointer set up above (null if static)
2817 // c_rarg2: cache entry pointer
2818 // c_rarg3: jvalue object on the stack
2819 __ call_VM(noreg,
2820 CAST_FROM_FN_PTR(address,
2821 InterpreterRuntime::post_field_modification),
2822 c_rarg1, c_rarg2, c_rarg3);
2823 __ load_field_entry(cache, index);
2824 __ bind(L1);
2825 }
2826 }
2827
2828 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2829 transition(vtos, vtos);
2830
2831 const Register cache = r2;
2832 const Register index = r3;
2833 const Register tos_state = r3;
2834 const Register obj = r2;
2835 const Register off = r19;
2836 const Register flags = r0;
2837 const Register bc = r4;
2838
2839 resolve_cache_and_index_for_field(byte_no, cache, index);
2840 jvmti_post_field_mod(cache, index, is_static);
2841 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2842
2843 Label Done;
2844 __ mov(r5, flags);
2845
2846 {
2847 Label notVolatile;
2848 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2849 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2850 __ bind(notVolatile);
2851 }
2852
2853 // field address
2854 const Address field(obj, off);
2855
2856 Label notByte, notBool, notInt, notShort, notChar,
2857 notLong, notFloat, notObj, notDouble;
2858
2859 assert(btos == 0, "change code, btos != 0");
2860 __ cbnz(tos_state, notByte);
2861
2862 // Don't rewrite putstatic, only putfield
2863 if (is_static) rc = may_not_rewrite;
2864
2865 // btos
2866 {
2867 __ pop(btos);
2868 if (!is_static) pop_and_check_object(obj);
2877 __ cmp(tos_state, (u1)ztos);
2878 __ br(Assembler::NE, notBool);
2879
2880 // ztos
2881 {
2882 __ pop(ztos);
2883 if (!is_static) pop_and_check_object(obj);
2884 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
2885 if (rc == may_rewrite) {
2886 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2887 }
2888 __ b(Done);
2889 }
2890
2891 __ bind(notBool);
2892 __ cmp(tos_state, (u1)atos);
2893 __ br(Assembler::NE, notObj);
2894
2895 // atos
2896 {
2897 __ pop(atos);
2898 if (!is_static) pop_and_check_object(obj);
2899 // Store into the field
2900 // Clobbers: r10, r11, r3
2901 do_oop_store(_masm, field, r0, IN_HEAP);
2902 if (rc == may_rewrite) {
2903 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2904 }
2905 __ b(Done);
2906 }
2907
2908 __ bind(notObj);
2909 __ cmp(tos_state, (u1)itos);
2910 __ br(Assembler::NE, notInt);
2911
2912 // itos
2913 {
2914 __ pop(itos);
2915 if (!is_static) pop_and_check_object(obj);
2916 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
2917 if (rc == may_rewrite) {
2918 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2919 }
2920 __ b(Done);
2921 }
2922
2923 __ bind(notInt);
2924 __ cmp(tos_state, (u1)ctos);
2925 __ br(Assembler::NE, notChar);
2990 {
2991 __ pop(dtos);
2992 if (!is_static) pop_and_check_object(obj);
2993 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
2994 if (rc == may_rewrite) {
2995 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
2996 }
2997 }
2998
2999 #ifdef ASSERT
3000 __ b(Done);
3001
3002 __ bind(notDouble);
3003 __ stop("Bad state");
3004 #endif
3005
3006 __ bind(Done);
3007
3008 {
3009 Label notVolatile;
3010 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3011 __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3012 __ bind(notVolatile);
3013 }
3014 }
3015
3016 void TemplateTable::putfield(int byte_no)
3017 {
3018 putfield_or_static(byte_no, false);
3019 }
3020
3021 void TemplateTable::nofast_putfield(int byte_no) {
3022 putfield_or_static(byte_no, false, may_not_rewrite);
3023 }
3024
3025 void TemplateTable::putstatic(int byte_no) {
3026 putfield_or_static(byte_no, true);
3027 }
3028
3029 void TemplateTable::jvmti_post_fast_field_mod() {
3030 if (JvmtiExport::can_post_field_modification()) {
3031 // Check to see if a field modification watch has been set before
3032 // we take the time to call into the VM.
3033 Label L2;
3034 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3035 __ ldrw(c_rarg3, Address(rscratch1));
3036 __ cbzw(c_rarg3, L2);
3037 __ pop_ptr(r19); // copy the object pointer from tos
3038 __ verify_oop(r19);
3039 __ push_ptr(r19); // put the object pointer back on tos
3040 // Save tos values before call_VM() clobbers them. Since we have
3041 // to do it for every data type, we use the saved values as the
3042 // jvalue object.
3043 switch (bytecode()) { // load values into the jvalue object
3044 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3045 case Bytecodes::_fast_bputfield: // fall through
3046 case Bytecodes::_fast_zputfield: // fall through
3047 case Bytecodes::_fast_sputfield: // fall through
3048 case Bytecodes::_fast_cputfield: // fall through
3049 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3050 case Bytecodes::_fast_dputfield: __ push_d(); break;
3051 case Bytecodes::_fast_fputfield: __ push_f(); break;
3052 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3053
3054 default:
3055 ShouldNotReachHere();
3056 }
3057 __ mov(c_rarg3, esp); // points to jvalue on the stack
3058 // access constant pool cache entry
3059 __ load_field_entry(c_rarg2, r0);
3060 __ verify_oop(r19);
3061 // r19: object pointer copied above
3062 // c_rarg2: cache entry pointer
3063 // c_rarg3: jvalue object on the stack
3064 __ call_VM(noreg,
3065 CAST_FROM_FN_PTR(address,
3066 InterpreterRuntime::post_field_modification),
3067 r19, c_rarg2, c_rarg3);
3068
3069 switch (bytecode()) { // restore tos values
3070 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3071 case Bytecodes::_fast_bputfield: // fall through
3072 case Bytecodes::_fast_zputfield: // fall through
3073 case Bytecodes::_fast_sputfield: // fall through
3074 case Bytecodes::_fast_cputfield: // fall through
3075 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3076 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3077 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3078 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3079 default: break;
3080 }
3081 __ bind(L2);
3082 }
3083 }
3084
3085 void TemplateTable::fast_storefield(TosState state)
3086 {
3087 transition(state, vtos);
3088
3089 ByteSize base = ConstantPoolCache::base_offset();
3097 load_resolved_field_entry(r2, r2, noreg, r1, r5);
3098 __ verify_field_offset(r1);
3099
3100 {
3101 Label notVolatile;
3102 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3103 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3104 __ bind(notVolatile);
3105 }
3106
3107 Label notVolatile;
3108
3109 // Get object from stack
3110 pop_and_check_object(r2);
3111
3112 // field address
3113 const Address field(r2, r1);
3114
3115 // access field
3116 switch (bytecode()) {
3117 case Bytecodes::_fast_aputfield:
3118 // Clobbers: r10, r11, r3
3119 do_oop_store(_masm, field, r0, IN_HEAP);
3120 break;
3121 case Bytecodes::_fast_lputfield:
3122 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3123 break;
3124 case Bytecodes::_fast_iputfield:
3125 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3126 break;
3127 case Bytecodes::_fast_zputfield:
3128 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3129 break;
3130 case Bytecodes::_fast_bputfield:
3131 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3132 break;
3133 case Bytecodes::_fast_sputfield:
3134 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3135 break;
3136 case Bytecodes::_fast_cputfield:
3192 // r0: object
3193 __ verify_oop(r0);
3194 __ null_check(r0);
3195 const Address field(r0, r1);
3196
3197 // 8179954: We need to make sure that the code generated for
3198 // volatile accesses forms a sequentially-consistent set of
3199 // operations when combined with STLR and LDAR. Without a leading
3200 // membar it's possible for a simple Dekker test to fail if loads
3201 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3202 // the stores in one method and we interpret the loads in another.
3203 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3204 Label notVolatile;
3205 __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3206 __ membar(MacroAssembler::AnyAny);
3207 __ bind(notVolatile);
3208 }
3209
3210 // access field
3211 switch (bytecode()) {
3212 case Bytecodes::_fast_agetfield:
3213 do_oop_load(_masm, field, r0, IN_HEAP);
3214 __ verify_oop(r0);
3215 break;
3216 case Bytecodes::_fast_lgetfield:
3217 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3218 break;
3219 case Bytecodes::_fast_igetfield:
3220 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3221 break;
3222 case Bytecodes::_fast_bgetfield:
3223 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3224 break;
3225 case Bytecodes::_fast_sgetfield:
3226 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3227 break;
3228 case Bytecodes::_fast_cgetfield:
3229 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3230 break;
3231 case Bytecodes::_fast_fgetfield:
3611 Label initialize_header;
3612
3613 __ get_cpool_and_tags(r4, r0);
3614 // Make sure the class we're about to instantiate has been resolved.
3615 // This is done before loading InstanceKlass to be consistent with the order
3616 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3617 const int tags_offset = Array<u1>::base_offset_in_bytes();
3618 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3619 __ lea(rscratch1, Address(rscratch1, tags_offset));
3620 __ ldarb(rscratch1, rscratch1);
3621 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3622 __ br(Assembler::NE, slow_case);
3623
3624 // get InstanceKlass
3625 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3626
3627 // make sure klass is initialized
3628 assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3629 __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3630
3631 // get instance_size in InstanceKlass (scaled to a count of bytes)
3632 __ ldrw(r3,
3633 Address(r4,
3634 Klass::layout_helper_offset()));
3635 // test to see if it is malformed in some way
3636 __ tbnz(r3, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3637
3638 // Allocate the instance:
3639 // If TLAB is enabled:
3640 // Try to allocate in the TLAB.
3641 // If fails, go to the slow path.
3642 // Initialize the allocation.
3643 // Exit.
3644 //
3645 // Go to slow path.
3646
3647 if (UseTLAB) {
3648 __ tlab_allocate(r0, r3, 0, noreg, r1, slow_case);
3649
3650 if (ZeroTLAB) {
3651 // the fields have been already cleared
3652 __ b(initialize_header);
3653 }
3654
3655 // The object is initialized before the header. If the object size is
3656 // zero, go directly to the header initialization.
3657 int header_size = oopDesc::header_size() * HeapWordSize;
3658 assert(is_aligned(header_size, BytesPerLong), "oop header size must be 8-byte-aligned");
3659 __ sub(r3, r3, header_size);
3660 __ cbz(r3, initialize_header);
3661
3662 // Initialize object fields
3663 {
3664 __ add(r2, r0, header_size);
3665 Label loop;
3666 __ bind(loop);
3667 __ str(zr, Address(__ post(r2, BytesPerLong)));
3668 __ sub(r3, r3, BytesPerLong);
3669 __ cbnz(r3, loop);
3670 }
3671
3672 // initialize object header only.
3673 __ bind(initialize_header);
3674 if (UseCompactObjectHeaders) {
3675 __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3676 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3677 } else {
3678 __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3679 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3680 __ store_klass_gap(r0, zr); // zero klass gap for compressed oops
3681 __ store_klass(r0, r4); // store klass last
3682 }
3683
3684 if (DTraceAllocProbes) {
3685 // Trigger dtrace event for fastpath
3686 __ push(atos); // save the return value
3687 __ call_VM_leaf(
3688 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3689 __ pop(atos); // restore the return value
3690
3691 }
3692 __ b(done);
3693 }
3694
3695 // slow case
3696 __ bind(slow_case);
3697 __ get_constant_pool(c_rarg1);
3698 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3699 __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3700 __ verify_oop(r0);
3701
3702 // continue
3703 __ bind(done);
3704 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3705 __ membar(Assembler::StoreStore);
3706 }
3707
3708 void TemplateTable::newarray() {
3709 transition(itos, atos);
3710 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3711 __ mov(c_rarg2, r0);
3712 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3713 c_rarg1, c_rarg2);
3757 __ bind(quicked);
3758 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3759 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3760
3761 __ bind(resolved);
3762 __ load_klass(r19, r3);
3763
3764 // Generate subtype check. Blows r2, r5. Object in r3.
3765 // Superklass in r0. Subklass in r19.
3766 __ gen_subtype_check(r19, ok_is_subtype);
3767
3768 // Come here on failure
3769 __ push(r3);
3770 // object is at TOS
3771 __ b(Interpreter::_throw_ClassCastException_entry);
3772
3773 // Come here on success
3774 __ bind(ok_is_subtype);
3775 __ mov(r0, r3); // Restore object in r3
3776
3777 // Collect counts on whether this test sees nulls a lot or not.
3778 if (ProfileInterpreter) {
3779 __ b(done);
3780 __ bind(is_null);
3781 __ profile_null_seen(r2);
3782 } else {
3783 __ bind(is_null); // same as 'done'
3784 }
3785 __ bind(done);
3786 }
3787
3788 void TemplateTable::instanceof() {
3789 transition(atos, itos);
3790 Label done, is_null, ok_is_subtype, quicked, resolved;
3791 __ cbz(r0, is_null);
3792
3793 // Get cpool & tags index
3794 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3795 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3796 // See if bytecode has already been quicked
3797 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3798 __ lea(r1, Address(rscratch1, r19));
3799 __ ldarb(r1, r1);
3800 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3801 __ br(Assembler::EQ, quicked);
3802
3803 __ push(atos); // save receiver for result, and for GC
3804 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3882 // in the assembly code structure as well
3883 //
3884 // Stack layout:
3885 //
3886 // [expressions ] <--- esp = expression stack top
3887 // ..
3888 // [expressions ]
3889 // [monitor entry] <--- monitor block top = expression stack bot
3890 // ..
3891 // [monitor entry]
3892 // [frame data ] <--- monitor block bot
3893 // ...
3894 // [saved rfp ] <--- rfp
3895 void TemplateTable::monitorenter()
3896 {
3897 transition(atos, vtos);
3898
3899 // check for null object
3900 __ null_check(r0);
3901
3902 const Address monitor_block_top(
3903 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3904 const Address monitor_block_bot(
3905 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
3906 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3907
3908 Label allocated;
3909
3910 // initialize entry pointer
3911 __ mov(c_rarg1, zr); // points to free slot or null
3912
3913 // find a free slot in the monitor block (result in c_rarg1)
3914 {
3915 Label entry, loop, exit;
3916 __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
3917 __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
3918 // c_rarg3 points to current entry, starting with top-most entry
3919
3920 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3921
3983 // c_rarg1: points to monitor entry
3984 __ bind(allocated);
3985
3986 // Increment bcp to point to the next bytecode, so exception
3987 // handling for async. exceptions work correctly.
3988 // The object has already been popped from the stack, so the
3989 // expression stack looks correct.
3990 __ increment(rbcp);
3991
3992 // store object
3993 __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
3994 __ lock_object(c_rarg1);
3995
3996 // check to make sure this monitor doesn't cause stack overflow after locking
3997 __ save_bcp(); // in case of exception
3998 __ generate_stack_overflow_check(0);
3999
4000 // The bcp has already been incremented. Just need to dispatch to
4001 // next instruction.
4002 __ dispatch_next(vtos);
4003 }
4004
4005
4006 void TemplateTable::monitorexit()
4007 {
4008 transition(atos, vtos);
4009
4010 // check for null object
4011 __ null_check(r0);
4012
4013 const Address monitor_block_top(
4014 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4015 const Address monitor_block_bot(
4016 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4017 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4018
4019 Label found;
4020
4021 // find matching slot
4022 {
4023 Label entry, loop;
4024 __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4025 __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4026 // c_rarg1 points to current entry, starting with top-most entry
4027
4028 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4029 // of monitor block
4030 __ b(entry);
4031
4032 __ bind(loop);
|
26 #include "asm/macroAssembler.inline.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "compiler/compilerDefinitions.inline.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "gc/shared/collectedHeap.hpp"
31 #include "gc/shared/tlab_globals.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/method.inline.hpp"
39 #include "oops/objArrayKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/resolvedFieldEntry.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/arguments.hpp"
47 #include "runtime/frame.inline.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "runtime/stubRoutines.hpp"
50 #include "runtime/synchronizer.hpp"
51 #include "utilities/powerOfTwo.hpp"
52
53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
54
55 // Address computation: local variables
56
57 static inline Address iaddress(int n) {
58 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
59 }
60
61 static inline Address laddress(int n) {
62 return iaddress(n + 1);
63 }
64
65 static inline Address faddress(int n) {
66 return iaddress(n);
153 Address src,
154 Register dst,
155 DecoratorSet decorators) {
156 __ load_heap_oop(dst, src, r10, r11, decorators);
157 }
158
159 Address TemplateTable::at_bcp(int offset) {
160 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
161 return Address(rbcp, offset);
162 }
163
164 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
165 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
166 int byte_no)
167 {
168 assert_different_registers(bc_reg, temp_reg);
169 if (!RewriteBytecodes) return;
170 Label L_patch_done;
171
172 switch (bc) {
173 case Bytecodes::_fast_vputfield:
174 case Bytecodes::_fast_aputfield:
175 case Bytecodes::_fast_bputfield:
176 case Bytecodes::_fast_zputfield:
177 case Bytecodes::_fast_cputfield:
178 case Bytecodes::_fast_dputfield:
179 case Bytecodes::_fast_fputfield:
180 case Bytecodes::_fast_iputfield:
181 case Bytecodes::_fast_lputfield:
182 case Bytecodes::_fast_sputfield:
183 {
184 // We skip bytecode quickening for putfield instructions when
185 // the put_code written to the constant pool cache is zero.
186 // This is required so that every execution of this instruction
187 // calls out to InterpreterRuntime::resolve_get_put to do
188 // additional, required work.
189 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
190 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
191 __ load_field_entry(temp_reg, bc_reg);
192 if (byte_no == f1_byte) {
193 __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
738 locals_index_wide(r1);
739 __ ldr(r0, aaddress(r1));
740 }
741
742 void TemplateTable::index_check(Register array, Register index)
743 {
744 // destroys r1, rscratch1
745 // sign extend index for use by indexed load
746 // __ movl2ptr(index, index);
747 // check index
748 Register length = rscratch1;
749 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
750 __ cmpw(index, length);
751 if (index != r1) {
752 // ??? convention: move aberrant index into r1 for exception message
753 assert(r1 != array, "different registers");
754 __ mov(r1, index);
755 }
756 Label ok;
757 __ br(Assembler::LO, ok);
758 // ??? convention: move array into r3 for exception message
759 __ mov(r3, array);
760 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
761 __ br(rscratch1);
762 __ bind(ok);
763 }
764
765 void TemplateTable::iaload()
766 {
767 transition(itos, itos);
768 __ mov(r1, r0);
769 __ pop_ptr(r0);
770 // r0: array
771 // r1: index
772 index_check(r0, r1); // leaves index in r1, kills rscratch1
773 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
774 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
775 }
776
777 void TemplateTable::laload()
778 {
779 transition(itos, ltos);
780 __ mov(r1, r0);
781 __ pop_ptr(r0);
801 void TemplateTable::daload()
802 {
803 transition(itos, dtos);
804 __ mov(r1, r0);
805 __ pop_ptr(r0);
806 // r0: array
807 // r1: index
808 index_check(r0, r1); // leaves index in r1, kills rscratch1
809 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
810 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
811 }
812
813 void TemplateTable::aaload()
814 {
815 transition(itos, atos);
816 __ mov(r1, r0);
817 __ pop_ptr(r0);
818 // r0: array
819 // r1: index
820 index_check(r0, r1); // leaves index in r1, kills rscratch1
821 __ profile_array_type<ArrayLoadData>(r2, r0, r4);
822 if (UseArrayFlattening) {
823 Label is_flat_array, done;
824
825 __ test_flat_array_oop(r0, rscratch1 /*temp*/, is_flat_array);
826 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
827 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
828
829 __ b(done);
830 __ bind(is_flat_array);
831 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), r0, r1);
832 // Ensure the stores to copy the inline field contents are visible
833 // before any subsequent store that publishes this reference.
834 __ membar(Assembler::StoreStore);
835 __ bind(done);
836 } else {
837 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
838 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
839 }
840 __ profile_element_type(r2, r0, r4);
841 }
842
843 void TemplateTable::baload()
844 {
845 transition(itos, itos);
846 __ mov(r1, r0);
847 __ pop_ptr(r0);
848 // r0: array
849 // r1: index
850 index_check(r0, r1); // leaves index in r1, kills rscratch1
851 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
852 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
853 }
854
855 void TemplateTable::caload()
856 {
857 transition(itos, itos);
858 __ mov(r1, r0);
859 __ pop_ptr(r0);
860 // r0: array
1107 // r1: index
1108 // r3: array
1109 index_check(r3, r1); // prefer index in r1
1110 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1111 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1112 }
1113
1114 void TemplateTable::dastore() {
1115 transition(dtos, vtos);
1116 __ pop_i(r1);
1117 __ pop_ptr(r3);
1118 // v0: value
1119 // r1: index
1120 // r3: array
1121 index_check(r3, r1); // prefer index in r1
1122 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1123 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1124 }
1125
1126 void TemplateTable::aastore() {
1127 Label is_null, is_flat_array, ok_is_subtype, done;
1128 transition(vtos, vtos);
1129 // stack: ..., array, index, value
1130 __ ldr(r0, at_tos()); // value
1131 __ ldr(r2, at_tos_p1()); // index
1132 __ ldr(r3, at_tos_p2()); // array
1133
1134 index_check(r3, r2); // kills r1
1135
1136 __ profile_array_type<ArrayStoreData>(r4, r3, r5);
1137 __ profile_multiple_element_types(r4, r0, r5, r6);
1138
1139 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1140 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1141 // Be careful not to clobber r4 below
1142
1143 // do array store check - check for null value first
1144 __ cbz(r0, is_null);
1145
1146 // Move array class to r5
1147 __ load_klass(r5, r3);
1148
1149 if (UseArrayFlattening) {
1150 __ ldrw(r6, Address(r5, Klass::layout_helper_offset()));
1151 __ test_flat_array_layout(r6, is_flat_array);
1152 }
1153
1154 // Move subklass into r1
1155 __ load_klass(r1, r0);
1156
1157 // Move array element superklass into r0
1158 __ ldr(r0, Address(r5, ObjArrayKlass::element_klass_offset()));
1159 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1160
1161 // Generate subtype check. Blows r2, r5
1162 // Superklass in r0. Subklass in r1.
1163
1164 // is "r1 <: r0" ? (value subclass <: array element superclass)
1165 __ gen_subtype_check(r1, ok_is_subtype, false);
1166
1167 // Come here on failure
1168 // object is at TOS
1169 __ b(Interpreter::_throw_ArrayStoreException_entry);
1170
1171 // Come here on success
1172 __ bind(ok_is_subtype);
1173
1174 // Get the value we will store
1175 __ ldr(r0, at_tos());
1176 // Now store using the appropriate barrier
1177 // Clobbers: r10, r11, r3
1178 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1179 __ b(done);
1180
1181 // Have a null in r0, r3=array, r2=index. Store null at ary[idx]
1182 __ bind(is_null);
1183 if (Arguments::is_valhalla_enabled()) {
1184 Label is_null_into_value_array_npe, store_null;
1185
1186 if (UseArrayFlattening) {
1187 __ test_flat_array_oop(r3, rscratch1, is_flat_array);
1188 }
1189
1190 // No way to store null in a null-free array
1191 __ test_null_free_array_oop(r3, rscratch1, is_null_into_value_array_npe);
1192 __ b(store_null);
1193
1194 __ bind(is_null_into_value_array_npe);
1195 __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1196
1197 __ bind(store_null);
1198 }
1199
1200 // Store a null
1201 // Clobbers: r10, r11, r3
1202 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1203 __ b(done);
1204
1205 if (UseArrayFlattening) {
1206 Label is_type_ok;
1207 __ bind(is_flat_array); // Store non-null value to flat
1208
1209 __ ldr(r0, at_tos()); // value
1210 __ ldr(r3, at_tos_p1()); // index
1211 __ ldr(r2, at_tos_p2()); // array
1212 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), r0, r2, r3);
1213 }
1214
1215 // Pop stack arguments
1216 __ bind(done);
1217 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1218 }
1219
1220 void TemplateTable::bastore()
1221 {
1222 transition(itos, vtos);
1223 __ pop_i(r1);
1224 __ pop_ptr(r3);
1225 // r0: value
1226 // r1: index
1227 // r3: array
1228 index_check(r3, r1); // prefer index in r1
1229
1230 // Need to check whether array is boolean or byte
1231 // since both types share the bastore bytecode.
1232 __ load_klass(r2, r3);
1233 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
2000 __ br(j_not(cc), not_taken);
2001 branch(false, false);
2002 __ bind(not_taken);
2003 __ profile_not_taken_branch(r0);
2004 }
2005
2006 void TemplateTable::if_nullcmp(Condition cc)
2007 {
2008 transition(atos, vtos);
2009 // assume branch is more often taken than not (loops use backward branches)
2010 Label not_taken;
2011 if (cc == equal)
2012 __ cbnz(r0, not_taken);
2013 else
2014 __ cbz(r0, not_taken);
2015 branch(false, false);
2016 __ bind(not_taken);
2017 __ profile_not_taken_branch(r0);
2018 }
2019
2020 void TemplateTable::if_acmp(Condition cc) {
2021 transition(atos, vtos);
2022 // assume branch is more often taken than not (loops use backward branches)
2023 Label taken, not_taken;
2024 __ pop_ptr(r1);
2025
2026 __ profile_acmp(r2, r1, r0, r4);
2027
2028 Register is_inline_type_mask = rscratch1;
2029 __ mov(is_inline_type_mask, markWord::inline_type_pattern);
2030
2031 if (Arguments::is_valhalla_enabled()) {
2032 // The substitutability test is only necessary if r1 and r0 are not the same...
2033 __ cmp(r1, r0);
2034 __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2035
2036 // ... neither are null...
2037 __ cbz(r1, (cc == equal) ? not_taken : taken);
2038 __ cbz(r0, (cc == equal) ? not_taken : taken);
2039
2040 // ...and both are values...
2041 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2042 __ andr(r2, r2, is_inline_type_mask);
2043 __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2044 __ andr(r4, r4, is_inline_type_mask);
2045 __ andr(r2, r2, r4);
2046 __ cmp(r2, is_inline_type_mask);
2047 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2048
2049 // ...with the same value klass
2050 __ load_metadata(r2, r1);
2051 __ load_metadata(r4, r0);
2052 __ cmp(r2, r4);
2053 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2054
2055 // Know both are the same type, let's test for substitutability...
2056 if (cc == equal) {
2057 invoke_is_substitutable(r0, r1, taken, not_taken);
2058 } else {
2059 invoke_is_substitutable(r0, r1, not_taken, taken);
2060 }
2061 __ stop("Not reachable");
2062 }
2063
2064 __ cmpoop(r1, r0);
2065 __ br(j_not(cc), not_taken);
2066 __ bind(taken);
2067 branch(false, false);
2068 __ bind(not_taken);
2069 __ profile_not_taken_branch(r0, true);
2070 }
2071
2072 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2073 Label& is_subst, Label& not_subst) {
2074
2075 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2076 // Restored... r0 answer, jmp to outcome...
2077 __ cbz(r0, not_subst);
2078 __ b(is_subst);
2079 }
2080
2081
2082 void TemplateTable::ret() {
2083 transition(vtos, vtos);
2084 locals_index(r1);
2085 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2086 __ profile_ret(r1, r2);
2087 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2088 __ lea(rbcp, Address(rbcp, r1));
2089 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2090 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2091 }
2092
2093 void TemplateTable::wide_ret() {
2094 transition(vtos, vtos);
2095 locals_index_wide(r1);
2096 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2097 __ profile_ret(r1, r2);
2098 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2099 __ lea(rbcp, Address(rbcp, r1));
2100 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2101 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2295 assert(_desc->calls_vm(),
2296 "inconsistent calls_vm information"); // call in remove_activation
2297
2298 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2299 assert(state == vtos, "only valid state");
2300
2301 __ ldr(c_rarg1, aaddress(0));
2302 __ load_klass(r3, c_rarg1);
2303 __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2304 Label skip_register_finalizer;
2305 __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2306
2307 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2308
2309 __ bind(skip_register_finalizer);
2310 }
2311
2312 // Issue a StoreStore barrier after all stores but before return
2313 // from any constructor for any class with a final field. We don't
2314 // know if this is a finalizer, so we always do so.
2315 if (_desc->bytecode() == Bytecodes::_return
2316 || _desc->bytecode() == Bytecodes::_return_register_finalizer)
2317 __ membar(MacroAssembler::StoreStore);
2318
2319 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2320 Label no_safepoint;
2321 __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2322 __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2323 __ push(state);
2324 __ push_cont_fastpath(rthread);
2325 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2326 __ pop_cont_fastpath(rthread);
2327 __ pop(state);
2328 __ bind(no_safepoint);
2329 }
2330
2331 // Narrow result if state is itos but result type is smaller.
2332 // Need to narrow in the return bytecode rather than in generate_return_entry
2333 // since compiled code callers expect the result to already be narrowed.
2334 if (state == itos) {
2335 __ narrow(r0);
2336 }
2688 }
2689 // c_rarg1: object pointer or null
2690 // c_rarg2: cache entry pointer
2691 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2692 InterpreterRuntime::post_field_access),
2693 c_rarg1, c_rarg2);
2694 __ load_field_entry(cache, index);
2695 __ bind(L1);
2696 }
2697 }
2698
2699 void TemplateTable::pop_and_check_object(Register r)
2700 {
2701 __ pop_ptr(r);
2702 __ null_check(r); // for field access must check obj.
2703 __ verify_oop(r);
2704 }
2705
2706 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2707 {
2708 const Register cache = r2;
2709 const Register obj = r4;
2710 const Register index = r3;
2711 const Register tos_state = r3;
2712 const Register off = r19;
2713 const Register flags = r6;
2714 const Register bc = r4; // uses same reg as obj, so don't mix them
2715
2716 resolve_cache_and_index_for_field(byte_no, cache, index);
2717 jvmti_post_field_access(cache, index, is_static, false);
2718
2719 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2720
2721 if (!is_static) {
2722 // obj is on the stack
2723 pop_and_check_object(obj);
2724 }
2725
2726 // 8179954: We need to make sure that the code generated for
2727 // volatile accesses forms a sequentially-consistent set of
2728 // operations when combined with STLR and LDAR. Without a leading
2729 // membar it's possible for a simple Dekker test to fail if loads
2730 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2731 // the stores in one method and we interpret the loads in another.
2732 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2733 Label notVolatile;
2734 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2735 __ membar(MacroAssembler::AnyAny);
2736 __ bind(notVolatile);
2737 }
2738
2757 __ b(Done);
2758
2759 __ bind(notByte);
2760 __ cmp(tos_state, (u1)ztos);
2761 __ br(Assembler::NE, notBool);
2762
2763 // ztos (same code as btos)
2764 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2765 __ push(ztos);
2766 // Rewrite bytecode to be faster
2767 if (rc == may_rewrite) {
2768 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2769 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2770 }
2771 __ b(Done);
2772
2773 __ bind(notBool);
2774 __ cmp(tos_state, (u1)atos);
2775 __ br(Assembler::NE, notObj);
2776 // atos
2777 if (!Arguments::is_valhalla_enabled()) {
2778 do_oop_load(_masm, field, r0, IN_HEAP);
2779 __ push(atos);
2780 if (rc == may_rewrite) {
2781 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2782 }
2783 __ b(Done);
2784 } else { // Valhalla
2785 if (is_static) {
2786 __ load_heap_oop(r0, field, rscratch1, rscratch2);
2787 __ push(atos);
2788 __ b(Done);
2789 } else {
2790 Label is_flat;
2791 __ test_field_is_flat(flags, noreg /* temp */, is_flat);
2792 __ load_heap_oop(r0, field, rscratch1, rscratch2);
2793 __ push(atos);
2794 if (rc == may_rewrite) {
2795 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2796 }
2797 __ b(Done);
2798 __ bind(is_flat);
2799 // field is flat (null-free or nullable with a null-marker)
2800 __ mov(r0, obj);
2801 __ read_flat_field(cache, r0);
2802 __ verify_oop(r0);
2803 __ push(atos);
2804 if (rc == may_rewrite) {
2805 patch_bytecode(Bytecodes::_fast_vgetfield, bc, r1);
2806 }
2807 __ b(Done);
2808 }
2809 }
2810
2811 __ bind(notObj);
2812 __ cmp(tos_state, (u1)itos);
2813 __ br(Assembler::NE, notInt);
2814 // itos
2815 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2816 __ push(itos);
2817 // Rewrite bytecode to be faster
2818 if (rc == may_rewrite) {
2819 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2820 }
2821 __ b(Done);
2822
2823 __ bind(notInt);
2824 __ cmp(tos_state, (u1)ctos);
2825 __ br(Assembler::NE, notChar);
2826 // ctos
2827 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2828 __ push(ctos);
2829 // Rewrite bytecode to be faster
2950 // c_rarg1: object pointer set up above (null if static)
2951 // c_rarg2: cache entry pointer
2952 // c_rarg3: jvalue object on the stack
2953 __ call_VM(noreg,
2954 CAST_FROM_FN_PTR(address,
2955 InterpreterRuntime::post_field_modification),
2956 c_rarg1, c_rarg2, c_rarg3);
2957 __ load_field_entry(cache, index);
2958 __ bind(L1);
2959 }
2960 }
2961
2962 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2963 transition(vtos, vtos);
2964
2965 const Register cache = r2;
2966 const Register index = r3;
2967 const Register tos_state = r3;
2968 const Register obj = r2;
2969 const Register off = r19;
2970 const Register flags = r6;
2971 const Register bc = r4;
2972
2973 resolve_cache_and_index_for_field(byte_no, cache, index);
2974 jvmti_post_field_mod(cache, index, is_static);
2975 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2976
2977 Label Done;
2978 {
2979 Label notVolatile;
2980 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2981 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2982 __ bind(notVolatile);
2983 }
2984
2985 // field address
2986 const Address field(obj, off);
2987
2988 Label notByte, notBool, notInt, notShort, notChar,
2989 notLong, notFloat, notObj, notDouble;
2990
2991 assert(btos == 0, "change code, btos != 0");
2992 __ cbnz(tos_state, notByte);
2993
2994 // Don't rewrite putstatic, only putfield
2995 if (is_static) rc = may_not_rewrite;
2996
2997 // btos
2998 {
2999 __ pop(btos);
3000 if (!is_static) pop_and_check_object(obj);
3009 __ cmp(tos_state, (u1)ztos);
3010 __ br(Assembler::NE, notBool);
3011
3012 // ztos
3013 {
3014 __ pop(ztos);
3015 if (!is_static) pop_and_check_object(obj);
3016 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3017 if (rc == may_rewrite) {
3018 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
3019 }
3020 __ b(Done);
3021 }
3022
3023 __ bind(notBool);
3024 __ cmp(tos_state, (u1)atos);
3025 __ br(Assembler::NE, notObj);
3026
3027 // atos
3028 {
3029 if (!Arguments::is_valhalla_enabled()) {
3030 __ pop(atos);
3031 if (!is_static) pop_and_check_object(obj);
3032 // Store into the field
3033 // Clobbers: r10, r11, r3
3034 do_oop_store(_masm, field, r0, IN_HEAP);
3035 if (rc == may_rewrite) {
3036 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
3037 }
3038 __ b(Done);
3039 } else { // Valhalla
3040 __ pop(atos);
3041 if (is_static) {
3042 Label is_nullable;
3043 __ test_field_is_not_null_free_inline_type(flags, noreg /* temp */, is_nullable);
3044 __ null_check(r0); // FIXME JDK-8341120
3045 __ bind(is_nullable);
3046 do_oop_store(_masm, field, r0, IN_HEAP);
3047 __ b(Done);
3048 } else {
3049 Label null_free_reference, is_flat, rewrite_inline;
3050 __ test_field_is_flat(flags, noreg /* temp */, is_flat);
3051 __ test_field_is_null_free_inline_type(flags, noreg /* temp */, null_free_reference);
3052 pop_and_check_object(obj);
3053 // Store into the field
3054 // Clobbers: r10, r11, r3
3055 do_oop_store(_masm, field, r0, IN_HEAP);
3056 if (rc == may_rewrite) {
3057 patch_bytecode(Bytecodes::_fast_aputfield, bc, r19, true, byte_no);
3058 }
3059 __ b(Done);
3060 // Implementation of the inline type semantic
3061 __ bind(null_free_reference);
3062 __ null_check(r0); // FIXME JDK-8341120
3063 pop_and_check_object(obj);
3064 // Store into the field
3065 // Clobbers: r10, r11, r3
3066 do_oop_store(_masm, field, r0, IN_HEAP);
3067 __ b(rewrite_inline);
3068 __ bind(is_flat);
3069 pop_and_check_object(r7);
3070 __ write_flat_field(cache, off, index, flags, r7);
3071 __ bind(rewrite_inline);
3072 if (rc == may_rewrite) {
3073 patch_bytecode(Bytecodes::_fast_vputfield, bc, r19, true, byte_no);
3074 }
3075 __ b(Done);
3076 }
3077 } // Valhalla
3078 }
3079
3080 __ bind(notObj);
3081 __ cmp(tos_state, (u1)itos);
3082 __ br(Assembler::NE, notInt);
3083
3084 // itos
3085 {
3086 __ pop(itos);
3087 if (!is_static) pop_and_check_object(obj);
3088 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3089 if (rc == may_rewrite) {
3090 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
3091 }
3092 __ b(Done);
3093 }
3094
3095 __ bind(notInt);
3096 __ cmp(tos_state, (u1)ctos);
3097 __ br(Assembler::NE, notChar);
3162 {
3163 __ pop(dtos);
3164 if (!is_static) pop_and_check_object(obj);
3165 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
3166 if (rc == may_rewrite) {
3167 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
3168 }
3169 }
3170
3171 #ifdef ASSERT
3172 __ b(Done);
3173
3174 __ bind(notDouble);
3175 __ stop("Bad state");
3176 #endif
3177
3178 __ bind(Done);
3179
3180 {
3181 Label notVolatile;
3182 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3183 __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3184 __ bind(notVolatile);
3185 }
3186 }
3187
3188 void TemplateTable::putfield(int byte_no)
3189 {
3190 putfield_or_static(byte_no, false);
3191 }
3192
3193 void TemplateTable::nofast_putfield(int byte_no) {
3194 putfield_or_static(byte_no, false, may_not_rewrite);
3195 }
3196
3197 void TemplateTable::putstatic(int byte_no) {
3198 putfield_or_static(byte_no, true);
3199 }
3200
3201 void TemplateTable::jvmti_post_fast_field_mod() {
3202 if (JvmtiExport::can_post_field_modification()) {
3203 // Check to see if a field modification watch has been set before
3204 // we take the time to call into the VM.
3205 Label L2;
3206 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3207 __ ldrw(c_rarg3, Address(rscratch1));
3208 __ cbzw(c_rarg3, L2);
3209 __ pop_ptr(r19); // copy the object pointer from tos
3210 __ verify_oop(r19);
3211 __ push_ptr(r19); // put the object pointer back on tos
3212 // Save tos values before call_VM() clobbers them. Since we have
3213 // to do it for every data type, we use the saved values as the
3214 // jvalue object.
3215 switch (bytecode()) { // load values into the jvalue object
3216 case Bytecodes::_fast_vputfield: // fall through
3217 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3218 case Bytecodes::_fast_bputfield: // fall through
3219 case Bytecodes::_fast_zputfield: // fall through
3220 case Bytecodes::_fast_sputfield: // fall through
3221 case Bytecodes::_fast_cputfield: // fall through
3222 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3223 case Bytecodes::_fast_dputfield: __ push_d(); break;
3224 case Bytecodes::_fast_fputfield: __ push_f(); break;
3225 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3226
3227 default:
3228 ShouldNotReachHere();
3229 }
3230 __ mov(c_rarg3, esp); // points to jvalue on the stack
3231 // access constant pool cache entry
3232 __ load_field_entry(c_rarg2, r0);
3233 __ verify_oop(r19);
3234 // r19: object pointer copied above
3235 // c_rarg2: cache entry pointer
3236 // c_rarg3: jvalue object on the stack
3237 __ call_VM(noreg,
3238 CAST_FROM_FN_PTR(address,
3239 InterpreterRuntime::post_field_modification),
3240 r19, c_rarg2, c_rarg3);
3241
3242 switch (bytecode()) { // restore tos values
3243 case Bytecodes::_fast_vputfield: // fall through
3244 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3245 case Bytecodes::_fast_bputfield: // fall through
3246 case Bytecodes::_fast_zputfield: // fall through
3247 case Bytecodes::_fast_sputfield: // fall through
3248 case Bytecodes::_fast_cputfield: // fall through
3249 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3250 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3251 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3252 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3253 default: break;
3254 }
3255 __ bind(L2);
3256 }
3257 }
3258
3259 void TemplateTable::fast_storefield(TosState state)
3260 {
3261 transition(state, vtos);
3262
3263 ByteSize base = ConstantPoolCache::base_offset();
3271 load_resolved_field_entry(r2, r2, noreg, r1, r5);
3272 __ verify_field_offset(r1);
3273
3274 {
3275 Label notVolatile;
3276 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3277 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3278 __ bind(notVolatile);
3279 }
3280
3281 Label notVolatile;
3282
3283 // Get object from stack
3284 pop_and_check_object(r2);
3285
3286 // field address
3287 const Address field(r2, r1);
3288
3289 // access field
3290 switch (bytecode()) {
3291 case Bytecodes::_fast_vputfield:
3292 {
3293 Label is_flat, done;
3294 __ test_field_is_flat(r5, noreg /* temp */, is_flat);
3295 __ null_check(r0);
3296 do_oop_store(_masm, field, r0, IN_HEAP);
3297 __ b(done);
3298 __ bind(is_flat);
3299 __ load_field_entry(r4, r5);
3300 // Re-shuffle registers because of VM calls calling convention
3301 __ mov(r19, r1);
3302 __ mov(r7, r2);
3303 __ write_flat_field(r4, r19, r6, r8, r7);
3304 __ bind(done);
3305 }
3306 break;
3307 case Bytecodes::_fast_aputfield:
3308 // Clobbers: r10, r11, r3
3309 do_oop_store(_masm, field, r0, IN_HEAP);
3310 break;
3311 case Bytecodes::_fast_lputfield:
3312 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3313 break;
3314 case Bytecodes::_fast_iputfield:
3315 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3316 break;
3317 case Bytecodes::_fast_zputfield:
3318 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3319 break;
3320 case Bytecodes::_fast_bputfield:
3321 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3322 break;
3323 case Bytecodes::_fast_sputfield:
3324 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3325 break;
3326 case Bytecodes::_fast_cputfield:
3382 // r0: object
3383 __ verify_oop(r0);
3384 __ null_check(r0);
3385 const Address field(r0, r1);
3386
3387 // 8179954: We need to make sure that the code generated for
3388 // volatile accesses forms a sequentially-consistent set of
3389 // operations when combined with STLR and LDAR. Without a leading
3390 // membar it's possible for a simple Dekker test to fail if loads
3391 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3392 // the stores in one method and we interpret the loads in another.
3393 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3394 Label notVolatile;
3395 __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3396 __ membar(MacroAssembler::AnyAny);
3397 __ bind(notVolatile);
3398 }
3399
3400 // access field
3401 switch (bytecode()) {
3402 case Bytecodes::_fast_vgetfield:
3403 {
3404 // field is flat
3405 __ read_flat_field(r2, r0);
3406 __ verify_oop(r0);
3407 }
3408 break;
3409 case Bytecodes::_fast_agetfield:
3410 do_oop_load(_masm, field, r0, IN_HEAP);
3411 __ verify_oop(r0);
3412 break;
3413 case Bytecodes::_fast_lgetfield:
3414 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3415 break;
3416 case Bytecodes::_fast_igetfield:
3417 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3418 break;
3419 case Bytecodes::_fast_bgetfield:
3420 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3421 break;
3422 case Bytecodes::_fast_sgetfield:
3423 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3424 break;
3425 case Bytecodes::_fast_cgetfield:
3426 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3427 break;
3428 case Bytecodes::_fast_fgetfield:
3808 Label initialize_header;
3809
3810 __ get_cpool_and_tags(r4, r0);
3811 // Make sure the class we're about to instantiate has been resolved.
3812 // This is done before loading InstanceKlass to be consistent with the order
3813 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3814 const int tags_offset = Array<u1>::base_offset_in_bytes();
3815 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3816 __ lea(rscratch1, Address(rscratch1, tags_offset));
3817 __ ldarb(rscratch1, rscratch1);
3818 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3819 __ br(Assembler::NE, slow_case);
3820
3821 // get InstanceKlass
3822 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3823
3824 // make sure klass is initialized
3825 assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3826 __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3827
3828 __ allocate_instance(r4, r0, r3, r1, true, slow_case);
3829 __ b(done);
3830
3831 // slow case
3832 __ bind(slow_case);
3833 __ get_constant_pool(c_rarg1);
3834 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3835 __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3836 __ verify_oop(r0);
3837
3838 // continue
3839 __ bind(done);
3840 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3841 __ membar(Assembler::StoreStore);
3842 }
3843
3844 void TemplateTable::newarray() {
3845 transition(itos, atos);
3846 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3847 __ mov(c_rarg2, r0);
3848 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3849 c_rarg1, c_rarg2);
3893 __ bind(quicked);
3894 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3895 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3896
3897 __ bind(resolved);
3898 __ load_klass(r19, r3);
3899
3900 // Generate subtype check. Blows r2, r5. Object in r3.
3901 // Superklass in r0. Subklass in r19.
3902 __ gen_subtype_check(r19, ok_is_subtype);
3903
3904 // Come here on failure
3905 __ push(r3);
3906 // object is at TOS
3907 __ b(Interpreter::_throw_ClassCastException_entry);
3908
3909 // Come here on success
3910 __ bind(ok_is_subtype);
3911 __ mov(r0, r3); // Restore object in r3
3912
3913 __ b(done);
3914 __ bind(is_null);
3915
3916 // Collect counts on whether this test sees nulls a lot or not.
3917 if (ProfileInterpreter) {
3918 __ profile_null_seen(r2);
3919 }
3920
3921 __ bind(done);
3922 }
3923
3924 void TemplateTable::instanceof() {
3925 transition(atos, itos);
3926 Label done, is_null, ok_is_subtype, quicked, resolved;
3927 __ cbz(r0, is_null);
3928
3929 // Get cpool & tags index
3930 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3931 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3932 // See if bytecode has already been quicked
3933 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3934 __ lea(r1, Address(rscratch1, r19));
3935 __ ldarb(r1, r1);
3936 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3937 __ br(Assembler::EQ, quicked);
3938
3939 __ push(atos); // save receiver for result, and for GC
3940 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4018 // in the assembly code structure as well
4019 //
4020 // Stack layout:
4021 //
4022 // [expressions ] <--- esp = expression stack top
4023 // ..
4024 // [expressions ]
4025 // [monitor entry] <--- monitor block top = expression stack bot
4026 // ..
4027 // [monitor entry]
4028 // [frame data ] <--- monitor block bot
4029 // ...
4030 // [saved rfp ] <--- rfp
4031 void TemplateTable::monitorenter()
4032 {
4033 transition(atos, vtos);
4034
4035 // check for null object
4036 __ null_check(r0);
4037
4038 Label is_inline_type;
4039 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4040 __ test_markword_is_inline_type(rscratch1, is_inline_type);
4041
4042 const Address monitor_block_top(
4043 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4044 const Address monitor_block_bot(
4045 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4046 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4047
4048 Label allocated;
4049
4050 // initialize entry pointer
4051 __ mov(c_rarg1, zr); // points to free slot or null
4052
4053 // find a free slot in the monitor block (result in c_rarg1)
4054 {
4055 Label entry, loop, exit;
4056 __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
4057 __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
4058 // c_rarg3 points to current entry, starting with top-most entry
4059
4060 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4061
4123 // c_rarg1: points to monitor entry
4124 __ bind(allocated);
4125
4126 // Increment bcp to point to the next bytecode, so exception
4127 // handling for async. exceptions work correctly.
4128 // The object has already been popped from the stack, so the
4129 // expression stack looks correct.
4130 __ increment(rbcp);
4131
4132 // store object
4133 __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
4134 __ lock_object(c_rarg1);
4135
4136 // check to make sure this monitor doesn't cause stack overflow after locking
4137 __ save_bcp(); // in case of exception
4138 __ generate_stack_overflow_check(0);
4139
4140 // The bcp has already been incremented. Just need to dispatch to
4141 // next instruction.
4142 __ dispatch_next(vtos);
4143
4144 __ bind(is_inline_type);
4145 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4146 InterpreterRuntime::throw_identity_exception), r0);
4147 __ should_not_reach_here();
4148 }
4149
4150
4151 void TemplateTable::monitorexit()
4152 {
4153 transition(atos, vtos);
4154
4155 // check for null object
4156 __ null_check(r0);
4157
4158 const int is_inline_type_mask = markWord::inline_type_pattern;
4159 Label has_identity;
4160 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4161 __ mov(rscratch2, is_inline_type_mask);
4162 __ andr(rscratch1, rscratch1, rscratch2);
4163 __ cmp(rscratch1, rscratch2);
4164 __ br(Assembler::NE, has_identity);
4165 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4166 InterpreterRuntime::throw_illegal_monitor_state_exception));
4167 __ should_not_reach_here();
4168 __ bind(has_identity);
4169
4170 const Address monitor_block_top(
4171 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4172 const Address monitor_block_bot(
4173 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4174 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4175
4176 Label found;
4177
4178 // find matching slot
4179 {
4180 Label entry, loop;
4181 __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4182 __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4183 // c_rarg1 points to current entry, starting with top-most entry
4184
4185 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4186 // of monitor block
4187 __ b(entry);
4188
4189 __ bind(loop);
|