26 #include "asm/macroAssembler.inline.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "compiler/compilerDefinitions.inline.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "gc/shared/collectedHeap.hpp"
31 #include "gc/shared/tlab_globals.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/method.inline.hpp"
39 #include "oops/objArrayKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/resolvedFieldEntry.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/frame.inline.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/synchronizer.hpp"
50 #include "utilities/powerOfTwo.hpp"
51
52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
53
54 // Address computation: local variables
55
56 static inline Address iaddress(int n) {
57 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
58 }
59
60 static inline Address laddress(int n) {
61 return iaddress(n + 1);
62 }
63
64 static inline Address faddress(int n) {
65 return iaddress(n);
156 Address src,
157 Register dst,
158 DecoratorSet decorators) {
159 __ load_heap_oop(dst, src, r10, r11, decorators);
160 }
161
162 Address TemplateTable::at_bcp(int offset) {
163 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
164 return Address(rbcp, offset);
165 }
166
167 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
168 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
169 int byte_no)
170 {
171 assert_different_registers(bc_reg, temp_reg);
172 if (!RewriteBytecodes) return;
173 Label L_patch_done;
174
175 switch (bc) {
176 case Bytecodes::_fast_aputfield:
177 case Bytecodes::_fast_bputfield:
178 case Bytecodes::_fast_zputfield:
179 case Bytecodes::_fast_cputfield:
180 case Bytecodes::_fast_dputfield:
181 case Bytecodes::_fast_fputfield:
182 case Bytecodes::_fast_iputfield:
183 case Bytecodes::_fast_lputfield:
184 case Bytecodes::_fast_sputfield:
185 {
186 // We skip bytecode quickening for putfield instructions when
187 // the put_code written to the constant pool cache is zero.
188 // This is required so that every execution of this instruction
189 // calls out to InterpreterRuntime::resolve_get_put to do
190 // additional, required work.
191 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
192 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
193 __ load_field_entry(temp_reg, bc_reg);
194 if (byte_no == f1_byte) {
195 __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
740 locals_index_wide(r1);
741 __ ldr(r0, aaddress(r1));
742 }
743
744 void TemplateTable::index_check(Register array, Register index)
745 {
746 // destroys r1, rscratch1
747 // sign extend index for use by indexed load
748 // __ movl2ptr(index, index);
749 // check index
750 Register length = rscratch1;
751 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
752 __ cmpw(index, length);
753 if (index != r1) {
754 // ??? convention: move aberrant index into r1 for exception message
755 assert(r1 != array, "different registers");
756 __ mov(r1, index);
757 }
758 Label ok;
759 __ br(Assembler::LO, ok);
760 // ??? convention: move array into r3 for exception message
761 __ mov(r3, array);
762 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
763 __ br(rscratch1);
764 __ bind(ok);
765 }
766
767 void TemplateTable::iaload()
768 {
769 transition(itos, itos);
770 __ mov(r1, r0);
771 __ pop_ptr(r0);
772 // r0: array
773 // r1: index
774 index_check(r0, r1); // leaves index in r1, kills rscratch1
775 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
776 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
777 }
778
779 void TemplateTable::laload()
780 {
781 transition(itos, ltos);
782 __ mov(r1, r0);
783 __ pop_ptr(r0);
803 void TemplateTable::daload()
804 {
805 transition(itos, dtos);
806 __ mov(r1, r0);
807 __ pop_ptr(r0);
808 // r0: array
809 // r1: index
810 index_check(r0, r1); // leaves index in r1, kills rscratch1
811 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
812 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
813 }
814
815 void TemplateTable::aaload()
816 {
817 transition(itos, atos);
818 __ mov(r1, r0);
819 __ pop_ptr(r0);
820 // r0: array
821 // r1: index
822 index_check(r0, r1); // leaves index in r1, kills rscratch1
823 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
824 do_oop_load(_masm,
825 Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
826 r0,
827 IS_ARRAY);
828 }
829
830 void TemplateTable::baload()
831 {
832 transition(itos, itos);
833 __ mov(r1, r0);
834 __ pop_ptr(r0);
835 // r0: array
836 // r1: index
837 index_check(r0, r1); // leaves index in r1, kills rscratch1
838 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
839 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
840 }
841
842 void TemplateTable::caload()
843 {
844 transition(itos, itos);
845 __ mov(r1, r0);
846 __ pop_ptr(r0);
847 // r0: array
1094 // r1: index
1095 // r3: array
1096 index_check(r3, r1); // prefer index in r1
1097 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1098 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1099 }
1100
1101 void TemplateTable::dastore() {
1102 transition(dtos, vtos);
1103 __ pop_i(r1);
1104 __ pop_ptr(r3);
1105 // v0: value
1106 // r1: index
1107 // r3: array
1108 index_check(r3, r1); // prefer index in r1
1109 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1110 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1111 }
1112
1113 void TemplateTable::aastore() {
1114 Label is_null, ok_is_subtype, done;
1115 transition(vtos, vtos);
1116 // stack: ..., array, index, value
1117 __ ldr(r0, at_tos()); // value
1118 __ ldr(r2, at_tos_p1()); // index
1119 __ ldr(r3, at_tos_p2()); // array
1120
1121 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1122
1123 index_check(r3, r2); // kills r1
1124 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1125
1126 // do array store check - check for null value first
1127 __ cbz(r0, is_null);
1128
1129 // Move subklass into r1
1130 __ load_klass(r1, r0);
1131 // Move superklass into r0
1132 __ load_klass(r0, r3);
1133 __ ldr(r0, Address(r0,
1134 ObjArrayKlass::element_klass_offset()));
1135 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1136
1137 // Generate subtype check. Blows r2, r5
1138 // Superklass in r0. Subklass in r1.
1139 __ gen_subtype_check(r1, ok_is_subtype);
1140
1141 // Come here on failure
1142 // object is at TOS
1143 __ b(Interpreter::_throw_ArrayStoreException_entry);
1144
1145 // Come here on success
1146 __ bind(ok_is_subtype);
1147
1148 // Get the value we will store
1149 __ ldr(r0, at_tos());
1150 // Now store using the appropriate barrier
1151 // Clobbers: r10, r11, r3
1152 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1153 __ b(done);
1154
1155 // Have a null in r0, r3=array, r2=index. Store null at ary[idx]
1156 __ bind(is_null);
1157 __ profile_null_seen(r2);
1158
1159 // Store a null
1160 // Clobbers: r10, r11, r3
1161 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1162
1163 // Pop stack arguments
1164 __ bind(done);
1165 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1166 }
1167
1168 void TemplateTable::bastore()
1169 {
1170 transition(itos, vtos);
1171 __ pop_i(r1);
1172 __ pop_ptr(r3);
1173 // r0: value
1174 // r1: index
1175 // r3: array
1176 index_check(r3, r1); // prefer index in r1
1177
1178 // Need to check whether array is boolean or byte
1179 // since both types share the bastore bytecode.
1180 __ load_klass(r2, r3);
1181 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
1948 __ br(j_not(cc), not_taken);
1949 branch(false, false);
1950 __ bind(not_taken);
1951 __ profile_not_taken_branch(r0);
1952 }
1953
1954 void TemplateTable::if_nullcmp(Condition cc)
1955 {
1956 transition(atos, vtos);
1957 // assume branch is more often taken than not (loops use backward branches)
1958 Label not_taken;
1959 if (cc == equal)
1960 __ cbnz(r0, not_taken);
1961 else
1962 __ cbz(r0, not_taken);
1963 branch(false, false);
1964 __ bind(not_taken);
1965 __ profile_not_taken_branch(r0);
1966 }
1967
1968 void TemplateTable::if_acmp(Condition cc)
1969 {
1970 transition(atos, vtos);
1971 // assume branch is more often taken than not (loops use backward branches)
1972 Label not_taken;
1973 __ pop_ptr(r1);
1974 __ cmpoop(r1, r0);
1975 __ br(j_not(cc), not_taken);
1976 branch(false, false);
1977 __ bind(not_taken);
1978 __ profile_not_taken_branch(r0);
1979 }
1980
1981 void TemplateTable::ret() {
1982 transition(vtos, vtos);
1983 locals_index(r1);
1984 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1985 __ profile_ret(r1, r2);
1986 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1987 __ lea(rbcp, Address(rbcp, r1));
1988 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
1989 __ dispatch_next(vtos, 0, /*generate_poll*/true);
1990 }
1991
1992 void TemplateTable::wide_ret() {
1993 transition(vtos, vtos);
1994 locals_index_wide(r1);
1995 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1996 __ profile_ret(r1, r2);
1997 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1998 __ lea(rbcp, Address(rbcp, r1));
1999 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2000 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2194 assert(_desc->calls_vm(),
2195 "inconsistent calls_vm information"); // call in remove_activation
2196
2197 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2198 assert(state == vtos, "only valid state");
2199
2200 __ ldr(c_rarg1, aaddress(0));
2201 __ load_klass(r3, c_rarg1);
2202 __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2203 Label skip_register_finalizer;
2204 __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2205
2206 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2207
2208 __ bind(skip_register_finalizer);
2209 }
2210
2211 // Issue a StoreStore barrier after all stores but before return
2212 // from any constructor for any class with a final field. We don't
2213 // know if this is a finalizer, so we always do so.
2214 if (_desc->bytecode() == Bytecodes::_return)
2215 __ membar(MacroAssembler::StoreStore);
2216
2217 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2218 Label no_safepoint;
2219 __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2220 __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2221 __ push(state);
2222 __ push_cont_fastpath(rthread);
2223 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2224 __ pop_cont_fastpath(rthread);
2225 __ pop(state);
2226 __ bind(no_safepoint);
2227 }
2228
2229 // Narrow result if state is itos but result type is smaller.
2230 // Need to narrow in the return bytecode rather than in generate_return_entry
2231 // since compiled code callers expect the result to already be narrowed.
2232 if (state == itos) {
2233 __ narrow(r0);
2234 }
2585 }
2586 // c_rarg1: object pointer or null
2587 // c_rarg2: cache entry pointer
2588 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2589 InterpreterRuntime::post_field_access),
2590 c_rarg1, c_rarg2);
2591 __ load_field_entry(cache, index);
2592 __ bind(L1);
2593 }
2594 }
2595
2596 void TemplateTable::pop_and_check_object(Register r)
2597 {
2598 __ pop_ptr(r);
2599 __ null_check(r); // for field access must check obj.
2600 __ verify_oop(r);
2601 }
2602
2603 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2604 {
2605 const Register cache = r4;
2606 const Register obj = r4;
2607 const Register index = r3;
2608 const Register tos_state = r3;
2609 const Register off = r19;
2610 const Register flags = r6;
2611 const Register bc = r4; // uses same reg as obj, so don't mix them
2612
2613 resolve_cache_and_index_for_field(byte_no, cache, index);
2614 jvmti_post_field_access(cache, index, is_static, false);
2615 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2616
2617 if (!is_static) {
2618 // obj is on the stack
2619 pop_and_check_object(obj);
2620 }
2621
2622 // 8179954: We need to make sure that the code generated for
2623 // volatile accesses forms a sequentially-consistent set of
2624 // operations when combined with STLR and LDAR. Without a leading
2625 // membar it's possible for a simple Dekker test to fail if loads
2626 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2627 // the stores in one method and we interpret the loads in another.
2628 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2629 Label notVolatile;
2630 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2631 __ membar(MacroAssembler::AnyAny);
2632 __ bind(notVolatile);
2633 }
2634
2653 __ b(Done);
2654
2655 __ bind(notByte);
2656 __ cmp(tos_state, (u1)ztos);
2657 __ br(Assembler::NE, notBool);
2658
2659 // ztos (same code as btos)
2660 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2661 __ push(ztos);
2662 // Rewrite bytecode to be faster
2663 if (rc == may_rewrite) {
2664 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2665 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2666 }
2667 __ b(Done);
2668
2669 __ bind(notBool);
2670 __ cmp(tos_state, (u1)atos);
2671 __ br(Assembler::NE, notObj);
2672 // atos
2673 do_oop_load(_masm, field, r0, IN_HEAP);
2674 __ push(atos);
2675 if (rc == may_rewrite) {
2676 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2677 }
2678 __ b(Done);
2679
2680 __ bind(notObj);
2681 __ cmp(tos_state, (u1)itos);
2682 __ br(Assembler::NE, notInt);
2683 // itos
2684 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2685 __ push(itos);
2686 // Rewrite bytecode to be faster
2687 if (rc == may_rewrite) {
2688 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2689 }
2690 __ b(Done);
2691
2692 __ bind(notInt);
2693 __ cmp(tos_state, (u1)ctos);
2694 __ br(Assembler::NE, notChar);
2695 // ctos
2696 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2697 __ push(ctos);
2698 // Rewrite bytecode to be faster
2819 // c_rarg1: object pointer set up above (null if static)
2820 // c_rarg2: cache entry pointer
2821 // c_rarg3: jvalue object on the stack
2822 __ call_VM(noreg,
2823 CAST_FROM_FN_PTR(address,
2824 InterpreterRuntime::post_field_modification),
2825 c_rarg1, c_rarg2, c_rarg3);
2826 __ load_field_entry(cache, index);
2827 __ bind(L1);
2828 }
2829 }
2830
2831 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2832 transition(vtos, vtos);
2833
2834 const Register cache = r2;
2835 const Register index = r3;
2836 const Register tos_state = r3;
2837 const Register obj = r2;
2838 const Register off = r19;
2839 const Register flags = r0;
2840 const Register bc = r4;
2841
2842 resolve_cache_and_index_for_field(byte_no, cache, index);
2843 jvmti_post_field_mod(cache, index, is_static);
2844 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2845
2846 Label Done;
2847 __ mov(r5, flags);
2848
2849 {
2850 Label notVolatile;
2851 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2852 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2853 __ bind(notVolatile);
2854 }
2855
2856 // field address
2857 const Address field(obj, off);
2858
2859 Label notByte, notBool, notInt, notShort, notChar,
2860 notLong, notFloat, notObj, notDouble;
2861
2862 assert(btos == 0, "change code, btos != 0");
2863 __ cbnz(tos_state, notByte);
2864
2865 // Don't rewrite putstatic, only putfield
2866 if (is_static) rc = may_not_rewrite;
2867
2868 // btos
2869 {
2870 __ pop(btos);
2871 if (!is_static) pop_and_check_object(obj);
2880 __ cmp(tos_state, (u1)ztos);
2881 __ br(Assembler::NE, notBool);
2882
2883 // ztos
2884 {
2885 __ pop(ztos);
2886 if (!is_static) pop_and_check_object(obj);
2887 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
2888 if (rc == may_rewrite) {
2889 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2890 }
2891 __ b(Done);
2892 }
2893
2894 __ bind(notBool);
2895 __ cmp(tos_state, (u1)atos);
2896 __ br(Assembler::NE, notObj);
2897
2898 // atos
2899 {
2900 __ pop(atos);
2901 if (!is_static) pop_and_check_object(obj);
2902 // Store into the field
2903 // Clobbers: r10, r11, r3
2904 do_oop_store(_masm, field, r0, IN_HEAP);
2905 if (rc == may_rewrite) {
2906 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2907 }
2908 __ b(Done);
2909 }
2910
2911 __ bind(notObj);
2912 __ cmp(tos_state, (u1)itos);
2913 __ br(Assembler::NE, notInt);
2914
2915 // itos
2916 {
2917 __ pop(itos);
2918 if (!is_static) pop_and_check_object(obj);
2919 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
2920 if (rc == may_rewrite) {
2921 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2922 }
2923 __ b(Done);
2924 }
2925
2926 __ bind(notInt);
2927 __ cmp(tos_state, (u1)ctos);
2928 __ br(Assembler::NE, notChar);
2993 {
2994 __ pop(dtos);
2995 if (!is_static) pop_and_check_object(obj);
2996 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
2997 if (rc == may_rewrite) {
2998 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
2999 }
3000 }
3001
3002 #ifdef ASSERT
3003 __ b(Done);
3004
3005 __ bind(notDouble);
3006 __ stop("Bad state");
3007 #endif
3008
3009 __ bind(Done);
3010
3011 {
3012 Label notVolatile;
3013 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3014 __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3015 __ bind(notVolatile);
3016 }
3017 }
3018
3019 void TemplateTable::putfield(int byte_no)
3020 {
3021 putfield_or_static(byte_no, false);
3022 }
3023
3024 void TemplateTable::nofast_putfield(int byte_no) {
3025 putfield_or_static(byte_no, false, may_not_rewrite);
3026 }
3027
3028 void TemplateTable::putstatic(int byte_no) {
3029 putfield_or_static(byte_no, true);
3030 }
3031
3032 void TemplateTable::jvmti_post_fast_field_mod() {
3033 if (JvmtiExport::can_post_field_modification()) {
3034 // Check to see if a field modification watch has been set before
3035 // we take the time to call into the VM.
3036 Label L2;
3037 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3038 __ ldrw(c_rarg3, Address(rscratch1));
3039 __ cbzw(c_rarg3, L2);
3040 __ pop_ptr(r19); // copy the object pointer from tos
3041 __ verify_oop(r19);
3042 __ push_ptr(r19); // put the object pointer back on tos
3043 // Save tos values before call_VM() clobbers them. Since we have
3044 // to do it for every data type, we use the saved values as the
3045 // jvalue object.
3046 switch (bytecode()) { // load values into the jvalue object
3047 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3048 case Bytecodes::_fast_bputfield: // fall through
3049 case Bytecodes::_fast_zputfield: // fall through
3050 case Bytecodes::_fast_sputfield: // fall through
3051 case Bytecodes::_fast_cputfield: // fall through
3052 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3053 case Bytecodes::_fast_dputfield: __ push_d(); break;
3054 case Bytecodes::_fast_fputfield: __ push_f(); break;
3055 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3056
3057 default:
3058 ShouldNotReachHere();
3059 }
3060 __ mov(c_rarg3, esp); // points to jvalue on the stack
3061 // access constant pool cache entry
3062 __ load_field_entry(c_rarg2, r0);
3063 __ verify_oop(r19);
3064 // r19: object pointer copied above
3065 // c_rarg2: cache entry pointer
3066 // c_rarg3: jvalue object on the stack
3067 __ call_VM(noreg,
3068 CAST_FROM_FN_PTR(address,
3069 InterpreterRuntime::post_field_modification),
3070 r19, c_rarg2, c_rarg3);
3071
3072 switch (bytecode()) { // restore tos values
3073 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3074 case Bytecodes::_fast_bputfield: // fall through
3075 case Bytecodes::_fast_zputfield: // fall through
3076 case Bytecodes::_fast_sputfield: // fall through
3077 case Bytecodes::_fast_cputfield: // fall through
3078 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3079 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3080 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3081 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3082 default: break;
3083 }
3084 __ bind(L2);
3085 }
3086 }
3087
3088 void TemplateTable::fast_storefield(TosState state)
3089 {
3090 transition(state, vtos);
3091
3092 ByteSize base = ConstantPoolCache::base_offset();
3100 load_resolved_field_entry(r2, r2, noreg, r1, r5);
3101 __ verify_field_offset(r1);
3102
3103 {
3104 Label notVolatile;
3105 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3106 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3107 __ bind(notVolatile);
3108 }
3109
3110 Label notVolatile;
3111
3112 // Get object from stack
3113 pop_and_check_object(r2);
3114
3115 // field address
3116 const Address field(r2, r1);
3117
3118 // access field
3119 switch (bytecode()) {
3120 case Bytecodes::_fast_aputfield:
3121 // Clobbers: r10, r11, r3
3122 do_oop_store(_masm, field, r0, IN_HEAP);
3123 break;
3124 case Bytecodes::_fast_lputfield:
3125 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3126 break;
3127 case Bytecodes::_fast_iputfield:
3128 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3129 break;
3130 case Bytecodes::_fast_zputfield:
3131 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3132 break;
3133 case Bytecodes::_fast_bputfield:
3134 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3135 break;
3136 case Bytecodes::_fast_sputfield:
3137 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3138 break;
3139 case Bytecodes::_fast_cputfield:
3195 // r0: object
3196 __ verify_oop(r0);
3197 __ null_check(r0);
3198 const Address field(r0, r1);
3199
3200 // 8179954: We need to make sure that the code generated for
3201 // volatile accesses forms a sequentially-consistent set of
3202 // operations when combined with STLR and LDAR. Without a leading
3203 // membar it's possible for a simple Dekker test to fail if loads
3204 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3205 // the stores in one method and we interpret the loads in another.
3206 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3207 Label notVolatile;
3208 __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3209 __ membar(MacroAssembler::AnyAny);
3210 __ bind(notVolatile);
3211 }
3212
3213 // access field
3214 switch (bytecode()) {
3215 case Bytecodes::_fast_agetfield:
3216 do_oop_load(_masm, field, r0, IN_HEAP);
3217 __ verify_oop(r0);
3218 break;
3219 case Bytecodes::_fast_lgetfield:
3220 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3221 break;
3222 case Bytecodes::_fast_igetfield:
3223 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3224 break;
3225 case Bytecodes::_fast_bgetfield:
3226 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3227 break;
3228 case Bytecodes::_fast_sgetfield:
3229 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3230 break;
3231 case Bytecodes::_fast_cgetfield:
3232 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3233 break;
3234 case Bytecodes::_fast_fgetfield:
3615 Label initialize_header;
3616
3617 __ get_cpool_and_tags(r4, r0);
3618 // Make sure the class we're about to instantiate has been resolved.
3619 // This is done before loading InstanceKlass to be consistent with the order
3620 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3621 const int tags_offset = Array<u1>::base_offset_in_bytes();
3622 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3623 __ lea(rscratch1, Address(rscratch1, tags_offset));
3624 __ ldarb(rscratch1, rscratch1);
3625 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3626 __ br(Assembler::NE, slow_case);
3627
3628 // get InstanceKlass
3629 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3630
3631 // make sure klass is initialized
3632 assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3633 __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3634
3635 // get instance_size in InstanceKlass (scaled to a count of bytes)
3636 __ ldrw(r3,
3637 Address(r4,
3638 Klass::layout_helper_offset()));
3639 // test to see if it is malformed in some way
3640 __ tbnz(r3, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3641
3642 // Allocate the instance:
3643 // If TLAB is enabled:
3644 // Try to allocate in the TLAB.
3645 // If fails, go to the slow path.
3646 // Initialize the allocation.
3647 // Exit.
3648 //
3649 // Go to slow path.
3650
3651 if (UseTLAB) {
3652 __ tlab_allocate(r0, r3, 0, noreg, r1, slow_case);
3653
3654 if (ZeroTLAB) {
3655 // the fields have been already cleared
3656 __ b(initialize_header);
3657 }
3658
3659 // The object is initialized before the header. If the object size is
3660 // zero, go directly to the header initialization.
3661 int header_size = oopDesc::header_size() * HeapWordSize;
3662 assert(is_aligned(header_size, BytesPerLong), "oop header size must be 8-byte-aligned");
3663 __ sub(r3, r3, header_size);
3664 __ cbz(r3, initialize_header);
3665
3666 // Initialize object fields
3667 {
3668 __ add(r2, r0, header_size);
3669 Label loop;
3670 __ bind(loop);
3671 __ str(zr, Address(__ post(r2, BytesPerLong)));
3672 __ sub(r3, r3, BytesPerLong);
3673 __ cbnz(r3, loop);
3674 }
3675
3676 // initialize object header only.
3677 __ bind(initialize_header);
3678 if (UseCompactObjectHeaders) {
3679 __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3680 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3681 } else {
3682 __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3683 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3684 __ store_klass_gap(r0, zr); // zero klass gap for compressed oops
3685 __ store_klass(r0, r4); // store klass last
3686 }
3687
3688 if (DTraceAllocProbes) {
3689 // Trigger dtrace event for fastpath
3690 __ push(atos); // save the return value
3691 __ call_VM_leaf(
3692 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3693 __ pop(atos); // restore the return value
3694
3695 }
3696 __ b(done);
3697 }
3698
3699 // slow case
3700 __ bind(slow_case);
3701 __ get_constant_pool(c_rarg1);
3702 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3703 __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3704 __ verify_oop(r0);
3705
3706 // continue
3707 __ bind(done);
3708 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3709 __ membar(Assembler::StoreStore);
3710 }
3711
3712 void TemplateTable::newarray() {
3713 transition(itos, atos);
3714 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3715 __ mov(c_rarg2, r0);
3716 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3717 c_rarg1, c_rarg2);
3761 __ bind(quicked);
3762 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3763 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3764
3765 __ bind(resolved);
3766 __ load_klass(r19, r3);
3767
3768 // Generate subtype check. Blows r2, r5. Object in r3.
3769 // Superklass in r0. Subklass in r19.
3770 __ gen_subtype_check(r19, ok_is_subtype);
3771
3772 // Come here on failure
3773 __ push(r3);
3774 // object is at TOS
3775 __ b(Interpreter::_throw_ClassCastException_entry);
3776
3777 // Come here on success
3778 __ bind(ok_is_subtype);
3779 __ mov(r0, r3); // Restore object in r3
3780
3781 // Collect counts on whether this test sees nulls a lot or not.
3782 if (ProfileInterpreter) {
3783 __ b(done);
3784 __ bind(is_null);
3785 __ profile_null_seen(r2);
3786 } else {
3787 __ bind(is_null); // same as 'done'
3788 }
3789 __ bind(done);
3790 }
3791
3792 void TemplateTable::instanceof() {
3793 transition(atos, itos);
3794 Label done, is_null, ok_is_subtype, quicked, resolved;
3795 __ cbz(r0, is_null);
3796
3797 // Get cpool & tags index
3798 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3799 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3800 // See if bytecode has already been quicked
3801 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3802 __ lea(r1, Address(rscratch1, r19));
3803 __ ldarb(r1, r1);
3804 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3805 __ br(Assembler::EQ, quicked);
3806
3807 __ push(atos); // save receiver for result, and for GC
3808 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3886 // in the assembly code structure as well
3887 //
3888 // Stack layout:
3889 //
3890 // [expressions ] <--- esp = expression stack top
3891 // ..
3892 // [expressions ]
3893 // [monitor entry] <--- monitor block top = expression stack bot
3894 // ..
3895 // [monitor entry]
3896 // [frame data ] <--- monitor block bot
3897 // ...
3898 // [saved rfp ] <--- rfp
3899 void TemplateTable::monitorenter()
3900 {
3901 transition(atos, vtos);
3902
3903 // check for null object
3904 __ null_check(r0);
3905
3906 const Address monitor_block_top(
3907 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3908 const Address monitor_block_bot(
3909 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
3910 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3911
3912 Label allocated;
3913
3914 // initialize entry pointer
3915 __ mov(c_rarg1, zr); // points to free slot or null
3916
3917 // find a free slot in the monitor block (result in c_rarg1)
3918 {
3919 Label entry, loop, exit;
3920 __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
3921 __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
3922 // c_rarg3 points to current entry, starting with top-most entry
3923
3924 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3925
3987 // c_rarg1: points to monitor entry
3988 __ bind(allocated);
3989
3990 // Increment bcp to point to the next bytecode, so exception
3991 // handling for async. exceptions work correctly.
3992 // The object has already been popped from the stack, so the
3993 // expression stack looks correct.
3994 __ increment(rbcp);
3995
3996 // store object
3997 __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
3998 __ lock_object(c_rarg1);
3999
4000 // check to make sure this monitor doesn't cause stack overflow after locking
4001 __ save_bcp(); // in case of exception
4002 __ generate_stack_overflow_check(0);
4003
4004 // The bcp has already been incremented. Just need to dispatch to
4005 // next instruction.
4006 __ dispatch_next(vtos);
4007 }
4008
4009
4010 void TemplateTable::monitorexit()
4011 {
4012 transition(atos, vtos);
4013
4014 // check for null object
4015 __ null_check(r0);
4016
4017 const Address monitor_block_top(
4018 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4019 const Address monitor_block_bot(
4020 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4021 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4022
4023 Label found;
4024
4025 // find matching slot
4026 {
4027 Label entry, loop;
4028 __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4029 __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4030 // c_rarg1 points to current entry, starting with top-most entry
4031
4032 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4033 // of monitor block
4034 __ b(entry);
4035
4036 __ bind(loop);
|
26 #include "asm/macroAssembler.inline.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "compiler/compilerDefinitions.inline.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "gc/shared/collectedHeap.hpp"
31 #include "gc/shared/tlab_globals.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/method.inline.hpp"
39 #include "oops/objArrayKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/resolvedFieldEntry.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/arguments.hpp"
47 #include "runtime/frame.inline.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "runtime/stubRoutines.hpp"
50 #include "runtime/synchronizer.hpp"
51 #include "utilities/powerOfTwo.hpp"
52
53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
54
55 // Address computation: local variables
56
57 static inline Address iaddress(int n) {
58 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
59 }
60
61 static inline Address laddress(int n) {
62 return iaddress(n + 1);
63 }
64
65 static inline Address faddress(int n) {
66 return iaddress(n);
157 Address src,
158 Register dst,
159 DecoratorSet decorators) {
160 __ load_heap_oop(dst, src, r10, r11, decorators);
161 }
162
163 Address TemplateTable::at_bcp(int offset) {
164 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
165 return Address(rbcp, offset);
166 }
167
168 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
169 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
170 int byte_no)
171 {
172 assert_different_registers(bc_reg, temp_reg);
173 if (!RewriteBytecodes) return;
174 Label L_patch_done;
175
176 switch (bc) {
177 case Bytecodes::_fast_vputfield:
178 case Bytecodes::_fast_aputfield:
179 case Bytecodes::_fast_bputfield:
180 case Bytecodes::_fast_zputfield:
181 case Bytecodes::_fast_cputfield:
182 case Bytecodes::_fast_dputfield:
183 case Bytecodes::_fast_fputfield:
184 case Bytecodes::_fast_iputfield:
185 case Bytecodes::_fast_lputfield:
186 case Bytecodes::_fast_sputfield:
187 {
188 // We skip bytecode quickening for putfield instructions when
189 // the put_code written to the constant pool cache is zero.
190 // This is required so that every execution of this instruction
191 // calls out to InterpreterRuntime::resolve_get_put to do
192 // additional, required work.
193 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
194 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
195 __ load_field_entry(temp_reg, bc_reg);
196 if (byte_no == f1_byte) {
197 __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
742 locals_index_wide(r1);
743 __ ldr(r0, aaddress(r1));
744 }
745
746 void TemplateTable::index_check(Register array, Register index)
747 {
748 // destroys r1, rscratch1
749 // sign extend index for use by indexed load
750 // __ movl2ptr(index, index);
751 // check index
752 Register length = rscratch1;
753 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
754 __ cmpw(index, length);
755 if (index != r1) {
756 // ??? convention: move aberrant index into r1 for exception message
757 assert(r1 != array, "different registers");
758 __ mov(r1, index);
759 }
760 Label ok;
761 __ br(Assembler::LO, ok);
762 // ??? convention: move array into r3 for exception message
763 __ mov(r3, array);
764 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
765 __ br(rscratch1);
766 __ bind(ok);
767 }
768
769 void TemplateTable::iaload()
770 {
771 transition(itos, itos);
772 __ mov(r1, r0);
773 __ pop_ptr(r0);
774 // r0: array
775 // r1: index
776 index_check(r0, r1); // leaves index in r1, kills rscratch1
777 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
778 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
779 }
780
781 void TemplateTable::laload()
782 {
783 transition(itos, ltos);
784 __ mov(r1, r0);
785 __ pop_ptr(r0);
805 void TemplateTable::daload()
806 {
807 transition(itos, dtos);
808 __ mov(r1, r0);
809 __ pop_ptr(r0);
810 // r0: array
811 // r1: index
812 index_check(r0, r1); // leaves index in r1, kills rscratch1
813 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
814 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
815 }
816
817 void TemplateTable::aaload()
818 {
819 transition(itos, atos);
820 __ mov(r1, r0);
821 __ pop_ptr(r0);
822 // r0: array
823 // r1: index
824 index_check(r0, r1); // leaves index in r1, kills rscratch1
825 __ profile_array_type<ArrayLoadData>(r2, r0, r4);
826 if (UseArrayFlattening) {
827 Label is_flat_array, done;
828
829 __ test_flat_array_oop(r0, rscratch1 /*temp*/, is_flat_array);
830 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
831 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
832
833 __ b(done);
834 __ bind(is_flat_array);
835 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), r0, r1);
836 // Ensure the stores to copy the inline field contents are visible
837 // before any subsequent store that publishes this reference.
838 __ membar(Assembler::StoreStore);
839 __ bind(done);
840 } else {
841 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
842 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
843 }
844 __ profile_element_type(r2, r0, r4);
845 }
846
847 void TemplateTable::baload()
848 {
849 transition(itos, itos);
850 __ mov(r1, r0);
851 __ pop_ptr(r0);
852 // r0: array
853 // r1: index
854 index_check(r0, r1); // leaves index in r1, kills rscratch1
855 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
856 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
857 }
858
859 void TemplateTable::caload()
860 {
861 transition(itos, itos);
862 __ mov(r1, r0);
863 __ pop_ptr(r0);
864 // r0: array
1111 // r1: index
1112 // r3: array
1113 index_check(r3, r1); // prefer index in r1
1114 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1115 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1116 }
1117
1118 void TemplateTable::dastore() {
1119 transition(dtos, vtos);
1120 __ pop_i(r1);
1121 __ pop_ptr(r3);
1122 // v0: value
1123 // r1: index
1124 // r3: array
1125 index_check(r3, r1); // prefer index in r1
1126 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1127 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1128 }
1129
1130 void TemplateTable::aastore() {
1131 Label is_null, is_flat_array, ok_is_subtype, done;
1132 transition(vtos, vtos);
1133 // stack: ..., array, index, value
1134 __ ldr(r0, at_tos()); // value
1135 __ ldr(r2, at_tos_p1()); // index
1136 __ ldr(r3, at_tos_p2()); // array
1137
1138 index_check(r3, r2); // kills r1
1139
1140 __ profile_array_type<ArrayStoreData>(r4, r3, r5);
1141 __ profile_multiple_element_types(r4, r0, r5, r6);
1142
1143 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1144 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1145 // Be careful not to clobber r4 below
1146
1147 // do array store check - check for null value first
1148 __ cbz(r0, is_null);
1149
1150 // Move array class to r5
1151 __ load_klass(r5, r3);
1152
1153 if (UseArrayFlattening) {
1154 __ ldrw(r6, Address(r5, Klass::layout_helper_offset()));
1155 __ test_flat_array_layout(r6, is_flat_array);
1156 }
1157
1158 // Move subklass into r1
1159 __ load_klass(r1, r0);
1160
1161 // Move array element superklass into r0
1162 __ ldr(r0, Address(r5, ObjArrayKlass::element_klass_offset()));
1163 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1164
1165 // Generate subtype check. Blows r2, r5
1166 // Superklass in r0. Subklass in r1.
1167
1168 // is "r1 <: r0" ? (value subclass <: array element superclass)
1169 __ gen_subtype_check(r1, ok_is_subtype, false);
1170
1171 // Come here on failure
1172 // object is at TOS
1173 __ b(Interpreter::_throw_ArrayStoreException_entry);
1174
1175 // Come here on success
1176 __ bind(ok_is_subtype);
1177
1178 // Get the value we will store
1179 __ ldr(r0, at_tos());
1180 // Now store using the appropriate barrier
1181 // Clobbers: r10, r11, r3
1182 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1183 __ b(done);
1184
1185 // Have a null in r0, r3=array, r2=index. Store null at ary[idx]
1186 __ bind(is_null);
1187 if (Arguments::is_valhalla_enabled()) {
1188 Label is_null_into_value_array_npe, store_null;
1189
1190 if (UseArrayFlattening) {
1191 __ test_flat_array_oop(r3, rscratch1, is_flat_array);
1192 }
1193
1194 // No way to store null in a null-free array
1195 __ test_null_free_array_oop(r3, rscratch1, is_null_into_value_array_npe);
1196 __ b(store_null);
1197
1198 __ bind(is_null_into_value_array_npe);
1199 __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1200
1201 __ bind(store_null);
1202 }
1203
1204 // Store a null
1205 // Clobbers: r10, r11, r3
1206 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1207 __ b(done);
1208
1209 if (UseArrayFlattening) {
1210 Label is_type_ok;
1211 __ bind(is_flat_array); // Store non-null value to flat
1212
1213 __ ldr(r0, at_tos()); // value
1214 __ ldr(r3, at_tos_p1()); // index
1215 __ ldr(r2, at_tos_p2()); // array
1216 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), r0, r2, r3);
1217 }
1218
1219 // Pop stack arguments
1220 __ bind(done);
1221 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1222 }
1223
1224 void TemplateTable::bastore()
1225 {
1226 transition(itos, vtos);
1227 __ pop_i(r1);
1228 __ pop_ptr(r3);
1229 // r0: value
1230 // r1: index
1231 // r3: array
1232 index_check(r3, r1); // prefer index in r1
1233
1234 // Need to check whether array is boolean or byte
1235 // since both types share the bastore bytecode.
1236 __ load_klass(r2, r3);
1237 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
2004 __ br(j_not(cc), not_taken);
2005 branch(false, false);
2006 __ bind(not_taken);
2007 __ profile_not_taken_branch(r0);
2008 }
2009
2010 void TemplateTable::if_nullcmp(Condition cc)
2011 {
2012 transition(atos, vtos);
2013 // assume branch is more often taken than not (loops use backward branches)
2014 Label not_taken;
2015 if (cc == equal)
2016 __ cbnz(r0, not_taken);
2017 else
2018 __ cbz(r0, not_taken);
2019 branch(false, false);
2020 __ bind(not_taken);
2021 __ profile_not_taken_branch(r0);
2022 }
2023
2024 void TemplateTable::if_acmp(Condition cc) {
2025 transition(atos, vtos);
2026 // assume branch is more often taken than not (loops use backward branches)
2027 Label taken, not_taken;
2028 __ pop_ptr(r1);
2029
2030 __ profile_acmp(r2, r1, r0, r4);
2031
2032 Register is_inline_type_mask = rscratch1;
2033 __ mov(is_inline_type_mask, markWord::inline_type_pattern);
2034
2035 if (Arguments::is_valhalla_enabled()) {
2036 __ cmp(r1, r0);
2037 __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2038
2039 // might be substitutable, test if either r0 or r1 is null
2040 __ andr(r2, r0, r1);
2041 __ cbz(r2, (cc == equal) ? not_taken : taken);
2042
2043 // and both are values ?
2044 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2045 __ andr(r2, r2, is_inline_type_mask);
2046 __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2047 __ andr(r4, r4, is_inline_type_mask);
2048 __ andr(r2, r2, r4);
2049 __ cmp(r2, is_inline_type_mask);
2050 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2051
2052 // same value klass ?
2053 __ load_metadata(r2, r1);
2054 __ load_metadata(r4, r0);
2055 __ cmp(r2, r4);
2056 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2057
2058 // Know both are the same type, let's test for substitutability...
2059 if (cc == equal) {
2060 invoke_is_substitutable(r0, r1, taken, not_taken);
2061 } else {
2062 invoke_is_substitutable(r0, r1, not_taken, taken);
2063 }
2064 __ stop("Not reachable");
2065 }
2066
2067 __ cmpoop(r1, r0);
2068 __ br(j_not(cc), not_taken);
2069 __ bind(taken);
2070 branch(false, false);
2071 __ bind(not_taken);
2072 __ profile_not_taken_branch(r0, true);
2073 }
2074
2075 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2076 Label& is_subst, Label& not_subst) {
2077
2078 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2079 // Restored... r0 answer, jmp to outcome...
2080 __ cbz(r0, not_subst);
2081 __ b(is_subst);
2082 }
2083
2084
2085 void TemplateTable::ret() {
2086 transition(vtos, vtos);
2087 locals_index(r1);
2088 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2089 __ profile_ret(r1, r2);
2090 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2091 __ lea(rbcp, Address(rbcp, r1));
2092 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2093 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2094 }
2095
2096 void TemplateTable::wide_ret() {
2097 transition(vtos, vtos);
2098 locals_index_wide(r1);
2099 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2100 __ profile_ret(r1, r2);
2101 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2102 __ lea(rbcp, Address(rbcp, r1));
2103 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2104 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2298 assert(_desc->calls_vm(),
2299 "inconsistent calls_vm information"); // call in remove_activation
2300
2301 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2302 assert(state == vtos, "only valid state");
2303
2304 __ ldr(c_rarg1, aaddress(0));
2305 __ load_klass(r3, c_rarg1);
2306 __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2307 Label skip_register_finalizer;
2308 __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2309
2310 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2311
2312 __ bind(skip_register_finalizer);
2313 }
2314
2315 // Issue a StoreStore barrier after all stores but before return
2316 // from any constructor for any class with a final field. We don't
2317 // know if this is a finalizer, so we always do so.
2318 if (_desc->bytecode() == Bytecodes::_return
2319 || _desc->bytecode() == Bytecodes::_return_register_finalizer)
2320 __ membar(MacroAssembler::StoreStore);
2321
2322 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2323 Label no_safepoint;
2324 __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2325 __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2326 __ push(state);
2327 __ push_cont_fastpath(rthread);
2328 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2329 __ pop_cont_fastpath(rthread);
2330 __ pop(state);
2331 __ bind(no_safepoint);
2332 }
2333
2334 // Narrow result if state is itos but result type is smaller.
2335 // Need to narrow in the return bytecode rather than in generate_return_entry
2336 // since compiled code callers expect the result to already be narrowed.
2337 if (state == itos) {
2338 __ narrow(r0);
2339 }
2690 }
2691 // c_rarg1: object pointer or null
2692 // c_rarg2: cache entry pointer
2693 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2694 InterpreterRuntime::post_field_access),
2695 c_rarg1, c_rarg2);
2696 __ load_field_entry(cache, index);
2697 __ bind(L1);
2698 }
2699 }
2700
2701 void TemplateTable::pop_and_check_object(Register r)
2702 {
2703 __ pop_ptr(r);
2704 __ null_check(r); // for field access must check obj.
2705 __ verify_oop(r);
2706 }
2707
2708 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2709 {
2710 const Register cache = r2;
2711 const Register obj = r4;
2712 const Register klass = r5;
2713 const Register inline_klass = r7;
2714 const Register field_index = r23;
2715 const Register index = r3;
2716 const Register tos_state = r3;
2717 const Register off = r19;
2718 const Register flags = r6;
2719 const Register bc = r4; // uses same reg as obj, so don't mix them
2720
2721 resolve_cache_and_index_for_field(byte_no, cache, index);
2722 jvmti_post_field_access(cache, index, is_static, false);
2723
2724 // Valhalla extras
2725 __ load_unsigned_short(field_index, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
2726 __ ldr(klass, Address(cache, ResolvedFieldEntry::field_holder_offset()));
2727
2728 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2729
2730 if (!is_static) {
2731 // obj is on the stack
2732 pop_and_check_object(obj);
2733 }
2734
2735 // 8179954: We need to make sure that the code generated for
2736 // volatile accesses forms a sequentially-consistent set of
2737 // operations when combined with STLR and LDAR. Without a leading
2738 // membar it's possible for a simple Dekker test to fail if loads
2739 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2740 // the stores in one method and we interpret the loads in another.
2741 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2742 Label notVolatile;
2743 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2744 __ membar(MacroAssembler::AnyAny);
2745 __ bind(notVolatile);
2746 }
2747
2766 __ b(Done);
2767
2768 __ bind(notByte);
2769 __ cmp(tos_state, (u1)ztos);
2770 __ br(Assembler::NE, notBool);
2771
2772 // ztos (same code as btos)
2773 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2774 __ push(ztos);
2775 // Rewrite bytecode to be faster
2776 if (rc == may_rewrite) {
2777 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2778 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2779 }
2780 __ b(Done);
2781
2782 __ bind(notBool);
2783 __ cmp(tos_state, (u1)atos);
2784 __ br(Assembler::NE, notObj);
2785 // atos
2786 if (!Arguments::is_valhalla_enabled()) {
2787 do_oop_load(_masm, field, r0, IN_HEAP);
2788 __ push(atos);
2789 if (rc == may_rewrite) {
2790 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2791 }
2792 __ b(Done);
2793 } else { // Valhalla
2794 if (is_static) {
2795 __ load_heap_oop(r0, field, rscratch1, rscratch2);
2796 __ push(atos);
2797 __ b(Done);
2798 } else {
2799 Label is_flat;
2800 __ test_field_is_flat(flags, noreg /* temp */, is_flat);
2801 __ load_heap_oop(r0, field, rscratch1, rscratch2);
2802 __ push(atos);
2803 if (rc == may_rewrite) {
2804 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2805 }
2806 __ b(Done);
2807 __ bind(is_flat);
2808 // field is flat (null-free or nullable with a null-marker)
2809 __ mov(r0, obj);
2810 __ read_flat_field(cache, field_index, off, inline_klass /* temp */, r0);
2811 __ verify_oop(r0);
2812 __ push(atos);
2813 if (rc == may_rewrite) {
2814 patch_bytecode(Bytecodes::_fast_vgetfield, bc, r1);
2815 }
2816 __ b(Done);
2817 }
2818 }
2819
2820 __ bind(notObj);
2821 __ cmp(tos_state, (u1)itos);
2822 __ br(Assembler::NE, notInt);
2823 // itos
2824 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2825 __ push(itos);
2826 // Rewrite bytecode to be faster
2827 if (rc == may_rewrite) {
2828 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2829 }
2830 __ b(Done);
2831
2832 __ bind(notInt);
2833 __ cmp(tos_state, (u1)ctos);
2834 __ br(Assembler::NE, notChar);
2835 // ctos
2836 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2837 __ push(ctos);
2838 // Rewrite bytecode to be faster
2959 // c_rarg1: object pointer set up above (null if static)
2960 // c_rarg2: cache entry pointer
2961 // c_rarg3: jvalue object on the stack
2962 __ call_VM(noreg,
2963 CAST_FROM_FN_PTR(address,
2964 InterpreterRuntime::post_field_modification),
2965 c_rarg1, c_rarg2, c_rarg3);
2966 __ load_field_entry(cache, index);
2967 __ bind(L1);
2968 }
2969 }
2970
2971 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2972 transition(vtos, vtos);
2973
2974 const Register cache = r2;
2975 const Register index = r3;
2976 const Register tos_state = r3;
2977 const Register obj = r2;
2978 const Register off = r19;
2979 const Register flags = r6;
2980 const Register bc = r4;
2981
2982 resolve_cache_and_index_for_field(byte_no, cache, index);
2983 jvmti_post_field_mod(cache, index, is_static);
2984 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2985
2986 Label Done;
2987 {
2988 Label notVolatile;
2989 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2990 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2991 __ bind(notVolatile);
2992 }
2993
2994 // field address
2995 const Address field(obj, off);
2996
2997 Label notByte, notBool, notInt, notShort, notChar,
2998 notLong, notFloat, notObj, notDouble;
2999
3000 assert(btos == 0, "change code, btos != 0");
3001 __ cbnz(tos_state, notByte);
3002
3003 // Don't rewrite putstatic, only putfield
3004 if (is_static) rc = may_not_rewrite;
3005
3006 // btos
3007 {
3008 __ pop(btos);
3009 if (!is_static) pop_and_check_object(obj);
3018 __ cmp(tos_state, (u1)ztos);
3019 __ br(Assembler::NE, notBool);
3020
3021 // ztos
3022 {
3023 __ pop(ztos);
3024 if (!is_static) pop_and_check_object(obj);
3025 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3026 if (rc == may_rewrite) {
3027 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
3028 }
3029 __ b(Done);
3030 }
3031
3032 __ bind(notBool);
3033 __ cmp(tos_state, (u1)atos);
3034 __ br(Assembler::NE, notObj);
3035
3036 // atos
3037 {
3038 if (!Arguments::is_valhalla_enabled()) {
3039 __ pop(atos);
3040 if (!is_static) pop_and_check_object(obj);
3041 // Store into the field
3042 // Clobbers: r10, r11, r3
3043 do_oop_store(_masm, field, r0, IN_HEAP);
3044 if (rc == may_rewrite) {
3045 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
3046 }
3047 __ b(Done);
3048 } else { // Valhalla
3049 __ pop(atos);
3050 if (is_static) {
3051 Label is_nullable;
3052 __ test_field_is_not_null_free_inline_type(flags, noreg /* temp */, is_nullable);
3053 __ null_check(r0); // FIXME JDK-8341120
3054 __ bind(is_nullable);
3055 do_oop_store(_masm, field, r0, IN_HEAP);
3056 __ b(Done);
3057 } else {
3058 Label null_free_reference, is_flat, rewrite_inline;
3059 __ test_field_is_flat(flags, noreg /* temp */, is_flat);
3060 __ test_field_is_null_free_inline_type(flags, noreg /* temp */, null_free_reference);
3061 pop_and_check_object(obj);
3062 // Store into the field
3063 // Clobbers: r10, r11, r3
3064 do_oop_store(_masm, field, r0, IN_HEAP);
3065 if (rc == may_rewrite) {
3066 patch_bytecode(Bytecodes::_fast_aputfield, bc, r19, true, byte_no);
3067 }
3068 __ b(Done);
3069 // Implementation of the inline type semantic
3070 __ bind(null_free_reference);
3071 __ null_check(r0); // FIXME JDK-8341120
3072 pop_and_check_object(obj);
3073 // Store into the field
3074 // Clobbers: r10, r11, r3
3075 do_oop_store(_masm, field, r0, IN_HEAP);
3076 __ b(rewrite_inline);
3077 __ bind(is_flat);
3078 pop_and_check_object(r7);
3079 __ write_flat_field(cache, off, index, flags, r7);
3080 __ bind(rewrite_inline);
3081 if (rc == may_rewrite) {
3082 patch_bytecode(Bytecodes::_fast_vputfield, bc, r19, true, byte_no);
3083 }
3084 __ b(Done);
3085 }
3086 } // Valhalla
3087 }
3088
3089 __ bind(notObj);
3090 __ cmp(tos_state, (u1)itos);
3091 __ br(Assembler::NE, notInt);
3092
3093 // itos
3094 {
3095 __ pop(itos);
3096 if (!is_static) pop_and_check_object(obj);
3097 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3098 if (rc == may_rewrite) {
3099 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
3100 }
3101 __ b(Done);
3102 }
3103
3104 __ bind(notInt);
3105 __ cmp(tos_state, (u1)ctos);
3106 __ br(Assembler::NE, notChar);
3171 {
3172 __ pop(dtos);
3173 if (!is_static) pop_and_check_object(obj);
3174 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
3175 if (rc == may_rewrite) {
3176 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
3177 }
3178 }
3179
3180 #ifdef ASSERT
3181 __ b(Done);
3182
3183 __ bind(notDouble);
3184 __ stop("Bad state");
3185 #endif
3186
3187 __ bind(Done);
3188
3189 {
3190 Label notVolatile;
3191 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3192 __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3193 __ bind(notVolatile);
3194 }
3195 }
3196
3197 void TemplateTable::putfield(int byte_no)
3198 {
3199 putfield_or_static(byte_no, false);
3200 }
3201
3202 void TemplateTable::nofast_putfield(int byte_no) {
3203 putfield_or_static(byte_no, false, may_not_rewrite);
3204 }
3205
3206 void TemplateTable::putstatic(int byte_no) {
3207 putfield_or_static(byte_no, true);
3208 }
3209
3210 void TemplateTable::jvmti_post_fast_field_mod() {
3211 if (JvmtiExport::can_post_field_modification()) {
3212 // Check to see if a field modification watch has been set before
3213 // we take the time to call into the VM.
3214 Label L2;
3215 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3216 __ ldrw(c_rarg3, Address(rscratch1));
3217 __ cbzw(c_rarg3, L2);
3218 __ pop_ptr(r19); // copy the object pointer from tos
3219 __ verify_oop(r19);
3220 __ push_ptr(r19); // put the object pointer back on tos
3221 // Save tos values before call_VM() clobbers them. Since we have
3222 // to do it for every data type, we use the saved values as the
3223 // jvalue object.
3224 switch (bytecode()) { // load values into the jvalue object
3225 case Bytecodes::_fast_vputfield: // fall through
3226 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3227 case Bytecodes::_fast_bputfield: // fall through
3228 case Bytecodes::_fast_zputfield: // fall through
3229 case Bytecodes::_fast_sputfield: // fall through
3230 case Bytecodes::_fast_cputfield: // fall through
3231 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3232 case Bytecodes::_fast_dputfield: __ push_d(); break;
3233 case Bytecodes::_fast_fputfield: __ push_f(); break;
3234 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3235
3236 default:
3237 ShouldNotReachHere();
3238 }
3239 __ mov(c_rarg3, esp); // points to jvalue on the stack
3240 // access constant pool cache entry
3241 __ load_field_entry(c_rarg2, r0);
3242 __ verify_oop(r19);
3243 // r19: object pointer copied above
3244 // c_rarg2: cache entry pointer
3245 // c_rarg3: jvalue object on the stack
3246 __ call_VM(noreg,
3247 CAST_FROM_FN_PTR(address,
3248 InterpreterRuntime::post_field_modification),
3249 r19, c_rarg2, c_rarg3);
3250
3251 switch (bytecode()) { // restore tos values
3252 case Bytecodes::_fast_vputfield: // fall through
3253 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3254 case Bytecodes::_fast_bputfield: // fall through
3255 case Bytecodes::_fast_zputfield: // fall through
3256 case Bytecodes::_fast_sputfield: // fall through
3257 case Bytecodes::_fast_cputfield: // fall through
3258 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3259 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3260 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3261 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3262 default: break;
3263 }
3264 __ bind(L2);
3265 }
3266 }
3267
3268 void TemplateTable::fast_storefield(TosState state)
3269 {
3270 transition(state, vtos);
3271
3272 ByteSize base = ConstantPoolCache::base_offset();
3280 load_resolved_field_entry(r2, r2, noreg, r1, r5);
3281 __ verify_field_offset(r1);
3282
3283 {
3284 Label notVolatile;
3285 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3286 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3287 __ bind(notVolatile);
3288 }
3289
3290 Label notVolatile;
3291
3292 // Get object from stack
3293 pop_and_check_object(r2);
3294
3295 // field address
3296 const Address field(r2, r1);
3297
3298 // access field
3299 switch (bytecode()) {
3300 case Bytecodes::_fast_vputfield:
3301 {
3302 Label is_flat, done;
3303 __ test_field_is_flat(r5, noreg /* temp */, is_flat);
3304 __ null_check(r0);
3305 do_oop_store(_masm, field, r0, IN_HEAP);
3306 __ b(done);
3307 __ bind(is_flat);
3308 __ load_field_entry(r4, r5);
3309 // Re-shuffle registers because of VM calls calling convention
3310 __ mov(r19, r1);
3311 __ mov(r7, r2);
3312 __ write_flat_field(r4, r19, r6, r8, r7);
3313 __ bind(done);
3314 }
3315 break;
3316 case Bytecodes::_fast_aputfield:
3317 // Clobbers: r10, r11, r3
3318 do_oop_store(_masm, field, r0, IN_HEAP);
3319 break;
3320 case Bytecodes::_fast_lputfield:
3321 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3322 break;
3323 case Bytecodes::_fast_iputfield:
3324 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3325 break;
3326 case Bytecodes::_fast_zputfield:
3327 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3328 break;
3329 case Bytecodes::_fast_bputfield:
3330 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3331 break;
3332 case Bytecodes::_fast_sputfield:
3333 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3334 break;
3335 case Bytecodes::_fast_cputfield:
3391 // r0: object
3392 __ verify_oop(r0);
3393 __ null_check(r0);
3394 const Address field(r0, r1);
3395
3396 // 8179954: We need to make sure that the code generated for
3397 // volatile accesses forms a sequentially-consistent set of
3398 // operations when combined with STLR and LDAR. Without a leading
3399 // membar it's possible for a simple Dekker test to fail if loads
3400 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3401 // the stores in one method and we interpret the loads in another.
3402 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3403 Label notVolatile;
3404 __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3405 __ membar(MacroAssembler::AnyAny);
3406 __ bind(notVolatile);
3407 }
3408
3409 // access field
3410 switch (bytecode()) {
3411 case Bytecodes::_fast_vgetfield:
3412 {
3413 Register index = r4, tmp = r7;
3414 // field is flat
3415 __ load_unsigned_short(index, Address(r2, in_bytes(ResolvedFieldEntry::field_index_offset())));
3416 __ read_flat_field(r2, index, r1, tmp /* temp */, r0);
3417 __ verify_oop(r0);
3418 }
3419 break;
3420 case Bytecodes::_fast_agetfield:
3421 do_oop_load(_masm, field, r0, IN_HEAP);
3422 __ verify_oop(r0);
3423 break;
3424 case Bytecodes::_fast_lgetfield:
3425 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3426 break;
3427 case Bytecodes::_fast_igetfield:
3428 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3429 break;
3430 case Bytecodes::_fast_bgetfield:
3431 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3432 break;
3433 case Bytecodes::_fast_sgetfield:
3434 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3435 break;
3436 case Bytecodes::_fast_cgetfield:
3437 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3438 break;
3439 case Bytecodes::_fast_fgetfield:
3820 Label initialize_header;
3821
3822 __ get_cpool_and_tags(r4, r0);
3823 // Make sure the class we're about to instantiate has been resolved.
3824 // This is done before loading InstanceKlass to be consistent with the order
3825 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3826 const int tags_offset = Array<u1>::base_offset_in_bytes();
3827 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3828 __ lea(rscratch1, Address(rscratch1, tags_offset));
3829 __ ldarb(rscratch1, rscratch1);
3830 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3831 __ br(Assembler::NE, slow_case);
3832
3833 // get InstanceKlass
3834 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3835
3836 // make sure klass is initialized
3837 assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3838 __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3839
3840 __ allocate_instance(r4, r0, r3, r1, true, slow_case);
3841 __ b(done);
3842
3843 // slow case
3844 __ bind(slow_case);
3845 __ get_constant_pool(c_rarg1);
3846 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3847 __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3848 __ verify_oop(r0);
3849
3850 // continue
3851 __ bind(done);
3852 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3853 __ membar(Assembler::StoreStore);
3854 }
3855
3856 void TemplateTable::newarray() {
3857 transition(itos, atos);
3858 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3859 __ mov(c_rarg2, r0);
3860 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3861 c_rarg1, c_rarg2);
3905 __ bind(quicked);
3906 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3907 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3908
3909 __ bind(resolved);
3910 __ load_klass(r19, r3);
3911
3912 // Generate subtype check. Blows r2, r5. Object in r3.
3913 // Superklass in r0. Subklass in r19.
3914 __ gen_subtype_check(r19, ok_is_subtype);
3915
3916 // Come here on failure
3917 __ push(r3);
3918 // object is at TOS
3919 __ b(Interpreter::_throw_ClassCastException_entry);
3920
3921 // Come here on success
3922 __ bind(ok_is_subtype);
3923 __ mov(r0, r3); // Restore object in r3
3924
3925 __ b(done);
3926 __ bind(is_null);
3927
3928 // Collect counts on whether this test sees nulls a lot or not.
3929 if (ProfileInterpreter) {
3930 __ profile_null_seen(r2);
3931 }
3932
3933 __ bind(done);
3934 }
3935
3936 void TemplateTable::instanceof() {
3937 transition(atos, itos);
3938 Label done, is_null, ok_is_subtype, quicked, resolved;
3939 __ cbz(r0, is_null);
3940
3941 // Get cpool & tags index
3942 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3943 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3944 // See if bytecode has already been quicked
3945 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3946 __ lea(r1, Address(rscratch1, r19));
3947 __ ldarb(r1, r1);
3948 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3949 __ br(Assembler::EQ, quicked);
3950
3951 __ push(atos); // save receiver for result, and for GC
3952 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4030 // in the assembly code structure as well
4031 //
4032 // Stack layout:
4033 //
4034 // [expressions ] <--- esp = expression stack top
4035 // ..
4036 // [expressions ]
4037 // [monitor entry] <--- monitor block top = expression stack bot
4038 // ..
4039 // [monitor entry]
4040 // [frame data ] <--- monitor block bot
4041 // ...
4042 // [saved rfp ] <--- rfp
4043 void TemplateTable::monitorenter()
4044 {
4045 transition(atos, vtos);
4046
4047 // check for null object
4048 __ null_check(r0);
4049
4050 Label is_inline_type;
4051 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4052 __ test_markword_is_inline_type(rscratch1, is_inline_type);
4053
4054 const Address monitor_block_top(
4055 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4056 const Address monitor_block_bot(
4057 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4058 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4059
4060 Label allocated;
4061
4062 // initialize entry pointer
4063 __ mov(c_rarg1, zr); // points to free slot or null
4064
4065 // find a free slot in the monitor block (result in c_rarg1)
4066 {
4067 Label entry, loop, exit;
4068 __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
4069 __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
4070 // c_rarg3 points to current entry, starting with top-most entry
4071
4072 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4073
4135 // c_rarg1: points to monitor entry
4136 __ bind(allocated);
4137
4138 // Increment bcp to point to the next bytecode, so exception
4139 // handling for async. exceptions work correctly.
4140 // The object has already been popped from the stack, so the
4141 // expression stack looks correct.
4142 __ increment(rbcp);
4143
4144 // store object
4145 __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
4146 __ lock_object(c_rarg1);
4147
4148 // check to make sure this monitor doesn't cause stack overflow after locking
4149 __ save_bcp(); // in case of exception
4150 __ generate_stack_overflow_check(0);
4151
4152 // The bcp has already been incremented. Just need to dispatch to
4153 // next instruction.
4154 __ dispatch_next(vtos);
4155
4156 __ bind(is_inline_type);
4157 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4158 InterpreterRuntime::throw_identity_exception), r0);
4159 __ should_not_reach_here();
4160 }
4161
4162
4163 void TemplateTable::monitorexit()
4164 {
4165 transition(atos, vtos);
4166
4167 // check for null object
4168 __ null_check(r0);
4169
4170 const int is_inline_type_mask = markWord::inline_type_pattern;
4171 Label has_identity;
4172 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4173 __ mov(rscratch2, is_inline_type_mask);
4174 __ andr(rscratch1, rscratch1, rscratch2);
4175 __ cmp(rscratch1, rscratch2);
4176 __ br(Assembler::NE, has_identity);
4177 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4178 InterpreterRuntime::throw_illegal_monitor_state_exception));
4179 __ should_not_reach_here();
4180 __ bind(has_identity);
4181
4182 const Address monitor_block_top(
4183 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4184 const Address monitor_block_bot(
4185 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4186 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4187
4188 Label found;
4189
4190 // find matching slot
4191 {
4192 Label entry, loop;
4193 __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4194 __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4195 // c_rarg1 points to current entry, starting with top-most entry
4196
4197 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4198 // of monitor block
4199 __ b(entry);
4200
4201 __ bind(loop);
|