156 Address src,
157 Register dst,
158 DecoratorSet decorators) {
159 __ load_heap_oop(dst, src, r10, r11, decorators);
160 }
161
162 Address TemplateTable::at_bcp(int offset) {
163 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
164 return Address(rbcp, offset);
165 }
166
167 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
168 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
169 int byte_no)
170 {
171 assert_different_registers(bc_reg, temp_reg);
172 if (!RewriteBytecodes) return;
173 Label L_patch_done;
174
175 switch (bc) {
176 case Bytecodes::_fast_aputfield:
177 case Bytecodes::_fast_bputfield:
178 case Bytecodes::_fast_zputfield:
179 case Bytecodes::_fast_cputfield:
180 case Bytecodes::_fast_dputfield:
181 case Bytecodes::_fast_fputfield:
182 case Bytecodes::_fast_iputfield:
183 case Bytecodes::_fast_lputfield:
184 case Bytecodes::_fast_sputfield:
185 {
186 // We skip bytecode quickening for putfield instructions when
187 // the put_code written to the constant pool cache is zero.
188 // This is required so that every execution of this instruction
189 // calls out to InterpreterRuntime::resolve_get_put to do
190 // additional, required work.
191 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
192 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
193 __ load_field_entry(temp_reg, bc_reg);
194 if (byte_no == f1_byte) {
195 __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
740 locals_index_wide(r1);
741 __ ldr(r0, aaddress(r1));
742 }
743
744 void TemplateTable::index_check(Register array, Register index)
745 {
746 // destroys r1, rscratch1
747 // sign extend index for use by indexed load
748 // __ movl2ptr(index, index);
749 // check index
750 Register length = rscratch1;
751 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
752 __ cmpw(index, length);
753 if (index != r1) {
754 // ??? convention: move aberrant index into r1 for exception message
755 assert(r1 != array, "different registers");
756 __ mov(r1, index);
757 }
758 Label ok;
759 __ br(Assembler::LO, ok);
760 // ??? convention: move array into r3 for exception message
761 __ mov(r3, array);
762 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
763 __ br(rscratch1);
764 __ bind(ok);
765 }
766
767 void TemplateTable::iaload()
768 {
769 transition(itos, itos);
770 __ mov(r1, r0);
771 __ pop_ptr(r0);
772 // r0: array
773 // r1: index
774 index_check(r0, r1); // leaves index in r1, kills rscratch1
775 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
776 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
777 }
778
779 void TemplateTable::laload()
780 {
781 transition(itos, ltos);
782 __ mov(r1, r0);
783 __ pop_ptr(r0);
803 void TemplateTable::daload()
804 {
805 transition(itos, dtos);
806 __ mov(r1, r0);
807 __ pop_ptr(r0);
808 // r0: array
809 // r1: index
810 index_check(r0, r1); // leaves index in r1, kills rscratch1
811 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
812 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
813 }
814
815 void TemplateTable::aaload()
816 {
817 transition(itos, atos);
818 __ mov(r1, r0);
819 __ pop_ptr(r0);
820 // r0: array
821 // r1: index
822 index_check(r0, r1); // leaves index in r1, kills rscratch1
823 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
824 do_oop_load(_masm,
825 Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
826 r0,
827 IS_ARRAY);
828 }
829
830 void TemplateTable::baload()
831 {
832 transition(itos, itos);
833 __ mov(r1, r0);
834 __ pop_ptr(r0);
835 // r0: array
836 // r1: index
837 index_check(r0, r1); // leaves index in r1, kills rscratch1
838 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
839 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
840 }
841
842 void TemplateTable::caload()
843 {
844 transition(itos, itos);
845 __ mov(r1, r0);
846 __ pop_ptr(r0);
847 // r0: array
1094 // r1: index
1095 // r3: array
1096 index_check(r3, r1); // prefer index in r1
1097 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1098 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1099 }
1100
1101 void TemplateTable::dastore() {
1102 transition(dtos, vtos);
1103 __ pop_i(r1);
1104 __ pop_ptr(r3);
1105 // v0: value
1106 // r1: index
1107 // r3: array
1108 index_check(r3, r1); // prefer index in r1
1109 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1110 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1111 }
1112
1113 void TemplateTable::aastore() {
1114 Label is_null, ok_is_subtype, done;
1115 transition(vtos, vtos);
1116 // stack: ..., array, index, value
1117 __ ldr(r0, at_tos()); // value
1118 __ ldr(r2, at_tos_p1()); // index
1119 __ ldr(r3, at_tos_p2()); // array
1120
1121 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1122
1123 index_check(r3, r2); // kills r1
1124 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1125
1126 // do array store check - check for null value first
1127 __ cbz(r0, is_null);
1128
1129 // Move subklass into r1
1130 __ load_klass(r1, r0);
1131 // Move superklass into r0
1132 __ load_klass(r0, r3);
1133 __ ldr(r0, Address(r0,
1134 ObjArrayKlass::element_klass_offset()));
1135 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1136
1137 // Generate subtype check. Blows r2, r5
1138 // Superklass in r0. Subklass in r1.
1139 __ gen_subtype_check(r1, ok_is_subtype);
1140
1141 // Come here on failure
1142 // object is at TOS
1143 __ b(Interpreter::_throw_ArrayStoreException_entry);
1144
1145 // Come here on success
1146 __ bind(ok_is_subtype);
1147
1148 // Get the value we will store
1149 __ ldr(r0, at_tos());
1150 // Now store using the appropriate barrier
1151 // Clobbers: r10, r11, r3
1152 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1153 __ b(done);
1154
1155 // Have a null in r0, r3=array, r2=index. Store null at ary[idx]
1156 __ bind(is_null);
1157 __ profile_null_seen(r2);
1158
1159 // Store a null
1160 // Clobbers: r10, r11, r3
1161 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1162
1163 // Pop stack arguments
1164 __ bind(done);
1165 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1166 }
1167
1168 void TemplateTable::bastore()
1169 {
1170 transition(itos, vtos);
1171 __ pop_i(r1);
1172 __ pop_ptr(r3);
1173 // r0: value
1174 // r1: index
1175 // r3: array
1176 index_check(r3, r1); // prefer index in r1
1177
1178 // Need to check whether array is boolean or byte
1179 // since both types share the bastore bytecode.
1180 __ load_klass(r2, r3);
1181 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
1948 __ br(j_not(cc), not_taken);
1949 branch(false, false);
1950 __ bind(not_taken);
1951 __ profile_not_taken_branch(r0);
1952 }
1953
1954 void TemplateTable::if_nullcmp(Condition cc)
1955 {
1956 transition(atos, vtos);
1957 // assume branch is more often taken than not (loops use backward branches)
1958 Label not_taken;
1959 if (cc == equal)
1960 __ cbnz(r0, not_taken);
1961 else
1962 __ cbz(r0, not_taken);
1963 branch(false, false);
1964 __ bind(not_taken);
1965 __ profile_not_taken_branch(r0);
1966 }
1967
1968 void TemplateTable::if_acmp(Condition cc)
1969 {
1970 transition(atos, vtos);
1971 // assume branch is more often taken than not (loops use backward branches)
1972 Label not_taken;
1973 __ pop_ptr(r1);
1974 __ cmpoop(r1, r0);
1975 __ br(j_not(cc), not_taken);
1976 branch(false, false);
1977 __ bind(not_taken);
1978 __ profile_not_taken_branch(r0);
1979 }
1980
1981 void TemplateTable::ret() {
1982 transition(vtos, vtos);
1983 locals_index(r1);
1984 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1985 __ profile_ret(r1, r2);
1986 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1987 __ lea(rbcp, Address(rbcp, r1));
1988 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
1989 __ dispatch_next(vtos, 0, /*generate_poll*/true);
1990 }
1991
1992 void TemplateTable::wide_ret() {
1993 transition(vtos, vtos);
1994 locals_index_wide(r1);
1995 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1996 __ profile_ret(r1, r2);
1997 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1998 __ lea(rbcp, Address(rbcp, r1));
1999 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2000 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2194 assert(_desc->calls_vm(),
2195 "inconsistent calls_vm information"); // call in remove_activation
2196
2197 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2198 assert(state == vtos, "only valid state");
2199
2200 __ ldr(c_rarg1, aaddress(0));
2201 __ load_klass(r3, c_rarg1);
2202 __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2203 Label skip_register_finalizer;
2204 __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2205
2206 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2207
2208 __ bind(skip_register_finalizer);
2209 }
2210
2211 // Issue a StoreStore barrier after all stores but before return
2212 // from any constructor for any class with a final field. We don't
2213 // know if this is a finalizer, so we always do so.
2214 if (_desc->bytecode() == Bytecodes::_return)
2215 __ membar(MacroAssembler::StoreStore);
2216
2217 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2218 Label no_safepoint;
2219 __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2220 __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2221 __ push(state);
2222 __ push_cont_fastpath(rthread);
2223 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2224 __ pop_cont_fastpath(rthread);
2225 __ pop(state);
2226 __ bind(no_safepoint);
2227 }
2228
2229 // Narrow result if state is itos but result type is smaller.
2230 // Need to narrow in the return bytecode rather than in generate_return_entry
2231 // since compiled code callers expect the result to already be narrowed.
2232 if (state == itos) {
2233 __ narrow(r0);
2234 }
2585 }
2586 // c_rarg1: object pointer or null
2587 // c_rarg2: cache entry pointer
2588 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2589 InterpreterRuntime::post_field_access),
2590 c_rarg1, c_rarg2);
2591 __ load_field_entry(cache, index);
2592 __ bind(L1);
2593 }
2594 }
2595
2596 void TemplateTable::pop_and_check_object(Register r)
2597 {
2598 __ pop_ptr(r);
2599 __ null_check(r); // for field access must check obj.
2600 __ verify_oop(r);
2601 }
2602
2603 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2604 {
2605 const Register cache = r4;
2606 const Register obj = r4;
2607 const Register index = r3;
2608 const Register tos_state = r3;
2609 const Register off = r19;
2610 const Register flags = r6;
2611 const Register bc = r4; // uses same reg as obj, so don't mix them
2612
2613 resolve_cache_and_index_for_field(byte_no, cache, index);
2614 jvmti_post_field_access(cache, index, is_static, false);
2615 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2616
2617 if (!is_static) {
2618 // obj is on the stack
2619 pop_and_check_object(obj);
2620 }
2621
2622 // 8179954: We need to make sure that the code generated for
2623 // volatile accesses forms a sequentially-consistent set of
2624 // operations when combined with STLR and LDAR. Without a leading
2625 // membar it's possible for a simple Dekker test to fail if loads
2626 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2627 // the stores in one method and we interpret the loads in another.
2628 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2629 Label notVolatile;
2630 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2631 __ membar(MacroAssembler::AnyAny);
2632 __ bind(notVolatile);
2633 }
2634
2653 __ b(Done);
2654
2655 __ bind(notByte);
2656 __ cmp(tos_state, (u1)ztos);
2657 __ br(Assembler::NE, notBool);
2658
2659 // ztos (same code as btos)
2660 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2661 __ push(ztos);
2662 // Rewrite bytecode to be faster
2663 if (rc == may_rewrite) {
2664 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2665 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2666 }
2667 __ b(Done);
2668
2669 __ bind(notBool);
2670 __ cmp(tos_state, (u1)atos);
2671 __ br(Assembler::NE, notObj);
2672 // atos
2673 do_oop_load(_masm, field, r0, IN_HEAP);
2674 __ push(atos);
2675 if (rc == may_rewrite) {
2676 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2677 }
2678 __ b(Done);
2679
2680 __ bind(notObj);
2681 __ cmp(tos_state, (u1)itos);
2682 __ br(Assembler::NE, notInt);
2683 // itos
2684 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2685 __ push(itos);
2686 // Rewrite bytecode to be faster
2687 if (rc == may_rewrite) {
2688 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2689 }
2690 __ b(Done);
2691
2692 __ bind(notInt);
2693 __ cmp(tos_state, (u1)ctos);
2694 __ br(Assembler::NE, notChar);
2695 // ctos
2696 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2697 __ push(ctos);
2698 // Rewrite bytecode to be faster
2819 // c_rarg1: object pointer set up above (null if static)
2820 // c_rarg2: cache entry pointer
2821 // c_rarg3: jvalue object on the stack
2822 __ call_VM(noreg,
2823 CAST_FROM_FN_PTR(address,
2824 InterpreterRuntime::post_field_modification),
2825 c_rarg1, c_rarg2, c_rarg3);
2826 __ load_field_entry(cache, index);
2827 __ bind(L1);
2828 }
2829 }
2830
2831 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2832 transition(vtos, vtos);
2833
2834 const Register cache = r2;
2835 const Register index = r3;
2836 const Register tos_state = r3;
2837 const Register obj = r2;
2838 const Register off = r19;
2839 const Register flags = r0;
2840 const Register bc = r4;
2841
2842 resolve_cache_and_index_for_field(byte_no, cache, index);
2843 jvmti_post_field_mod(cache, index, is_static);
2844 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2845
2846 Label Done;
2847 __ mov(r5, flags);
2848
2849 {
2850 Label notVolatile;
2851 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2852 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2853 __ bind(notVolatile);
2854 }
2855
2856 // field address
2857 const Address field(obj, off);
2858
2859 Label notByte, notBool, notInt, notShort, notChar,
2860 notLong, notFloat, notObj, notDouble;
2861
2862 assert(btos == 0, "change code, btos != 0");
2863 __ cbnz(tos_state, notByte);
2864
2865 // Don't rewrite putstatic, only putfield
2866 if (is_static) rc = may_not_rewrite;
2867
2868 // btos
2869 {
2870 __ pop(btos);
2871 if (!is_static) pop_and_check_object(obj);
2880 __ cmp(tos_state, (u1)ztos);
2881 __ br(Assembler::NE, notBool);
2882
2883 // ztos
2884 {
2885 __ pop(ztos);
2886 if (!is_static) pop_and_check_object(obj);
2887 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
2888 if (rc == may_rewrite) {
2889 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2890 }
2891 __ b(Done);
2892 }
2893
2894 __ bind(notBool);
2895 __ cmp(tos_state, (u1)atos);
2896 __ br(Assembler::NE, notObj);
2897
2898 // atos
2899 {
2900 __ pop(atos);
2901 if (!is_static) pop_and_check_object(obj);
2902 // Store into the field
2903 // Clobbers: r10, r11, r3
2904 do_oop_store(_masm, field, r0, IN_HEAP);
2905 if (rc == may_rewrite) {
2906 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2907 }
2908 __ b(Done);
2909 }
2910
2911 __ bind(notObj);
2912 __ cmp(tos_state, (u1)itos);
2913 __ br(Assembler::NE, notInt);
2914
2915 // itos
2916 {
2917 __ pop(itos);
2918 if (!is_static) pop_and_check_object(obj);
2919 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
2920 if (rc == may_rewrite) {
2921 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2922 }
2923 __ b(Done);
2924 }
2925
2926 __ bind(notInt);
2927 __ cmp(tos_state, (u1)ctos);
2928 __ br(Assembler::NE, notChar);
2993 {
2994 __ pop(dtos);
2995 if (!is_static) pop_and_check_object(obj);
2996 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
2997 if (rc == may_rewrite) {
2998 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
2999 }
3000 }
3001
3002 #ifdef ASSERT
3003 __ b(Done);
3004
3005 __ bind(notDouble);
3006 __ stop("Bad state");
3007 #endif
3008
3009 __ bind(Done);
3010
3011 {
3012 Label notVolatile;
3013 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3014 __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3015 __ bind(notVolatile);
3016 }
3017 }
3018
3019 void TemplateTable::putfield(int byte_no)
3020 {
3021 putfield_or_static(byte_no, false);
3022 }
3023
3024 void TemplateTable::nofast_putfield(int byte_no) {
3025 putfield_or_static(byte_no, false, may_not_rewrite);
3026 }
3027
3028 void TemplateTable::putstatic(int byte_no) {
3029 putfield_or_static(byte_no, true);
3030 }
3031
3032 void TemplateTable::jvmti_post_fast_field_mod() {
3033 if (JvmtiExport::can_post_field_modification()) {
3034 // Check to see if a field modification watch has been set before
3035 // we take the time to call into the VM.
3036 Label L2;
3037 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3038 __ ldrw(c_rarg3, Address(rscratch1));
3039 __ cbzw(c_rarg3, L2);
3040 __ pop_ptr(r19); // copy the object pointer from tos
3041 __ verify_oop(r19);
3042 __ push_ptr(r19); // put the object pointer back on tos
3043 // Save tos values before call_VM() clobbers them. Since we have
3044 // to do it for every data type, we use the saved values as the
3045 // jvalue object.
3046 switch (bytecode()) { // load values into the jvalue object
3047 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3048 case Bytecodes::_fast_bputfield: // fall through
3049 case Bytecodes::_fast_zputfield: // fall through
3050 case Bytecodes::_fast_sputfield: // fall through
3051 case Bytecodes::_fast_cputfield: // fall through
3052 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3053 case Bytecodes::_fast_dputfield: __ push_d(); break;
3054 case Bytecodes::_fast_fputfield: __ push_f(); break;
3055 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3056
3057 default:
3058 ShouldNotReachHere();
3059 }
3060 __ mov(c_rarg3, esp); // points to jvalue on the stack
3061 // access constant pool cache entry
3062 __ load_field_entry(c_rarg2, r0);
3063 __ verify_oop(r19);
3064 // r19: object pointer copied above
3065 // c_rarg2: cache entry pointer
3066 // c_rarg3: jvalue object on the stack
3067 __ call_VM(noreg,
3068 CAST_FROM_FN_PTR(address,
3069 InterpreterRuntime::post_field_modification),
3070 r19, c_rarg2, c_rarg3);
3071
3072 switch (bytecode()) { // restore tos values
3073 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3074 case Bytecodes::_fast_bputfield: // fall through
3075 case Bytecodes::_fast_zputfield: // fall through
3076 case Bytecodes::_fast_sputfield: // fall through
3077 case Bytecodes::_fast_cputfield: // fall through
3078 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3079 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3080 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3081 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3082 default: break;
3083 }
3084 __ bind(L2);
3085 }
3086 }
3087
3088 void TemplateTable::fast_storefield(TosState state)
3089 {
3090 transition(state, vtos);
3091
3092 ByteSize base = ConstantPoolCache::base_offset();
3100 load_resolved_field_entry(r2, r2, noreg, r1, r5);
3101 __ verify_field_offset(r1);
3102
3103 {
3104 Label notVolatile;
3105 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3106 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3107 __ bind(notVolatile);
3108 }
3109
3110 Label notVolatile;
3111
3112 // Get object from stack
3113 pop_and_check_object(r2);
3114
3115 // field address
3116 const Address field(r2, r1);
3117
3118 // access field
3119 switch (bytecode()) {
3120 case Bytecodes::_fast_aputfield:
3121 // Clobbers: r10, r11, r3
3122 do_oop_store(_masm, field, r0, IN_HEAP);
3123 break;
3124 case Bytecodes::_fast_lputfield:
3125 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3126 break;
3127 case Bytecodes::_fast_iputfield:
3128 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3129 break;
3130 case Bytecodes::_fast_zputfield:
3131 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3132 break;
3133 case Bytecodes::_fast_bputfield:
3134 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3135 break;
3136 case Bytecodes::_fast_sputfield:
3137 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3138 break;
3139 case Bytecodes::_fast_cputfield:
3195 // r0: object
3196 __ verify_oop(r0);
3197 __ null_check(r0);
3198 const Address field(r0, r1);
3199
3200 // 8179954: We need to make sure that the code generated for
3201 // volatile accesses forms a sequentially-consistent set of
3202 // operations when combined with STLR and LDAR. Without a leading
3203 // membar it's possible for a simple Dekker test to fail if loads
3204 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3205 // the stores in one method and we interpret the loads in another.
3206 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3207 Label notVolatile;
3208 __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3209 __ membar(MacroAssembler::AnyAny);
3210 __ bind(notVolatile);
3211 }
3212
3213 // access field
3214 switch (bytecode()) {
3215 case Bytecodes::_fast_agetfield:
3216 do_oop_load(_masm, field, r0, IN_HEAP);
3217 __ verify_oop(r0);
3218 break;
3219 case Bytecodes::_fast_lgetfield:
3220 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3221 break;
3222 case Bytecodes::_fast_igetfield:
3223 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3224 break;
3225 case Bytecodes::_fast_bgetfield:
3226 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3227 break;
3228 case Bytecodes::_fast_sgetfield:
3229 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3230 break;
3231 case Bytecodes::_fast_cgetfield:
3232 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3233 break;
3234 case Bytecodes::_fast_fgetfield:
3615 Label initialize_header;
3616
3617 __ get_cpool_and_tags(r4, r0);
3618 // Make sure the class we're about to instantiate has been resolved.
3619 // This is done before loading InstanceKlass to be consistent with the order
3620 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3621 const int tags_offset = Array<u1>::base_offset_in_bytes();
3622 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3623 __ lea(rscratch1, Address(rscratch1, tags_offset));
3624 __ ldarb(rscratch1, rscratch1);
3625 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3626 __ br(Assembler::NE, slow_case);
3627
3628 // get InstanceKlass
3629 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3630
3631 // make sure klass is initialized
3632 assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3633 __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3634
3635 // get instance_size in InstanceKlass (scaled to a count of bytes)
3636 __ ldrw(r3,
3637 Address(r4,
3638 Klass::layout_helper_offset()));
3639 // test to see if it is malformed in some way
3640 __ tbnz(r3, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3641
3642 // Allocate the instance:
3643 // If TLAB is enabled:
3644 // Try to allocate in the TLAB.
3645 // If fails, go to the slow path.
3646 // Initialize the allocation.
3647 // Exit.
3648 //
3649 // Go to slow path.
3650
3651 if (UseTLAB) {
3652 __ tlab_allocate(r0, r3, 0, noreg, r1, slow_case);
3653
3654 if (ZeroTLAB) {
3655 // the fields have been already cleared
3656 __ b(initialize_header);
3657 }
3658
3659 // The object is initialized before the header. If the object size is
3660 // zero, go directly to the header initialization.
3661 int header_size = oopDesc::header_size() * HeapWordSize;
3662 assert(is_aligned(header_size, BytesPerLong), "oop header size must be 8-byte-aligned");
3663 __ sub(r3, r3, header_size);
3664 __ cbz(r3, initialize_header);
3665
3666 // Initialize object fields
3667 {
3668 __ add(r2, r0, header_size);
3669 Label loop;
3670 __ bind(loop);
3671 __ str(zr, Address(__ post(r2, BytesPerLong)));
3672 __ sub(r3, r3, BytesPerLong);
3673 __ cbnz(r3, loop);
3674 }
3675
3676 // initialize object header only.
3677 __ bind(initialize_header);
3678 if (UseCompactObjectHeaders) {
3679 __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3680 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3681 } else {
3682 __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3683 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3684 __ store_klass_gap(r0, zr); // zero klass gap for compressed oops
3685 __ store_klass(r0, r4); // store klass last
3686 }
3687
3688 if (DTraceAllocProbes) {
3689 // Trigger dtrace event for fastpath
3690 __ push(atos); // save the return value
3691 __ call_VM_leaf(
3692 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3693 __ pop(atos); // restore the return value
3694
3695 }
3696 __ b(done);
3697 }
3698
3699 // slow case
3700 __ bind(slow_case);
3701 __ get_constant_pool(c_rarg1);
3702 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3703 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3704 __ verify_oop(r0);
3705
3706 // continue
3707 __ bind(done);
3708 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3709 __ membar(Assembler::StoreStore);
3710 }
3711
3712 void TemplateTable::newarray() {
3713 transition(itos, atos);
3714 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3715 __ mov(c_rarg2, r0);
3716 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3717 c_rarg1, c_rarg2);
3761 __ bind(quicked);
3762 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3763 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3764
3765 __ bind(resolved);
3766 __ load_klass(r19, r3);
3767
3768 // Generate subtype check. Blows r2, r5. Object in r3.
3769 // Superklass in r0. Subklass in r19.
3770 __ gen_subtype_check(r19, ok_is_subtype);
3771
3772 // Come here on failure
3773 __ push(r3);
3774 // object is at TOS
3775 __ b(Interpreter::_throw_ClassCastException_entry);
3776
3777 // Come here on success
3778 __ bind(ok_is_subtype);
3779 __ mov(r0, r3); // Restore object in r3
3780
3781 // Collect counts on whether this test sees nulls a lot or not.
3782 if (ProfileInterpreter) {
3783 __ b(done);
3784 __ bind(is_null);
3785 __ profile_null_seen(r2);
3786 } else {
3787 __ bind(is_null); // same as 'done'
3788 }
3789 __ bind(done);
3790 }
3791
3792 void TemplateTable::instanceof() {
3793 transition(atos, itos);
3794 Label done, is_null, ok_is_subtype, quicked, resolved;
3795 __ cbz(r0, is_null);
3796
3797 // Get cpool & tags index
3798 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3799 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3800 // See if bytecode has already been quicked
3801 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3802 __ lea(r1, Address(rscratch1, r19));
3803 __ ldarb(r1, r1);
3804 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3805 __ br(Assembler::EQ, quicked);
3806
3807 __ push(atos); // save receiver for result, and for GC
3808 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3886 // in the assembly code structure as well
3887 //
3888 // Stack layout:
3889 //
3890 // [expressions ] <--- esp = expression stack top
3891 // ..
3892 // [expressions ]
3893 // [monitor entry] <--- monitor block top = expression stack bot
3894 // ..
3895 // [monitor entry]
3896 // [frame data ] <--- monitor block bot
3897 // ...
3898 // [saved rfp ] <--- rfp
3899 void TemplateTable::monitorenter()
3900 {
3901 transition(atos, vtos);
3902
3903 // check for null object
3904 __ null_check(r0);
3905
3906 const Address monitor_block_top(
3907 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3908 const Address monitor_block_bot(
3909 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
3910 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3911
3912 Label allocated;
3913
3914 // initialize entry pointer
3915 __ mov(c_rarg1, zr); // points to free slot or null
3916
3917 // find a free slot in the monitor block (result in c_rarg1)
3918 {
3919 Label entry, loop, exit;
3920 __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
3921 __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
3922 // c_rarg3 points to current entry, starting with top-most entry
3923
3924 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3925
3987 // c_rarg1: points to monitor entry
3988 __ bind(allocated);
3989
3990 // Increment bcp to point to the next bytecode, so exception
3991 // handling for async. exceptions work correctly.
3992 // The object has already been popped from the stack, so the
3993 // expression stack looks correct.
3994 __ increment(rbcp);
3995
3996 // store object
3997 __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
3998 __ lock_object(c_rarg1);
3999
4000 // check to make sure this monitor doesn't cause stack overflow after locking
4001 __ save_bcp(); // in case of exception
4002 __ generate_stack_overflow_check(0);
4003
4004 // The bcp has already been incremented. Just need to dispatch to
4005 // next instruction.
4006 __ dispatch_next(vtos);
4007 }
4008
4009
4010 void TemplateTable::monitorexit()
4011 {
4012 transition(atos, vtos);
4013
4014 // check for null object
4015 __ null_check(r0);
4016
4017 const Address monitor_block_top(
4018 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4019 const Address monitor_block_bot(
4020 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4021 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4022
4023 Label found;
4024
4025 // find matching slot
4026 {
4027 Label entry, loop;
4028 __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4029 __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4030 // c_rarg1 points to current entry, starting with top-most entry
4031
4032 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4033 // of monitor block
4034 __ b(entry);
4035
4036 __ bind(loop);
|
156 Address src,
157 Register dst,
158 DecoratorSet decorators) {
159 __ load_heap_oop(dst, src, r10, r11, decorators);
160 }
161
162 Address TemplateTable::at_bcp(int offset) {
163 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
164 return Address(rbcp, offset);
165 }
166
167 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
168 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
169 int byte_no)
170 {
171 assert_different_registers(bc_reg, temp_reg);
172 if (!RewriteBytecodes) return;
173 Label L_patch_done;
174
175 switch (bc) {
176 case Bytecodes::_fast_vputfield:
177 case Bytecodes::_fast_aputfield:
178 case Bytecodes::_fast_bputfield:
179 case Bytecodes::_fast_zputfield:
180 case Bytecodes::_fast_cputfield:
181 case Bytecodes::_fast_dputfield:
182 case Bytecodes::_fast_fputfield:
183 case Bytecodes::_fast_iputfield:
184 case Bytecodes::_fast_lputfield:
185 case Bytecodes::_fast_sputfield:
186 {
187 // We skip bytecode quickening for putfield instructions when
188 // the put_code written to the constant pool cache is zero.
189 // This is required so that every execution of this instruction
190 // calls out to InterpreterRuntime::resolve_get_put to do
191 // additional, required work.
192 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
193 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
194 __ load_field_entry(temp_reg, bc_reg);
195 if (byte_no == f1_byte) {
196 __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
741 locals_index_wide(r1);
742 __ ldr(r0, aaddress(r1));
743 }
744
745 void TemplateTable::index_check(Register array, Register index)
746 {
747 // destroys r1, rscratch1
748 // sign extend index for use by indexed load
749 // __ movl2ptr(index, index);
750 // check index
751 Register length = rscratch1;
752 __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
753 __ cmpw(index, length);
754 if (index != r1) {
755 // ??? convention: move aberrant index into r1 for exception message
756 assert(r1 != array, "different registers");
757 __ mov(r1, index);
758 }
759 Label ok;
760 __ br(Assembler::LO, ok);
761 // ??? convention: move array into r3 for exception message
762 __ mov(r3, array);
763 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
764 __ br(rscratch1);
765 __ bind(ok);
766 }
767
768 void TemplateTable::iaload()
769 {
770 transition(itos, itos);
771 __ mov(r1, r0);
772 __ pop_ptr(r0);
773 // r0: array
774 // r1: index
775 index_check(r0, r1); // leaves index in r1, kills rscratch1
776 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
777 __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
778 }
779
780 void TemplateTable::laload()
781 {
782 transition(itos, ltos);
783 __ mov(r1, r0);
784 __ pop_ptr(r0);
804 void TemplateTable::daload()
805 {
806 transition(itos, dtos);
807 __ mov(r1, r0);
808 __ pop_ptr(r0);
809 // r0: array
810 // r1: index
811 index_check(r0, r1); // leaves index in r1, kills rscratch1
812 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
813 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
814 }
815
816 void TemplateTable::aaload()
817 {
818 transition(itos, atos);
819 __ mov(r1, r0);
820 __ pop_ptr(r0);
821 // r0: array
822 // r1: index
823 index_check(r0, r1); // leaves index in r1, kills rscratch1
824 __ profile_array_type<ArrayLoadData>(r2, r0, r4);
825 if (UseArrayFlattening) {
826 Label is_flat_array, done;
827
828 __ test_flat_array_oop(r0, rscratch1 /*temp*/, is_flat_array);
829 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
830 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
831
832 __ b(done);
833 __ bind(is_flat_array);
834 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), r0, r1);
835 // Ensure the stores to copy the inline field contents are visible
836 // before any subsequent store that publishes this reference.
837 __ membar(Assembler::StoreStore);
838 __ bind(done);
839 } else {
840 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
841 do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
842 }
843 __ profile_element_type(r2, r0, r4);
844 }
845
846 void TemplateTable::baload()
847 {
848 transition(itos, itos);
849 __ mov(r1, r0);
850 __ pop_ptr(r0);
851 // r0: array
852 // r1: index
853 index_check(r0, r1); // leaves index in r1, kills rscratch1
854 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
855 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
856 }
857
858 void TemplateTable::caload()
859 {
860 transition(itos, itos);
861 __ mov(r1, r0);
862 __ pop_ptr(r0);
863 // r0: array
1110 // r1: index
1111 // r3: array
1112 index_check(r3, r1); // prefer index in r1
1113 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1114 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1115 }
1116
1117 void TemplateTable::dastore() {
1118 transition(dtos, vtos);
1119 __ pop_i(r1);
1120 __ pop_ptr(r3);
1121 // v0: value
1122 // r1: index
1123 // r3: array
1124 index_check(r3, r1); // prefer index in r1
1125 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1126 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1127 }
1128
1129 void TemplateTable::aastore() {
1130 Label is_null, is_flat_array, ok_is_subtype, done;
1131 transition(vtos, vtos);
1132 // stack: ..., array, index, value
1133 __ ldr(r0, at_tos()); // value
1134 __ ldr(r2, at_tos_p1()); // index
1135 __ ldr(r3, at_tos_p2()); // array
1136
1137 index_check(r3, r2); // kills r1
1138
1139 __ profile_array_type<ArrayStoreData>(r4, r3, r5);
1140 __ profile_multiple_element_types(r4, r0, r5, r6);
1141
1142 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1143 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1144 // Be careful not to clobber r4 below
1145
1146 // do array store check - check for null value first
1147 __ cbz(r0, is_null);
1148
1149 // Move array class to r5
1150 __ load_klass(r5, r3);
1151
1152 if (UseArrayFlattening) {
1153 __ ldrw(r6, Address(r5, Klass::layout_helper_offset()));
1154 __ test_flat_array_layout(r6, is_flat_array);
1155 }
1156
1157 // Move subklass into r1
1158 __ load_klass(r1, r0);
1159
1160 // Move array element superklass into r0
1161 __ ldr(r0, Address(r5, ObjArrayKlass::element_klass_offset()));
1162 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1163
1164 // Generate subtype check. Blows r2, r5
1165 // Superklass in r0. Subklass in r1.
1166
1167 // is "r1 <: r0" ? (value subclass <: array element superclass)
1168 __ gen_subtype_check(r1, ok_is_subtype, false);
1169
1170 // Come here on failure
1171 // object is at TOS
1172 __ b(Interpreter::_throw_ArrayStoreException_entry);
1173
1174 // Come here on success
1175 __ bind(ok_is_subtype);
1176
1177 // Get the value we will store
1178 __ ldr(r0, at_tos());
1179 // Now store using the appropriate barrier
1180 // Clobbers: r10, r11, r3
1181 do_oop_store(_masm, element_address, r0, IS_ARRAY);
1182 __ b(done);
1183
1184 // Have a null in r0, r3=array, r2=index. Store null at ary[idx]
1185 __ bind(is_null);
1186 if (EnableValhalla) {
1187 Label is_null_into_value_array_npe, store_null;
1188
1189 if (UseArrayFlattening) {
1190 __ test_flat_array_oop(r3, rscratch1, is_flat_array);
1191 }
1192
1193 // No way to store null in a null-free array
1194 __ test_null_free_array_oop(r3, rscratch1, is_null_into_value_array_npe);
1195 __ b(store_null);
1196
1197 __ bind(is_null_into_value_array_npe);
1198 __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1199
1200 __ bind(store_null);
1201 }
1202
1203 // Store a null
1204 // Clobbers: r10, r11, r3
1205 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1206 __ b(done);
1207
1208 if (UseArrayFlattening) {
1209 Label is_type_ok;
1210 __ bind(is_flat_array); // Store non-null value to flat
1211
1212 __ ldr(r0, at_tos()); // value
1213 __ ldr(r3, at_tos_p1()); // index
1214 __ ldr(r2, at_tos_p2()); // array
1215 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), r0, r2, r3);
1216 }
1217
1218 // Pop stack arguments
1219 __ bind(done);
1220 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1221 }
1222
1223 void TemplateTable::bastore()
1224 {
1225 transition(itos, vtos);
1226 __ pop_i(r1);
1227 __ pop_ptr(r3);
1228 // r0: value
1229 // r1: index
1230 // r3: array
1231 index_check(r3, r1); // prefer index in r1
1232
1233 // Need to check whether array is boolean or byte
1234 // since both types share the bastore bytecode.
1235 __ load_klass(r2, r3);
1236 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
2003 __ br(j_not(cc), not_taken);
2004 branch(false, false);
2005 __ bind(not_taken);
2006 __ profile_not_taken_branch(r0);
2007 }
2008
2009 void TemplateTable::if_nullcmp(Condition cc)
2010 {
2011 transition(atos, vtos);
2012 // assume branch is more often taken than not (loops use backward branches)
2013 Label not_taken;
2014 if (cc == equal)
2015 __ cbnz(r0, not_taken);
2016 else
2017 __ cbz(r0, not_taken);
2018 branch(false, false);
2019 __ bind(not_taken);
2020 __ profile_not_taken_branch(r0);
2021 }
2022
2023 void TemplateTable::if_acmp(Condition cc) {
2024 transition(atos, vtos);
2025 // assume branch is more often taken than not (loops use backward branches)
2026 Label taken, not_taken;
2027 __ pop_ptr(r1);
2028
2029 __ profile_acmp(r2, r1, r0, r4);
2030
2031 Register is_inline_type_mask = rscratch1;
2032 __ mov(is_inline_type_mask, markWord::inline_type_pattern);
2033
2034 if (EnableValhalla) {
2035 __ cmp(r1, r0);
2036 __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2037
2038 // might be substitutable, test if either r0 or r1 is null
2039 __ andr(r2, r0, r1);
2040 __ cbz(r2, (cc == equal) ? not_taken : taken);
2041
2042 // and both are values ?
2043 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2044 __ andr(r2, r2, is_inline_type_mask);
2045 __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2046 __ andr(r4, r4, is_inline_type_mask);
2047 __ andr(r2, r2, r4);
2048 __ cmp(r2, is_inline_type_mask);
2049 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2050
2051 // same value klass ?
2052 __ load_metadata(r2, r1);
2053 __ load_metadata(r4, r0);
2054 __ cmp(r2, r4);
2055 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2056
2057 // Know both are the same type, let's test for substitutability...
2058 if (cc == equal) {
2059 invoke_is_substitutable(r0, r1, taken, not_taken);
2060 } else {
2061 invoke_is_substitutable(r0, r1, not_taken, taken);
2062 }
2063 __ stop("Not reachable");
2064 }
2065
2066 __ cmpoop(r1, r0);
2067 __ br(j_not(cc), not_taken);
2068 __ bind(taken);
2069 branch(false, false);
2070 __ bind(not_taken);
2071 __ profile_not_taken_branch(r0, true);
2072 }
2073
2074 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2075 Label& is_subst, Label& not_subst) {
2076
2077 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2078 // Restored... r0 answer, jmp to outcome...
2079 __ cbz(r0, not_subst);
2080 __ b(is_subst);
2081 }
2082
2083
2084 void TemplateTable::ret() {
2085 transition(vtos, vtos);
2086 locals_index(r1);
2087 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2088 __ profile_ret(r1, r2);
2089 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2090 __ lea(rbcp, Address(rbcp, r1));
2091 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2092 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2093 }
2094
2095 void TemplateTable::wide_ret() {
2096 transition(vtos, vtos);
2097 locals_index_wide(r1);
2098 __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2099 __ profile_ret(r1, r2);
2100 __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2101 __ lea(rbcp, Address(rbcp, r1));
2102 __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2103 __ dispatch_next(vtos, 0, /*generate_poll*/true);
2297 assert(_desc->calls_vm(),
2298 "inconsistent calls_vm information"); // call in remove_activation
2299
2300 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2301 assert(state == vtos, "only valid state");
2302
2303 __ ldr(c_rarg1, aaddress(0));
2304 __ load_klass(r3, c_rarg1);
2305 __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2306 Label skip_register_finalizer;
2307 __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2308
2309 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2310
2311 __ bind(skip_register_finalizer);
2312 }
2313
2314 // Issue a StoreStore barrier after all stores but before return
2315 // from any constructor for any class with a final field. We don't
2316 // know if this is a finalizer, so we always do so.
2317 if (_desc->bytecode() == Bytecodes::_return
2318 || _desc->bytecode() == Bytecodes::_return_register_finalizer)
2319 __ membar(MacroAssembler::StoreStore);
2320
2321 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2322 Label no_safepoint;
2323 __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2324 __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2325 __ push(state);
2326 __ push_cont_fastpath(rthread);
2327 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2328 __ pop_cont_fastpath(rthread);
2329 __ pop(state);
2330 __ bind(no_safepoint);
2331 }
2332
2333 // Narrow result if state is itos but result type is smaller.
2334 // Need to narrow in the return bytecode rather than in generate_return_entry
2335 // since compiled code callers expect the result to already be narrowed.
2336 if (state == itos) {
2337 __ narrow(r0);
2338 }
2689 }
2690 // c_rarg1: object pointer or null
2691 // c_rarg2: cache entry pointer
2692 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2693 InterpreterRuntime::post_field_access),
2694 c_rarg1, c_rarg2);
2695 __ load_field_entry(cache, index);
2696 __ bind(L1);
2697 }
2698 }
2699
2700 void TemplateTable::pop_and_check_object(Register r)
2701 {
2702 __ pop_ptr(r);
2703 __ null_check(r); // for field access must check obj.
2704 __ verify_oop(r);
2705 }
2706
2707 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2708 {
2709 const Register cache = r2;
2710 const Register obj = r4;
2711 const Register klass = r5;
2712 const Register inline_klass = r7;
2713 const Register field_index = r23;
2714 const Register index = r3;
2715 const Register tos_state = r3;
2716 const Register off = r19;
2717 const Register flags = r6;
2718 const Register bc = r4; // uses same reg as obj, so don't mix them
2719
2720 resolve_cache_and_index_for_field(byte_no, cache, index);
2721 jvmti_post_field_access(cache, index, is_static, false);
2722
2723 // Valhalla extras
2724 __ load_unsigned_short(field_index, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
2725 __ ldr(klass, Address(cache, ResolvedFieldEntry::field_holder_offset()));
2726
2727 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2728
2729 if (!is_static) {
2730 // obj is on the stack
2731 pop_and_check_object(obj);
2732 }
2733
2734 // 8179954: We need to make sure that the code generated for
2735 // volatile accesses forms a sequentially-consistent set of
2736 // operations when combined with STLR and LDAR. Without a leading
2737 // membar it's possible for a simple Dekker test to fail if loads
2738 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
2739 // the stores in one method and we interpret the loads in another.
2740 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2741 Label notVolatile;
2742 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2743 __ membar(MacroAssembler::AnyAny);
2744 __ bind(notVolatile);
2745 }
2746
2765 __ b(Done);
2766
2767 __ bind(notByte);
2768 __ cmp(tos_state, (u1)ztos);
2769 __ br(Assembler::NE, notBool);
2770
2771 // ztos (same code as btos)
2772 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2773 __ push(ztos);
2774 // Rewrite bytecode to be faster
2775 if (rc == may_rewrite) {
2776 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2777 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2778 }
2779 __ b(Done);
2780
2781 __ bind(notBool);
2782 __ cmp(tos_state, (u1)atos);
2783 __ br(Assembler::NE, notObj);
2784 // atos
2785 if (!EnableValhalla) {
2786 do_oop_load(_masm, field, r0, IN_HEAP);
2787 __ push(atos);
2788 if (rc == may_rewrite) {
2789 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2790 }
2791 __ b(Done);
2792 } else { // Valhalla
2793 if (is_static) {
2794 __ load_heap_oop(r0, field, rscratch1, rscratch2);
2795 __ push(atos);
2796 __ b(Done);
2797 } else {
2798 Label is_flat, rewrite_inline;
2799 __ test_field_is_flat(flags, noreg /*temp*/, is_flat);
2800 __ load_heap_oop(r0, field, rscratch1, rscratch2);
2801 __ push(atos);
2802 if (rc == may_rewrite) {
2803 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2804 }
2805 __ b(Done);
2806 __ bind(is_flat);
2807 // field is flat (null-free or nullable with a null-marker)
2808 __ mov(r0, obj);
2809 __ read_flat_field(cache, field_index, off, inline_klass /* temp */, r0);
2810 __ verify_oop(r0);
2811 __ push(atos);
2812 __ bind(rewrite_inline);
2813 if (rc == may_rewrite) {
2814 patch_bytecode(Bytecodes::_fast_vgetfield, bc, r1);
2815 }
2816 __ b(Done);
2817 }
2818 }
2819
2820 __ bind(notObj);
2821 __ cmp(tos_state, (u1)itos);
2822 __ br(Assembler::NE, notInt);
2823 // itos
2824 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2825 __ push(itos);
2826 // Rewrite bytecode to be faster
2827 if (rc == may_rewrite) {
2828 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2829 }
2830 __ b(Done);
2831
2832 __ bind(notInt);
2833 __ cmp(tos_state, (u1)ctos);
2834 __ br(Assembler::NE, notChar);
2835 // ctos
2836 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2837 __ push(ctos);
2838 // Rewrite bytecode to be faster
2959 // c_rarg1: object pointer set up above (null if static)
2960 // c_rarg2: cache entry pointer
2961 // c_rarg3: jvalue object on the stack
2962 __ call_VM(noreg,
2963 CAST_FROM_FN_PTR(address,
2964 InterpreterRuntime::post_field_modification),
2965 c_rarg1, c_rarg2, c_rarg3);
2966 __ load_field_entry(cache, index);
2967 __ bind(L1);
2968 }
2969 }
2970
2971 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2972 transition(vtos, vtos);
2973
2974 const Register cache = r2;
2975 const Register index = r3;
2976 const Register tos_state = r3;
2977 const Register obj = r2;
2978 const Register off = r19;
2979 const Register flags = r6;
2980 const Register bc = r4;
2981 const Register inline_klass = r5;
2982
2983 resolve_cache_and_index_for_field(byte_no, cache, index);
2984 jvmti_post_field_mod(cache, index, is_static);
2985 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2986
2987 Label Done;
2988 {
2989 Label notVolatile;
2990 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2991 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2992 __ bind(notVolatile);
2993 }
2994
2995 // field address
2996 const Address field(obj, off);
2997
2998 Label notByte, notBool, notInt, notShort, notChar,
2999 notLong, notFloat, notObj, notDouble;
3000
3001 assert(btos == 0, "change code, btos != 0");
3002 __ cbnz(tos_state, notByte);
3003
3004 // Don't rewrite putstatic, only putfield
3005 if (is_static) rc = may_not_rewrite;
3006
3007 // btos
3008 {
3009 __ pop(btos);
3010 if (!is_static) pop_and_check_object(obj);
3019 __ cmp(tos_state, (u1)ztos);
3020 __ br(Assembler::NE, notBool);
3021
3022 // ztos
3023 {
3024 __ pop(ztos);
3025 if (!is_static) pop_and_check_object(obj);
3026 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3027 if (rc == may_rewrite) {
3028 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
3029 }
3030 __ b(Done);
3031 }
3032
3033 __ bind(notBool);
3034 __ cmp(tos_state, (u1)atos);
3035 __ br(Assembler::NE, notObj);
3036
3037 // atos
3038 {
3039 if (!EnableValhalla) {
3040 __ pop(atos);
3041 if (!is_static) pop_and_check_object(obj);
3042 // Store into the field
3043 // Clobbers: r10, r11, r3
3044 do_oop_store(_masm, field, r0, IN_HEAP);
3045 if (rc == may_rewrite) {
3046 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
3047 }
3048 __ b(Done);
3049 } else { // Valhalla
3050 __ pop(atos);
3051 if (is_static) {
3052 Label is_nullable;
3053 __ test_field_is_not_null_free_inline_type(flags, noreg /* temp */, is_nullable);
3054 __ null_check(r0); // FIXME JDK-8341120
3055 __ bind(is_nullable);
3056 do_oop_store(_masm, field, r0, IN_HEAP);
3057 __ b(Done);
3058 } else {
3059 Label null_free_reference, is_flat, rewrite_inline;
3060 __ test_field_is_flat(flags, noreg /*temp*/, is_flat);
3061 __ test_field_is_null_free_inline_type(flags, noreg /*temp*/, null_free_reference);
3062 pop_and_check_object(obj);
3063 // Store into the field
3064 // Clobbers: r10, r11, r3
3065 do_oop_store(_masm, field, r0, IN_HEAP);
3066 if (rc == may_rewrite) {
3067 patch_bytecode(Bytecodes::_fast_aputfield, bc, r19, true, byte_no);
3068 }
3069 __ b(Done);
3070 // Implementation of the inline type semantic
3071 __ bind(null_free_reference);
3072 __ null_check(r0); // FIXME JDK-8341120
3073 pop_and_check_object(obj);
3074 // Store into the field
3075 // Clobbers: r10, r11, r3
3076 do_oop_store(_masm, field, r0, IN_HEAP);
3077 __ b(rewrite_inline);
3078 __ bind(is_flat);
3079 pop_and_check_object(r7);
3080 __ write_flat_field(cache, off, r3, r6, r7);
3081 __ bind(rewrite_inline);
3082 if (rc == may_rewrite) {
3083 patch_bytecode(Bytecodes::_fast_vputfield, bc, r19, true, byte_no);
3084 }
3085 __ b(Done);
3086 }
3087 } // Valhalla
3088 }
3089
3090 __ bind(notObj);
3091 __ cmp(tos_state, (u1)itos);
3092 __ br(Assembler::NE, notInt);
3093
3094 // itos
3095 {
3096 __ pop(itos);
3097 if (!is_static) pop_and_check_object(obj);
3098 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3099 if (rc == may_rewrite) {
3100 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
3101 }
3102 __ b(Done);
3103 }
3104
3105 __ bind(notInt);
3106 __ cmp(tos_state, (u1)ctos);
3107 __ br(Assembler::NE, notChar);
3172 {
3173 __ pop(dtos);
3174 if (!is_static) pop_and_check_object(obj);
3175 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
3176 if (rc == may_rewrite) {
3177 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
3178 }
3179 }
3180
3181 #ifdef ASSERT
3182 __ b(Done);
3183
3184 __ bind(notDouble);
3185 __ stop("Bad state");
3186 #endif
3187
3188 __ bind(Done);
3189
3190 {
3191 Label notVolatile;
3192 __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3193 __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3194 __ bind(notVolatile);
3195 }
3196 }
3197
3198 void TemplateTable::putfield(int byte_no)
3199 {
3200 putfield_or_static(byte_no, false);
3201 }
3202
3203 void TemplateTable::nofast_putfield(int byte_no) {
3204 putfield_or_static(byte_no, false, may_not_rewrite);
3205 }
3206
3207 void TemplateTable::putstatic(int byte_no) {
3208 putfield_or_static(byte_no, true);
3209 }
3210
3211 void TemplateTable::jvmti_post_fast_field_mod() {
3212 if (JvmtiExport::can_post_field_modification()) {
3213 // Check to see if a field modification watch has been set before
3214 // we take the time to call into the VM.
3215 Label L2;
3216 __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3217 __ ldrw(c_rarg3, Address(rscratch1));
3218 __ cbzw(c_rarg3, L2);
3219 __ pop_ptr(r19); // copy the object pointer from tos
3220 __ verify_oop(r19);
3221 __ push_ptr(r19); // put the object pointer back on tos
3222 // Save tos values before call_VM() clobbers them. Since we have
3223 // to do it for every data type, we use the saved values as the
3224 // jvalue object.
3225 switch (bytecode()) { // load values into the jvalue object
3226 case Bytecodes::_fast_vputfield: //fall through
3227 case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3228 case Bytecodes::_fast_bputfield: // fall through
3229 case Bytecodes::_fast_zputfield: // fall through
3230 case Bytecodes::_fast_sputfield: // fall through
3231 case Bytecodes::_fast_cputfield: // fall through
3232 case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3233 case Bytecodes::_fast_dputfield: __ push_d(); break;
3234 case Bytecodes::_fast_fputfield: __ push_f(); break;
3235 case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3236
3237 default:
3238 ShouldNotReachHere();
3239 }
3240 __ mov(c_rarg3, esp); // points to jvalue on the stack
3241 // access constant pool cache entry
3242 __ load_field_entry(c_rarg2, r0);
3243 __ verify_oop(r19);
3244 // r19: object pointer copied above
3245 // c_rarg2: cache entry pointer
3246 // c_rarg3: jvalue object on the stack
3247 __ call_VM(noreg,
3248 CAST_FROM_FN_PTR(address,
3249 InterpreterRuntime::post_field_modification),
3250 r19, c_rarg2, c_rarg3);
3251
3252 switch (bytecode()) { // restore tos values
3253 case Bytecodes::_fast_vputfield: //fall through
3254 case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3255 case Bytecodes::_fast_bputfield: // fall through
3256 case Bytecodes::_fast_zputfield: // fall through
3257 case Bytecodes::_fast_sputfield: // fall through
3258 case Bytecodes::_fast_cputfield: // fall through
3259 case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3260 case Bytecodes::_fast_dputfield: __ pop_d(); break;
3261 case Bytecodes::_fast_fputfield: __ pop_f(); break;
3262 case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3263 default: break;
3264 }
3265 __ bind(L2);
3266 }
3267 }
3268
3269 void TemplateTable::fast_storefield(TosState state)
3270 {
3271 transition(state, vtos);
3272
3273 ByteSize base = ConstantPoolCache::base_offset();
3281 load_resolved_field_entry(r2, r2, noreg, r1, r5);
3282 __ verify_field_offset(r1);
3283
3284 {
3285 Label notVolatile;
3286 __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3287 __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3288 __ bind(notVolatile);
3289 }
3290
3291 Label notVolatile;
3292
3293 // Get object from stack
3294 pop_and_check_object(r2);
3295
3296 // field address
3297 const Address field(r2, r1);
3298
3299 // access field
3300 switch (bytecode()) {
3301 case Bytecodes::_fast_vputfield:
3302 {
3303 Label is_flat, has_null_marker, done;
3304 __ test_field_is_flat(r5, noreg /* temp */, is_flat);
3305 __ null_check(r0);
3306 do_oop_store(_masm, field, r0, IN_HEAP);
3307 __ b(done);
3308 __ bind(is_flat);
3309 __ load_field_entry(r4, r5);
3310 // Re-shuffle registers because of VM calls calling convention
3311 __ mov(r19, r1);
3312 __ mov(r7, r2);
3313 __ write_flat_field(r4, r19, r6, r8, r7);
3314 __ bind(done);
3315 }
3316 break;
3317 case Bytecodes::_fast_aputfield:
3318 // Clobbers: r10, r11, r3
3319 do_oop_store(_masm, field, r0, IN_HEAP);
3320 break;
3321 case Bytecodes::_fast_lputfield:
3322 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3323 break;
3324 case Bytecodes::_fast_iputfield:
3325 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3326 break;
3327 case Bytecodes::_fast_zputfield:
3328 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3329 break;
3330 case Bytecodes::_fast_bputfield:
3331 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3332 break;
3333 case Bytecodes::_fast_sputfield:
3334 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3335 break;
3336 case Bytecodes::_fast_cputfield:
3392 // r0: object
3393 __ verify_oop(r0);
3394 __ null_check(r0);
3395 const Address field(r0, r1);
3396
3397 // 8179954: We need to make sure that the code generated for
3398 // volatile accesses forms a sequentially-consistent set of
3399 // operations when combined with STLR and LDAR. Without a leading
3400 // membar it's possible for a simple Dekker test to fail if loads
3401 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3402 // the stores in one method and we interpret the loads in another.
3403 if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3404 Label notVolatile;
3405 __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3406 __ membar(MacroAssembler::AnyAny);
3407 __ bind(notVolatile);
3408 }
3409
3410 // access field
3411 switch (bytecode()) {
3412 case Bytecodes::_fast_vgetfield:
3413 {
3414 Register index = r4, tmp = r7;
3415 // field is flat
3416 __ load_unsigned_short(index, Address(r2, in_bytes(ResolvedFieldEntry::field_index_offset())));
3417 __ read_flat_field(r2, index, r1, tmp /* temp */, r0);
3418 __ verify_oop(r0);
3419 }
3420 break;
3421 case Bytecodes::_fast_agetfield:
3422 do_oop_load(_masm, field, r0, IN_HEAP);
3423 __ verify_oop(r0);
3424 break;
3425 case Bytecodes::_fast_lgetfield:
3426 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3427 break;
3428 case Bytecodes::_fast_igetfield:
3429 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3430 break;
3431 case Bytecodes::_fast_bgetfield:
3432 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3433 break;
3434 case Bytecodes::_fast_sgetfield:
3435 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3436 break;
3437 case Bytecodes::_fast_cgetfield:
3438 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3439 break;
3440 case Bytecodes::_fast_fgetfield:
3821 Label initialize_header;
3822
3823 __ get_cpool_and_tags(r4, r0);
3824 // Make sure the class we're about to instantiate has been resolved.
3825 // This is done before loading InstanceKlass to be consistent with the order
3826 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3827 const int tags_offset = Array<u1>::base_offset_in_bytes();
3828 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3829 __ lea(rscratch1, Address(rscratch1, tags_offset));
3830 __ ldarb(rscratch1, rscratch1);
3831 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3832 __ br(Assembler::NE, slow_case);
3833
3834 // get InstanceKlass
3835 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3836
3837 // make sure klass is initialized
3838 assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3839 __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3840
3841 __ allocate_instance(r4, r0, r3, r1, true, slow_case);
3842 __ b(done);
3843
3844 // slow case
3845 __ bind(slow_case);
3846 __ get_constant_pool(c_rarg1);
3847 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3848 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3849 __ verify_oop(r0);
3850
3851 // continue
3852 __ bind(done);
3853 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3854 __ membar(Assembler::StoreStore);
3855 }
3856
3857 void TemplateTable::newarray() {
3858 transition(itos, atos);
3859 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3860 __ mov(c_rarg2, r0);
3861 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3862 c_rarg1, c_rarg2);
3906 __ bind(quicked);
3907 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3908 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3909
3910 __ bind(resolved);
3911 __ load_klass(r19, r3);
3912
3913 // Generate subtype check. Blows r2, r5. Object in r3.
3914 // Superklass in r0. Subklass in r19.
3915 __ gen_subtype_check(r19, ok_is_subtype);
3916
3917 // Come here on failure
3918 __ push(r3);
3919 // object is at TOS
3920 __ b(Interpreter::_throw_ClassCastException_entry);
3921
3922 // Come here on success
3923 __ bind(ok_is_subtype);
3924 __ mov(r0, r3); // Restore object in r3
3925
3926 __ b(done);
3927 __ bind(is_null);
3928
3929 // Collect counts on whether this test sees nulls a lot or not.
3930 if (ProfileInterpreter) {
3931 __ profile_null_seen(r2);
3932 }
3933
3934 __ bind(done);
3935 }
3936
3937 void TemplateTable::instanceof() {
3938 transition(atos, itos);
3939 Label done, is_null, ok_is_subtype, quicked, resolved;
3940 __ cbz(r0, is_null);
3941
3942 // Get cpool & tags index
3943 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3944 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3945 // See if bytecode has already been quicked
3946 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3947 __ lea(r1, Address(rscratch1, r19));
3948 __ ldarb(r1, r1);
3949 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3950 __ br(Assembler::EQ, quicked);
3951
3952 __ push(atos); // save receiver for result, and for GC
3953 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4031 // in the assembly code structure as well
4032 //
4033 // Stack layout:
4034 //
4035 // [expressions ] <--- esp = expression stack top
4036 // ..
4037 // [expressions ]
4038 // [monitor entry] <--- monitor block top = expression stack bot
4039 // ..
4040 // [monitor entry]
4041 // [frame data ] <--- monitor block bot
4042 // ...
4043 // [saved rfp ] <--- rfp
4044 void TemplateTable::monitorenter()
4045 {
4046 transition(atos, vtos);
4047
4048 // check for null object
4049 __ null_check(r0);
4050
4051 Label is_inline_type;
4052 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4053 __ test_markword_is_inline_type(rscratch1, is_inline_type);
4054
4055 const Address monitor_block_top(
4056 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4057 const Address monitor_block_bot(
4058 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4059 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4060
4061 Label allocated;
4062
4063 // initialize entry pointer
4064 __ mov(c_rarg1, zr); // points to free slot or null
4065
4066 // find a free slot in the monitor block (result in c_rarg1)
4067 {
4068 Label entry, loop, exit;
4069 __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
4070 __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
4071 // c_rarg3 points to current entry, starting with top-most entry
4072
4073 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4074
4136 // c_rarg1: points to monitor entry
4137 __ bind(allocated);
4138
4139 // Increment bcp to point to the next bytecode, so exception
4140 // handling for async. exceptions work correctly.
4141 // The object has already been popped from the stack, so the
4142 // expression stack looks correct.
4143 __ increment(rbcp);
4144
4145 // store object
4146 __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
4147 __ lock_object(c_rarg1);
4148
4149 // check to make sure this monitor doesn't cause stack overflow after locking
4150 __ save_bcp(); // in case of exception
4151 __ generate_stack_overflow_check(0);
4152
4153 // The bcp has already been incremented. Just need to dispatch to
4154 // next instruction.
4155 __ dispatch_next(vtos);
4156
4157 __ bind(is_inline_type);
4158 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4159 InterpreterRuntime::throw_identity_exception), r0);
4160 __ should_not_reach_here();
4161 }
4162
4163
4164 void TemplateTable::monitorexit()
4165 {
4166 transition(atos, vtos);
4167
4168 // check for null object
4169 __ null_check(r0);
4170
4171 const int is_inline_type_mask = markWord::inline_type_pattern;
4172 Label has_identity;
4173 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4174 __ mov(rscratch2, is_inline_type_mask);
4175 __ andr(rscratch1, rscratch1, rscratch2);
4176 __ cmp(rscratch1, rscratch2);
4177 __ br(Assembler::NE, has_identity);
4178 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4179 InterpreterRuntime::throw_illegal_monitor_state_exception));
4180 __ should_not_reach_here();
4181 __ bind(has_identity);
4182
4183 const Address monitor_block_top(
4184 rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4185 const Address monitor_block_bot(
4186 rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4187 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4188
4189 Label found;
4190
4191 // find matching slot
4192 {
4193 Label entry, loop;
4194 __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4195 __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4196 // c_rarg1 points to current entry, starting with top-most entry
4197
4198 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4199 // of monitor block
4200 __ b(entry);
4201
4202 __ bind(loop);
|