< prev index next >

src/hotspot/cpu/aarch64/templateTable_aarch64.cpp

Print this page

  26 #include "asm/macroAssembler.inline.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "compiler/compilerDefinitions.inline.hpp"
  29 #include "gc/shared/barrierSetAssembler.hpp"
  30 #include "gc/shared/collectedHeap.hpp"
  31 #include "gc/shared/tlab_globals.hpp"
  32 #include "interpreter/interpreter.hpp"
  33 #include "interpreter/interpreterRuntime.hpp"
  34 #include "interpreter/interp_masm.hpp"
  35 #include "interpreter/templateTable.hpp"
  36 #include "memory/universe.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/method.inline.hpp"
  39 #include "oops/objArrayKlass.hpp"
  40 #include "oops/oop.inline.hpp"
  41 #include "oops/resolvedFieldEntry.hpp"
  42 #include "oops/resolvedIndyEntry.hpp"
  43 #include "oops/resolvedMethodEntry.hpp"
  44 #include "prims/jvmtiExport.hpp"
  45 #include "prims/methodHandles.hpp"

  46 #include "runtime/frame.inline.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/stubRoutines.hpp"
  49 #include "runtime/synchronizer.hpp"
  50 #include "utilities/powerOfTwo.hpp"
  51 
  52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  53 
  54 // Address computation: local variables
  55 
  56 static inline Address iaddress(int n) {
  57   return Address(rlocals, Interpreter::local_offset_in_bytes(n));
  58 }
  59 
  60 static inline Address laddress(int n) {
  61   return iaddress(n + 1);
  62 }
  63 
  64 static inline Address faddress(int n) {
  65   return iaddress(n);

 152                         Address src,
 153                         Register dst,
 154                         DecoratorSet decorators) {
 155   __ load_heap_oop(dst, src, r10, r11, decorators);
 156 }
 157 
 158 Address TemplateTable::at_bcp(int offset) {
 159   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 160   return Address(rbcp, offset);
 161 }
 162 
 163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 164                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 165                                    int byte_no)
 166 {
 167   assert_different_registers(bc_reg, temp_reg);
 168   if (!RewriteBytecodes)  return;
 169   Label L_patch_done;
 170 
 171   switch (bc) {

 172   case Bytecodes::_fast_aputfield:
 173   case Bytecodes::_fast_bputfield:
 174   case Bytecodes::_fast_zputfield:
 175   case Bytecodes::_fast_cputfield:
 176   case Bytecodes::_fast_dputfield:
 177   case Bytecodes::_fast_fputfield:
 178   case Bytecodes::_fast_iputfield:
 179   case Bytecodes::_fast_lputfield:
 180   case Bytecodes::_fast_sputfield:
 181     {
 182       // We skip bytecode quickening for putfield instructions when
 183       // the put_code written to the constant pool cache is zero.
 184       // This is required so that every execution of this instruction
 185       // calls out to InterpreterRuntime::resolve_get_put to do
 186       // additional, required work.
 187       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 188       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 189       __ load_field_entry(temp_reg, bc_reg);
 190       if (byte_no == f1_byte) {
 191         __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 736   locals_index_wide(r1);
 737   __ ldr(r0, aaddress(r1));
 738 }
 739 
 740 void TemplateTable::index_check(Register array, Register index)
 741 {
 742   // destroys r1, rscratch1
 743   // sign extend index for use by indexed load
 744   // __ movl2ptr(index, index);
 745   // check index
 746   Register length = rscratch1;
 747   __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
 748   __ cmpw(index, length);
 749   if (index != r1) {
 750     // ??? convention: move aberrant index into r1 for exception message
 751     assert(r1 != array, "different registers");
 752     __ mov(r1, index);
 753   }
 754   Label ok;
 755   __ br(Assembler::LO, ok);
 756     // ??? convention: move array into r3 for exception message
 757   __ mov(r3, array);
 758   __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
 759   __ br(rscratch1);
 760   __ bind(ok);
 761 }
 762 
 763 void TemplateTable::iaload()
 764 {
 765   transition(itos, itos);
 766   __ mov(r1, r0);
 767   __ pop_ptr(r0);
 768   // r0: array
 769   // r1: index
 770   index_check(r0, r1); // leaves index in r1, kills rscratch1
 771   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
 772   __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
 773 }
 774 
 775 void TemplateTable::laload()
 776 {
 777   transition(itos, ltos);
 778   __ mov(r1, r0);
 779   __ pop_ptr(r0);

 799 void TemplateTable::daload()
 800 {
 801   transition(itos, dtos);
 802   __ mov(r1, r0);
 803   __ pop_ptr(r0);
 804   // r0: array
 805   // r1: index
 806   index_check(r0, r1); // leaves index in r1, kills rscratch1
 807   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
 808   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
 809 }
 810 
 811 void TemplateTable::aaload()
 812 {
 813   transition(itos, atos);
 814   __ mov(r1, r0);
 815   __ pop_ptr(r0);
 816   // r0: array
 817   // r1: index
 818   index_check(r0, r1); // leaves index in r1, kills rscratch1
 819   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
 820   do_oop_load(_masm,
 821               Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
 822               r0,
 823               IS_ARRAY);















 824 }
 825 
 826 void TemplateTable::baload()
 827 {
 828   transition(itos, itos);
 829   __ mov(r1, r0);
 830   __ pop_ptr(r0);
 831   // r0: array
 832   // r1: index
 833   index_check(r0, r1); // leaves index in r1, kills rscratch1
 834   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
 835   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
 836 }
 837 
 838 void TemplateTable::caload()
 839 {
 840   transition(itos, itos);
 841   __ mov(r1, r0);
 842   __ pop_ptr(r0);
 843   // r0: array

1090   // r1:  index
1091   // r3:  array
1092   index_check(r3, r1); // prefer index in r1
1093   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1094   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1095 }
1096 
1097 void TemplateTable::dastore() {
1098   transition(dtos, vtos);
1099   __ pop_i(r1);
1100   __ pop_ptr(r3);
1101   // v0: value
1102   // r1:  index
1103   // r3:  array
1104   index_check(r3, r1); // prefer index in r1
1105   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1106   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1107 }
1108 
1109 void TemplateTable::aastore() {
1110   Label is_null, ok_is_subtype, done;
1111   transition(vtos, vtos);
1112   // stack: ..., array, index, value
1113   __ ldr(r0, at_tos());    // value
1114   __ ldr(r2, at_tos_p1()); // index
1115   __ ldr(r3, at_tos_p2()); // array
1116 
1117   Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1118 
1119   index_check(r3, r2);     // kills r1




1120   __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);


1121 
1122   // do array store check - check for null value first
1123   __ cbz(r0, is_null);
1124 








1125   // Move subklass into r1
1126   __ load_klass(r1, r0);
1127   // Move superklass into r0
1128   __ load_klass(r0, r3);
1129   __ ldr(r0, Address(r0,
1130                      ObjArrayKlass::element_klass_offset()));
1131   // Compress array + index*oopSize + 12 into a single register.  Frees r2.
1132 
1133   // Generate subtype check.  Blows r2, r5
1134   // Superklass in r0.  Subklass in r1.
1135   __ gen_subtype_check(r1, ok_is_subtype);


1136 
1137   // Come here on failure
1138   // object is at TOS
1139   __ b(Interpreter::_throw_ArrayStoreException_entry);
1140 
1141   // Come here on success
1142   __ bind(ok_is_subtype);
1143 
1144   // Get the value we will store
1145   __ ldr(r0, at_tos());
1146   // Now store using the appropriate barrier
1147   // Clobbers: r10, r11, r3
1148   do_oop_store(_masm, element_address, r0, IS_ARRAY);
1149   __ b(done);
1150 
1151   // Have a null in r0, r3=array, r2=index.  Store null at ary[idx]
1152   __ bind(is_null);
1153   __ profile_null_seen(r2);















1154 
1155   // Store a null
1156   // Clobbers: r10, r11, r3
1157   do_oop_store(_masm, element_address, noreg, IS_ARRAY);











1158 
1159   // Pop stack arguments
1160   __ bind(done);
1161   __ add(esp, esp, 3 * Interpreter::stackElementSize);
1162 }
1163 
1164 void TemplateTable::bastore()
1165 {
1166   transition(itos, vtos);
1167   __ pop_i(r1);
1168   __ pop_ptr(r3);
1169   // r0: value
1170   // r1: index
1171   // r3: array
1172   index_check(r3, r1); // prefer index in r1
1173 
1174   // Need to check whether array is boolean or byte
1175   // since both types share the bastore bytecode.
1176   __ load_klass(r2, r3);
1177   __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));

1944   __ br(j_not(cc), not_taken);
1945   branch(false, false);
1946   __ bind(not_taken);
1947   __ profile_not_taken_branch(r0);
1948 }
1949 
1950 void TemplateTable::if_nullcmp(Condition cc)
1951 {
1952   transition(atos, vtos);
1953   // assume branch is more often taken than not (loops use backward branches)
1954   Label not_taken;
1955   if (cc == equal)
1956     __ cbnz(r0, not_taken);
1957   else
1958     __ cbz(r0, not_taken);
1959   branch(false, false);
1960   __ bind(not_taken);
1961   __ profile_not_taken_branch(r0);
1962 }
1963 
1964 void TemplateTable::if_acmp(Condition cc)
1965 {
1966   transition(atos, vtos);
1967   // assume branch is more often taken than not (loops use backward branches)
1968   Label not_taken;
1969   __ pop_ptr(r1);






































1970   __ cmpoop(r1, r0);
1971   __ br(j_not(cc), not_taken);

1972   branch(false, false);
1973   __ bind(not_taken);
1974   __ profile_not_taken_branch(r0);









1975 }
1976 

1977 void TemplateTable::ret() {
1978   transition(vtos, vtos);
1979   locals_index(r1);
1980   __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1981   __ profile_ret(r1, r2);
1982   __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1983   __ lea(rbcp, Address(rbcp, r1));
1984   __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
1985   __ dispatch_next(vtos, 0, /*generate_poll*/true);
1986 }
1987 
1988 void TemplateTable::wide_ret() {
1989   transition(vtos, vtos);
1990   locals_index_wide(r1);
1991   __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1992   __ profile_ret(r1, r2);
1993   __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1994   __ lea(rbcp, Address(rbcp, r1));
1995   __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
1996   __ dispatch_next(vtos, 0, /*generate_poll*/true);

2190   assert(_desc->calls_vm(),
2191          "inconsistent calls_vm information"); // call in remove_activation
2192 
2193   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2194     assert(state == vtos, "only valid state");
2195 
2196     __ ldr(c_rarg1, aaddress(0));
2197     __ load_klass(r3, c_rarg1);
2198     __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2199     Label skip_register_finalizer;
2200     __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2201 
2202     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2203 
2204     __ bind(skip_register_finalizer);
2205   }
2206 
2207   // Issue a StoreStore barrier after all stores but before return
2208   // from any constructor for any class with a final field.  We don't
2209   // know if this is a finalizer, so we always do so.
2210   if (_desc->bytecode() == Bytecodes::_return)

2211     __ membar(MacroAssembler::StoreStore);
2212 
2213   if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2214     Label no_safepoint;
2215     __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2216     __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2217     __ push(state);
2218     __ push_cont_fastpath(rthread);
2219     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2220     __ pop_cont_fastpath(rthread);
2221     __ pop(state);
2222     __ bind(no_safepoint);
2223   }
2224 
2225   // Narrow result if state is itos but result type is smaller.
2226   // Need to narrow in the return bytecode rather than in generate_return_entry
2227   // since compiled code callers expect the result to already be narrowed.
2228   if (state == itos) {
2229     __ narrow(r0);
2230   }

2582     }
2583     // c_rarg1: object pointer or null
2584     // c_rarg2: cache entry pointer
2585     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2586                                        InterpreterRuntime::post_field_access),
2587                c_rarg1, c_rarg2);
2588     __ load_field_entry(cache, index);
2589     __ bind(L1);
2590   }
2591 }
2592 
2593 void TemplateTable::pop_and_check_object(Register r)
2594 {
2595   __ pop_ptr(r);
2596   __ null_check(r);  // for field access must check obj.
2597   __ verify_oop(r);
2598 }
2599 
2600 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2601 {
2602   const Register cache     = r4;
2603   const Register obj       = r4;
2604   const Register index     = r3;
2605   const Register tos_state = r3;
2606   const Register off       = r19;
2607   const Register flags     = r6;
2608   const Register bc        = r4; // uses same reg as obj, so don't mix them
2609 
2610   resolve_cache_and_index_for_field(byte_no, cache, index);
2611   jvmti_post_field_access(cache, index, is_static, false);

2612   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2613 
2614   if (!is_static) {
2615     // obj is on the stack
2616     pop_and_check_object(obj);
2617   }
2618 
2619   // 8179954: We need to make sure that the code generated for
2620   // volatile accesses forms a sequentially-consistent set of
2621   // operations when combined with STLR and LDAR.  Without a leading
2622   // membar it's possible for a simple Dekker test to fail if loads
2623   // use LDR;DMB but stores use STLR.  This can happen if C2 compiles
2624   // the stores in one method and we interpret the loads in another.
2625   if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2626     Label notVolatile;
2627     __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2628     __ membar(MacroAssembler::AnyAny);
2629     __ bind(notVolatile);
2630   }
2631 

2650   __ b(Done);
2651 
2652   __ bind(notByte);
2653   __ cmp(tos_state, (u1)ztos);
2654   __ br(Assembler::NE, notBool);
2655 
2656   // ztos (same code as btos)
2657   __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2658   __ push(ztos);
2659   // Rewrite bytecode to be faster
2660   if (rc == may_rewrite) {
2661     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2662     patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2663   }
2664   __ b(Done);
2665 
2666   __ bind(notBool);
2667   __ cmp(tos_state, (u1)atos);
2668   __ br(Assembler::NE, notObj);
2669   // atos
2670   do_oop_load(_masm, field, r0, IN_HEAP);
2671   __ push(atos);
2672   if (rc == may_rewrite) {
2673     patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);




























2674   }
2675   __ b(Done);
2676 
2677   __ bind(notObj);
2678   __ cmp(tos_state, (u1)itos);
2679   __ br(Assembler::NE, notInt);
2680   // itos
2681   __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2682   __ push(itos);
2683   // Rewrite bytecode to be faster
2684   if (rc == may_rewrite) {
2685     patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2686   }
2687   __ b(Done);
2688 
2689   __ bind(notInt);
2690   __ cmp(tos_state, (u1)ctos);
2691   __ br(Assembler::NE, notChar);
2692   // ctos
2693   __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2694   __ push(ctos);
2695   // Rewrite bytecode to be faster

2816     // c_rarg1: object pointer set up above (null if static)
2817     // c_rarg2: cache entry pointer
2818     // c_rarg3: jvalue object on the stack
2819     __ call_VM(noreg,
2820                CAST_FROM_FN_PTR(address,
2821                                 InterpreterRuntime::post_field_modification),
2822                c_rarg1, c_rarg2, c_rarg3);
2823     __ load_field_entry(cache, index);
2824     __ bind(L1);
2825   }
2826 }
2827 
2828 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2829   transition(vtos, vtos);
2830 
2831   const Register cache     = r2;
2832   const Register index     = r3;
2833   const Register tos_state = r3;
2834   const Register obj       = r2;
2835   const Register off       = r19;
2836   const Register flags     = r0;
2837   const Register bc        = r4;
2838 
2839   resolve_cache_and_index_for_field(byte_no, cache, index);
2840   jvmti_post_field_mod(cache, index, is_static);
2841   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2842 
2843   Label Done;
2844   __ mov(r5, flags);
2845 
2846   {
2847     Label notVolatile;
2848     __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2849     __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2850     __ bind(notVolatile);
2851   }
2852 
2853   // field address
2854   const Address field(obj, off);
2855 
2856   Label notByte, notBool, notInt, notShort, notChar,
2857         notLong, notFloat, notObj, notDouble;
2858 
2859   assert(btos == 0, "change code, btos != 0");
2860   __ cbnz(tos_state, notByte);
2861 
2862   // Don't rewrite putstatic, only putfield
2863   if (is_static) rc = may_not_rewrite;
2864 
2865   // btos
2866   {
2867     __ pop(btos);
2868     if (!is_static) pop_and_check_object(obj);

2877   __ cmp(tos_state, (u1)ztos);
2878   __ br(Assembler::NE, notBool);
2879 
2880   // ztos
2881   {
2882     __ pop(ztos);
2883     if (!is_static) pop_and_check_object(obj);
2884     __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
2885     if (rc == may_rewrite) {
2886       patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2887     }
2888     __ b(Done);
2889   }
2890 
2891   __ bind(notBool);
2892   __ cmp(tos_state, (u1)atos);
2893   __ br(Assembler::NE, notObj);
2894 
2895   // atos
2896   {
2897     __ pop(atos);
2898     if (!is_static) pop_and_check_object(obj);
2899     // Store into the field
2900     // Clobbers: r10, r11, r3
2901     do_oop_store(_masm, field, r0, IN_HEAP);
2902     if (rc == may_rewrite) {
2903       patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2904     }
2905     __ b(Done);








































2906   }
2907 
2908   __ bind(notObj);
2909   __ cmp(tos_state, (u1)itos);
2910   __ br(Assembler::NE, notInt);
2911 
2912   // itos
2913   {
2914     __ pop(itos);
2915     if (!is_static) pop_and_check_object(obj);
2916     __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
2917     if (rc == may_rewrite) {
2918       patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2919     }
2920     __ b(Done);
2921   }
2922 
2923   __ bind(notInt);
2924   __ cmp(tos_state, (u1)ctos);
2925   __ br(Assembler::NE, notChar);

2990   {
2991     __ pop(dtos);
2992     if (!is_static) pop_and_check_object(obj);
2993     __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
2994     if (rc == may_rewrite) {
2995       patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
2996     }
2997   }
2998 
2999 #ifdef ASSERT
3000   __ b(Done);
3001 
3002   __ bind(notDouble);
3003   __ stop("Bad state");
3004 #endif
3005 
3006   __ bind(Done);
3007 
3008   {
3009     Label notVolatile;
3010     __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3011     __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3012     __ bind(notVolatile);
3013   }
3014 }
3015 
3016 void TemplateTable::putfield(int byte_no)
3017 {
3018   putfield_or_static(byte_no, false);
3019 }
3020 
3021 void TemplateTable::nofast_putfield(int byte_no) {
3022   putfield_or_static(byte_no, false, may_not_rewrite);
3023 }
3024 
3025 void TemplateTable::putstatic(int byte_no) {
3026   putfield_or_static(byte_no, true);
3027 }
3028 
3029 void TemplateTable::jvmti_post_fast_field_mod() {
3030   if (JvmtiExport::can_post_field_modification()) {
3031     // Check to see if a field modification watch has been set before
3032     // we take the time to call into the VM.
3033     Label L2;
3034     __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3035     __ ldrw(c_rarg3, Address(rscratch1));
3036     __ cbzw(c_rarg3, L2);
3037     __ pop_ptr(r19);                  // copy the object pointer from tos
3038     __ verify_oop(r19);
3039     __ push_ptr(r19);                 // put the object pointer back on tos
3040     // Save tos values before call_VM() clobbers them. Since we have
3041     // to do it for every data type, we use the saved values as the
3042     // jvalue object.
3043     switch (bytecode()) {          // load values into the jvalue object

3044     case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3045     case Bytecodes::_fast_bputfield: // fall through
3046     case Bytecodes::_fast_zputfield: // fall through
3047     case Bytecodes::_fast_sputfield: // fall through
3048     case Bytecodes::_fast_cputfield: // fall through
3049     case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3050     case Bytecodes::_fast_dputfield: __ push_d(); break;
3051     case Bytecodes::_fast_fputfield: __ push_f(); break;
3052     case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3053 
3054     default:
3055       ShouldNotReachHere();
3056     }
3057     __ mov(c_rarg3, esp);             // points to jvalue on the stack
3058     // access constant pool cache entry
3059     __ load_field_entry(c_rarg2, r0);
3060     __ verify_oop(r19);
3061     // r19: object pointer copied above
3062     // c_rarg2: cache entry pointer
3063     // c_rarg3: jvalue object on the stack
3064     __ call_VM(noreg,
3065                CAST_FROM_FN_PTR(address,
3066                                 InterpreterRuntime::post_field_modification),
3067                r19, c_rarg2, c_rarg3);
3068 
3069     switch (bytecode()) {             // restore tos values

3070     case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3071     case Bytecodes::_fast_bputfield: // fall through
3072     case Bytecodes::_fast_zputfield: // fall through
3073     case Bytecodes::_fast_sputfield: // fall through
3074     case Bytecodes::_fast_cputfield: // fall through
3075     case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3076     case Bytecodes::_fast_dputfield: __ pop_d(); break;
3077     case Bytecodes::_fast_fputfield: __ pop_f(); break;
3078     case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3079     default: break;
3080     }
3081     __ bind(L2);
3082   }
3083 }
3084 
3085 void TemplateTable::fast_storefield(TosState state)
3086 {
3087   transition(state, vtos);
3088 
3089   ByteSize base = ConstantPoolCache::base_offset();

3097   load_resolved_field_entry(r2, r2, noreg, r1, r5);
3098   __ verify_field_offset(r1);
3099 
3100   {
3101     Label notVolatile;
3102     __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3103     __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3104     __ bind(notVolatile);
3105   }
3106 
3107   Label notVolatile;
3108 
3109   // Get object from stack
3110   pop_and_check_object(r2);
3111 
3112   // field address
3113   const Address field(r2, r1);
3114 
3115   // access field
3116   switch (bytecode()) {
















3117   case Bytecodes::_fast_aputfield:
3118     // Clobbers: r10, r11, r3
3119     do_oop_store(_masm, field, r0, IN_HEAP);
3120     break;
3121   case Bytecodes::_fast_lputfield:
3122     __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3123     break;
3124   case Bytecodes::_fast_iputfield:
3125     __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3126     break;
3127   case Bytecodes::_fast_zputfield:
3128     __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3129     break;
3130   case Bytecodes::_fast_bputfield:
3131     __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3132     break;
3133   case Bytecodes::_fast_sputfield:
3134     __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3135     break;
3136   case Bytecodes::_fast_cputfield:

3192   // r0: object
3193   __ verify_oop(r0);
3194   __ null_check(r0);
3195   const Address field(r0, r1);
3196 
3197   // 8179954: We need to make sure that the code generated for
3198   // volatile accesses forms a sequentially-consistent set of
3199   // operations when combined with STLR and LDAR.  Without a leading
3200   // membar it's possible for a simple Dekker test to fail if loads
3201   // use LDR;DMB but stores use STLR.  This can happen if C2 compiles
3202   // the stores in one method and we interpret the loads in another.
3203   if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3204     Label notVolatile;
3205     __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3206     __ membar(MacroAssembler::AnyAny);
3207     __ bind(notVolatile);
3208   }
3209 
3210   // access field
3211   switch (bytecode()) {







3212   case Bytecodes::_fast_agetfield:
3213     do_oop_load(_masm, field, r0, IN_HEAP);
3214     __ verify_oop(r0);
3215     break;
3216   case Bytecodes::_fast_lgetfield:
3217     __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3218     break;
3219   case Bytecodes::_fast_igetfield:
3220     __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3221     break;
3222   case Bytecodes::_fast_bgetfield:
3223     __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3224     break;
3225   case Bytecodes::_fast_sgetfield:
3226     __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3227     break;
3228   case Bytecodes::_fast_cgetfield:
3229     __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3230     break;
3231   case Bytecodes::_fast_fgetfield:

3612   Label initialize_header;
3613 
3614   __ get_cpool_and_tags(r4, r0);
3615   // Make sure the class we're about to instantiate has been resolved.
3616   // This is done before loading InstanceKlass to be consistent with the order
3617   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3618   const int tags_offset = Array<u1>::base_offset_in_bytes();
3619   __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3620   __ lea(rscratch1, Address(rscratch1, tags_offset));
3621   __ ldarb(rscratch1, rscratch1);
3622   __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3623   __ br(Assembler::NE, slow_case);
3624 
3625   // get InstanceKlass
3626   __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3627 
3628   // make sure klass is initialized
3629   assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3630   __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3631 
3632   // get instance_size in InstanceKlass (scaled to a count of bytes)
3633   __ ldrw(r3,
3634           Address(r4,
3635                   Klass::layout_helper_offset()));
3636   // test to see if it is malformed in some way
3637   __ tbnz(r3, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3638 
3639   // Allocate the instance:
3640   //  If TLAB is enabled:
3641   //    Try to allocate in the TLAB.
3642   //    If fails, go to the slow path.
3643   //    Initialize the allocation.
3644   //    Exit.
3645   //
3646   //  Go to slow path.
3647 
3648   if (UseTLAB) {
3649     __ tlab_allocate(r0, r3, 0, noreg, r1, slow_case);
3650 
3651     if (ZeroTLAB) {
3652       // the fields have been already cleared
3653       __ b(initialize_header);
3654     }
3655 
3656     // The object is initialized before the header.  If the object size is
3657     // zero, go directly to the header initialization.
3658     int header_size = oopDesc::header_size() * HeapWordSize;
3659     assert(is_aligned(header_size, BytesPerLong), "oop header size must be 8-byte-aligned");
3660     __ sub(r3, r3, header_size);
3661     __ cbz(r3, initialize_header);
3662 
3663     // Initialize object fields
3664     {
3665       __ add(r2, r0, header_size);
3666       Label loop;
3667       __ bind(loop);
3668       __ str(zr, Address(__ post(r2, BytesPerLong)));
3669       __ sub(r3, r3, BytesPerLong);
3670       __ cbnz(r3, loop);
3671     }
3672 
3673     // initialize object header only.
3674     __ bind(initialize_header);
3675     if (UseCompactObjectHeaders) {
3676       __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3677       __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3678     } else {
3679       __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3680       __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3681       __ store_klass_gap(r0, zr);  // zero klass gap for compressed oops
3682       __ store_klass(r0, r4);      // store klass last
3683     }
3684 
3685     if (DTraceAllocProbes) {
3686       // Trigger dtrace event for fastpath
3687       __ push(atos); // save the return value
3688       __ call_VM_leaf(
3689            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3690       __ pop(atos); // restore the return value
3691 
3692     }
3693     __ b(done);
3694   }
3695 
3696   // slow case
3697   __ bind(slow_case);
3698   __ get_constant_pool(c_rarg1);
3699   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3700   __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3701   __ verify_oop(r0);
3702 
3703   // continue
3704   __ bind(done);
3705   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3706   __ membar(Assembler::StoreStore);
3707 }
3708 
3709 void TemplateTable::newarray() {
3710   transition(itos, atos);
3711   __ load_unsigned_byte(c_rarg1, at_bcp(1));
3712   __ mov(c_rarg2, r0);
3713   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3714           c_rarg1, c_rarg2);

3758   __ bind(quicked);
3759   __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3760   __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3761 
3762   __ bind(resolved);
3763   __ load_klass(r19, r3);
3764 
3765   // Generate subtype check.  Blows r2, r5.  Object in r3.
3766   // Superklass in r0.  Subklass in r19.
3767   __ gen_subtype_check(r19, ok_is_subtype);
3768 
3769   // Come here on failure
3770   __ push(r3);
3771   // object is at TOS
3772   __ b(Interpreter::_throw_ClassCastException_entry);
3773 
3774   // Come here on success
3775   __ bind(ok_is_subtype);
3776   __ mov(r0, r3); // Restore object in r3
3777 



3778   // Collect counts on whether this test sees nulls a lot or not.
3779   if (ProfileInterpreter) {
3780     __ b(done);
3781     __ bind(is_null);
3782     __ profile_null_seen(r2);
3783   } else {
3784     __ bind(is_null);   // same as 'done'
3785   }

3786   __ bind(done);
3787 }
3788 
3789 void TemplateTable::instanceof() {
3790   transition(atos, itos);
3791   Label done, is_null, ok_is_subtype, quicked, resolved;
3792   __ cbz(r0, is_null);
3793 
3794   // Get cpool & tags index
3795   __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3796   __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3797   // See if bytecode has already been quicked
3798   __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3799   __ lea(r1, Address(rscratch1, r19));
3800   __ ldarb(r1, r1);
3801   __ cmp(r1, (u1)JVM_CONSTANT_Class);
3802   __ br(Assembler::EQ, quicked);
3803 
3804   __ push(atos); // save receiver for result, and for GC
3805   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));

3883 //       in the assembly code structure as well
3884 //
3885 // Stack layout:
3886 //
3887 // [expressions  ] <--- esp               = expression stack top
3888 // ..
3889 // [expressions  ]
3890 // [monitor entry] <--- monitor block top = expression stack bot
3891 // ..
3892 // [monitor entry]
3893 // [frame data   ] <--- monitor block bot
3894 // ...
3895 // [saved rfp    ] <--- rfp
3896 void TemplateTable::monitorenter()
3897 {
3898   transition(atos, vtos);
3899 
3900   // check for null object
3901   __ null_check(r0);
3902 




3903   const Address monitor_block_top(
3904         rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3905   const Address monitor_block_bot(
3906         rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
3907   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3908 
3909   Label allocated;
3910 
3911   // initialize entry pointer
3912   __ mov(c_rarg1, zr); // points to free slot or null
3913 
3914   // find a free slot in the monitor block (result in c_rarg1)
3915   {
3916     Label entry, loop, exit;
3917     __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
3918     __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
3919     // c_rarg3 points to current entry, starting with top-most entry
3920 
3921     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3922 

3984   // c_rarg1: points to monitor entry
3985   __ bind(allocated);
3986 
3987   // Increment bcp to point to the next bytecode, so exception
3988   // handling for async. exceptions work correctly.
3989   // The object has already been popped from the stack, so the
3990   // expression stack looks correct.
3991   __ increment(rbcp);
3992 
3993   // store object
3994   __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
3995   __ lock_object(c_rarg1);
3996 
3997   // check to make sure this monitor doesn't cause stack overflow after locking
3998   __ save_bcp();  // in case of exception
3999   __ generate_stack_overflow_check(0);
4000 
4001   // The bcp has already been incremented. Just need to dispatch to
4002   // next instruction.
4003   __ dispatch_next(vtos);





4004 }
4005 
4006 
4007 void TemplateTable::monitorexit()
4008 {
4009   transition(atos, vtos);
4010 
4011   // check for null object
4012   __ null_check(r0);
4013 












4014   const Address monitor_block_top(
4015         rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4016   const Address monitor_block_bot(
4017         rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4018   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4019 
4020   Label found;
4021 
4022   // find matching slot
4023   {
4024     Label entry, loop;
4025     __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4026     __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4027     // c_rarg1 points to current entry, starting with top-most entry
4028 
4029     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4030                                         // of monitor block
4031     __ b(entry);
4032 
4033     __ bind(loop);

  26 #include "asm/macroAssembler.inline.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "compiler/compilerDefinitions.inline.hpp"
  29 #include "gc/shared/barrierSetAssembler.hpp"
  30 #include "gc/shared/collectedHeap.hpp"
  31 #include "gc/shared/tlab_globals.hpp"
  32 #include "interpreter/interpreter.hpp"
  33 #include "interpreter/interpreterRuntime.hpp"
  34 #include "interpreter/interp_masm.hpp"
  35 #include "interpreter/templateTable.hpp"
  36 #include "memory/universe.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/method.inline.hpp"
  39 #include "oops/objArrayKlass.hpp"
  40 #include "oops/oop.inline.hpp"
  41 #include "oops/resolvedFieldEntry.hpp"
  42 #include "oops/resolvedIndyEntry.hpp"
  43 #include "oops/resolvedMethodEntry.hpp"
  44 #include "prims/jvmtiExport.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "runtime/arguments.hpp"
  47 #include "runtime/frame.inline.hpp"
  48 #include "runtime/sharedRuntime.hpp"
  49 #include "runtime/stubRoutines.hpp"
  50 #include "runtime/synchronizer.hpp"
  51 #include "utilities/powerOfTwo.hpp"
  52 
  53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  54 
  55 // Address computation: local variables
  56 
  57 static inline Address iaddress(int n) {
  58   return Address(rlocals, Interpreter::local_offset_in_bytes(n));
  59 }
  60 
  61 static inline Address laddress(int n) {
  62   return iaddress(n + 1);
  63 }
  64 
  65 static inline Address faddress(int n) {
  66   return iaddress(n);

 153                         Address src,
 154                         Register dst,
 155                         DecoratorSet decorators) {
 156   __ load_heap_oop(dst, src, r10, r11, decorators);
 157 }
 158 
 159 Address TemplateTable::at_bcp(int offset) {
 160   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 161   return Address(rbcp, offset);
 162 }
 163 
 164 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 165                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 166                                    int byte_no)
 167 {
 168   assert_different_registers(bc_reg, temp_reg);
 169   if (!RewriteBytecodes)  return;
 170   Label L_patch_done;
 171 
 172   switch (bc) {
 173   case Bytecodes::_fast_vputfield:
 174   case Bytecodes::_fast_aputfield:
 175   case Bytecodes::_fast_bputfield:
 176   case Bytecodes::_fast_zputfield:
 177   case Bytecodes::_fast_cputfield:
 178   case Bytecodes::_fast_dputfield:
 179   case Bytecodes::_fast_fputfield:
 180   case Bytecodes::_fast_iputfield:
 181   case Bytecodes::_fast_lputfield:
 182   case Bytecodes::_fast_sputfield:
 183     {
 184       // We skip bytecode quickening for putfield instructions when
 185       // the put_code written to the constant pool cache is zero.
 186       // This is required so that every execution of this instruction
 187       // calls out to InterpreterRuntime::resolve_get_put to do
 188       // additional, required work.
 189       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 190       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 191       __ load_field_entry(temp_reg, bc_reg);
 192       if (byte_no == f1_byte) {
 193         __ lea(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 738   locals_index_wide(r1);
 739   __ ldr(r0, aaddress(r1));
 740 }
 741 
 742 void TemplateTable::index_check(Register array, Register index)
 743 {
 744   // destroys r1, rscratch1
 745   // sign extend index for use by indexed load
 746   // __ movl2ptr(index, index);
 747   // check index
 748   Register length = rscratch1;
 749   __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
 750   __ cmpw(index, length);
 751   if (index != r1) {
 752     // ??? convention: move aberrant index into r1 for exception message
 753     assert(r1 != array, "different registers");
 754     __ mov(r1, index);
 755   }
 756   Label ok;
 757   __ br(Assembler::LO, ok);
 758   // ??? convention: move array into r3 for exception message
 759    __ mov(r3, array);
 760    __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
 761    __ br(rscratch1);
 762   __ bind(ok);
 763 }
 764 
 765 void TemplateTable::iaload()
 766 {
 767   transition(itos, itos);
 768   __ mov(r1, r0);
 769   __ pop_ptr(r0);
 770   // r0: array
 771   // r1: index
 772   index_check(r0, r1); // leaves index in r1, kills rscratch1
 773   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
 774   __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
 775 }
 776 
 777 void TemplateTable::laload()
 778 {
 779   transition(itos, ltos);
 780   __ mov(r1, r0);
 781   __ pop_ptr(r0);

 801 void TemplateTable::daload()
 802 {
 803   transition(itos, dtos);
 804   __ mov(r1, r0);
 805   __ pop_ptr(r0);
 806   // r0: array
 807   // r1: index
 808   index_check(r0, r1); // leaves index in r1, kills rscratch1
 809   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
 810   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
 811 }
 812 
 813 void TemplateTable::aaload()
 814 {
 815   transition(itos, atos);
 816   __ mov(r1, r0);
 817   __ pop_ptr(r0);
 818   // r0: array
 819   // r1: index
 820   index_check(r0, r1); // leaves index in r1, kills rscratch1
 821   __ profile_array_type<ArrayLoadData>(r2, r0, r4);
 822   if (UseArrayFlattening) {
 823     Label is_flat_array, done;
 824 
 825     __ test_flat_array_oop(r0, rscratch1 /*temp*/, is_flat_array);
 826     __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
 827     do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
 828 
 829     __ b(done);
 830     __ bind(is_flat_array);
 831     __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), r0, r1);
 832     // Ensure the stores to copy the inline field contents are visible
 833     // before any subsequent store that publishes this reference.
 834     __ membar(Assembler::StoreStore);
 835     __ bind(done);
 836   } else {
 837     __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
 838     do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
 839   }
 840   __ profile_element_type(r2, r0, r4);
 841 }
 842 
 843 void TemplateTable::baload()
 844 {
 845   transition(itos, itos);
 846   __ mov(r1, r0);
 847   __ pop_ptr(r0);
 848   // r0: array
 849   // r1: index
 850   index_check(r0, r1); // leaves index in r1, kills rscratch1
 851   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
 852   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
 853 }
 854 
 855 void TemplateTable::caload()
 856 {
 857   transition(itos, itos);
 858   __ mov(r1, r0);
 859   __ pop_ptr(r0);
 860   // r0: array

1107   // r1:  index
1108   // r3:  array
1109   index_check(r3, r1); // prefer index in r1
1110   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1111   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg, noreg);
1112 }
1113 
1114 void TemplateTable::dastore() {
1115   transition(dtos, vtos);
1116   __ pop_i(r1);
1117   __ pop_ptr(r3);
1118   // v0: value
1119   // r1:  index
1120   // r3:  array
1121   index_check(r3, r1); // prefer index in r1
1122   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1123   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg, noreg);
1124 }
1125 
1126 void TemplateTable::aastore() {
1127   Label is_null, is_flat_array, ok_is_subtype, done;
1128   transition(vtos, vtos);
1129   // stack: ..., array, index, value
1130   __ ldr(r0, at_tos());    // value
1131   __ ldr(r2, at_tos_p1()); // index
1132   __ ldr(r3, at_tos_p2()); // array
1133 


1134   index_check(r3, r2);     // kills r1
1135 
1136   __ profile_array_type<ArrayStoreData>(r4, r3, r5);
1137   __ profile_multiple_element_types(r4, r0, r5, r6);
1138 
1139   __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1140   Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1141   // Be careful not to clobber r4 below
1142 
1143   // do array store check - check for null value first
1144   __ cbz(r0, is_null);
1145 
1146   // Move array class to r5
1147   __ load_klass(r5, r3);
1148 
1149   if (UseArrayFlattening) {
1150     __ ldrw(r6, Address(r5, Klass::layout_helper_offset()));
1151     __ test_flat_array_layout(r6, is_flat_array);
1152   }
1153 
1154   // Move subklass into r1
1155   __ load_klass(r1, r0);
1156 
1157   // Move array element superklass into r0
1158   __ ldr(r0, Address(r5, ObjArrayKlass::element_klass_offset()));

1159   // Compress array + index*oopSize + 12 into a single register.  Frees r2.
1160 
1161   // Generate subtype check.  Blows r2, r5
1162   // Superklass in r0.  Subklass in r1.
1163 
1164   // is "r1 <: r0" ? (value subclass <: array element superclass)
1165   __ gen_subtype_check(r1, ok_is_subtype, false);
1166 
1167   // Come here on failure
1168   // object is at TOS
1169   __ b(Interpreter::_throw_ArrayStoreException_entry);
1170 
1171   // Come here on success
1172   __ bind(ok_is_subtype);
1173 
1174   // Get the value we will store
1175   __ ldr(r0, at_tos());
1176   // Now store using the appropriate barrier
1177   // Clobbers: r10, r11, r3
1178   do_oop_store(_masm, element_address, r0, IS_ARRAY);
1179   __ b(done);
1180 
1181   // Have a null in r0, r3=array, r2=index.  Store null at ary[idx]
1182   __ bind(is_null);
1183   if (Arguments::is_valhalla_enabled()) {
1184     Label is_null_into_value_array_npe, store_null;
1185 
1186     if (UseArrayFlattening) {
1187       __ test_flat_array_oop(r3, rscratch1, is_flat_array);
1188     }
1189 
1190     // No way to store null in a null-free array
1191     __ test_null_free_array_oop(r3, rscratch1, is_null_into_value_array_npe);
1192     __ b(store_null);
1193 
1194     __ bind(is_null_into_value_array_npe);
1195     __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1196 
1197     __ bind(store_null);
1198   }
1199 
1200   // Store a null
1201   // Clobbers: r10, r11, r3
1202   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1203   __ b(done);
1204 
1205   if (UseArrayFlattening) {
1206      Label is_type_ok;
1207     __ bind(is_flat_array); // Store non-null value to flat
1208 
1209     __ ldr(r0, at_tos());    // value
1210     __ ldr(r3, at_tos_p1()); // index
1211     __ ldr(r2, at_tos_p2()); // array
1212     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), r0, r2, r3);
1213   }
1214 
1215   // Pop stack arguments
1216   __ bind(done);
1217   __ add(esp, esp, 3 * Interpreter::stackElementSize);
1218 }
1219 
1220 void TemplateTable::bastore()
1221 {
1222   transition(itos, vtos);
1223   __ pop_i(r1);
1224   __ pop_ptr(r3);
1225   // r0: value
1226   // r1: index
1227   // r3: array
1228   index_check(r3, r1); // prefer index in r1
1229 
1230   // Need to check whether array is boolean or byte
1231   // since both types share the bastore bytecode.
1232   __ load_klass(r2, r3);
1233   __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));

2000   __ br(j_not(cc), not_taken);
2001   branch(false, false);
2002   __ bind(not_taken);
2003   __ profile_not_taken_branch(r0);
2004 }
2005 
2006 void TemplateTable::if_nullcmp(Condition cc)
2007 {
2008   transition(atos, vtos);
2009   // assume branch is more often taken than not (loops use backward branches)
2010   Label not_taken;
2011   if (cc == equal)
2012     __ cbnz(r0, not_taken);
2013   else
2014     __ cbz(r0, not_taken);
2015   branch(false, false);
2016   __ bind(not_taken);
2017   __ profile_not_taken_branch(r0);
2018 }
2019 
2020 void TemplateTable::if_acmp(Condition cc) {

2021   transition(atos, vtos);
2022   // assume branch is more often taken than not (loops use backward branches)
2023   Label taken, not_taken;
2024   __ pop_ptr(r1);
2025 
2026   __ profile_acmp(r2, r1, r0, r4);
2027 
2028   Register is_inline_type_mask = rscratch1;
2029   __ mov(is_inline_type_mask, markWord::inline_type_pattern);
2030 
2031   if (Arguments::is_valhalla_enabled()) {
2032     __ cmp(r1, r0);
2033     __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2034 
2035     // might be substitutable, test if either r0 or r1 is null
2036     __ andr(r2, r0, r1);
2037     __ cbz(r2, (cc == equal) ? not_taken : taken);
2038 
2039     // and both are values ?
2040     __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2041     __ andr(r2, r2, is_inline_type_mask);
2042     __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2043     __ andr(r4, r4, is_inline_type_mask);
2044     __ andr(r2, r2, r4);
2045     __ cmp(r2,  is_inline_type_mask);
2046     __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2047 
2048     // same value klass ?
2049     __ load_metadata(r2, r1);
2050     __ load_metadata(r4, r0);
2051     __ cmp(r2, r4);
2052     __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2053 
2054     // Know both are the same type, let's test for substitutability...
2055     if (cc == equal) {
2056       invoke_is_substitutable(r0, r1, taken, not_taken);
2057     } else {
2058       invoke_is_substitutable(r0, r1, not_taken, taken);
2059     }
2060     __ stop("Not reachable");
2061   }
2062 
2063   __ cmpoop(r1, r0);
2064   __ br(j_not(cc), not_taken);
2065   __ bind(taken);
2066   branch(false, false);
2067   __ bind(not_taken);
2068   __ profile_not_taken_branch(r0, true);
2069 }
2070 
2071 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2072                                             Label& is_subst, Label& not_subst) {
2073 
2074   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2075   // Restored... r0 answer, jmp to outcome...
2076   __ cbz(r0, not_subst);
2077   __ b(is_subst);
2078 }
2079 
2080 
2081 void TemplateTable::ret() {
2082   transition(vtos, vtos);
2083   locals_index(r1);
2084   __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2085   __ profile_ret(r1, r2);
2086   __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2087   __ lea(rbcp, Address(rbcp, r1));
2088   __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2089   __ dispatch_next(vtos, 0, /*generate_poll*/true);
2090 }
2091 
2092 void TemplateTable::wide_ret() {
2093   transition(vtos, vtos);
2094   locals_index_wide(r1);
2095   __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2096   __ profile_ret(r1, r2);
2097   __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2098   __ lea(rbcp, Address(rbcp, r1));
2099   __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2100   __ dispatch_next(vtos, 0, /*generate_poll*/true);

2294   assert(_desc->calls_vm(),
2295          "inconsistent calls_vm information"); // call in remove_activation
2296 
2297   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2298     assert(state == vtos, "only valid state");
2299 
2300     __ ldr(c_rarg1, aaddress(0));
2301     __ load_klass(r3, c_rarg1);
2302     __ ldrb(r3, Address(r3, Klass::misc_flags_offset()));
2303     Label skip_register_finalizer;
2304     __ tbz(r3, exact_log2(KlassFlags::_misc_has_finalizer), skip_register_finalizer);
2305 
2306     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), c_rarg1);
2307 
2308     __ bind(skip_register_finalizer);
2309   }
2310 
2311   // Issue a StoreStore barrier after all stores but before return
2312   // from any constructor for any class with a final field.  We don't
2313   // know if this is a finalizer, so we always do so.
2314   if (_desc->bytecode() == Bytecodes::_return
2315       || _desc->bytecode() == Bytecodes::_return_register_finalizer)
2316     __ membar(MacroAssembler::StoreStore);
2317 
2318   if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2319     Label no_safepoint;
2320     __ ldr(rscratch1, Address(rthread, JavaThread::polling_word_offset()));
2321     __ tbz(rscratch1, log2i_exact(SafepointMechanism::poll_bit()), no_safepoint);
2322     __ push(state);
2323     __ push_cont_fastpath(rthread);
2324     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2325     __ pop_cont_fastpath(rthread);
2326     __ pop(state);
2327     __ bind(no_safepoint);
2328   }
2329 
2330   // Narrow result if state is itos but result type is smaller.
2331   // Need to narrow in the return bytecode rather than in generate_return_entry
2332   // since compiled code callers expect the result to already be narrowed.
2333   if (state == itos) {
2334     __ narrow(r0);
2335   }

2687     }
2688     // c_rarg1: object pointer or null
2689     // c_rarg2: cache entry pointer
2690     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2691                                        InterpreterRuntime::post_field_access),
2692                c_rarg1, c_rarg2);
2693     __ load_field_entry(cache, index);
2694     __ bind(L1);
2695   }
2696 }
2697 
2698 void TemplateTable::pop_and_check_object(Register r)
2699 {
2700   __ pop_ptr(r);
2701   __ null_check(r);  // for field access must check obj.
2702   __ verify_oop(r);
2703 }
2704 
2705 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2706 {
2707   const Register cache     = r2;
2708   const Register obj       = r4;
2709   const Register index     = r3;
2710   const Register tos_state = r3;
2711   const Register off       = r19;
2712   const Register flags     = r6;
2713   const Register bc        = r4; // uses same reg as obj, so don't mix them
2714 
2715   resolve_cache_and_index_for_field(byte_no, cache, index);
2716   jvmti_post_field_access(cache, index, is_static, false);
2717 
2718   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2719 
2720   if (!is_static) {
2721     // obj is on the stack
2722     pop_and_check_object(obj);
2723   }
2724 
2725   // 8179954: We need to make sure that the code generated for
2726   // volatile accesses forms a sequentially-consistent set of
2727   // operations when combined with STLR and LDAR.  Without a leading
2728   // membar it's possible for a simple Dekker test to fail if loads
2729   // use LDR;DMB but stores use STLR.  This can happen if C2 compiles
2730   // the stores in one method and we interpret the loads in another.
2731   if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2732     Label notVolatile;
2733     __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2734     __ membar(MacroAssembler::AnyAny);
2735     __ bind(notVolatile);
2736   }
2737 

2756   __ b(Done);
2757 
2758   __ bind(notByte);
2759   __ cmp(tos_state, (u1)ztos);
2760   __ br(Assembler::NE, notBool);
2761 
2762   // ztos (same code as btos)
2763   __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2764   __ push(ztos);
2765   // Rewrite bytecode to be faster
2766   if (rc == may_rewrite) {
2767     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2768     patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2769   }
2770   __ b(Done);
2771 
2772   __ bind(notBool);
2773   __ cmp(tos_state, (u1)atos);
2774   __ br(Assembler::NE, notObj);
2775   // atos
2776   if (!Arguments::is_valhalla_enabled()) {
2777     do_oop_load(_masm, field, r0, IN_HEAP);
2778     __ push(atos);
2779     if (rc == may_rewrite) {
2780       patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2781     }
2782     __ b(Done);
2783   } else { // Valhalla
2784     if (is_static) {
2785       __ load_heap_oop(r0, field, rscratch1, rscratch2);
2786       __ push(atos);
2787       __ b(Done);
2788     } else {
2789       Label is_flat;
2790       __ test_field_is_flat(flags, noreg /* temp */, is_flat);
2791       __ load_heap_oop(r0, field, rscratch1, rscratch2);
2792       __ push(atos);
2793       if (rc == may_rewrite) {
2794         patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2795       }
2796       __ b(Done);
2797       __ bind(is_flat);
2798       // field is flat (null-free or nullable with a null-marker)
2799       __ mov(r0, obj);
2800       __ read_flat_field(cache, r0);
2801       __ verify_oop(r0);
2802       __ push(atos);
2803       if (rc == may_rewrite) {
2804         patch_bytecode(Bytecodes::_fast_vgetfield, bc, r1);
2805       }
2806       __ b(Done);
2807     }
2808   }

2809 
2810   __ bind(notObj);
2811   __ cmp(tos_state, (u1)itos);
2812   __ br(Assembler::NE, notInt);
2813   // itos
2814   __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2815   __ push(itos);
2816   // Rewrite bytecode to be faster
2817   if (rc == may_rewrite) {
2818     patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2819   }
2820   __ b(Done);
2821 
2822   __ bind(notInt);
2823   __ cmp(tos_state, (u1)ctos);
2824   __ br(Assembler::NE, notChar);
2825   // ctos
2826   __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2827   __ push(ctos);
2828   // Rewrite bytecode to be faster

2949     // c_rarg1: object pointer set up above (null if static)
2950     // c_rarg2: cache entry pointer
2951     // c_rarg3: jvalue object on the stack
2952     __ call_VM(noreg,
2953                CAST_FROM_FN_PTR(address,
2954                                 InterpreterRuntime::post_field_modification),
2955                c_rarg1, c_rarg2, c_rarg3);
2956     __ load_field_entry(cache, index);
2957     __ bind(L1);
2958   }
2959 }
2960 
2961 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2962   transition(vtos, vtos);
2963 
2964   const Register cache     = r2;
2965   const Register index     = r3;
2966   const Register tos_state = r3;
2967   const Register obj       = r2;
2968   const Register off       = r19;
2969   const Register flags     = r6;
2970   const Register bc        = r4;
2971 
2972   resolve_cache_and_index_for_field(byte_no, cache, index);
2973   jvmti_post_field_mod(cache, index, is_static);
2974   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2975 
2976   Label Done;


2977   {
2978     Label notVolatile;
2979     __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
2980     __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2981     __ bind(notVolatile);
2982   }
2983 
2984   // field address
2985   const Address field(obj, off);
2986 
2987   Label notByte, notBool, notInt, notShort, notChar,
2988         notLong, notFloat, notObj, notDouble;
2989 
2990   assert(btos == 0, "change code, btos != 0");
2991   __ cbnz(tos_state, notByte);
2992 
2993   // Don't rewrite putstatic, only putfield
2994   if (is_static) rc = may_not_rewrite;
2995 
2996   // btos
2997   {
2998     __ pop(btos);
2999     if (!is_static) pop_and_check_object(obj);

3008   __ cmp(tos_state, (u1)ztos);
3009   __ br(Assembler::NE, notBool);
3010 
3011   // ztos
3012   {
3013     __ pop(ztos);
3014     if (!is_static) pop_and_check_object(obj);
3015     __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3016     if (rc == may_rewrite) {
3017       patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
3018     }
3019     __ b(Done);
3020   }
3021 
3022   __ bind(notBool);
3023   __ cmp(tos_state, (u1)atos);
3024   __ br(Assembler::NE, notObj);
3025 
3026   // atos
3027   {
3028     if (!Arguments::is_valhalla_enabled()) {
3029       __ pop(atos);
3030       if (!is_static) pop_and_check_object(obj);
3031       // Store into the field
3032       // Clobbers: r10, r11, r3
3033       do_oop_store(_masm, field, r0, IN_HEAP);
3034       if (rc == may_rewrite) {
3035         patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
3036       }
3037       __ b(Done);
3038     } else { // Valhalla
3039       __ pop(atos);
3040       if (is_static) {
3041         Label is_nullable;
3042         __ test_field_is_not_null_free_inline_type(flags, noreg /* temp */, is_nullable);
3043         __ null_check(r0);  // FIXME JDK-8341120
3044         __ bind(is_nullable);
3045         do_oop_store(_masm, field, r0, IN_HEAP);
3046         __ b(Done);
3047       } else {
3048         Label null_free_reference, is_flat, rewrite_inline;
3049         __ test_field_is_flat(flags, noreg /* temp */, is_flat);
3050         __ test_field_is_null_free_inline_type(flags, noreg /* temp */, null_free_reference);
3051         pop_and_check_object(obj);
3052         // Store into the field
3053         // Clobbers: r10, r11, r3
3054         do_oop_store(_masm, field, r0, IN_HEAP);
3055         if (rc == may_rewrite) {
3056           patch_bytecode(Bytecodes::_fast_aputfield, bc, r19, true, byte_no);
3057         }
3058         __ b(Done);
3059         // Implementation of the inline type semantic
3060         __ bind(null_free_reference);
3061         __ null_check(r0);  // FIXME JDK-8341120
3062         pop_and_check_object(obj);
3063         // Store into the field
3064         // Clobbers: r10, r11, r3
3065         do_oop_store(_masm, field, r0, IN_HEAP);
3066         __ b(rewrite_inline);
3067         __ bind(is_flat);
3068         pop_and_check_object(r7);
3069         __ write_flat_field(cache, off, index, flags, r7);
3070         __ bind(rewrite_inline);
3071         if (rc == may_rewrite) {
3072           patch_bytecode(Bytecodes::_fast_vputfield, bc, r19, true, byte_no);
3073         }
3074         __ b(Done);
3075       }
3076     } // Valhalla
3077   }
3078 
3079   __ bind(notObj);
3080   __ cmp(tos_state, (u1)itos);
3081   __ br(Assembler::NE, notInt);
3082 
3083   // itos
3084   {
3085     __ pop(itos);
3086     if (!is_static) pop_and_check_object(obj);
3087     __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3088     if (rc == may_rewrite) {
3089       patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
3090     }
3091     __ b(Done);
3092   }
3093 
3094   __ bind(notInt);
3095   __ cmp(tos_state, (u1)ctos);
3096   __ br(Assembler::NE, notChar);

3161   {
3162     __ pop(dtos);
3163     if (!is_static) pop_and_check_object(obj);
3164     __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg, noreg);
3165     if (rc == may_rewrite) {
3166       patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
3167     }
3168   }
3169 
3170 #ifdef ASSERT
3171   __ b(Done);
3172 
3173   __ bind(notDouble);
3174   __ stop("Bad state");
3175 #endif
3176 
3177   __ bind(Done);
3178 
3179   {
3180     Label notVolatile;
3181     __ tbz(flags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3182     __ membar(MacroAssembler::StoreLoad | MacroAssembler::StoreStore);
3183     __ bind(notVolatile);
3184   }
3185 }
3186 
3187 void TemplateTable::putfield(int byte_no)
3188 {
3189   putfield_or_static(byte_no, false);
3190 }
3191 
3192 void TemplateTable::nofast_putfield(int byte_no) {
3193   putfield_or_static(byte_no, false, may_not_rewrite);
3194 }
3195 
3196 void TemplateTable::putstatic(int byte_no) {
3197   putfield_or_static(byte_no, true);
3198 }
3199 
3200 void TemplateTable::jvmti_post_fast_field_mod() {
3201   if (JvmtiExport::can_post_field_modification()) {
3202     // Check to see if a field modification watch has been set before
3203     // we take the time to call into the VM.
3204     Label L2;
3205     __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3206     __ ldrw(c_rarg3, Address(rscratch1));
3207     __ cbzw(c_rarg3, L2);
3208     __ pop_ptr(r19);                  // copy the object pointer from tos
3209     __ verify_oop(r19);
3210     __ push_ptr(r19);                 // put the object pointer back on tos
3211     // Save tos values before call_VM() clobbers them. Since we have
3212     // to do it for every data type, we use the saved values as the
3213     // jvalue object.
3214     switch (bytecode()) {          // load values into the jvalue object
3215     case Bytecodes::_fast_vputfield: // fall through
3216     case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3217     case Bytecodes::_fast_bputfield: // fall through
3218     case Bytecodes::_fast_zputfield: // fall through
3219     case Bytecodes::_fast_sputfield: // fall through
3220     case Bytecodes::_fast_cputfield: // fall through
3221     case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3222     case Bytecodes::_fast_dputfield: __ push_d(); break;
3223     case Bytecodes::_fast_fputfield: __ push_f(); break;
3224     case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3225 
3226     default:
3227       ShouldNotReachHere();
3228     }
3229     __ mov(c_rarg3, esp);             // points to jvalue on the stack
3230     // access constant pool cache entry
3231     __ load_field_entry(c_rarg2, r0);
3232     __ verify_oop(r19);
3233     // r19: object pointer copied above
3234     // c_rarg2: cache entry pointer
3235     // c_rarg3: jvalue object on the stack
3236     __ call_VM(noreg,
3237                CAST_FROM_FN_PTR(address,
3238                                 InterpreterRuntime::post_field_modification),
3239                r19, c_rarg2, c_rarg3);
3240 
3241     switch (bytecode()) {             // restore tos values
3242     case Bytecodes::_fast_vputfield: // fall through
3243     case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3244     case Bytecodes::_fast_bputfield: // fall through
3245     case Bytecodes::_fast_zputfield: // fall through
3246     case Bytecodes::_fast_sputfield: // fall through
3247     case Bytecodes::_fast_cputfield: // fall through
3248     case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3249     case Bytecodes::_fast_dputfield: __ pop_d(); break;
3250     case Bytecodes::_fast_fputfield: __ pop_f(); break;
3251     case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3252     default: break;
3253     }
3254     __ bind(L2);
3255   }
3256 }
3257 
3258 void TemplateTable::fast_storefield(TosState state)
3259 {
3260   transition(state, vtos);
3261 
3262   ByteSize base = ConstantPoolCache::base_offset();

3270   load_resolved_field_entry(r2, r2, noreg, r1, r5);
3271   __ verify_field_offset(r1);
3272 
3273   {
3274     Label notVolatile;
3275     __ tbz(r5, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3276     __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3277     __ bind(notVolatile);
3278   }
3279 
3280   Label notVolatile;
3281 
3282   // Get object from stack
3283   pop_and_check_object(r2);
3284 
3285   // field address
3286   const Address field(r2, r1);
3287 
3288   // access field
3289   switch (bytecode()) {
3290   case Bytecodes::_fast_vputfield:
3291     {
3292       Label is_flat, done;
3293       __ test_field_is_flat(r5, noreg /* temp */, is_flat);
3294       __ null_check(r0);
3295       do_oop_store(_masm, field, r0, IN_HEAP);
3296       __ b(done);
3297       __ bind(is_flat);
3298       __ load_field_entry(r4, r5);
3299       // Re-shuffle registers because of VM calls calling convention
3300       __ mov(r19, r1);
3301       __ mov(r7, r2);
3302       __ write_flat_field(r4, r19, r6, r8, r7);
3303       __ bind(done);
3304     }
3305     break;
3306   case Bytecodes::_fast_aputfield:
3307     // Clobbers: r10, r11, r3
3308     do_oop_store(_masm, field, r0, IN_HEAP);
3309     break;
3310   case Bytecodes::_fast_lputfield:
3311     __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg, noreg);
3312     break;
3313   case Bytecodes::_fast_iputfield:
3314     __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg, noreg);
3315     break;
3316   case Bytecodes::_fast_zputfield:
3317     __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg, noreg);
3318     break;
3319   case Bytecodes::_fast_bputfield:
3320     __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg, noreg);
3321     break;
3322   case Bytecodes::_fast_sputfield:
3323     __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg, noreg);
3324     break;
3325   case Bytecodes::_fast_cputfield:

3381   // r0: object
3382   __ verify_oop(r0);
3383   __ null_check(r0);
3384   const Address field(r0, r1);
3385 
3386   // 8179954: We need to make sure that the code generated for
3387   // volatile accesses forms a sequentially-consistent set of
3388   // operations when combined with STLR and LDAR.  Without a leading
3389   // membar it's possible for a simple Dekker test to fail if loads
3390   // use LDR;DMB but stores use STLR.  This can happen if C2 compiles
3391   // the stores in one method and we interpret the loads in another.
3392   if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3393     Label notVolatile;
3394     __ tbz(r3, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3395     __ membar(MacroAssembler::AnyAny);
3396     __ bind(notVolatile);
3397   }
3398 
3399   // access field
3400   switch (bytecode()) {
3401   case Bytecodes::_fast_vgetfield:
3402     {
3403       // field is flat
3404       __ read_flat_field(r2, r0);
3405       __ verify_oop(r0);
3406     }
3407     break;
3408   case Bytecodes::_fast_agetfield:
3409     do_oop_load(_masm, field, r0, IN_HEAP);
3410     __ verify_oop(r0);
3411     break;
3412   case Bytecodes::_fast_lgetfield:
3413     __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3414     break;
3415   case Bytecodes::_fast_igetfield:
3416     __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3417     break;
3418   case Bytecodes::_fast_bgetfield:
3419     __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3420     break;
3421   case Bytecodes::_fast_sgetfield:
3422     __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3423     break;
3424   case Bytecodes::_fast_cgetfield:
3425     __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3426     break;
3427   case Bytecodes::_fast_fgetfield:

3808   Label initialize_header;
3809 
3810   __ get_cpool_and_tags(r4, r0);
3811   // Make sure the class we're about to instantiate has been resolved.
3812   // This is done before loading InstanceKlass to be consistent with the order
3813   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3814   const int tags_offset = Array<u1>::base_offset_in_bytes();
3815   __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3816   __ lea(rscratch1, Address(rscratch1, tags_offset));
3817   __ ldarb(rscratch1, rscratch1);
3818   __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3819   __ br(Assembler::NE, slow_case);
3820 
3821   // get InstanceKlass
3822   __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3823 
3824   // make sure klass is initialized
3825   assert(VM_Version::supports_fast_class_init_checks(), "Optimization requires support for fast class initialization checks");
3826   __ clinit_barrier(r4, rscratch1, nullptr /*L_fast_path*/, &slow_case);
3827 
3828   __ allocate_instance(r4, r0, r3, r1, true, slow_case);
3829   __ b(done);





























































3830 
3831   // slow case
3832   __ bind(slow_case);
3833   __ get_constant_pool(c_rarg1);
3834   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3835   __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3836   __ verify_oop(r0);
3837 
3838   // continue
3839   __ bind(done);
3840   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3841   __ membar(Assembler::StoreStore);
3842 }
3843 
3844 void TemplateTable::newarray() {
3845   transition(itos, atos);
3846   __ load_unsigned_byte(c_rarg1, at_bcp(1));
3847   __ mov(c_rarg2, r0);
3848   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3849           c_rarg1, c_rarg2);

3893   __ bind(quicked);
3894   __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3895   __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3896 
3897   __ bind(resolved);
3898   __ load_klass(r19, r3);
3899 
3900   // Generate subtype check.  Blows r2, r5.  Object in r3.
3901   // Superklass in r0.  Subklass in r19.
3902   __ gen_subtype_check(r19, ok_is_subtype);
3903 
3904   // Come here on failure
3905   __ push(r3);
3906   // object is at TOS
3907   __ b(Interpreter::_throw_ClassCastException_entry);
3908 
3909   // Come here on success
3910   __ bind(ok_is_subtype);
3911   __ mov(r0, r3); // Restore object in r3
3912 
3913   __ b(done);
3914   __ bind(is_null);
3915 
3916   // Collect counts on whether this test sees nulls a lot or not.
3917   if (ProfileInterpreter) {


3918     __ profile_null_seen(r2);


3919   }
3920 
3921   __ bind(done);
3922 }
3923 
3924 void TemplateTable::instanceof() {
3925   transition(atos, itos);
3926   Label done, is_null, ok_is_subtype, quicked, resolved;
3927   __ cbz(r0, is_null);
3928 
3929   // Get cpool & tags index
3930   __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3931   __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3932   // See if bytecode has already been quicked
3933   __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3934   __ lea(r1, Address(rscratch1, r19));
3935   __ ldarb(r1, r1);
3936   __ cmp(r1, (u1)JVM_CONSTANT_Class);
3937   __ br(Assembler::EQ, quicked);
3938 
3939   __ push(atos); // save receiver for result, and for GC
3940   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));

4018 //       in the assembly code structure as well
4019 //
4020 // Stack layout:
4021 //
4022 // [expressions  ] <--- esp               = expression stack top
4023 // ..
4024 // [expressions  ]
4025 // [monitor entry] <--- monitor block top = expression stack bot
4026 // ..
4027 // [monitor entry]
4028 // [frame data   ] <--- monitor block bot
4029 // ...
4030 // [saved rfp    ] <--- rfp
4031 void TemplateTable::monitorenter()
4032 {
4033   transition(atos, vtos);
4034 
4035   // check for null object
4036   __ null_check(r0);
4037 
4038   Label is_inline_type;
4039   __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4040   __ test_markword_is_inline_type(rscratch1, is_inline_type);
4041 
4042   const Address monitor_block_top(
4043         rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4044   const Address monitor_block_bot(
4045         rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4046   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4047 
4048   Label allocated;
4049 
4050   // initialize entry pointer
4051   __ mov(c_rarg1, zr); // points to free slot or null
4052 
4053   // find a free slot in the monitor block (result in c_rarg1)
4054   {
4055     Label entry, loop, exit;
4056     __ ldr(c_rarg3, monitor_block_top); // derelativize pointer
4057     __ lea(c_rarg3, Address(rfp, c_rarg3, Address::lsl(Interpreter::logStackElementSize)));
4058     // c_rarg3 points to current entry, starting with top-most entry
4059 
4060     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4061 

4123   // c_rarg1: points to monitor entry
4124   __ bind(allocated);
4125 
4126   // Increment bcp to point to the next bytecode, so exception
4127   // handling for async. exceptions work correctly.
4128   // The object has already been popped from the stack, so the
4129   // expression stack looks correct.
4130   __ increment(rbcp);
4131 
4132   // store object
4133   __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset()));
4134   __ lock_object(c_rarg1);
4135 
4136   // check to make sure this monitor doesn't cause stack overflow after locking
4137   __ save_bcp();  // in case of exception
4138   __ generate_stack_overflow_check(0);
4139 
4140   // The bcp has already been incremented. Just need to dispatch to
4141   // next instruction.
4142   __ dispatch_next(vtos);
4143 
4144   __ bind(is_inline_type);
4145   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4146                     InterpreterRuntime::throw_identity_exception), r0);
4147   __ should_not_reach_here();
4148 }
4149 
4150 
4151 void TemplateTable::monitorexit()
4152 {
4153   transition(atos, vtos);
4154 
4155   // check for null object
4156   __ null_check(r0);
4157 
4158   const int is_inline_type_mask = markWord::inline_type_pattern;
4159   Label has_identity;
4160   __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4161   __ mov(rscratch2, is_inline_type_mask);
4162   __ andr(rscratch1, rscratch1, rscratch2);
4163   __ cmp(rscratch1, rscratch2);
4164   __ br(Assembler::NE, has_identity);
4165   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4166                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4167   __ should_not_reach_here();
4168   __ bind(has_identity);
4169 
4170   const Address monitor_block_top(
4171         rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4172   const Address monitor_block_bot(
4173         rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4174   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4175 
4176   Label found;
4177 
4178   // find matching slot
4179   {
4180     Label entry, loop;
4181     __ ldr(c_rarg1, monitor_block_top); // derelativize pointer
4182     __ lea(c_rarg1, Address(rfp, c_rarg1, Address::lsl(Interpreter::logStackElementSize)));
4183     // c_rarg1 points to current entry, starting with top-most entry
4184 
4185     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4186                                         // of monitor block
4187     __ b(entry);
4188 
4189     __ bind(loop);
< prev index next >