< prev index next >

src/hotspot/cpu/aarch64/templateTable_aarch64.cpp

Print this page

 128   case TemplateTable::equal        : return Assembler::NE;
 129   case TemplateTable::not_equal    : return Assembler::EQ;
 130   case TemplateTable::less         : return Assembler::GE;
 131   case TemplateTable::less_equal   : return Assembler::GT;
 132   case TemplateTable::greater      : return Assembler::LE;
 133   case TemplateTable::greater_equal: return Assembler::LT;
 134   }
 135   ShouldNotReachHere();
 136   return Assembler::EQ;
 137 }
 138 
 139 
 140 // Miscelaneous helper routines
 141 // Store an oop (or NULL) at the Address described by obj.
 142 // If val == noreg this means store a NULL
 143 static void do_oop_store(InterpreterMacroAssembler* _masm,
 144                          Address dst,
 145                          Register val,
 146                          DecoratorSet decorators) {
 147   assert(val == noreg || val == r0, "parameter is just for looks");
 148   __ store_heap_oop(dst, val, r10, r1, decorators);
 149 }
 150 
 151 static void do_oop_load(InterpreterMacroAssembler* _masm,
 152                         Address src,
 153                         Register dst,
 154                         DecoratorSet decorators) {
 155   __ load_heap_oop(dst, src, r10, r1, decorators);
 156 }
 157 
 158 Address TemplateTable::at_bcp(int offset) {
 159   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 160   return Address(rbcp, offset);
 161 }
 162 
 163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 164                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 165                                    int byte_no)
 166 {
 167   if (!RewriteBytecodes)  return;
 168   Label L_patch_done;
 169 
 170   switch (bc) {

 171   case Bytecodes::_fast_aputfield:
 172   case Bytecodes::_fast_bputfield:
 173   case Bytecodes::_fast_zputfield:
 174   case Bytecodes::_fast_cputfield:
 175   case Bytecodes::_fast_dputfield:
 176   case Bytecodes::_fast_fputfield:
 177   case Bytecodes::_fast_iputfield:
 178   case Bytecodes::_fast_lputfield:
 179   case Bytecodes::_fast_sputfield:
 180     {
 181       // We skip bytecode quickening for putfield instructions when
 182       // the put_code written to the constant pool cache is zero.
 183       // This is required so that every execution of this instruction
 184       // calls out to InterpreterRuntime::resolve_get_put to do
 185       // additional, required work.
 186       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 187       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 188       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 189       __ movw(bc_reg, bc);
 190       __ cbzw(temp_reg, L_patch_done);  // don't patch

 310 
 311 void TemplateTable::ldc(bool wide)
 312 {
 313   transition(vtos, vtos);
 314   Label call_ldc, notFloat, notClass, notInt, Done;
 315 
 316   if (wide) {
 317     __ get_unsigned_2_byte_index_at_bcp(r1, 1);
 318   } else {
 319     __ load_unsigned_byte(r1, at_bcp(1));
 320   }
 321   __ get_cpool_and_tags(r2, r0);
 322 
 323   const int base_offset = ConstantPool::header_size() * wordSize;
 324   const int tags_offset = Array<u1>::base_offset_in_bytes();
 325 
 326   // get type
 327   __ add(r3, r1, tags_offset);
 328   __ lea(r3, Address(r0, r3));
 329   __ ldarb(r3, r3);

 330 
 331   // unresolved class - get the resolved class
 332   __ cmp(r3, (u1)JVM_CONSTANT_UnresolvedClass);
 333   __ br(Assembler::EQ, call_ldc);
 334 
 335   // unresolved class in error state - call into runtime to throw the error
 336   // from the first resolution attempt
 337   __ cmp(r3, (u1)JVM_CONSTANT_UnresolvedClassInError);
 338   __ br(Assembler::EQ, call_ldc);
 339 
 340   // resolved class - need to call vm to get java mirror of the class
 341   __ cmp(r3, (u1)JVM_CONSTANT_Class);
 342   __ br(Assembler::NE, notClass);
 343 
 344   __ bind(call_ldc);
 345   __ mov(c_rarg1, wide);
 346   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), c_rarg1);
 347   __ push_ptr(r0);
 348   __ verify_oop(r0);
 349   __ b(Done);

 727 }
 728 
 729 void TemplateTable::index_check(Register array, Register index)
 730 {
 731   // destroys r1, rscratch1
 732   // check array
 733   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 734   // sign extend index for use by indexed load
 735   // __ movl2ptr(index, index);
 736   // check index
 737   Register length = rscratch1;
 738   __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
 739   __ cmpw(index, length);
 740   if (index != r1) {
 741     // ??? convention: move aberrant index into r1 for exception message
 742     assert(r1 != array, "different registers");
 743     __ mov(r1, index);
 744   }
 745   Label ok;
 746   __ br(Assembler::LO, ok);
 747     // ??? convention: move array into r3 for exception message
 748   __ mov(r3, array);
 749   __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
 750   __ br(rscratch1);
 751   __ bind(ok);
 752 }
 753 
 754 void TemplateTable::iaload()
 755 {
 756   transition(itos, itos);
 757   __ mov(r1, r0);
 758   __ pop_ptr(r0);
 759   // r0: array
 760   // r1: index
 761   index_check(r0, r1); // leaves index in r1, kills rscratch1
 762   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
 763   __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
 764 }
 765 
 766 void TemplateTable::laload()
 767 {
 768   transition(itos, ltos);
 769   __ mov(r1, r0);
 770   __ pop_ptr(r0);

 790 void TemplateTable::daload()
 791 {
 792   transition(itos, dtos);
 793   __ mov(r1, r0);
 794   __ pop_ptr(r0);
 795   // r0: array
 796   // r1: index
 797   index_check(r0, r1); // leaves index in r1, kills rscratch1
 798   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
 799   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
 800 }
 801 
 802 void TemplateTable::aaload()
 803 {
 804   transition(itos, atos);
 805   __ mov(r1, r0);
 806   __ pop_ptr(r0);
 807   // r0: array
 808   // r1: index
 809   index_check(r0, r1); // leaves index in r1, kills rscratch1
 810   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
 811   do_oop_load(_masm,
 812               Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
 813               r0,
 814               IS_ARRAY);












 815 }
 816 
 817 void TemplateTable::baload()
 818 {
 819   transition(itos, itos);
 820   __ mov(r1, r0);
 821   __ pop_ptr(r0);
 822   // r0: array
 823   // r1: index
 824   index_check(r0, r1); // leaves index in r1, kills rscratch1
 825   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
 826   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
 827 }
 828 
 829 void TemplateTable::caload()
 830 {
 831   transition(itos, itos);
 832   __ mov(r1, r0);
 833   __ pop_ptr(r0);
 834   // r0: array

1081   // r1:  index
1082   // r3:  array
1083   index_check(r3, r1); // prefer index in r1
1084   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1085   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg);
1086 }
1087 
1088 void TemplateTable::dastore() {
1089   transition(dtos, vtos);
1090   __ pop_i(r1);
1091   __ pop_ptr(r3);
1092   // v0: value
1093   // r1:  index
1094   // r3:  array
1095   index_check(r3, r1); // prefer index in r1
1096   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1097   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1098 }
1099 
1100 void TemplateTable::aastore() {
1101   Label is_null, ok_is_subtype, done;
1102   transition(vtos, vtos);
1103   // stack: ..., array, index, value
1104   __ ldr(r0, at_tos());    // value
1105   __ ldr(r2, at_tos_p1()); // index
1106   __ ldr(r3, at_tos_p2()); // array
1107 
1108   Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1109 
1110   index_check(r3, r2);     // kills r1




1111   __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);


1112 
1113   // do array store check - check for NULL value first
1114   __ cbz(r0, is_null);
1115 








1116   // Move subklass into r1
1117   __ load_klass(r1, r0);
1118   // Move superklass into r0
1119   __ load_klass(r0, r3);
1120   __ ldr(r0, Address(r0,
1121                      ObjArrayKlass::element_klass_offset()));
1122   // Compress array + index*oopSize + 12 into a single register.  Frees r2.
1123 
1124   // Generate subtype check.  Blows r2, r5
1125   // Superklass in r0.  Subklass in r1.
1126   __ gen_subtype_check(r1, ok_is_subtype);


1127 
1128   // Come here on failure
1129   // object is at TOS
1130   __ b(Interpreter::_throw_ArrayStoreException_entry);
1131 
1132   // Come here on success
1133   __ bind(ok_is_subtype);
1134 
1135   // Get the value we will store
1136   __ ldr(r0, at_tos());
1137   // Now store using the appropriate barrier
1138   do_oop_store(_masm, element_address, r0, IS_ARRAY);
1139   __ b(done);
1140 
1141   // Have a NULL in r0, r3=array, r2=index.  Store NULL at ary[idx]
1142   __ bind(is_null);
1143   __ profile_null_seen(r2);











1144 
1145   // Store a NULL
1146   do_oop_store(_masm, element_address, noreg, IS_ARRAY);









































1147 
1148   // Pop stack arguments
1149   __ bind(done);
1150   __ add(esp, esp, 3 * Interpreter::stackElementSize);
1151 }
1152 
1153 void TemplateTable::bastore()
1154 {
1155   transition(itos, vtos);
1156   __ pop_i(r1);
1157   __ pop_ptr(r3);
1158   // r0: value
1159   // r1: index
1160   // r3: array
1161   index_check(r3, r1); // prefer index in r1
1162 
1163   // Need to check whether array is boolean or byte
1164   // since both types share the bastore bytecode.
1165   __ load_klass(r2, r3);
1166   __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));

1937   __ br(j_not(cc), not_taken);
1938   branch(false, false);
1939   __ bind(not_taken);
1940   __ profile_not_taken_branch(r0);
1941 }
1942 
1943 void TemplateTable::if_nullcmp(Condition cc)
1944 {
1945   transition(atos, vtos);
1946   // assume branch is more often taken than not (loops use backward branches)
1947   Label not_taken;
1948   if (cc == equal)
1949     __ cbnz(r0, not_taken);
1950   else
1951     __ cbz(r0, not_taken);
1952   branch(false, false);
1953   __ bind(not_taken);
1954   __ profile_not_taken_branch(r0);
1955 }
1956 
1957 void TemplateTable::if_acmp(Condition cc)
1958 {
1959   transition(atos, vtos);
1960   // assume branch is more often taken than not (loops use backward branches)
1961   Label not_taken;
1962   __ pop_ptr(r1);






































1963   __ cmpoop(r1, r0);
1964   __ br(j_not(cc), not_taken);

1965   branch(false, false);
1966   __ bind(not_taken);
1967   __ profile_not_taken_branch(r0);
1968 }
1969 










1970 void TemplateTable::ret() {
1971   transition(vtos, vtos);
1972   // We might be moving to a safepoint.  The thread which calls
1973   // Interpreter::notice_safepoints() will effectively flush its cache
1974   // when it makes a system call, but we need to do something to
1975   // ensure that we see the changed dispatch table.
1976   __ membar(MacroAssembler::LoadLoad);
1977 
1978   locals_index(r1);
1979   __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
1980   __ profile_ret(r1, r2);
1981   __ ldr(rbcp, Address(rmethod, Method::const_offset()));
1982   __ lea(rbcp, Address(rbcp, r1));
1983   __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
1984   __ dispatch_next(vtos, 0, /*generate_poll*/true);
1985 }
1986 
1987 void TemplateTable::wide_ret() {
1988   transition(vtos, vtos);
1989   locals_index_wide(r1);

2287   if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2288     __ load_resolved_method_at_index(byte_no, temp, Rcache);
2289     __ load_method_holder(temp, temp);
2290     __ clinit_barrier(temp, rscratch1, NULL, &clinit_barrier_slow);
2291   }
2292 }
2293 
2294 // The Rcache and index registers must be set before call
2295 // n.b unlike x86 cache already includes the index offset
2296 void TemplateTable::load_field_cp_cache_entry(Register obj,
2297                                               Register cache,
2298                                               Register index,
2299                                               Register off,
2300                                               Register flags,
2301                                               bool is_static = false) {
2302   assert_different_registers(cache, index, flags, off);
2303 
2304   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2305   // Field offset
2306   __ ldr(off, Address(cache, in_bytes(cp_base_offset +
2307                                           ConstantPoolCacheEntry::f2_offset())));
2308   // Flags
2309   __ ldrw(flags, Address(cache, in_bytes(cp_base_offset +
2310                                            ConstantPoolCacheEntry::flags_offset())));
2311 
2312   // klass overwrite register
2313   if (is_static) {
2314     __ ldr(obj, Address(cache, in_bytes(cp_base_offset +
2315                                         ConstantPoolCacheEntry::f1_offset())));
2316     const int mirror_offset = in_bytes(Klass::java_mirror_offset());
2317     __ ldr(obj, Address(obj, mirror_offset));
2318     __ resolve_oop_handle(obj);
2319   }
2320 }
2321 
2322 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2323                                                Register method,
2324                                                Register itable_index,
2325                                                Register flags,
2326                                                bool is_invokevirtual,
2327                                                bool is_invokevfinal, /*unused*/
2328                                                bool is_invokedynamic) {
2329   // setup registers
2330   const Register cache = rscratch2;

2388     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2389                                        InterpreterRuntime::post_field_access),
2390                c_rarg1, c_rarg2, c_rarg3);
2391     __ get_cache_and_index_at_bcp(cache, index, 1);
2392     __ bind(L1);
2393   }
2394 }
2395 
2396 void TemplateTable::pop_and_check_object(Register r)
2397 {
2398   __ pop_ptr(r);
2399   __ null_check(r);  // for field access must check obj.
2400   __ verify_oop(r);
2401 }
2402 
2403 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2404 {
2405   const Register cache = r2;
2406   const Register index = r3;
2407   const Register obj   = r4;


2408   const Register off   = r19;
2409   const Register flags = r0;
2410   const Register raw_flags = r6;
2411   const Register bc    = r4; // uses same reg as obj, so don't mix them
2412 
2413   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2414   jvmti_post_field_access(cache, index, is_static, false);
2415   load_field_cp_cache_entry(obj, cache, index, off, raw_flags, is_static);
2416 
2417   if (!is_static) {
2418     // obj is on the stack
2419     pop_and_check_object(obj);
2420   }
2421 
2422   // 8179954: We need to make sure that the code generated for
2423   // volatile accesses forms a sequentially-consistent set of
2424   // operations when combined with STLR and LDAR.  Without a leading
2425   // membar it's possible for a simple Dekker test to fail if loads
2426   // use LDR;DMB but stores use STLR.  This can happen if C2 compiles
2427   // the stores in one method and we interpret the loads in another.
2428   if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2429     Label notVolatile;
2430     __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2431     __ membar(MacroAssembler::AnyAny);
2432     __ bind(notVolatile);
2433   }
2434 
2435   const Address field(obj, off);
2436 
2437   Label Done, notByte, notBool, notInt, notShort, notChar,
2438               notLong, notFloat, notObj, notDouble;
2439 





2440   // x86 uses a shift and mask or wings it with a shift plus assert
2441   // the mask is not needed. aarch64 just uses bitfield extract
2442   __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift,
2443            ConstantPoolCacheEntry::tos_state_bits);
2444 
2445   assert(btos == 0, "change code, btos != 0");
2446   __ cbnz(flags, notByte);
2447 
2448   // Don't rewrite getstatic, only getfield
2449   if (is_static) rc = may_not_rewrite;
2450 
2451   // btos
2452   __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2453   __ push(btos);
2454   // Rewrite bytecode to be faster
2455   if (rc == may_rewrite) {
2456     patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2457   }
2458   __ b(Done);
2459 
2460   __ bind(notByte);
2461   __ cmp(flags, (u1)ztos);
2462   __ br(Assembler::NE, notBool);
2463 
2464   // ztos (same code as btos)
2465   __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2466   __ push(ztos);
2467   // Rewrite bytecode to be faster
2468   if (rc == may_rewrite) {
2469     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2470     patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2471   }
2472   __ b(Done);
2473 
2474   __ bind(notBool);
2475   __ cmp(flags, (u1)atos);
2476   __ br(Assembler::NE, notObj);
2477   // atos
2478   do_oop_load(_masm, field, r0, IN_HEAP);
2479   __ push(atos);
2480   if (rc == may_rewrite) {
2481     patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);


































































2482   }
2483   __ b(Done);
2484 
2485   __ bind(notObj);
2486   __ cmp(flags, (u1)itos);
2487   __ br(Assembler::NE, notInt);
2488   // itos
2489   __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2490   __ push(itos);
2491   // Rewrite bytecode to be faster
2492   if (rc == may_rewrite) {
2493     patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2494   }
2495   __ b(Done);
2496 
2497   __ bind(notInt);
2498   __ cmp(flags, (u1)ctos);
2499   __ br(Assembler::NE, notChar);
2500   // ctos
2501   __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2502   __ push(ctos);
2503   // Rewrite bytecode to be faster

2633     // c_rarg1: object pointer set up above (NULL if static)
2634     // c_rarg2: cache entry pointer
2635     // c_rarg3: jvalue object on the stack
2636     __ call_VM(noreg,
2637                CAST_FROM_FN_PTR(address,
2638                                 InterpreterRuntime::post_field_modification),
2639                c_rarg1, c_rarg2, c_rarg3);
2640     __ get_cache_and_index_at_bcp(cache, index, 1);
2641     __ bind(L1);
2642   }
2643 }
2644 
2645 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2646   transition(vtos, vtos);
2647 
2648   const Register cache = r2;
2649   const Register index = r3;
2650   const Register obj   = r2;
2651   const Register off   = r19;
2652   const Register flags = r0;

2653   const Register bc    = r4;

2654 
2655   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2656   jvmti_post_field_mod(cache, index, is_static);
2657   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2658 
2659   Label Done;
2660   __ mov(r5, flags);
2661 
2662   {
2663     Label notVolatile;
2664     __ tbz(r5, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2665     __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2666     __ bind(notVolatile);
2667   }
2668 
2669   // field address
2670   const Address field(obj, off);
2671 
2672   Label notByte, notBool, notInt, notShort, notChar,
2673         notLong, notFloat, notObj, notDouble;
2674 


2675   // x86 uses a shift and mask or wings it with a shift plus assert
2676   // the mask is not needed. aarch64 just uses bitfield extract
2677   __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift,  ConstantPoolCacheEntry::tos_state_bits);
2678 
2679   assert(btos == 0, "change code, btos != 0");
2680   __ cbnz(flags, notByte);
2681 
2682   // Don't rewrite putstatic, only putfield
2683   if (is_static) rc = may_not_rewrite;
2684 
2685   // btos
2686   {
2687     __ pop(btos);
2688     if (!is_static) pop_and_check_object(obj);
2689     __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2690     if (rc == may_rewrite) {
2691       patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2692     }
2693     __ b(Done);
2694   }

2697   __ cmp(flags, (u1)ztos);
2698   __ br(Assembler::NE, notBool);
2699 
2700   // ztos
2701   {
2702     __ pop(ztos);
2703     if (!is_static) pop_and_check_object(obj);
2704     __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2705     if (rc == may_rewrite) {
2706       patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2707     }
2708     __ b(Done);
2709   }
2710 
2711   __ bind(notBool);
2712   __ cmp(flags, (u1)atos);
2713   __ br(Assembler::NE, notObj);
2714 
2715   // atos
2716   {
2717     __ pop(atos);
2718     if (!is_static) pop_and_check_object(obj);
2719     // Store into the field
2720     do_oop_store(_masm, field, r0, IN_HEAP);
2721     if (rc == may_rewrite) {
2722       patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2723     }
2724     __ b(Done);














































2725   }
2726 
2727   __ bind(notObj);
2728   __ cmp(flags, (u1)itos);
2729   __ br(Assembler::NE, notInt);
2730 
2731   // itos
2732   {
2733     __ pop(itos);
2734     if (!is_static) pop_and_check_object(obj);
2735     __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
2736     if (rc == may_rewrite) {
2737       patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2738     }
2739     __ b(Done);
2740   }
2741 
2742   __ bind(notInt);
2743   __ cmp(flags, (u1)ctos);
2744   __ br(Assembler::NE, notChar);

2844 void TemplateTable::putstatic(int byte_no) {
2845   putfield_or_static(byte_no, true);
2846 }
2847 
2848 void TemplateTable::jvmti_post_fast_field_mod()
2849 {
2850   if (JvmtiExport::can_post_field_modification()) {
2851     // Check to see if a field modification watch has been set before
2852     // we take the time to call into the VM.
2853     Label L2;
2854     __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2855     __ ldrw(c_rarg3, Address(rscratch1));
2856     __ cbzw(c_rarg3, L2);
2857     __ pop_ptr(r19);                  // copy the object pointer from tos
2858     __ verify_oop(r19);
2859     __ push_ptr(r19);                 // put the object pointer back on tos
2860     // Save tos values before call_VM() clobbers them. Since we have
2861     // to do it for every data type, we use the saved values as the
2862     // jvalue object.
2863     switch (bytecode()) {          // load values into the jvalue object

2864     case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
2865     case Bytecodes::_fast_bputfield: // fall through
2866     case Bytecodes::_fast_zputfield: // fall through
2867     case Bytecodes::_fast_sputfield: // fall through
2868     case Bytecodes::_fast_cputfield: // fall through
2869     case Bytecodes::_fast_iputfield: __ push_i(r0); break;
2870     case Bytecodes::_fast_dputfield: __ push_d(); break;
2871     case Bytecodes::_fast_fputfield: __ push_f(); break;
2872     case Bytecodes::_fast_lputfield: __ push_l(r0); break;
2873 
2874     default:
2875       ShouldNotReachHere();
2876     }
2877     __ mov(c_rarg3, esp);             // points to jvalue on the stack
2878     // access constant pool cache entry
2879     __ get_cache_entry_pointer_at_bcp(c_rarg2, r0, 1);
2880     __ verify_oop(r19);
2881     // r19: object pointer copied above
2882     // c_rarg2: cache entry pointer
2883     // c_rarg3: jvalue object on the stack
2884     __ call_VM(noreg,
2885                CAST_FROM_FN_PTR(address,
2886                                 InterpreterRuntime::post_field_modification),
2887                r19, c_rarg2, c_rarg3);
2888 
2889     switch (bytecode()) {             // restore tos values

2890     case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
2891     case Bytecodes::_fast_bputfield: // fall through
2892     case Bytecodes::_fast_zputfield: // fall through
2893     case Bytecodes::_fast_sputfield: // fall through
2894     case Bytecodes::_fast_cputfield: // fall through
2895     case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
2896     case Bytecodes::_fast_dputfield: __ pop_d(); break;
2897     case Bytecodes::_fast_fputfield: __ pop_f(); break;
2898     case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
2899     default: break;
2900     }
2901     __ bind(L2);
2902   }
2903 }
2904 
2905 void TemplateTable::fast_storefield(TosState state)
2906 {
2907   transition(state, vtos);
2908 
2909   ByteSize base = ConstantPoolCache::base_offset();

2923   // replace index with field offset from cache entry
2924   __ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2925 
2926   {
2927     Label notVolatile;
2928     __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2929     __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2930     __ bind(notVolatile);
2931   }
2932 
2933   Label notVolatile;
2934 
2935   // Get object from stack
2936   pop_and_check_object(r2);
2937 
2938   // field address
2939   const Address field(r2, r1);
2940 
2941   // access field
2942   switch (bytecode()) {

















2943   case Bytecodes::_fast_aputfield:
2944     do_oop_store(_masm, field, r0, IN_HEAP);
2945     break;
2946   case Bytecodes::_fast_lputfield:
2947     __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
2948     break;
2949   case Bytecodes::_fast_iputfield:
2950     __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
2951     break;
2952   case Bytecodes::_fast_zputfield:
2953     __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2954     break;
2955   case Bytecodes::_fast_bputfield:
2956     __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2957     break;
2958   case Bytecodes::_fast_sputfield:
2959     __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
2960     break;
2961   case Bytecodes::_fast_cputfield:
2962     __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);

3020   // r0: object
3021   __ verify_oop(r0);
3022   __ null_check(r0);
3023   const Address field(r0, r1);
3024 
3025   // 8179954: We need to make sure that the code generated for
3026   // volatile accesses forms a sequentially-consistent set of
3027   // operations when combined with STLR and LDAR.  Without a leading
3028   // membar it's possible for a simple Dekker test to fail if loads
3029   // use LDR;DMB but stores use STLR.  This can happen if C2 compiles
3030   // the stores in one method and we interpret the loads in another.
3031   if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3032     Label notVolatile;
3033     __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3034     __ membar(MacroAssembler::AnyAny);
3035     __ bind(notVolatile);
3036   }
3037 
3038   // access field
3039   switch (bytecode()) {


























3040   case Bytecodes::_fast_agetfield:
3041     do_oop_load(_masm, field, r0, IN_HEAP);
3042     __ verify_oop(r0);
3043     break;
3044   case Bytecodes::_fast_lgetfield:
3045     __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3046     break;
3047   case Bytecodes::_fast_igetfield:
3048     __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3049     break;
3050   case Bytecodes::_fast_bgetfield:
3051     __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3052     break;
3053   case Bytecodes::_fast_sgetfield:
3054     __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3055     break;
3056   case Bytecodes::_fast_cgetfield:
3057     __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3058     break;
3059   case Bytecodes::_fast_fgetfield:

3450   // %%% should make a type profile for any invokedynamic that takes a ref argument
3451   // profile this call
3452   __ profile_call(rbcp);
3453   __ profile_arguments_type(r3, rmethod, r13, false);
3454 
3455   __ verify_oop(r0);
3456 
3457   __ jump_from_interpreted(rmethod, r0);
3458 }
3459 
3460 
3461 //-----------------------------------------------------------------------------
3462 // Allocation
3463 
3464 void TemplateTable::_new() {
3465   transition(vtos, atos);
3466 
3467   __ get_unsigned_2_byte_index_at_bcp(r3, 1);
3468   Label slow_case;
3469   Label done;

3470   Label initialize_header;
3471   Label initialize_object; // including clearing the fields
3472 
3473   __ get_cpool_and_tags(r4, r0);
3474   // Make sure the class we're about to instantiate has been resolved.
3475   // This is done before loading InstanceKlass to be consistent with the order
3476   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3477   const int tags_offset = Array<u1>::base_offset_in_bytes();
3478   __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3479   __ lea(rscratch1, Address(rscratch1, tags_offset));
3480   __ ldarb(rscratch1, rscratch1);
3481   __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3482   __ br(Assembler::NE, slow_case);
3483 
3484   // get InstanceKlass
3485   __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3486 








3487   // make sure klass is initialized & doesn't have finalizer
3488   // make sure klass is fully initialized
3489   __ ldrb(rscratch1, Address(r4, InstanceKlass::init_state_offset()));
3490   __ cmp(rscratch1, (u1)InstanceKlass::fully_initialized);
3491   __ br(Assembler::NE, slow_case);
3492 
3493   // get instance_size in InstanceKlass (scaled to a count of bytes)
3494   __ ldrw(r3,
3495           Address(r4,
3496                   Klass::layout_helper_offset()));
3497   // test to see if it has a finalizer or is malformed in some way
3498   __ tbnz(r3, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3499 
3500   // Allocate the instance:
3501   //  If TLAB is enabled:
3502   //    Try to allocate in the TLAB.
3503   //    If fails, go to the slow path.
3504   //  Else If inline contiguous allocations are enabled:
3505   //    Try to allocate in eden.
3506   //    If fails due to heap end, go to slow path.
3507   //
3508   //  If TLAB is enabled OR inline contiguous is enabled:
3509   //    Initialize the allocation.
3510   //    Exit.
3511   //
3512   //  Go to slow path.
3513   const bool allow_shared_alloc =
3514     Universe::heap()->supports_inline_contig_alloc();
3515 
3516   if (UseTLAB) {
3517     __ tlab_allocate(r0, r3, 0, noreg, r1, slow_case);
3518 
3519     if (ZeroTLAB) {
3520       // the fields have been already cleared
3521       __ b(initialize_header);
3522     } else {
3523       // initialize both the header and fields
3524       __ b(initialize_object);
3525     }
3526   } else {
3527     // Allocation in the shared Eden, if allowed.
3528     //
3529     // r3: instance size in bytes
3530     if (allow_shared_alloc) {
3531       __ eden_allocate(r0, r3, 0, r10, slow_case);
3532     }
3533   }
3534 
3535   // If UseTLAB or allow_shared_alloc are true, the object is created above and
3536   // there is an initialize need. Otherwise, skip and go to the slow path.
3537   if (UseTLAB || allow_shared_alloc) {
3538     // The object is initialized before the header.  If the object size is
3539     // zero, go directly to the header initialization.
3540     __ bind(initialize_object);
3541     __ sub(r3, r3, sizeof(oopDesc));
3542     __ cbz(r3, initialize_header);
3543 
3544     // Initialize object fields
3545     {
3546       __ add(r2, r0, sizeof(oopDesc));
3547       Label loop;
3548       __ bind(loop);
3549       __ str(zr, Address(__ post(r2, BytesPerLong)));
3550       __ sub(r3, r3, BytesPerLong);
3551       __ cbnz(r3, loop);
3552     }
3553 
3554     // initialize object header only.
3555     __ bind(initialize_header);
3556     __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3557     __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3558     __ store_klass_gap(r0, zr);  // zero klass gap for compressed oops
3559     __ store_klass(r0, r4);      // store klass last
3560 
3561     {
3562       SkipIfEqual skip(_masm, &DTraceAllocProbes, false);
3563       // Trigger dtrace event for fastpath
3564       __ push(atos); // save the return value
3565       __ call_VM_leaf(
3566            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3567       __ pop(atos); // restore the return value
3568 
3569     }
3570     __ b(done);
3571   }
3572 
3573   // slow case
3574   __ bind(slow_case);
3575   __ get_constant_pool(c_rarg1);
3576   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3577   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3578   __ verify_oop(r0);
3579 
3580   // continue
3581   __ bind(done);
3582   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3583   __ membar(Assembler::StoreStore);
3584 }
3585 





























3586 void TemplateTable::newarray() {
3587   transition(itos, atos);
3588   __ load_unsigned_byte(c_rarg1, at_bcp(1));
3589   __ mov(c_rarg2, r0);
3590   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3591           c_rarg1, c_rarg2);
3592   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3593   __ membar(Assembler::StoreStore);
3594 }
3595 
3596 void TemplateTable::anewarray() {
3597   transition(itos, atos);
3598   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3599   __ get_constant_pool(c_rarg1);
3600   __ mov(c_rarg3, r0);
3601   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3602           c_rarg1, c_rarg2, c_rarg3);
3603   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3604   __ membar(Assembler::StoreStore);
3605 }
3606 
3607 void TemplateTable::arraylength() {
3608   transition(atos, itos);
3609   __ null_check(r0, arrayOopDesc::length_offset_in_bytes());
3610   __ ldrw(r0, Address(r0, arrayOopDesc::length_offset_in_bytes()));
3611 }
3612 
3613 void TemplateTable::checkcast()
3614 {
3615   transition(atos, atos);
3616   Label done, is_null, ok_is_subtype, quicked, resolved;
3617   __ cbz(r0, is_null);
3618 
3619   // Get cpool & tags index
3620   __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3621   __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3622   // See if bytecode has already been quicked
3623   __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3624   __ lea(r1, Address(rscratch1, r19));
3625   __ ldarb(r1, r1);

3626   __ cmp(r1, (u1)JVM_CONSTANT_Class);
3627   __ br(Assembler::EQ, quicked);
3628 
3629   __ push(atos); // save receiver for result, and for GC
3630   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3631   // vm_result_2 has metadata result
3632   __ get_vm_result_2(r0, rthread);
3633   __ pop(r3); // restore receiver
3634   __ b(resolved);
3635 
3636   // Get superklass in r0 and subklass in r3
3637   __ bind(quicked);
3638   __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3639   __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3640 
3641   __ bind(resolved);
3642   __ load_klass(r19, r3);
3643 
3644   // Generate subtype check.  Blows r2, r5.  Object in r3.
3645   // Superklass in r0.  Subklass in r19.
3646   __ gen_subtype_check(r19, ok_is_subtype);
3647 
3648   // Come here on failure
3649   __ push(r3);
3650   // object is at TOS
3651   __ b(Interpreter::_throw_ClassCastException_entry);
3652 
3653   // Come here on success
3654   __ bind(ok_is_subtype);
3655   __ mov(r0, r3); // Restore object in r3
3656 



3657   // Collect counts on whether this test sees NULLs a lot or not.
3658   if (ProfileInterpreter) {
3659     __ b(done);
3660     __ bind(is_null);
3661     __ profile_null_seen(r2);
3662   } else {
3663     __ bind(is_null);   // same as 'done'
3664   }
















3665   __ bind(done);
3666 }
3667 
3668 void TemplateTable::instanceof() {
3669   transition(atos, itos);
3670   Label done, is_null, ok_is_subtype, quicked, resolved;
3671   __ cbz(r0, is_null);
3672 
3673   // Get cpool & tags index
3674   __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3675   __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3676   // See if bytecode has already been quicked
3677   __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3678   __ lea(r1, Address(rscratch1, r19));
3679   __ ldarb(r1, r1);

3680   __ cmp(r1, (u1)JVM_CONSTANT_Class);
3681   __ br(Assembler::EQ, quicked);
3682 
3683   __ push(atos); // save receiver for result, and for GC
3684   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3685   // vm_result_2 has metadata result
3686   __ get_vm_result_2(r0, rthread);
3687   __ pop(r3); // restore receiver
3688   __ verify_oop(r3);
3689   __ load_klass(r3, r3);
3690   __ b(resolved);
3691 
3692   // Get superklass in r0 and subklass in r3
3693   __ bind(quicked);
3694   __ load_klass(r3, r0);
3695   __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1);
3696 
3697   __ bind(resolved);
3698 
3699   // Generate subtype check.  Blows r2, r5

3763 //       in the assembly code structure as well
3764 //
3765 // Stack layout:
3766 //
3767 // [expressions  ] <--- esp               = expression stack top
3768 // ..
3769 // [expressions  ]
3770 // [monitor entry] <--- monitor block top = expression stack bot
3771 // ..
3772 // [monitor entry]
3773 // [frame data   ] <--- monitor block bot
3774 // ...
3775 // [saved rbp    ] <--- rbp
3776 void TemplateTable::monitorenter()
3777 {
3778   transition(atos, vtos);
3779 
3780   // check for NULL object
3781   __ null_check(r0);
3782 




3783   const Address monitor_block_top(
3784         rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3785   const Address monitor_block_bot(
3786         rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
3787   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
3788 
3789   Label allocated;
3790 
3791   // initialize entry pointer
3792   __ mov(c_rarg1, zr); // points to free slot or NULL
3793 
3794   // find a free slot in the monitor block (result in c_rarg1)
3795   {
3796     Label entry, loop, exit;
3797     __ ldr(c_rarg3, monitor_block_top); // points to current entry,
3798                                         // starting with top-most entry
3799     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3800 
3801     __ b(entry);
3802 

3852   // c_rarg1: points to monitor entry
3853   __ bind(allocated);
3854 
3855   // Increment bcp to point to the next bytecode, so exception
3856   // handling for async. exceptions work correctly.
3857   // The object has already been poped from the stack, so the
3858   // expression stack looks correct.
3859   __ increment(rbcp);
3860 
3861   // store object
3862   __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));
3863   __ lock_object(c_rarg1);
3864 
3865   // check to make sure this monitor doesn't cause stack overflow after locking
3866   __ save_bcp();  // in case of exception
3867   __ generate_stack_overflow_check(0);
3868 
3869   // The bcp has already been incremented. Just need to dispatch to
3870   // next instruction.
3871   __ dispatch_next(vtos);





3872 }
3873 
3874 
3875 void TemplateTable::monitorexit()
3876 {
3877   transition(atos, vtos);
3878 
3879   // check for NULL object
3880   __ null_check(r0);
3881 












3882   const Address monitor_block_top(
3883         rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3884   const Address monitor_block_bot(
3885         rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
3886   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
3887 
3888   Label found;
3889 
3890   // find matching slot
3891   {
3892     Label entry, loop;
3893     __ ldr(c_rarg1, monitor_block_top); // points to current entry,
3894                                         // starting with top-most entry
3895     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3896                                         // of monitor block
3897     __ b(entry);
3898 
3899     __ bind(loop);
3900     // check if current entry is for same object
3901     __ ldr(rscratch1, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));

 128   case TemplateTable::equal        : return Assembler::NE;
 129   case TemplateTable::not_equal    : return Assembler::EQ;
 130   case TemplateTable::less         : return Assembler::GE;
 131   case TemplateTable::less_equal   : return Assembler::GT;
 132   case TemplateTable::greater      : return Assembler::LE;
 133   case TemplateTable::greater_equal: return Assembler::LT;
 134   }
 135   ShouldNotReachHere();
 136   return Assembler::EQ;
 137 }
 138 
 139 
 140 // Miscelaneous helper routines
 141 // Store an oop (or NULL) at the Address described by obj.
 142 // If val == noreg this means store a NULL
 143 static void do_oop_store(InterpreterMacroAssembler* _masm,
 144                          Address dst,
 145                          Register val,
 146                          DecoratorSet decorators) {
 147   assert(val == noreg || val == r0, "parameter is just for looks");
 148   __ store_heap_oop(dst, val, r10, r1, noreg, decorators);
 149 }
 150 
 151 static void do_oop_load(InterpreterMacroAssembler* _masm,
 152                         Address src,
 153                         Register dst,
 154                         DecoratorSet decorators) {
 155   __ load_heap_oop(dst, src, r10, r1, decorators);
 156 }
 157 
 158 Address TemplateTable::at_bcp(int offset) {
 159   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 160   return Address(rbcp, offset);
 161 }
 162 
 163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 164                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 165                                    int byte_no)
 166 {
 167   if (!RewriteBytecodes)  return;
 168   Label L_patch_done;
 169 
 170   switch (bc) {
 171   case Bytecodes::_fast_qputfield:
 172   case Bytecodes::_fast_aputfield:
 173   case Bytecodes::_fast_bputfield:
 174   case Bytecodes::_fast_zputfield:
 175   case Bytecodes::_fast_cputfield:
 176   case Bytecodes::_fast_dputfield:
 177   case Bytecodes::_fast_fputfield:
 178   case Bytecodes::_fast_iputfield:
 179   case Bytecodes::_fast_lputfield:
 180   case Bytecodes::_fast_sputfield:
 181     {
 182       // We skip bytecode quickening for putfield instructions when
 183       // the put_code written to the constant pool cache is zero.
 184       // This is required so that every execution of this instruction
 185       // calls out to InterpreterRuntime::resolve_get_put to do
 186       // additional, required work.
 187       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 188       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 189       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 190       __ movw(bc_reg, bc);
 191       __ cbzw(temp_reg, L_patch_done);  // don't patch

 311 
 312 void TemplateTable::ldc(bool wide)
 313 {
 314   transition(vtos, vtos);
 315   Label call_ldc, notFloat, notClass, notInt, Done;
 316 
 317   if (wide) {
 318     __ get_unsigned_2_byte_index_at_bcp(r1, 1);
 319   } else {
 320     __ load_unsigned_byte(r1, at_bcp(1));
 321   }
 322   __ get_cpool_and_tags(r2, r0);
 323 
 324   const int base_offset = ConstantPool::header_size() * wordSize;
 325   const int tags_offset = Array<u1>::base_offset_in_bytes();
 326 
 327   // get type
 328   __ add(r3, r1, tags_offset);
 329   __ lea(r3, Address(r0, r3));
 330   __ ldarb(r3, r3);
 331   __ andr(r3, r3, ~JVM_CONSTANT_QDescBit);
 332 
 333   // unresolved class - get the resolved class
 334   __ cmp(r3, (u1)JVM_CONSTANT_UnresolvedClass);
 335   __ br(Assembler::EQ, call_ldc);
 336 
 337   // unresolved class in error state - call into runtime to throw the error
 338   // from the first resolution attempt
 339   __ cmp(r3, (u1)JVM_CONSTANT_UnresolvedClassInError);
 340   __ br(Assembler::EQ, call_ldc);
 341 
 342   // resolved class - need to call vm to get java mirror of the class
 343   __ cmp(r3, (u1)JVM_CONSTANT_Class);
 344   __ br(Assembler::NE, notClass);
 345 
 346   __ bind(call_ldc);
 347   __ mov(c_rarg1, wide);
 348   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), c_rarg1);
 349   __ push_ptr(r0);
 350   __ verify_oop(r0);
 351   __ b(Done);

 729 }
 730 
 731 void TemplateTable::index_check(Register array, Register index)
 732 {
 733   // destroys r1, rscratch1
 734   // check array
 735   __ null_check(array, arrayOopDesc::length_offset_in_bytes());
 736   // sign extend index for use by indexed load
 737   // __ movl2ptr(index, index);
 738   // check index
 739   Register length = rscratch1;
 740   __ ldrw(length, Address(array, arrayOopDesc::length_offset_in_bytes()));
 741   __ cmpw(index, length);
 742   if (index != r1) {
 743     // ??? convention: move aberrant index into r1 for exception message
 744     assert(r1 != array, "different registers");
 745     __ mov(r1, index);
 746   }
 747   Label ok;
 748   __ br(Assembler::LO, ok);
 749   // ??? convention: move array into r3 for exception message
 750    __ mov(r3, array);
 751    __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
 752    __ br(rscratch1);
 753   __ bind(ok);
 754 }
 755 
 756 void TemplateTable::iaload()
 757 {
 758   transition(itos, itos);
 759   __ mov(r1, r0);
 760   __ pop_ptr(r0);
 761   // r0: array
 762   // r1: index
 763   index_check(r0, r1); // leaves index in r1, kills rscratch1
 764   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
 765   __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
 766 }
 767 
 768 void TemplateTable::laload()
 769 {
 770   transition(itos, ltos);
 771   __ mov(r1, r0);
 772   __ pop_ptr(r0);

 792 void TemplateTable::daload()
 793 {
 794   transition(itos, dtos);
 795   __ mov(r1, r0);
 796   __ pop_ptr(r0);
 797   // r0: array
 798   // r1: index
 799   index_check(r0, r1); // leaves index in r1, kills rscratch1
 800   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
 801   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
 802 }
 803 
 804 void TemplateTable::aaload()
 805 {
 806   transition(itos, atos);
 807   __ mov(r1, r0);
 808   __ pop_ptr(r0);
 809   // r0: array
 810   // r1: index
 811   index_check(r0, r1); // leaves index in r1, kills rscratch1
 812   __ profile_array(r2, r0, r4);
 813   if (UseFlatArray) {
 814     Label is_flat_array, done;
 815 
 816     __ test_flattened_array_oop(r0, r8 /*temp*/, is_flat_array);
 817     __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
 818     do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
 819 
 820     __ b(done);
 821     __ bind(is_flat_array);
 822     __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_load), r0, r1);
 823     __ bind(done);
 824   } else {
 825     __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
 826     do_oop_load(_masm, Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)), r0, IS_ARRAY);
 827   }
 828   __ profile_element(r2, r0, r4);
 829 }
 830 
 831 void TemplateTable::baload()
 832 {
 833   transition(itos, itos);
 834   __ mov(r1, r0);
 835   __ pop_ptr(r0);
 836   // r0: array
 837   // r1: index
 838   index_check(r0, r1); // leaves index in r1, kills rscratch1
 839   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
 840   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
 841 }
 842 
 843 void TemplateTable::caload()
 844 {
 845   transition(itos, itos);
 846   __ mov(r1, r0);
 847   __ pop_ptr(r0);
 848   // r0: array

1095   // r1:  index
1096   // r3:  array
1097   index_check(r3, r1); // prefer index in r1
1098   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1099   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg);
1100 }
1101 
1102 void TemplateTable::dastore() {
1103   transition(dtos, vtos);
1104   __ pop_i(r1);
1105   __ pop_ptr(r3);
1106   // v0: value
1107   // r1:  index
1108   // r3:  array
1109   index_check(r3, r1); // prefer index in r1
1110   __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1111   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1112 }
1113 
1114 void TemplateTable::aastore() {
1115   Label is_null, is_flat_array, ok_is_subtype, done;
1116   transition(vtos, vtos);
1117   // stack: ..., array, index, value
1118   __ ldr(r0, at_tos());    // value
1119   __ ldr(r2, at_tos_p1()); // index
1120   __ ldr(r3, at_tos_p2()); // array
1121 


1122   index_check(r3, r2);     // kills r1
1123 
1124   __ profile_array(r4, r3, r5);
1125   __ profile_element(r4, r0, r5);
1126 
1127   __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1128   Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1129   // Be careful not to clobber r4 below
1130 
1131   // do array store check - check for NULL value first
1132   __ cbz(r0, is_null);
1133 
1134   // Move array class to r5
1135   __ load_klass(r5, r3);
1136 
1137   if (UseFlatArray) {
1138     __ ldrw(r6, Address(r5, Klass::layout_helper_offset()));
1139     __ test_flattened_array_layout(r6, is_flat_array);
1140   }
1141 
1142   // Move subklass into r1
1143   __ load_klass(r1, r0);
1144 
1145   // Move array element superklass into r0
1146   __ ldr(r0, Address(r5, ObjArrayKlass::element_klass_offset()));

1147   // Compress array + index*oopSize + 12 into a single register.  Frees r2.
1148 
1149   // Generate subtype check.  Blows r2, r5
1150   // Superklass in r0.  Subklass in r1.
1151 
1152   // is "r1 <: r0" ? (value subclass <: array element superclass)
1153   __ gen_subtype_check(r1, ok_is_subtype, false);
1154 
1155   // Come here on failure
1156   // object is at TOS
1157   __ b(Interpreter::_throw_ArrayStoreException_entry);
1158 
1159   // Come here on success
1160   __ bind(ok_is_subtype);
1161 
1162   // Get the value we will store
1163   __ ldr(r0, at_tos());
1164   // Now store using the appropriate barrier
1165   do_oop_store(_masm, element_address, r0, IS_ARRAY);
1166   __ b(done);
1167 
1168   // Have a NULL in r0, r3=array, r2=index.  Store NULL at ary[idx]
1169   __ bind(is_null);
1170   if (EnableValhalla) {
1171     Label is_null_into_value_array_npe, store_null;
1172 
1173     // No way to store null in flat null-free array
1174     __ test_null_free_array_oop(r3, r8, is_null_into_value_array_npe);
1175     __ b(store_null);
1176 
1177     __ bind(is_null_into_value_array_npe);
1178     __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1179 
1180     __ bind(store_null);
1181   }
1182 
1183   // Store a NULL
1184   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1185   __ b(done);
1186 
1187   if (EnableValhalla) {
1188      Label is_type_ok;
1189     __ bind(is_flat_array); // Store non-null value to flat
1190 
1191     // Simplistic type check...
1192     // r0 - value, r2 - index, r3 - array.
1193 
1194     // Profile the not-null value's klass.
1195     // Load value class
1196      __ load_klass(r1, r0);
1197 
1198     // Move element klass into r7
1199      __ ldr(r7, Address(r5, ArrayKlass::element_klass_offset()));
1200 
1201     // flat value array needs exact type match
1202     // is "r1 == r7" (value subclass == array element superclass)
1203 
1204      __ cmp(r7, r1);
1205      __ br(Assembler::EQ, is_type_ok);
1206 
1207      __ b(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1208 
1209      __ bind(is_type_ok);
1210     // r1: value's klass
1211     // r3: array
1212     // r5: array klass
1213     __ test_klass_is_empty_inline_type(r1, r7, done);
1214 
1215     // calc dst for copy
1216     __ ldrw(r7, at_tos_p1()); // index
1217     __ data_for_value_array_index(r3, r5, r7, r7);
1218 
1219     // ...and src for copy
1220     __ ldr(r6, at_tos());  // value
1221     __ data_for_oop(r6, r6, r1);
1222 
1223     __ mov(r4, r1);  // Shuffle arguments to avoid conflict with c_rarg1
1224     __ access_value_copy(IN_HEAP, r6, r7, r4);
1225   }
1226 
1227   // Pop stack arguments
1228   __ bind(done);
1229   __ add(esp, esp, 3 * Interpreter::stackElementSize);
1230 }
1231 
1232 void TemplateTable::bastore()
1233 {
1234   transition(itos, vtos);
1235   __ pop_i(r1);
1236   __ pop_ptr(r3);
1237   // r0: value
1238   // r1: index
1239   // r3: array
1240   index_check(r3, r1); // prefer index in r1
1241 
1242   // Need to check whether array is boolean or byte
1243   // since both types share the bastore bytecode.
1244   __ load_klass(r2, r3);
1245   __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));

2016   __ br(j_not(cc), not_taken);
2017   branch(false, false);
2018   __ bind(not_taken);
2019   __ profile_not_taken_branch(r0);
2020 }
2021 
2022 void TemplateTable::if_nullcmp(Condition cc)
2023 {
2024   transition(atos, vtos);
2025   // assume branch is more often taken than not (loops use backward branches)
2026   Label not_taken;
2027   if (cc == equal)
2028     __ cbnz(r0, not_taken);
2029   else
2030     __ cbz(r0, not_taken);
2031   branch(false, false);
2032   __ bind(not_taken);
2033   __ profile_not_taken_branch(r0);
2034 }
2035 
2036 void TemplateTable::if_acmp(Condition cc) {

2037   transition(atos, vtos);
2038   // assume branch is more often taken than not (loops use backward branches)
2039   Label taken, not_taken;
2040   __ pop_ptr(r1);
2041 
2042   __ profile_acmp(r2, r1, r0, r4);
2043 
2044   Register is_inline_type_mask = rscratch1;
2045   __ mov(is_inline_type_mask, markWord::inline_type_pattern);
2046 
2047   if (EnableValhalla) {
2048     __ cmp(r1, r0);
2049     __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2050 
2051     // might be substitutable, test if either r0 or r1 is null
2052     __ andr(r2, r0, r1);
2053     __ cbz(r2, (cc == equal) ? not_taken : taken);
2054 
2055     // and both are values ?
2056     __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2057     __ andr(r2, r2, is_inline_type_mask);
2058     __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2059     __ andr(r4, r4, is_inline_type_mask);
2060     __ andr(r2, r2, r4);
2061     __ cmp(r2,  is_inline_type_mask);
2062     __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2063 
2064     // same value klass ?
2065     __ load_metadata(r2, r1);
2066     __ load_metadata(r4, r0);
2067     __ cmp(r2, r4);
2068     __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2069 
2070     // Know both are the same type, let's test for substitutability...
2071     if (cc == equal) {
2072       invoke_is_substitutable(r0, r1, taken, not_taken);
2073     } else {
2074       invoke_is_substitutable(r0, r1, not_taken, taken);
2075     }
2076     __ stop("Not reachable");
2077   }
2078 
2079   __ cmpoop(r1, r0);
2080   __ br(j_not(cc), not_taken);
2081   __ bind(taken);
2082   branch(false, false);
2083   __ bind(not_taken);
2084   __ profile_not_taken_branch(r0, true);
2085 }
2086 
2087 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2088                                             Label& is_subst, Label& not_subst) {
2089 
2090   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2091   // Restored... r0 answer, jmp to outcome...
2092   __ cbz(r0, not_subst);
2093   __ b(is_subst);
2094 }
2095 
2096 
2097 void TemplateTable::ret() {
2098   transition(vtos, vtos);
2099   // We might be moving to a safepoint.  The thread which calls
2100   // Interpreter::notice_safepoints() will effectively flush its cache
2101   // when it makes a system call, but we need to do something to
2102   // ensure that we see the changed dispatch table.
2103   __ membar(MacroAssembler::LoadLoad);
2104 
2105   locals_index(r1);
2106   __ ldr(r1, aaddress(r1)); // get return bci, compute return bcp
2107   __ profile_ret(r1, r2);
2108   __ ldr(rbcp, Address(rmethod, Method::const_offset()));
2109   __ lea(rbcp, Address(rbcp, r1));
2110   __ add(rbcp, rbcp, in_bytes(ConstMethod::codes_offset()));
2111   __ dispatch_next(vtos, 0, /*generate_poll*/true);
2112 }
2113 
2114 void TemplateTable::wide_ret() {
2115   transition(vtos, vtos);
2116   locals_index_wide(r1);

2414   if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2415     __ load_resolved_method_at_index(byte_no, temp, Rcache);
2416     __ load_method_holder(temp, temp);
2417     __ clinit_barrier(temp, rscratch1, NULL, &clinit_barrier_slow);
2418   }
2419 }
2420 
2421 // The Rcache and index registers must be set before call
2422 // n.b unlike x86 cache already includes the index offset
2423 void TemplateTable::load_field_cp_cache_entry(Register obj,
2424                                               Register cache,
2425                                               Register index,
2426                                               Register off,
2427                                               Register flags,
2428                                               bool is_static = false) {
2429   assert_different_registers(cache, index, flags, off);
2430 
2431   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2432   // Field offset
2433   __ ldr(off, Address(cache, in_bytes(cp_base_offset +
2434                                       ConstantPoolCacheEntry::f2_offset())));
2435   // Flags
2436   __ ldrw(flags, Address(cache, in_bytes(cp_base_offset +
2437                                          ConstantPoolCacheEntry::flags_offset())));
2438 
2439   // klass overwrite register
2440   if (is_static) {
2441     __ ldr(obj, Address(cache, in_bytes(cp_base_offset +
2442                                         ConstantPoolCacheEntry::f1_offset())));
2443     const int mirror_offset = in_bytes(Klass::java_mirror_offset());
2444     __ ldr(obj, Address(obj, mirror_offset));
2445     __ resolve_oop_handle(obj);
2446   }
2447 }
2448 
2449 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2450                                                Register method,
2451                                                Register itable_index,
2452                                                Register flags,
2453                                                bool is_invokevirtual,
2454                                                bool is_invokevfinal, /*unused*/
2455                                                bool is_invokedynamic) {
2456   // setup registers
2457   const Register cache = rscratch2;

2515     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2516                                        InterpreterRuntime::post_field_access),
2517                c_rarg1, c_rarg2, c_rarg3);
2518     __ get_cache_and_index_at_bcp(cache, index, 1);
2519     __ bind(L1);
2520   }
2521 }
2522 
2523 void TemplateTable::pop_and_check_object(Register r)
2524 {
2525   __ pop_ptr(r);
2526   __ null_check(r);  // for field access must check obj.
2527   __ verify_oop(r);
2528 }
2529 
2530 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc)
2531 {
2532   const Register cache = r2;
2533   const Register index = r3;
2534   const Register obj   = r4;
2535   const Register klass = r5;
2536   const Register inline_klass = r7;
2537   const Register off   = r19;
2538   const Register flags = r0;
2539   const Register raw_flags = r6;
2540   const Register bc    = r4; // uses same reg as obj, so don't mix them
2541 
2542   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2543   jvmti_post_field_access(cache, index, is_static, false);
2544   load_field_cp_cache_entry(obj, cache, index, off, raw_flags, is_static);
2545 
2546   if (!is_static) {
2547     // obj is on the stack
2548     pop_and_check_object(obj);
2549   }
2550 
2551   // 8179954: We need to make sure that the code generated for
2552   // volatile accesses forms a sequentially-consistent set of
2553   // operations when combined with STLR and LDAR.  Without a leading
2554   // membar it's possible for a simple Dekker test to fail if loads
2555   // use LDR;DMB but stores use STLR.  This can happen if C2 compiles
2556   // the stores in one method and we interpret the loads in another.
2557   if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()){
2558     Label notVolatile;
2559     __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2560     __ membar(MacroAssembler::AnyAny);
2561     __ bind(notVolatile);
2562   }
2563 
2564   const Address field(obj, off);
2565 
2566   Label Done, notByte, notBool, notInt, notShort, notChar,
2567               notLong, notFloat, notObj, notDouble;
2568 
2569   if (!is_static) {
2570     __ ldr(klass, Address(cache, in_bytes(ConstantPoolCache::base_offset() +
2571                                           ConstantPoolCacheEntry::f1_offset())));
2572   }
2573 
2574   // x86 uses a shift and mask or wings it with a shift plus assert
2575   // the mask is not needed. aarch64 just uses bitfield extract
2576   __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);

2577 
2578   assert(btos == 0, "change code, btos != 0");
2579   __ cbnz(flags, notByte);
2580 
2581   // Don't rewrite getstatic, only getfield
2582   if (is_static) rc = may_not_rewrite;
2583 
2584   // btos
2585   __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2586   __ push(btos);
2587   // Rewrite bytecode to be faster
2588   if (rc == may_rewrite) {
2589     patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2590   }
2591   __ b(Done);
2592 
2593   __ bind(notByte);
2594   __ cmp(flags, (u1)ztos);
2595   __ br(Assembler::NE, notBool);
2596 
2597   // ztos (same code as btos)
2598   __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2599   __ push(ztos);
2600   // Rewrite bytecode to be faster
2601   if (rc == may_rewrite) {
2602     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2603     patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2604   }
2605   __ b(Done);
2606 
2607   __ bind(notBool);
2608   __ cmp(flags, (u1)atos);
2609   __ br(Assembler::NE, notObj);
2610   // atos
2611   if (!EnableValhalla) {
2612     do_oop_load(_masm, field, r0, IN_HEAP);
2613     __ push(atos);
2614     if (rc == may_rewrite) {
2615       patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2616     }
2617     __ b(Done);
2618   } else { // Valhalla
2619     if (is_static) {
2620       __ load_heap_oop(r0, field);
2621       Label is_null_free_inline_type, uninitialized;
2622       // Issue below if the static field has not been initialized yet
2623       __ test_field_is_null_free_inline_type(raw_flags, noreg /*temp*/, is_null_free_inline_type);
2624         // field is not a null free inline type
2625         __ push(atos);
2626         __ b(Done);
2627       // field is a null free inline type, must not return null even if uninitialized
2628       __ bind(is_null_free_inline_type);
2629         __ cbz(r0, uninitialized);
2630           __ push(atos);
2631           __ b(Done);
2632         __ bind(uninitialized);
2633           __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2634           Label slow_case, finish;
2635           __ ldrb(rscratch1, Address(cache, InstanceKlass::init_state_offset()));
2636           __ cmp(rscratch1, (u1)InstanceKlass::fully_initialized);
2637           __ br(Assembler::NE, slow_case);
2638           __ get_default_value_oop(klass, off /* temp */, r0);
2639         __ b(finish);
2640         __ bind(slow_case);
2641           __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_inline_type_field), obj, raw_flags);
2642           __ bind(finish);
2643           __ verify_oop(r0);
2644           __ push(atos);
2645           __ b(Done);
2646     } else {
2647       Label is_inlined, nonnull, is_inline_type, rewrite_inline;
2648       __ test_field_is_null_free_inline_type(raw_flags, noreg /*temp*/, is_inline_type);
2649         // Non-inline field case
2650         __ load_heap_oop(r0, field);
2651         __ push(atos);
2652         if (rc == may_rewrite) {
2653           patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2654         }
2655         __ b(Done);
2656       __ bind(is_inline_type);
2657         __ test_field_is_inlined(raw_flags, noreg /* temp */, is_inlined);
2658          // field is not inlined
2659           __ load_heap_oop(r0, field);
2660           __ cbnz(r0, nonnull);
2661             __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2662             __ get_inline_type_field_klass(klass, raw_flags, inline_klass);
2663             __ get_default_value_oop(inline_klass, klass /* temp */, r0);
2664           __ bind(nonnull);
2665           __ verify_oop(r0);
2666           __ push(atos);
2667           __ b(rewrite_inline);
2668         __ bind(is_inlined);
2669         // field is inlined
2670           __ andw(raw_flags, raw_flags, ConstantPoolCacheEntry::field_index_mask);
2671           __ mov(r0, obj);
2672           __ read_inlined_field(klass, raw_flags, off, inline_klass /* temp */, r0);
2673           __ verify_oop(r0);
2674           __ push(atos);
2675       __ bind(rewrite_inline);
2676       if (rc == may_rewrite) {
2677         patch_bytecode(Bytecodes::_fast_qgetfield, bc, r1);
2678       }
2679       __ b(Done);
2680     }
2681   }

2682 
2683   __ bind(notObj);
2684   __ cmp(flags, (u1)itos);
2685   __ br(Assembler::NE, notInt);
2686   // itos
2687   __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2688   __ push(itos);
2689   // Rewrite bytecode to be faster
2690   if (rc == may_rewrite) {
2691     patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2692   }
2693   __ b(Done);
2694 
2695   __ bind(notInt);
2696   __ cmp(flags, (u1)ctos);
2697   __ br(Assembler::NE, notChar);
2698   // ctos
2699   __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2700   __ push(ctos);
2701   // Rewrite bytecode to be faster

2831     // c_rarg1: object pointer set up above (NULL if static)
2832     // c_rarg2: cache entry pointer
2833     // c_rarg3: jvalue object on the stack
2834     __ call_VM(noreg,
2835                CAST_FROM_FN_PTR(address,
2836                                 InterpreterRuntime::post_field_modification),
2837                c_rarg1, c_rarg2, c_rarg3);
2838     __ get_cache_and_index_at_bcp(cache, index, 1);
2839     __ bind(L1);
2840   }
2841 }
2842 
2843 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2844   transition(vtos, vtos);
2845 
2846   const Register cache = r2;
2847   const Register index = r3;
2848   const Register obj   = r2;
2849   const Register off   = r19;
2850   const Register flags = r0;
2851   const Register flags2 = r6;
2852   const Register bc    = r4;
2853   const Register inline_klass = r5;
2854 
2855   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2856   jvmti_post_field_mod(cache, index, is_static);
2857   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2858 
2859   Label Done;
2860   __ mov(r5, flags);
2861 
2862   {
2863     Label notVolatile;
2864     __ tbz(r5, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2865     __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
2866     __ bind(notVolatile);
2867   }
2868 
2869   // field address
2870   const Address field(obj, off);
2871 
2872   Label notByte, notBool, notInt, notShort, notChar,
2873         notLong, notFloat, notObj, notDouble;
2874 
2875   __ mov(flags2, flags);
2876 
2877   // x86 uses a shift and mask or wings it with a shift plus assert
2878   // the mask is not needed. aarch64 just uses bitfield extract
2879   __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift,  ConstantPoolCacheEntry::tos_state_bits);
2880 
2881   assert(btos == 0, "change code, btos != 0");
2882   __ cbnz(flags, notByte);
2883 
2884   // Don't rewrite putstatic, only putfield
2885   if (is_static) rc = may_not_rewrite;
2886 
2887   // btos
2888   {
2889     __ pop(btos);
2890     if (!is_static) pop_and_check_object(obj);
2891     __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2892     if (rc == may_rewrite) {
2893       patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2894     }
2895     __ b(Done);
2896   }

2899   __ cmp(flags, (u1)ztos);
2900   __ br(Assembler::NE, notBool);
2901 
2902   // ztos
2903   {
2904     __ pop(ztos);
2905     if (!is_static) pop_and_check_object(obj);
2906     __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2907     if (rc == may_rewrite) {
2908       patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2909     }
2910     __ b(Done);
2911   }
2912 
2913   __ bind(notBool);
2914   __ cmp(flags, (u1)atos);
2915   __ br(Assembler::NE, notObj);
2916 
2917   // atos
2918   {
2919      if (!EnableValhalla) {
2920       __ pop(atos);
2921       if (!is_static) pop_and_check_object(obj);
2922       // Store into the field
2923       do_oop_store(_masm, field, r0, IN_HEAP);
2924       if (rc == may_rewrite) {
2925         patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2926       }
2927       __ b(Done);
2928      } else { // Valhalla
2929       __ pop(atos);
2930       if (is_static) {
2931         Label is_inline_type;
2932          __ test_field_is_not_null_free_inline_type(flags2, noreg /* temp */, is_inline_type);
2933          __ null_check(r0);
2934          __ bind(is_inline_type);
2935          do_oop_store(_masm, field, r0, IN_HEAP);
2936          __ b(Done);
2937       } else {
2938         Label is_inline_type, is_inlined, rewrite_not_inline, rewrite_inline;
2939         __ test_field_is_null_free_inline_type(flags2, noreg /*temp*/, is_inline_type);
2940         // Not an inline type
2941         pop_and_check_object(obj);
2942         // Store into the field
2943         do_oop_store(_masm, field, r0, IN_HEAP);
2944         __ bind(rewrite_not_inline);
2945         if (rc == may_rewrite) {
2946           patch_bytecode(Bytecodes::_fast_aputfield, bc, r19, true, byte_no);
2947         }
2948         __ b(Done);
2949         // Implementation of the inline type semantic
2950         __ bind(is_inline_type);
2951         __ null_check(r0);
2952         __ test_field_is_inlined(flags2, noreg /*temp*/, is_inlined);
2953         // field is not inlined
2954         pop_and_check_object(obj);
2955         // Store into the field
2956         do_oop_store(_masm, field, r0, IN_HEAP);
2957         __ b(rewrite_inline);
2958         __ bind(is_inlined);
2959         // field is inlined
2960         pop_and_check_object(obj);
2961         assert_different_registers(r0, inline_klass, obj, off);
2962         __ load_klass(inline_klass, r0);
2963         __ data_for_oop(r0, r0, inline_klass);
2964         __ add(obj, obj, off);
2965         __ access_value_copy(IN_HEAP, r0, obj, inline_klass);
2966         __ bind(rewrite_inline);
2967         if (rc == may_rewrite) {
2968           patch_bytecode(Bytecodes::_fast_qputfield, bc, r19, true, byte_no);
2969         }
2970         __ b(Done);
2971       }
2972      }  // Valhalla
2973   }
2974 
2975   __ bind(notObj);
2976   __ cmp(flags, (u1)itos);
2977   __ br(Assembler::NE, notInt);
2978 
2979   // itos
2980   {
2981     __ pop(itos);
2982     if (!is_static) pop_and_check_object(obj);
2983     __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
2984     if (rc == may_rewrite) {
2985       patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2986     }
2987     __ b(Done);
2988   }
2989 
2990   __ bind(notInt);
2991   __ cmp(flags, (u1)ctos);
2992   __ br(Assembler::NE, notChar);

3092 void TemplateTable::putstatic(int byte_no) {
3093   putfield_or_static(byte_no, true);
3094 }
3095 
3096 void TemplateTable::jvmti_post_fast_field_mod()
3097 {
3098   if (JvmtiExport::can_post_field_modification()) {
3099     // Check to see if a field modification watch has been set before
3100     // we take the time to call into the VM.
3101     Label L2;
3102     __ lea(rscratch1, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3103     __ ldrw(c_rarg3, Address(rscratch1));
3104     __ cbzw(c_rarg3, L2);
3105     __ pop_ptr(r19);                  // copy the object pointer from tos
3106     __ verify_oop(r19);
3107     __ push_ptr(r19);                 // put the object pointer back on tos
3108     // Save tos values before call_VM() clobbers them. Since we have
3109     // to do it for every data type, we use the saved values as the
3110     // jvalue object.
3111     switch (bytecode()) {          // load values into the jvalue object
3112     case Bytecodes::_fast_qputfield: //fall through
3113     case Bytecodes::_fast_aputfield: __ push_ptr(r0); break;
3114     case Bytecodes::_fast_bputfield: // fall through
3115     case Bytecodes::_fast_zputfield: // fall through
3116     case Bytecodes::_fast_sputfield: // fall through
3117     case Bytecodes::_fast_cputfield: // fall through
3118     case Bytecodes::_fast_iputfield: __ push_i(r0); break;
3119     case Bytecodes::_fast_dputfield: __ push_d(); break;
3120     case Bytecodes::_fast_fputfield: __ push_f(); break;
3121     case Bytecodes::_fast_lputfield: __ push_l(r0); break;
3122 
3123     default:
3124       ShouldNotReachHere();
3125     }
3126     __ mov(c_rarg3, esp);             // points to jvalue on the stack
3127     // access constant pool cache entry
3128     __ get_cache_entry_pointer_at_bcp(c_rarg2, r0, 1);
3129     __ verify_oop(r19);
3130     // r19: object pointer copied above
3131     // c_rarg2: cache entry pointer
3132     // c_rarg3: jvalue object on the stack
3133     __ call_VM(noreg,
3134                CAST_FROM_FN_PTR(address,
3135                                 InterpreterRuntime::post_field_modification),
3136                r19, c_rarg2, c_rarg3);
3137 
3138     switch (bytecode()) {             // restore tos values
3139     case Bytecodes::_fast_qputfield: //fall through
3140     case Bytecodes::_fast_aputfield: __ pop_ptr(r0); break;
3141     case Bytecodes::_fast_bputfield: // fall through
3142     case Bytecodes::_fast_zputfield: // fall through
3143     case Bytecodes::_fast_sputfield: // fall through
3144     case Bytecodes::_fast_cputfield: // fall through
3145     case Bytecodes::_fast_iputfield: __ pop_i(r0); break;
3146     case Bytecodes::_fast_dputfield: __ pop_d(); break;
3147     case Bytecodes::_fast_fputfield: __ pop_f(); break;
3148     case Bytecodes::_fast_lputfield: __ pop_l(r0); break;
3149     default: break;
3150     }
3151     __ bind(L2);
3152   }
3153 }
3154 
3155 void TemplateTable::fast_storefield(TosState state)
3156 {
3157   transition(state, vtos);
3158 
3159   ByteSize base = ConstantPoolCache::base_offset();

3173   // replace index with field offset from cache entry
3174   __ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3175 
3176   {
3177     Label notVolatile;
3178     __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3179     __ membar(MacroAssembler::StoreStore | MacroAssembler::LoadStore);
3180     __ bind(notVolatile);
3181   }
3182 
3183   Label notVolatile;
3184 
3185   // Get object from stack
3186   pop_and_check_object(r2);
3187 
3188   // field address
3189   const Address field(r2, r1);
3190 
3191   // access field
3192   switch (bytecode()) {
3193   case Bytecodes::_fast_qputfield: //fall through
3194    {
3195       Label is_inlined, done;
3196       __ null_check(r0);
3197       __ test_field_is_inlined(r3, noreg /* temp */, is_inlined);
3198       // field is not inlined
3199       do_oop_store(_masm, field, r0, IN_HEAP);
3200       __ b(done);
3201       __ bind(is_inlined);
3202       // field is inlined
3203       __ load_klass(r4, r0);
3204       __ data_for_oop(r0, r0, r4);
3205       __ lea(rscratch1, field);
3206       __ access_value_copy(IN_HEAP, r0, rscratch1, r4);
3207       __ bind(done);
3208     }
3209     break;
3210   case Bytecodes::_fast_aputfield:
3211     do_oop_store(_masm, field, r0, IN_HEAP);
3212     break;
3213   case Bytecodes::_fast_lputfield:
3214     __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
3215     break;
3216   case Bytecodes::_fast_iputfield:
3217     __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
3218     break;
3219   case Bytecodes::_fast_zputfield:
3220     __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
3221     break;
3222   case Bytecodes::_fast_bputfield:
3223     __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
3224     break;
3225   case Bytecodes::_fast_sputfield:
3226     __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
3227     break;
3228   case Bytecodes::_fast_cputfield:
3229     __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);

3287   // r0: object
3288   __ verify_oop(r0);
3289   __ null_check(r0);
3290   const Address field(r0, r1);
3291 
3292   // 8179954: We need to make sure that the code generated for
3293   // volatile accesses forms a sequentially-consistent set of
3294   // operations when combined with STLR and LDAR.  Without a leading
3295   // membar it's possible for a simple Dekker test to fail if loads
3296   // use LDR;DMB but stores use STLR.  This can happen if C2 compiles
3297   // the stores in one method and we interpret the loads in another.
3298   if (!CompilerConfig::is_c1_or_interpreter_only_no_jvmci()) {
3299     Label notVolatile;
3300     __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3301     __ membar(MacroAssembler::AnyAny);
3302     __ bind(notVolatile);
3303   }
3304 
3305   // access field
3306   switch (bytecode()) {
3307   case Bytecodes::_fast_qgetfield:
3308     {
3309       Register index = r4, klass = r5, inline_klass = r6, tmp = r7;
3310       Label is_inlined, nonnull, Done;
3311       __ test_field_is_inlined(r3, noreg /* temp */, is_inlined);
3312         // field is not inlined
3313         __ load_heap_oop(r0, field);
3314         __ cbnz(r0, nonnull);
3315           __ andw(index, r3, ConstantPoolCacheEntry::field_index_mask);
3316           __ ldr(klass, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3317                                              ConstantPoolCacheEntry::f1_offset())));
3318           __ get_inline_type_field_klass(klass, index, inline_klass);
3319           __ get_default_value_oop(inline_klass, tmp /* temp */, r0);
3320         __ bind(nonnull);
3321         __ verify_oop(r0);
3322         __ b(Done);
3323       __ bind(is_inlined);
3324       // field is inlined
3325         __ andw(index, r3, ConstantPoolCacheEntry::field_index_mask);
3326         __ ldr(klass, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3327                                            ConstantPoolCacheEntry::f1_offset())));
3328         __ read_inlined_field(klass, index, r1, tmp /* temp */, r0);
3329         __ verify_oop(r0);
3330       __ bind(Done);
3331     }
3332     break;
3333   case Bytecodes::_fast_agetfield:
3334     do_oop_load(_masm, field, r0, IN_HEAP);
3335     __ verify_oop(r0);
3336     break;
3337   case Bytecodes::_fast_lgetfield:
3338     __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3339     break;
3340   case Bytecodes::_fast_igetfield:
3341     __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3342     break;
3343   case Bytecodes::_fast_bgetfield:
3344     __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3345     break;
3346   case Bytecodes::_fast_sgetfield:
3347     __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3348     break;
3349   case Bytecodes::_fast_cgetfield:
3350     __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3351     break;
3352   case Bytecodes::_fast_fgetfield:

3743   // %%% should make a type profile for any invokedynamic that takes a ref argument
3744   // profile this call
3745   __ profile_call(rbcp);
3746   __ profile_arguments_type(r3, rmethod, r13, false);
3747 
3748   __ verify_oop(r0);
3749 
3750   __ jump_from_interpreted(rmethod, r0);
3751 }
3752 
3753 
3754 //-----------------------------------------------------------------------------
3755 // Allocation
3756 
3757 void TemplateTable::_new() {
3758   transition(vtos, atos);
3759 
3760   __ get_unsigned_2_byte_index_at_bcp(r3, 1);
3761   Label slow_case;
3762   Label done;
3763   Label is_not_value;
3764   Label initialize_header;
3765   Label initialize_object; // including clearing the fields
3766 
3767   __ get_cpool_and_tags(r4, r0);
3768   // Make sure the class we're about to instantiate has been resolved.
3769   // This is done before loading InstanceKlass to be consistent with the order
3770   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3771   const int tags_offset = Array<u1>::base_offset_in_bytes();
3772   __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3773   __ lea(rscratch1, Address(rscratch1, tags_offset));
3774   __ ldarb(rscratch1, rscratch1);
3775   __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3776   __ br(Assembler::NE, slow_case);
3777 
3778   // get InstanceKlass
3779   __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3780 
3781   __ ldrb(rscratch1, Address(r4, InstanceKlass::kind_offset()));
3782   __ cmp(rscratch1, (u1)InstanceKlass::_kind_inline_type);
3783   __ br(Assembler::NE, is_not_value);
3784 
3785   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_InstantiationError));
3786 
3787   __ bind(is_not_value);
3788 
3789   // make sure klass is initialized & doesn't have finalizer
3790   // make sure klass is fully initialized
3791   __ ldrb(rscratch1, Address(r4, InstanceKlass::init_state_offset()));
3792   __ cmp(rscratch1, (u1)InstanceKlass::fully_initialized);
3793   __ br(Assembler::NE, slow_case);
3794 
3795   __ allocate_instance(r4, r0, r3, r1, true, slow_case);
3796   __ b(done);













































































3797 
3798   // slow case
3799   __ bind(slow_case);
3800   __ get_constant_pool(c_rarg1);
3801   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3802   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3803   __ verify_oop(r0);
3804 
3805   // continue
3806   __ bind(done);
3807   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3808   __ membar(Assembler::StoreStore);
3809 }
3810 
3811 void TemplateTable::defaultvalue() {
3812   transition(vtos, atos);
3813   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3814   __ get_constant_pool(c_rarg1);
3815   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::defaultvalue),
3816           c_rarg1, c_rarg2);
3817   __ verify_oop(r0);
3818   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3819   __ membar(Assembler::StoreStore);
3820 }
3821 
3822 void TemplateTable::withfield() {
3823   transition(vtos, atos);
3824   resolve_cache_and_index(f2_byte, c_rarg1 /*cache*/, c_rarg2 /*index*/, sizeof(u2));
3825 
3826   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3827 
3828   // n.b. unlike x86 cache is now rcpool plus the indexed offset
3829   __ lea(c_rarg1, Address(c_rarg1, in_bytes(cp_base_offset)));
3830 
3831   __ lea(c_rarg2, at_tos());
3832   call_VM(r1, CAST_FROM_FN_PTR(address, InterpreterRuntime::withfield), c_rarg1, c_rarg2);
3833   // new value type is returned in r1
3834   // stack adjustment is returned in r0
3835   __ verify_oop(r1);
3836   __ add(esp, esp, r0);
3837   __ mov(r0, r1);
3838 }
3839 
3840 void TemplateTable::newarray() {
3841   transition(itos, atos);
3842   __ load_unsigned_byte(c_rarg1, at_bcp(1));
3843   __ mov(c_rarg2, r0);
3844   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3845           c_rarg1, c_rarg2);
3846   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3847   __ membar(Assembler::StoreStore);
3848 }
3849 
3850 void TemplateTable::anewarray() {
3851   transition(itos, atos);
3852   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3853   __ get_constant_pool(c_rarg1);
3854   __ mov(c_rarg3, r0);
3855   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3856           c_rarg1, c_rarg2, c_rarg3);
3857   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3858   __ membar(Assembler::StoreStore);
3859 }
3860 
3861 void TemplateTable::arraylength() {
3862   transition(atos, itos);
3863   __ null_check(r0, arrayOopDesc::length_offset_in_bytes());
3864   __ ldrw(r0, Address(r0, arrayOopDesc::length_offset_in_bytes()));
3865 }
3866 
3867 void TemplateTable::checkcast()
3868 {
3869   transition(atos, atos);
3870   Label done, is_null, ok_is_subtype, quicked, resolved;
3871   __ cbz(r0, is_null);
3872 
3873   // Get cpool & tags index
3874   __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3875   __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3876   // See if bytecode has already been quicked
3877   __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3878   __ lea(r1, Address(rscratch1, r19));
3879   __ ldarb(r1, r1);
3880   __ andr(r1, r1, ~JVM_CONSTANT_QDescBit);
3881   __ cmp(r1, (u1)JVM_CONSTANT_Class);
3882   __ br(Assembler::EQ, quicked);
3883 
3884   __ push(atos); // save receiver for result, and for GC
3885   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3886   // vm_result_2 has metadata result
3887   __ get_vm_result_2(r0, rthread);
3888   __ pop(r3); // restore receiver
3889   __ b(resolved);
3890 
3891   // Get superklass in r0 and subklass in r3
3892   __ bind(quicked);
3893   __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3894   __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3895 
3896   __ bind(resolved);
3897   __ load_klass(r19, r3);
3898 
3899   // Generate subtype check.  Blows r2, r5.  Object in r3.
3900   // Superklass in r0.  Subklass in r19.
3901   __ gen_subtype_check(r19, ok_is_subtype);
3902 
3903   // Come here on failure
3904   __ push(r3);
3905   // object is at TOS
3906   __ b(Interpreter::_throw_ClassCastException_entry);
3907 
3908   // Come here on success
3909   __ bind(ok_is_subtype);
3910   __ mov(r0, r3); // Restore object in r3
3911 
3912   __ b(done);
3913   __ bind(is_null);
3914 
3915   // Collect counts on whether this test sees NULLs a lot or not.
3916   if (ProfileInterpreter) {


3917     __ profile_null_seen(r2);


3918   }
3919 
3920   if (EnableValhalla) {
3921     // Get cpool & tags index
3922     __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3923     __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3924      // See if bytecode has already been quicked
3925     __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3926     __ lea(r1, Address(rscratch1, r19));
3927     __ ldarb(r1, r1);
3928     // See if CP entry is a Q-descriptor
3929     __ andr (r1, r1, JVM_CONSTANT_QDescBit);
3930     __ cmp(r1, (u1) JVM_CONSTANT_QDescBit);
3931     __ br(Assembler::NE, done);
3932     __ b(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
3933   }
3934 
3935   __ bind(done);
3936 }
3937 
3938 void TemplateTable::instanceof() {
3939   transition(atos, itos);
3940   Label done, is_null, ok_is_subtype, quicked, resolved;
3941   __ cbz(r0, is_null);
3942 
3943   // Get cpool & tags index
3944   __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3945   __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3946   // See if bytecode has already been quicked
3947   __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3948   __ lea(r1, Address(rscratch1, r19));
3949   __ ldarb(r1, r1);
3950   __ andr(r1, r1, ~JVM_CONSTANT_QDescBit);
3951   __ cmp(r1, (u1)JVM_CONSTANT_Class);
3952   __ br(Assembler::EQ, quicked);
3953 
3954   __ push(atos); // save receiver for result, and for GC
3955   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3956   // vm_result_2 has metadata result
3957   __ get_vm_result_2(r0, rthread);
3958   __ pop(r3); // restore receiver
3959   __ verify_oop(r3);
3960   __ load_klass(r3, r3);
3961   __ b(resolved);
3962 
3963   // Get superklass in r0 and subklass in r3
3964   __ bind(quicked);
3965   __ load_klass(r3, r0);
3966   __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1);
3967 
3968   __ bind(resolved);
3969 
3970   // Generate subtype check.  Blows r2, r5

4034 //       in the assembly code structure as well
4035 //
4036 // Stack layout:
4037 //
4038 // [expressions  ] <--- esp               = expression stack top
4039 // ..
4040 // [expressions  ]
4041 // [monitor entry] <--- monitor block top = expression stack bot
4042 // ..
4043 // [monitor entry]
4044 // [frame data   ] <--- monitor block bot
4045 // ...
4046 // [saved rbp    ] <--- rbp
4047 void TemplateTable::monitorenter()
4048 {
4049   transition(atos, vtos);
4050 
4051   // check for NULL object
4052   __ null_check(r0);
4053 
4054   Label is_inline_type;
4055   __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4056   __ test_markword_is_inline_type(rscratch1, is_inline_type);
4057 
4058   const Address monitor_block_top(
4059         rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4060   const Address monitor_block_bot(
4061         rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4062   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4063 
4064   Label allocated;
4065 
4066   // initialize entry pointer
4067   __ mov(c_rarg1, zr); // points to free slot or NULL
4068 
4069   // find a free slot in the monitor block (result in c_rarg1)
4070   {
4071     Label entry, loop, exit;
4072     __ ldr(c_rarg3, monitor_block_top); // points to current entry,
4073                                         // starting with top-most entry
4074     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4075 
4076     __ b(entry);
4077 

4127   // c_rarg1: points to monitor entry
4128   __ bind(allocated);
4129 
4130   // Increment bcp to point to the next bytecode, so exception
4131   // handling for async. exceptions work correctly.
4132   // The object has already been poped from the stack, so the
4133   // expression stack looks correct.
4134   __ increment(rbcp);
4135 
4136   // store object
4137   __ str(r0, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));
4138   __ lock_object(c_rarg1);
4139 
4140   // check to make sure this monitor doesn't cause stack overflow after locking
4141   __ save_bcp();  // in case of exception
4142   __ generate_stack_overflow_check(0);
4143 
4144   // The bcp has already been incremented. Just need to dispatch to
4145   // next instruction.
4146   __ dispatch_next(vtos);
4147 
4148   __ bind(is_inline_type);
4149   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4150                     InterpreterRuntime::throw_illegal_monitor_state_exception));
4151   __ should_not_reach_here();
4152 }
4153 
4154 
4155 void TemplateTable::monitorexit()
4156 {
4157   transition(atos, vtos);
4158 
4159   // check for NULL object
4160   __ null_check(r0);
4161 
4162   const int is_inline_type_mask = markWord::inline_type_pattern;
4163   Label has_identity;
4164   __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
4165   __ mov(rscratch2, is_inline_type_mask);
4166   __ andr(rscratch1, rscratch1, rscratch2);
4167   __ cmp(rscratch1, rscratch2);
4168   __ br(Assembler::NE, has_identity);
4169   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4170                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4171   __ should_not_reach_here();
4172   __ bind(has_identity);
4173 
4174   const Address monitor_block_top(
4175         rfp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4176   const Address monitor_block_bot(
4177         rfp, frame::interpreter_frame_initial_sp_offset * wordSize);
4178   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4179 
4180   Label found;
4181 
4182   // find matching slot
4183   {
4184     Label entry, loop;
4185     __ ldr(c_rarg1, monitor_block_top); // points to current entry,
4186                                         // starting with top-most entry
4187     __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
4188                                         // of monitor block
4189     __ b(entry);
4190 
4191     __ bind(loop);
4192     // check if current entry is for same object
4193     __ ldr(rscratch1, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));
< prev index next >