< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page

  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/gc_globals.hpp"
  30 #include "gc/shared/tlab_globals.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/interp_masm.hpp"
  34 #include "interpreter/templateTable.hpp"
  35 #include "memory/universe.hpp"
  36 #include "oops/methodCounters.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/objArrayKlass.hpp"
  39 #include "oops/oop.inline.hpp"

  40 #include "oops/resolvedFieldEntry.hpp"
  41 #include "oops/resolvedIndyEntry.hpp"
  42 #include "oops/resolvedMethodEntry.hpp"
  43 #include "prims/jvmtiExport.hpp"
  44 #include "prims/methodHandles.hpp"
  45 #include "runtime/frame.inline.hpp"
  46 #include "runtime/safepointMechanism.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/stubRoutines.hpp"
  49 #include "runtime/synchronizer.hpp"
  50 #include "utilities/macros.hpp"
  51 
  52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  53 
  54 // Global Register Names
  55 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  56 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  57 
  58 // Address Computation: local variables
  59 static inline Address iaddress(int n) {

 166 static void do_oop_load(InterpreterMacroAssembler* _masm,
 167                         Address src,
 168                         Register dst,
 169                         DecoratorSet decorators = 0) {
 170   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 171 }
 172 
 173 Address TemplateTable::at_bcp(int offset) {
 174   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 175   return Address(rbcp, offset);
 176 }
 177 
 178 
 179 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 180                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 181                                    int byte_no) {
 182   if (!RewriteBytecodes)  return;
 183   Label L_patch_done;
 184 
 185   switch (bc) {

 186   case Bytecodes::_fast_aputfield:
 187   case Bytecodes::_fast_bputfield:
 188   case Bytecodes::_fast_zputfield:
 189   case Bytecodes::_fast_cputfield:
 190   case Bytecodes::_fast_dputfield:
 191   case Bytecodes::_fast_fputfield:
 192   case Bytecodes::_fast_iputfield:
 193   case Bytecodes::_fast_lputfield:
 194   case Bytecodes::_fast_sputfield:
 195     {
 196       // We skip bytecode quickening for putfield instructions when
 197       // the put_code written to the constant pool cache is zero.
 198       // This is required so that every execution of this instruction
 199       // calls out to InterpreterRuntime::resolve_get_put to do
 200       // additional, required work.
 201       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 202       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 203       __ load_field_entry(temp_reg, bc_reg);
 204       if (byte_no == f1_byte) {
 205         __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 364   __ sarl(rax, 16);
 365 }
 366 
 367 void TemplateTable::ldc(LdcType type) {
 368   transition(vtos, vtos);
 369   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 370   Label call_ldc, notFloat, notClass, notInt, Done;
 371 
 372   if (is_ldc_wide(type)) {
 373     __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 374   } else {
 375     __ load_unsigned_byte(rbx, at_bcp(1));
 376   }
 377 
 378   __ get_cpool_and_tags(rcx, rax);
 379   const int base_offset = ConstantPool::header_size() * wordSize;
 380   const int tags_offset = Array<u1>::base_offset_in_bytes();
 381 
 382   // get type
 383   __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));

 384 
 385   // unresolved class - get the resolved class
 386   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
 387   __ jccb(Assembler::equal, call_ldc);
 388 
 389   // unresolved class in error state - call into runtime to throw the error
 390   // from the first resolution attempt
 391   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 392   __ jccb(Assembler::equal, call_ldc);
 393 
 394   // resolved class - need to call vm to get java mirror of the class
 395   __ cmpl(rdx, JVM_CONSTANT_Class);
 396   __ jcc(Assembler::notEqual, notClass);
 397 
 398   __ bind(call_ldc);
 399 
 400   __ movl(rarg, is_ldc_wide(type) ? 1 : 0);
 401   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 402 
 403   __ push(atos);

 813                     Address(rdx, rax,
 814                             Address::times_4,
 815                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 816                     noreg, noreg);
 817 }
 818 
 819 void TemplateTable::daload() {
 820   transition(itos, dtos);
 821   // rax: index
 822   // rdx: array
 823   index_check(rdx, rax); // kills rbx
 824   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 825                     Address(rdx, rax,
 826                             Address::times_8,
 827                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 828                     noreg, noreg);
 829 }
 830 
 831 void TemplateTable::aaload() {
 832   transition(itos, atos);
 833   // rax: index
 834   // rdx: array
 835   index_check(rdx, rax); // kills rbx
 836   do_oop_load(_masm,
 837               Address(rdx, rax,
 838                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 839                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 840               rax,
 841               IS_ARRAY);


















 842 }
 843 
 844 void TemplateTable::baload() {
 845   transition(itos, itos);
 846   // rax: index
 847   // rdx: array
 848   index_check(rdx, rax); // kills rbx
 849   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 850                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 851                     noreg, noreg);
 852 }
 853 
 854 void TemplateTable::caload() {
 855   transition(itos, itos);
 856   // rax: index
 857   // rdx: array
 858   index_check(rdx, rax); // kills rbx
 859   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 860                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 861                     noreg, noreg);

1107   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1108                      Address(rdx, rbx, Address::times_4,
1109                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1110                      noreg /* ftos */, noreg, noreg, noreg);
1111 }
1112 
1113 void TemplateTable::dastore() {
1114   transition(dtos, vtos);
1115   __ pop_i(rbx);
1116   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1117   // rbx:  index
1118   // rdx:  array
1119   index_check(rdx, rbx); // prefer index in rbx
1120   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1121                      Address(rdx, rbx, Address::times_8,
1122                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1123                      noreg /* dtos */, noreg, noreg, noreg);
1124 }
1125 
1126 void TemplateTable::aastore() {
1127   Label is_null, ok_is_subtype, done;
1128   transition(vtos, vtos);
1129   // stack: ..., array, index, value
1130   __ movptr(rax, at_tos());    // value
1131   __ movl(rcx, at_tos_p1()); // index
1132   __ movptr(rdx, at_tos_p2()); // array
1133 
1134   Address element_address(rdx, rcx,
1135                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1136                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1137 
1138   index_check_without_pop(rdx, rcx);     // kills rbx




1139   __ testptr(rax, rax);
1140   __ jcc(Assembler::zero, is_null);
1141 







1142   // Move subklass into rbx
1143   __ load_klass(rbx, rax, rscratch1);
1144   // Move superklass into rax
1145   __ load_klass(rax, rdx, rscratch1);
1146   __ movptr(rax, Address(rax,
1147                          ObjArrayKlass::element_klass_offset()));
1148 
1149   // Generate subtype check.  Blows rcx, rdi
1150   // Superklass in rax.  Subklass in rbx.
1151   __ gen_subtype_check(rbx, ok_is_subtype);

1152 
1153   // Come here on failure
1154   // object is at TOS
1155   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1156 
1157   // Come here on success
1158   __ bind(ok_is_subtype);
1159 
1160   // Get the value we will store
1161   __ movptr(rax, at_tos());
1162   __ movl(rcx, at_tos_p1()); // index
1163   // Now store using the appropriate barrier
1164   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1165   __ jmp(done);
1166 
1167   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1168   __ bind(is_null);
1169   __ profile_null_seen(rbx);








1170 


1171   // Store a null
1172   do_oop_store(_masm, element_address, noreg, IS_ARRAY);




























1173 






1174   // Pop stack arguments
1175   __ bind(done);
1176   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1177 }
1178 
1179 void TemplateTable::bastore() {
1180   transition(itos, vtos);
1181   __ pop_i(rbx);
1182   // rax: value
1183   // rbx: index
1184   // rdx: array
1185   index_check(rdx, rbx); // prefer index in rbx
1186   // Need to check whether array is boolean or byte
1187   // since both types share the bastore bytecode.
1188   __ load_klass(rcx, rdx, rscratch1);
1189   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1190   int diffbit = Klass::layout_helper_boolean_diffbit();
1191   __ testl(rcx, diffbit);
1192   Label L_skip;
1193   __ jccb(Assembler::zero, L_skip);

2322   __ jcc(j_not(cc), not_taken);
2323   branch(false, false);
2324   __ bind(not_taken);
2325   __ profile_not_taken_branch(rax);
2326 }
2327 
2328 void TemplateTable::if_nullcmp(Condition cc) {
2329   transition(atos, vtos);
2330   // assume branch is more often taken than not (loops use backward branches)
2331   Label not_taken;
2332   __ testptr(rax, rax);
2333   __ jcc(j_not(cc), not_taken);
2334   branch(false, false);
2335   __ bind(not_taken);
2336   __ profile_not_taken_branch(rax);
2337 }
2338 
2339 void TemplateTable::if_acmp(Condition cc) {
2340   transition(atos, vtos);
2341   // assume branch is more often taken than not (loops use backward branches)
2342   Label not_taken;
2343   __ pop_ptr(rdx);




































2344   __ cmpoop(rdx, rax);
2345   __ jcc(j_not(cc), not_taken);

2346   branch(false, false);
2347   __ bind(not_taken);
2348   __ profile_not_taken_branch(rax);









2349 }
2350 
2351 void TemplateTable::ret() {
2352   transition(vtos, vtos);
2353   locals_index(rbx);
2354   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2355   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2356   __ profile_ret(rbx, rcx);
2357   __ get_method(rax);
2358   __ movptr(rbcp, Address(rax, Method::const_offset()));
2359   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2360                       ConstMethod::codes_offset()));
2361   __ dispatch_next(vtos, 0, true);
2362 }
2363 
2364 void TemplateTable::wide_ret() {
2365   transition(vtos, vtos);
2366   locals_index_wide(rbx);
2367   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2368   __ profile_ret(rbx, rcx);

2598     const Register thread = rdi;
2599     __ get_thread(thread);
2600     __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2601 #endif
2602     __ jcc(Assembler::zero, no_safepoint);
2603     __ push(state);
2604     __ push_cont_fastpath();
2605     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2606                                        InterpreterRuntime::at_safepoint));
2607     __ pop_cont_fastpath();
2608     __ pop(state);
2609     __ bind(no_safepoint);
2610   }
2611 
2612   // Narrow result if state is itos but result type is smaller.
2613   // Need to narrow in the return bytecode rather than in generate_return_entry
2614   // since compiled code callers expect the result to already be narrowed.
2615   if (state == itos) {
2616     __ narrow(rax);
2617   }
2618   __ remove_activation(state, rbcp);

2619 
2620   __ jmp(rbcp);
2621 }
2622 
2623 // ----------------------------------------------------------------------------
2624 // Volatile variables demand their effects be made known to all CPU's
2625 // in order.  Store buffers on most chips allow reads & writes to
2626 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2627 // without some kind of memory barrier (i.e., it's not sufficient that
2628 // the interpreter does not reorder volatile references, the hardware
2629 // also must not reorder them).
2630 //
2631 // According to the new Java Memory Model (JMM):
2632 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2633 //     writes act as acquire & release, so:
2634 // (2) A read cannot let unrelated NON-volatile memory refs that
2635 //     happen after the read float up to before the read.  It's OK for
2636 //     non-volatile memory refs that happen before the volatile read to
2637 //     float down below it.
2638 // (3) Similar a volatile write cannot let unrelated NON-volatile

2964     }
2965     // rax,:   object pointer or null
2966     // cache: cache entry pointer
2967     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2968               rax, cache);
2969 
2970     __ load_field_entry(cache, index);
2971     __ bind(L1);
2972   }
2973 }
2974 
2975 void TemplateTable::pop_and_check_object(Register r) {
2976   __ pop_ptr(r);
2977   __ null_check(r);  // for field access must check obj.
2978   __ verify_oop(r);
2979 }
2980 
2981 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2982   transition(vtos, vtos);
2983 
2984   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2985   const Register cache = rcx;
2986   const Register index = rdx;
2987   const Register off   = rbx;
2988   const Register tos_state   = rax;
2989   const Register flags = rdx;
2990   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2991 
2992   resolve_cache_and_index_for_field(byte_no, cache, index);
2993   jvmti_post_field_access(cache, index, is_static, false);
2994   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2995 
2996   if (!is_static) pop_and_check_object(obj);
2997 
2998   const Address field(obj, off, Address::times_1, 0*wordSize);
2999 
3000   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
3001 
3002   // Make sure we don't need to mask edx after the above shift
3003   assert(btos == 0, "change code, btos != 0");
3004   __ testl(tos_state, tos_state);
3005   __ jcc(Assembler::notZero, notByte);
3006 
3007   // btos

3008   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3009   __ push(btos);
3010   // Rewrite bytecode to be faster
3011   if (!is_static && rc == may_rewrite) {
3012     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3013   }
3014   __ jmp(Done);
3015 
3016   __ bind(notByte);
3017   __ cmpl(tos_state, ztos);
3018   __ jcc(Assembler::notEqual, notBool);
3019 
3020   // ztos (same code as btos)
3021   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3022   __ push(ztos);
3023   // Rewrite bytecode to be faster
3024   if (!is_static && rc == may_rewrite) {
3025     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3026     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3027   }
3028   __ jmp(Done);
3029 
3030   __ bind(notBool);
3031   __ cmpl(tos_state, atos);
3032   __ jcc(Assembler::notEqual, notObj);
3033   // atos
3034   do_oop_load(_masm, field, rax);
3035   __ push(atos);
3036   if (!is_static && rc == may_rewrite) {
3037     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);












































































3038   }
3039   __ jmp(Done);
3040 
3041   __ bind(notObj);



3042   __ cmpl(tos_state, itos);
3043   __ jcc(Assembler::notEqual, notInt);
3044   // itos
3045   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3046   __ push(itos);
3047   // Rewrite bytecode to be faster
3048   if (!is_static && rc == may_rewrite) {
3049     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3050   }
3051   __ jmp(Done);
3052 
3053   __ bind(notInt);
3054   __ cmpl(tos_state, ctos);
3055   __ jcc(Assembler::notEqual, notChar);
3056   // ctos
3057   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3058   __ push(ctos);
3059   // Rewrite bytecode to be faster
3060   if (!is_static && rc == may_rewrite) {
3061     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

3121 #endif
3122 
3123   __ bind(Done);
3124   // [jk] not needed currently
3125   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3126   //                                              Assembler::LoadStore));
3127 }
3128 
3129 void TemplateTable::getfield(int byte_no) {
3130   getfield_or_static(byte_no, false);
3131 }
3132 
3133 void TemplateTable::nofast_getfield(int byte_no) {
3134   getfield_or_static(byte_no, false, may_not_rewrite);
3135 }
3136 
3137 void TemplateTable::getstatic(int byte_no) {
3138   getfield_or_static(byte_no, true);
3139 }
3140 
















3141 
3142 // The registers cache and index expected to be set before call.
3143 // The function may destroy various registers, just not the cache and index registers.
3144 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3145   // Cache is rcx and index is rdx
3146   const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3147   const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx);   // Object pointer
3148   const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3149 
3150   if (JvmtiExport::can_post_field_modification()) {
3151     // Check to see if a field modification watch has been set before
3152     // we take the time to call into the VM.
3153     Label L1;
3154     assert_different_registers(cache, obj, rax);
3155     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3156     __ testl(rax, rax);
3157     __ jcc(Assembler::zero, L1);
3158 
3159     __ mov(entry, cache);
3160 

3203     // cache: field entry pointer
3204     // value: jvalue object on the stack
3205     __ call_VM(noreg,
3206               CAST_FROM_FN_PTR(address,
3207                               InterpreterRuntime::post_field_modification),
3208               obj, entry, value);
3209     // Reload field entry
3210     __ load_field_entry(cache, index);
3211     __ bind(L1);
3212   }
3213 }
3214 
3215 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3216   transition(vtos, vtos);
3217 
3218   const Register obj = rcx;
3219   const Register cache = rcx;
3220   const Register index = rdx;
3221   const Register tos_state   = rdx;
3222   const Register off   = rbx;
3223   const Register flags = rax;
3224 
3225   resolve_cache_and_index_for_field(byte_no, cache, index);
3226   jvmti_post_field_mod(cache, index, is_static);
3227   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3228 
3229   // [jk] not needed currently
3230   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3231   //                                              Assembler::StoreStore));
3232 
3233   Label notVolatile, Done;
3234 
3235   // Check for volatile store
3236   __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
3237   __ testl(flags, flags);

3238   __ jcc(Assembler::zero, notVolatile);
3239 
3240   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3241   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3242                                                Assembler::StoreStore));
3243   __ jmp(Done);
3244   __ bind(notVolatile);
3245 
3246   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3247 
3248   __ bind(Done);
3249 }
3250 
3251 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3252                                               Register obj, Register off, Register tos_state) {
3253 
3254   // field addresses
3255   const Address field(obj, off, Address::times_1, 0*wordSize);
3256   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3257 
3258   Label notByte, notBool, notInt, notShort, notChar,
3259         notLong, notFloat, notObj;
3260   Label Done;
3261 
3262   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3263 
3264   // Test TOS state
3265   __ testl(tos_state, tos_state);
3266   __ jcc(Assembler::notZero, notByte);
3267 
3268   // btos
3269   {
3270     __ pop(btos);
3271     if (!is_static) pop_and_check_object(obj);
3272     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3273     if (!is_static && rc == may_rewrite) {
3274       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3275     }
3276     __ jmp(Done);
3277   }
3278 
3279   __ bind(notByte);
3280   __ cmpl(tos_state, ztos);
3281   __ jcc(Assembler::notEqual, notBool);
3282 
3283   // ztos
3284   {
3285     __ pop(ztos);
3286     if (!is_static) pop_and_check_object(obj);
3287     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3288     if (!is_static && rc == may_rewrite) {
3289       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3290     }
3291     __ jmp(Done);
3292   }
3293 
3294   __ bind(notBool);
3295   __ cmpl(tos_state, atos);
3296   __ jcc(Assembler::notEqual, notObj);
3297 
3298   // atos
3299   {
3300     __ pop(atos);
3301     if (!is_static) pop_and_check_object(obj);
3302     // Store into the field
3303     do_oop_store(_masm, field, rax);
3304     if (!is_static && rc == may_rewrite) {
3305       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);















































3306     }
3307     __ jmp(Done);
3308   }
3309 
3310   __ bind(notObj);
3311   __ cmpl(tos_state, itos);
3312   __ jcc(Assembler::notEqual, notInt);
3313 
3314   // itos
3315   {
3316     __ pop(itos);
3317     if (!is_static) pop_and_check_object(obj);
3318     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3319     if (!is_static && rc == may_rewrite) {
3320       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3321     }
3322     __ jmp(Done);
3323   }
3324 
3325   __ bind(notInt);
3326   __ cmpl(tos_state, ctos);
3327   __ jcc(Assembler::notEqual, notChar);

3426 }
3427 
3428 void TemplateTable::jvmti_post_fast_field_mod() {
3429 
3430   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3431 
3432   if (JvmtiExport::can_post_field_modification()) {
3433     // Check to see if a field modification watch has been set before
3434     // we take the time to call into the VM.
3435     Label L2;
3436     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3437     __ testl(scratch, scratch);
3438     __ jcc(Assembler::zero, L2);
3439     __ pop_ptr(rbx);                  // copy the object pointer from tos
3440     __ verify_oop(rbx);
3441     __ push_ptr(rbx);                 // put the object pointer back on tos
3442     // Save tos values before call_VM() clobbers them. Since we have
3443     // to do it for every data type, we use the saved values as the
3444     // jvalue object.
3445     switch (bytecode()) {          // load values into the jvalue object

3446     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3447     case Bytecodes::_fast_bputfield: // fall through
3448     case Bytecodes::_fast_zputfield: // fall through
3449     case Bytecodes::_fast_sputfield: // fall through
3450     case Bytecodes::_fast_cputfield: // fall through
3451     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3452     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3453     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3454     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3455 
3456     default:
3457       ShouldNotReachHere();
3458     }
3459     __ mov(scratch, rsp);             // points to jvalue on the stack
3460     // access constant pool cache entry
3461     LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3462     NOT_LP64(__ load_field_entry(rax, rdx));
3463     __ verify_oop(rbx);
3464     // rbx: object pointer copied above
3465     // c_rarg2: cache entry pointer
3466     // c_rarg3: jvalue object on the stack
3467     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3468     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3469 
3470     switch (bytecode()) {             // restore tos values

3471     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3472     case Bytecodes::_fast_bputfield: // fall through
3473     case Bytecodes::_fast_zputfield: // fall through
3474     case Bytecodes::_fast_sputfield: // fall through
3475     case Bytecodes::_fast_cputfield: // fall through
3476     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3477     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3478     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3479     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3480     default: break;
3481     }
3482     __ bind(L2);
3483   }
3484 }
3485 
3486 void TemplateTable::fast_storefield(TosState state) {
3487   transition(state, vtos);
3488 
3489   Register cache = rcx;
3490 
3491   Label notVolatile, Done;
3492 
3493   jvmti_post_fast_field_mod();
3494 
3495   __ push(rax);
3496   __ load_field_entry(rcx, rax);
3497   load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3498   // RBX: field offset, RCX: RAX: TOS, RDX: flags
3499   __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3500   __ pop(rax);

3501 
3502   // Get object from stack
3503   pop_and_check_object(rcx);
3504 
3505   // field address
3506   const Address field(rcx, rbx, Address::times_1);
3507 
3508   // Check for volatile store
3509   __ testl(rdx, rdx);


3510   __ jcc(Assembler::zero, notVolatile);
3511 
3512   fast_storefield_helper(field, rax);
3513   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3514                                                Assembler::StoreStore));
3515   __ jmp(Done);
3516   __ bind(notVolatile);
3517 
3518   fast_storefield_helper(field, rax);
3519 
3520   __ bind(Done);
3521 }
3522 
3523 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3524 
3525   // access field
3526   switch (bytecode()) {

















3527   case Bytecodes::_fast_aputfield:
3528     do_oop_store(_masm, field, rax);


3529     break;
3530   case Bytecodes::_fast_lputfield:
3531 #ifdef _LP64
3532     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3533 #else
3534   __ stop("should not be rewritten");
3535 #endif
3536     break;
3537   case Bytecodes::_fast_iputfield:
3538     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3539     break;
3540   case Bytecodes::_fast_zputfield:
3541     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3542     break;
3543   case Bytecodes::_fast_bputfield:
3544     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3545     break;
3546   case Bytecodes::_fast_sputfield:
3547     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3548     break;

3570     Label L1;
3571     __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3572     __ testl(rcx, rcx);
3573     __ jcc(Assembler::zero, L1);
3574     // access constant pool cache entry
3575     LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3576     NOT_LP64(__ load_field_entry(rcx, rdx));
3577     __ verify_oop(rax);
3578     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3579     LP64_ONLY(__ mov(c_rarg1, rax));
3580     // c_rarg1: object pointer copied above
3581     // c_rarg2: cache entry pointer
3582     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3583     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3584     __ pop_ptr(rax); // restore object pointer
3585     __ bind(L1);
3586   }
3587 
3588   // access constant pool cache
3589   __ load_field_entry(rcx, rbx);
3590   __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3591 
3592   // rax: object
3593   __ verify_oop(rax);
3594   __ null_check(rax);
3595   Address field(rax, rbx, Address::times_1);
3596 
3597   // access field
3598   switch (bytecode()) {



























3599   case Bytecodes::_fast_agetfield:
3600     do_oop_load(_masm, field, rax);
3601     __ verify_oop(rax);
3602     break;
3603   case Bytecodes::_fast_lgetfield:
3604 #ifdef _LP64
3605     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3606 #else
3607   __ stop("should not be rewritten");
3608 #endif
3609     break;
3610   case Bytecodes::_fast_igetfield:
3611     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3612     break;
3613   case Bytecodes::_fast_bgetfield:
3614     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3615     break;
3616   case Bytecodes::_fast_sgetfield:
3617     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3618     break;

4014 
4015   // Note:  rax_callsite is already pushed
4016 
4017   // %%% should make a type profile for any invokedynamic that takes a ref argument
4018   // profile this call
4019   __ profile_call(rbcp);
4020   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4021 
4022   __ verify_oop(rax_callsite);
4023 
4024   __ jump_from_interpreted(rbx_method, rdx);
4025 }
4026 
4027 //-----------------------------------------------------------------------------
4028 // Allocation
4029 
4030 void TemplateTable::_new() {
4031   transition(vtos, atos);
4032   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4033   Label slow_case;
4034   Label slow_case_no_pop;
4035   Label done;
4036   Label initialize_header;
4037 
4038   __ get_cpool_and_tags(rcx, rax);
4039 
4040   // Make sure the class we're about to instantiate has been resolved.
4041   // This is done before loading InstanceKlass to be consistent with the order
4042   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4043   const int tags_offset = Array<u1>::base_offset_in_bytes();
4044   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4045   __ jcc(Assembler::notEqual, slow_case_no_pop);
4046 
4047   // get InstanceKlass
4048   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4049   __ push(rcx);  // save the contexts of klass for initializing the header






4050 
4051   // make sure klass is initialized & doesn't have finalizer
4052   // make sure klass is fully initialized
4053   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4054   __ jcc(Assembler::notEqual, slow_case);
4055 
4056   // get instance_size in InstanceKlass (scaled to a count of bytes)
4057   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
4058   // test to see if it has a finalizer or is malformed in some way
4059   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
4060   __ jcc(Assembler::notZero, slow_case);
4061 
4062   // Allocate the instance:
4063   //  If TLAB is enabled:
4064   //    Try to allocate in the TLAB.
4065   //    If fails, go to the slow path.
4066   //    Initialize the allocation.
4067   //    Exit.
4068   //
4069   //  Go to slow path.
4070 
4071   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);

4072 
4073   if (UseTLAB) {
4074     NOT_LP64(__ get_thread(thread);)
4075     __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4076     if (ZeroTLAB) {
4077       // the fields have been already cleared
4078       __ jmp(initialize_header);
4079     }
4080 
4081     // The object is initialized before the header.  If the object size is
4082     // zero, go directly to the header initialization.
4083     __ decrement(rdx, sizeof(oopDesc));
4084     __ jcc(Assembler::zero, initialize_header);
4085 
4086     // Initialize topmost object field, divide rdx by 8, check if odd and
4087     // test if zero.
4088     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
4089     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4090 
4091     // rdx must have been multiple of 8
4092 #ifdef ASSERT
4093     // make sure rdx was multiple of 8
4094     Label L;
4095     // Ignore partial flag stall after shrl() since it is debug VM
4096     __ jcc(Assembler::carryClear, L);
4097     __ stop("object size is not multiple of 2 - adjust this code");
4098     __ bind(L);
4099     // rdx must be > 0, no extra check needed here
4100 #endif
4101 
4102     // initialize remaining object fields: rdx was a multiple of 8
4103     { Label loop;
4104     __ bind(loop);
4105     __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4106     NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4107     __ decrement(rdx);
4108     __ jcc(Assembler::notZero, loop);
4109     }
4110 
4111     // initialize object header only.
4112     __ bind(initialize_header);
4113     __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4114               (intptr_t)markWord::prototype().value()); // header
4115     __ pop(rcx);   // get saved klass back in the register.
4116 #ifdef _LP64
4117     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4118     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
4119 #endif
4120     __ store_klass(rax, rcx, rscratch1);  // klass
4121 
4122     {
4123       SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0, rscratch1);
4124       // Trigger dtrace event for fastpath
4125       __ push(atos);
4126       __ call_VM_leaf(
4127            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4128       __ pop(atos);
4129     }
4130 
4131     __ jmp(done);
4132   }
















4133 
4134   // slow case
4135   __ bind(slow_case);
4136   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4137   __ bind(slow_case_no_pop);
4138 
4139   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4140   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4141 
4142   __ get_constant_pool(rarg1);
4143   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4144   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4145    __ verify_oop(rax);


4146 
4147   // continue
4148   __ bind(done);

4149 }
4150 
4151 void TemplateTable::newarray() {
4152   transition(itos, atos);
4153   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4154   __ load_unsigned_byte(rarg1, at_bcp(1));
4155   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4156           rarg1, rax);
4157 }
4158 
4159 void TemplateTable::anewarray() {
4160   transition(itos, atos);
4161 
4162   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4163   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4164 
4165   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4166   __ get_constant_pool(rarg1);
4167   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4168           rarg1, rarg2, rax);
4169 }
4170 
4171 void TemplateTable::arraylength() {
4172   transition(atos, itos);
4173   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4174 }
4175 
4176 void TemplateTable::checkcast() {
4177   transition(atos, atos);
4178   Label done, is_null, ok_is_subtype, quicked, resolved;
4179   __ testptr(rax, rax); // object is in rax
4180   __ jcc(Assembler::zero, is_null);
4181 
4182   // Get cpool & tags index
4183   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4184   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4185   // See if bytecode has already been quicked
4186   __ cmpb(Address(rdx, rbx,
4187                   Address::times_1,
4188                   Array<u1>::base_offset_in_bytes()),
4189           JVM_CONSTANT_Class);

4190   __ jcc(Assembler::equal, quicked);
4191   __ push(atos); // save receiver for result, and for GC
4192   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4193 
4194   // vm_result_2 has metadata result
4195 #ifndef _LP64
4196   // borrow rdi from locals
4197   __ get_thread(rdi);
4198   __ get_vm_result_2(rax, rdi);
4199   __ restore_locals();
4200 #else
4201   __ get_vm_result_2(rax, r15_thread);
4202 #endif
4203 
4204   __ pop_ptr(rdx); // restore receiver
4205   __ jmpb(resolved);
4206 
4207   // Get superklass in rax and subklass in rbx
4208   __ bind(quicked);
4209   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4210   __ load_resolved_klass_at_index(rax, rcx, rbx);
4211 
4212   __ bind(resolved);
4213   __ load_klass(rbx, rdx, rscratch1);
4214 
4215   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4216   // Superklass in rax.  Subklass in rbx.
4217   __ gen_subtype_check(rbx, ok_is_subtype);
4218 
4219   // Come here on failure
4220   __ push_ptr(rdx);
4221   // object is at TOS
4222   __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4223 
4224   // Come here on success
4225   __ bind(ok_is_subtype);
4226   __ mov(rax, rdx); // Restore object in rdx



4227 
4228   // Collect counts on whether this check-cast sees nulls a lot or not.
4229   if (ProfileInterpreter) {
4230     __ jmp(done);
4231     __ bind(is_null);
4232     __ profile_null_seen(rcx);
4233   } else {
4234     __ bind(is_null);   // same as 'done'
4235   }















4236   __ bind(done);
4237 }
4238 
4239 void TemplateTable::instanceof() {
4240   transition(atos, itos);
4241   Label done, is_null, ok_is_subtype, quicked, resolved;
4242   __ testptr(rax, rax);
4243   __ jcc(Assembler::zero, is_null);
4244 
4245   // Get cpool & tags index
4246   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4247   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4248   // See if bytecode has already been quicked
4249   __ cmpb(Address(rdx, rbx,
4250                   Address::times_1,
4251                   Array<u1>::base_offset_in_bytes()),
4252           JVM_CONSTANT_Class);

4253   __ jcc(Assembler::equal, quicked);
4254 
4255   __ push(atos); // save receiver for result, and for GC
4256   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4257   // vm_result_2 has metadata result
4258 
4259 #ifndef _LP64
4260   // borrow rdi from locals
4261   __ get_thread(rdi);
4262   __ get_vm_result_2(rax, rdi);
4263   __ restore_locals();
4264 #else
4265   __ get_vm_result_2(rax, r15_thread);
4266 #endif
4267 
4268   __ pop_ptr(rdx); // restore receiver
4269   __ verify_oop(rdx);
4270   __ load_klass(rdx, rdx, rscratch1);
4271   __ jmpb(resolved);
4272 

4284   // Come here on failure
4285   __ xorl(rax, rax);
4286   __ jmpb(done);
4287   // Come here on success
4288   __ bind(ok_is_subtype);
4289   __ movl(rax, 1);
4290 
4291   // Collect counts on whether this test sees nulls a lot or not.
4292   if (ProfileInterpreter) {
4293     __ jmp(done);
4294     __ bind(is_null);
4295     __ profile_null_seen(rcx);
4296   } else {
4297     __ bind(is_null);   // same as 'done'
4298   }
4299   __ bind(done);
4300   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
4301   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
4302 }
4303 
4304 
4305 //----------------------------------------------------------------------------------------------------
4306 // Breakpoints
4307 void TemplateTable::_breakpoint() {
4308   // Note: We get here even if we are single stepping..
4309   // jbug insists on setting breakpoints at every bytecode
4310   // even if we are in single step mode.
4311 
4312   transition(vtos, vtos);
4313 
4314   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4315 
4316   // get the unpatched byte code
4317   __ get_method(rarg);
4318   __ call_VM(noreg,
4319              CAST_FROM_FN_PTR(address,
4320                               InterpreterRuntime::get_original_bytecode_at),
4321              rarg, rbcp);
4322   __ mov(rbx, rax);  // why?
4323 
4324   // post the breakpoint event

4346 // Note: monitorenter & exit are symmetric routines; which is reflected
4347 //       in the assembly code structure as well
4348 //
4349 // Stack layout:
4350 //
4351 // [expressions  ] <--- rsp               = expression stack top
4352 // ..
4353 // [expressions  ]
4354 // [monitor entry] <--- monitor block top = expression stack bot
4355 // ..
4356 // [monitor entry]
4357 // [frame data   ] <--- monitor block bot
4358 // ...
4359 // [saved rbp    ] <--- rbp
4360 void TemplateTable::monitorenter() {
4361   transition(atos, vtos);
4362 
4363   // check for null object
4364   __ null_check(rax);
4365 




4366   const Address monitor_block_top(
4367         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4368   const Address monitor_block_bot(
4369         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4370   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4371 
4372   Label allocated;
4373 
4374   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4375   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4376   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4377 
4378   // initialize entry pointer
4379   __ xorl(rmon, rmon); // points to free slot or null
4380 
4381   // find a free slot in the monitor block (result in rmon)
4382   {
4383     Label entry, loop, exit;
4384     __ movptr(rtop, monitor_block_top); // derelativize pointer
4385     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));

4438   // rmon: points to monitor entry
4439   __ bind(allocated);
4440 
4441   // Increment bcp to point to the next bytecode, so exception
4442   // handling for async. exceptions work correctly.
4443   // The object has already been popped from the stack, so the
4444   // expression stack looks correct.
4445   __ increment(rbcp);
4446 
4447   // store object
4448   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4449   __ lock_object(rmon);
4450 
4451   // check to make sure this monitor doesn't cause stack overflow after locking
4452   __ save_bcp();  // in case of exception
4453   __ generate_stack_overflow_check(0);
4454 
4455   // The bcp has already been incremented. Just need to dispatch to
4456   // next instruction.
4457   __ dispatch_next(vtos);





4458 }
4459 
4460 void TemplateTable::monitorexit() {
4461   transition(atos, vtos);
4462 
4463   // check for null object
4464   __ null_check(rax);
4465 











4466   const Address monitor_block_top(
4467         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4468   const Address monitor_block_bot(
4469         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4470   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4471 
4472   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4473   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4474 
4475   Label found;
4476 
4477   // find matching slot
4478   {
4479     Label entry, loop;
4480     __ movptr(rtop, monitor_block_top); // derelativize pointer
4481     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4482     // rtop points to current entry, starting with top-most entry
4483 
4484     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4485                                         // of monitor block

  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/gc_globals.hpp"
  30 #include "gc/shared/tlab_globals.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/interp_masm.hpp"
  34 #include "interpreter/templateTable.hpp"
  35 #include "memory/universe.hpp"
  36 #include "oops/methodCounters.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/objArrayKlass.hpp"
  39 #include "oops/oop.inline.hpp"
  40 #include "oops/inlineKlass.hpp"
  41 #include "oops/resolvedFieldEntry.hpp"
  42 #include "oops/resolvedIndyEntry.hpp"
  43 #include "oops/resolvedMethodEntry.hpp"
  44 #include "prims/jvmtiExport.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "runtime/frame.inline.hpp"
  47 #include "runtime/safepointMechanism.hpp"
  48 #include "runtime/sharedRuntime.hpp"
  49 #include "runtime/stubRoutines.hpp"
  50 #include "runtime/synchronizer.hpp"
  51 #include "utilities/macros.hpp"
  52 
  53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  54 
  55 // Global Register Names
  56 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  57 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  58 
  59 // Address Computation: local variables
  60 static inline Address iaddress(int n) {

 167 static void do_oop_load(InterpreterMacroAssembler* _masm,
 168                         Address src,
 169                         Register dst,
 170                         DecoratorSet decorators = 0) {
 171   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 172 }
 173 
 174 Address TemplateTable::at_bcp(int offset) {
 175   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 176   return Address(rbcp, offset);
 177 }
 178 
 179 
 180 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 181                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 182                                    int byte_no) {
 183   if (!RewriteBytecodes)  return;
 184   Label L_patch_done;
 185 
 186   switch (bc) {
 187   case Bytecodes::_fast_qputfield:
 188   case Bytecodes::_fast_aputfield:
 189   case Bytecodes::_fast_bputfield:
 190   case Bytecodes::_fast_zputfield:
 191   case Bytecodes::_fast_cputfield:
 192   case Bytecodes::_fast_dputfield:
 193   case Bytecodes::_fast_fputfield:
 194   case Bytecodes::_fast_iputfield:
 195   case Bytecodes::_fast_lputfield:
 196   case Bytecodes::_fast_sputfield:
 197     {
 198       // We skip bytecode quickening for putfield instructions when
 199       // the put_code written to the constant pool cache is zero.
 200       // This is required so that every execution of this instruction
 201       // calls out to InterpreterRuntime::resolve_get_put to do
 202       // additional, required work.
 203       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 204       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 205       __ load_field_entry(temp_reg, bc_reg);
 206       if (byte_no == f1_byte) {
 207         __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 366   __ sarl(rax, 16);
 367 }
 368 
 369 void TemplateTable::ldc(LdcType type) {
 370   transition(vtos, vtos);
 371   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 372   Label call_ldc, notFloat, notClass, notInt, Done;
 373 
 374   if (is_ldc_wide(type)) {
 375     __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 376   } else {
 377     __ load_unsigned_byte(rbx, at_bcp(1));
 378   }
 379 
 380   __ get_cpool_and_tags(rcx, rax);
 381   const int base_offset = ConstantPool::header_size() * wordSize;
 382   const int tags_offset = Array<u1>::base_offset_in_bytes();
 383 
 384   // get type
 385   __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));
 386   __ andl(rdx, ~JVM_CONSTANT_QDescBit);
 387 
 388   // unresolved class - get the resolved class
 389   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
 390   __ jccb(Assembler::equal, call_ldc);
 391 
 392   // unresolved class in error state - call into runtime to throw the error
 393   // from the first resolution attempt
 394   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 395   __ jccb(Assembler::equal, call_ldc);
 396 
 397   // resolved class - need to call vm to get java mirror of the class
 398   __ cmpl(rdx, JVM_CONSTANT_Class);
 399   __ jcc(Assembler::notEqual, notClass);
 400 
 401   __ bind(call_ldc);
 402 
 403   __ movl(rarg, is_ldc_wide(type) ? 1 : 0);
 404   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 405 
 406   __ push(atos);

 816                     Address(rdx, rax,
 817                             Address::times_4,
 818                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 819                     noreg, noreg);
 820 }
 821 
 822 void TemplateTable::daload() {
 823   transition(itos, dtos);
 824   // rax: index
 825   // rdx: array
 826   index_check(rdx, rax); // kills rbx
 827   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 828                     Address(rdx, rax,
 829                             Address::times_8,
 830                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 831                     noreg, noreg);
 832 }
 833 
 834 void TemplateTable::aaload() {
 835   transition(itos, atos);
 836   Register array = rdx;
 837   Register index = rax;
 838 
 839   index_check(array, index); // kills rbx
 840   __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
 841   if (UseFlatArray) {
 842     Label is_flat_array, done;
 843     __ test_flat_array_oop(array, rbx, is_flat_array);
 844     do_oop_load(_masm,
 845                 Address(array, index,
 846                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 847                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 848                 rax,
 849                 IS_ARRAY);
 850     __ jmp(done);
 851     __ bind(is_flat_array);
 852     __ read_flat_element(array, index, rbx, rcx, rax);
 853     __ bind(done);
 854   } else {
 855     do_oop_load(_masm,
 856                 Address(array, index,
 857                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 858                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 859                 rax,
 860                 IS_ARRAY);
 861   }
 862   __ profile_element_type(rbx, rax, rcx);
 863 }
 864 
 865 void TemplateTable::baload() {
 866   transition(itos, itos);
 867   // rax: index
 868   // rdx: array
 869   index_check(rdx, rax); // kills rbx
 870   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 871                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 872                     noreg, noreg);
 873 }
 874 
 875 void TemplateTable::caload() {
 876   transition(itos, itos);
 877   // rax: index
 878   // rdx: array
 879   index_check(rdx, rax); // kills rbx
 880   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 881                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 882                     noreg, noreg);

1128   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1129                      Address(rdx, rbx, Address::times_4,
1130                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1131                      noreg /* ftos */, noreg, noreg, noreg);
1132 }
1133 
1134 void TemplateTable::dastore() {
1135   transition(dtos, vtos);
1136   __ pop_i(rbx);
1137   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1138   // rbx:  index
1139   // rdx:  array
1140   index_check(rdx, rbx); // prefer index in rbx
1141   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1142                      Address(rdx, rbx, Address::times_8,
1143                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1144                      noreg /* dtos */, noreg, noreg, noreg);
1145 }
1146 
1147 void TemplateTable::aastore() {
1148   Label is_null, is_flat_array, ok_is_subtype, done;
1149   transition(vtos, vtos);
1150   // stack: ..., array, index, value
1151   __ movptr(rax, at_tos());    // value
1152   __ movl(rcx, at_tos_p1()); // index
1153   __ movptr(rdx, at_tos_p2()); // array
1154 
1155   Address element_address(rdx, rcx,
1156                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1157                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1158 
1159   index_check_without_pop(rdx, rcx);     // kills rbx
1160 
1161   __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1162   __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1163 
1164   __ testptr(rax, rax);
1165   __ jcc(Assembler::zero, is_null);
1166 
1167   // Move array class to rdi
1168   __ load_klass(rdi, rdx, rscratch1);
1169   if (UseFlatArray) {
1170     __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1171     __ test_flat_array_layout(rbx, is_flat_array);
1172   }
1173 
1174   // Move subklass into rbx
1175   __ load_klass(rbx, rax, rscratch1);
1176   // Move array element superklass into rax
1177   __ movptr(rax, Address(rdi,

1178                          ObjArrayKlass::element_klass_offset()));
1179 
1180   // Generate subtype check.  Blows rcx, rdi
1181   // Superklass in rax.  Subklass in rbx.
1182   // is "rbx <: rax" ? (value subclass <: array element superclass)
1183   __ gen_subtype_check(rbx, ok_is_subtype, false);
1184 
1185   // Come here on failure
1186   // object is at TOS
1187   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1188 
1189   // Come here on success
1190   __ bind(ok_is_subtype);
1191 
1192   // Get the value we will store
1193   __ movptr(rax, at_tos());
1194   __ movl(rcx, at_tos_p1()); // index
1195   // Now store using the appropriate barrier
1196   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1197   __ jmp(done);
1198 
1199   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1200   __ bind(is_null);
1201   if (EnableValhalla) {
1202     Label is_null_into_value_array_npe, store_null;
1203 
1204     // No way to store null in null-free array
1205     __ test_null_free_array_oop(rdx, rbx, is_null_into_value_array_npe);
1206     __ jmp(store_null);
1207 
1208     __ bind(is_null_into_value_array_npe);
1209     __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1210 
1211     __ bind(store_null);
1212   }
1213   // Store a null
1214   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1215   __ jmp(done);
1216 
1217   if (UseFlatArray) {
1218     Label is_type_ok;
1219     __ bind(is_flat_array); // Store non-null value to flat
1220 
1221     // Simplistic type check...
1222 
1223     // Profile the not-null value's klass.
1224     __ load_klass(rbx, rax, rscratch1);
1225     // Move element klass into rax
1226     __ movptr(rax, Address(rdi, ArrayKlass::element_klass_offset()));
1227     // flat value array needs exact type match
1228     // is "rax == rbx" (value subclass == array element superclass)
1229     __ cmpptr(rax, rbx);
1230     __ jccb(Assembler::equal, is_type_ok);
1231 
1232     __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1233 
1234     __ bind(is_type_ok);
1235     // rbx: value's klass
1236     // rdx: array
1237     // rdi: array klass
1238     __ test_klass_is_empty_inline_type(rbx, rax, done);
1239 
1240     // calc dst for copy
1241     __ movl(rax, at_tos_p1()); // index
1242     __ data_for_value_array_index(rdx, rdi, rax, rax);
1243 
1244     // ...and src for copy
1245     __ movptr(rcx, at_tos());  // value
1246     __ data_for_oop(rcx, rcx, rbx);
1247 
1248     __ access_value_copy(IN_HEAP, rcx, rax, rbx);
1249   }
1250   // Pop stack arguments
1251   __ bind(done);
1252   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1253 }
1254 
1255 void TemplateTable::bastore() {
1256   transition(itos, vtos);
1257   __ pop_i(rbx);
1258   // rax: value
1259   // rbx: index
1260   // rdx: array
1261   index_check(rdx, rbx); // prefer index in rbx
1262   // Need to check whether array is boolean or byte
1263   // since both types share the bastore bytecode.
1264   __ load_klass(rcx, rdx, rscratch1);
1265   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1266   int diffbit = Klass::layout_helper_boolean_diffbit();
1267   __ testl(rcx, diffbit);
1268   Label L_skip;
1269   __ jccb(Assembler::zero, L_skip);

2398   __ jcc(j_not(cc), not_taken);
2399   branch(false, false);
2400   __ bind(not_taken);
2401   __ profile_not_taken_branch(rax);
2402 }
2403 
2404 void TemplateTable::if_nullcmp(Condition cc) {
2405   transition(atos, vtos);
2406   // assume branch is more often taken than not (loops use backward branches)
2407   Label not_taken;
2408   __ testptr(rax, rax);
2409   __ jcc(j_not(cc), not_taken);
2410   branch(false, false);
2411   __ bind(not_taken);
2412   __ profile_not_taken_branch(rax);
2413 }
2414 
2415 void TemplateTable::if_acmp(Condition cc) {
2416   transition(atos, vtos);
2417   // assume branch is more often taken than not (loops use backward branches)
2418   Label taken, not_taken;
2419   __ pop_ptr(rdx);
2420 
2421   __ profile_acmp(rbx, rdx, rax, rcx);
2422 
2423   const int is_inline_type_mask = markWord::inline_type_pattern;
2424   if (EnableValhalla) {
2425     __ cmpoop(rdx, rax);
2426     __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
2427 
2428     // might be substitutable, test if either rax or rdx is null
2429     __ testptr(rax, rax);
2430     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2431     __ testptr(rdx, rdx);
2432     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2433 
2434     // and both are values ?
2435     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2436     __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
2437     __ andptr(rbx, is_inline_type_mask);
2438     __ cmpptr(rbx, is_inline_type_mask);
2439     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2440 
2441     // same value klass ?
2442     __ load_metadata(rbx, rdx);
2443     __ load_metadata(rcx, rax);
2444     __ cmpptr(rbx, rcx);
2445     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2446 
2447     // Know both are the same type, let's test for substitutability...
2448     if (cc == equal) {
2449       invoke_is_substitutable(rax, rdx, taken, not_taken);
2450     } else {
2451       invoke_is_substitutable(rax, rdx, not_taken, taken);
2452     }
2453     __ stop("Not reachable");
2454   }
2455 
2456   __ cmpoop(rdx, rax);
2457   __ jcc(j_not(cc), not_taken);
2458   __ bind(taken);
2459   branch(false, false);
2460   __ bind(not_taken);
2461   __ profile_not_taken_branch(rax, true);
2462 }
2463 
2464 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2465                                             Label& is_subst, Label& not_subst) {
2466   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2467   // Restored...rax answer, jmp to outcome...
2468   __ testl(rax, rax);
2469   __ jcc(Assembler::zero, not_subst);
2470   __ jmp(is_subst);
2471 }
2472 
2473 void TemplateTable::ret() {
2474   transition(vtos, vtos);
2475   locals_index(rbx);
2476   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2477   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2478   __ profile_ret(rbx, rcx);
2479   __ get_method(rax);
2480   __ movptr(rbcp, Address(rax, Method::const_offset()));
2481   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2482                       ConstMethod::codes_offset()));
2483   __ dispatch_next(vtos, 0, true);
2484 }
2485 
2486 void TemplateTable::wide_ret() {
2487   transition(vtos, vtos);
2488   locals_index_wide(rbx);
2489   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2490   __ profile_ret(rbx, rcx);

2720     const Register thread = rdi;
2721     __ get_thread(thread);
2722     __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2723 #endif
2724     __ jcc(Assembler::zero, no_safepoint);
2725     __ push(state);
2726     __ push_cont_fastpath();
2727     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2728                                        InterpreterRuntime::at_safepoint));
2729     __ pop_cont_fastpath();
2730     __ pop(state);
2731     __ bind(no_safepoint);
2732   }
2733 
2734   // Narrow result if state is itos but result type is smaller.
2735   // Need to narrow in the return bytecode rather than in generate_return_entry
2736   // since compiled code callers expect the result to already be narrowed.
2737   if (state == itos) {
2738     __ narrow(rax);
2739   }
2740 
2741   __ remove_activation(state, rbcp, true, true, true);
2742 
2743   __ jmp(rbcp);
2744 }
2745 
2746 // ----------------------------------------------------------------------------
2747 // Volatile variables demand their effects be made known to all CPU's
2748 // in order.  Store buffers on most chips allow reads & writes to
2749 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2750 // without some kind of memory barrier (i.e., it's not sufficient that
2751 // the interpreter does not reorder volatile references, the hardware
2752 // also must not reorder them).
2753 //
2754 // According to the new Java Memory Model (JMM):
2755 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2756 //     writes act as acquire & release, so:
2757 // (2) A read cannot let unrelated NON-volatile memory refs that
2758 //     happen after the read float up to before the read.  It's OK for
2759 //     non-volatile memory refs that happen before the volatile read to
2760 //     float down below it.
2761 // (3) Similar a volatile write cannot let unrelated NON-volatile

3087     }
3088     // rax,:   object pointer or null
3089     // cache: cache entry pointer
3090     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
3091               rax, cache);
3092 
3093     __ load_field_entry(cache, index);
3094     __ bind(L1);
3095   }
3096 }
3097 
3098 void TemplateTable::pop_and_check_object(Register r) {
3099   __ pop_ptr(r);
3100   __ null_check(r);  // for field access must check obj.
3101   __ verify_oop(r);
3102 }
3103 
3104 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3105   transition(vtos, vtos);
3106 
3107   const Register obj   = LP64_ONLY(r9) NOT_LP64(rcx);
3108   const Register cache = rcx;
3109   const Register index = rdx;
3110   const Register off   = rbx;
3111   const Register tos_state   = rax;
3112   const Register flags = rdx;
3113   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
3114 
3115   resolve_cache_and_index_for_field(byte_no, cache, index);
3116   jvmti_post_field_access(cache, index, is_static, false);
3117   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3118 


3119   const Address field(obj, off, Address::times_1, 0*wordSize);
3120 
3121   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
3122 
3123   // Make sure we don't need to mask edx after the above shift
3124   assert(btos == 0, "change code, btos != 0");
3125   __ testl(tos_state, tos_state);
3126   __ jcc(Assembler::notZero, notByte);
3127 
3128   // btos
3129   if (!is_static) pop_and_check_object(obj);
3130   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3131   __ push(btos);
3132   // Rewrite bytecode to be faster
3133   if (!is_static && rc == may_rewrite) {
3134     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3135   }
3136   __ jmp(Done);
3137 
3138   __ bind(notByte);
3139   __ cmpl(tos_state, ztos);
3140   __ jcc(Assembler::notEqual, notBool);
3141    if (!is_static) pop_and_check_object(obj);
3142   // ztos (same code as btos)
3143   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3144   __ push(ztos);
3145   // Rewrite bytecode to be faster
3146   if (!is_static && rc == may_rewrite) {
3147     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3148     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3149   }
3150   __ jmp(Done);
3151 
3152   __ bind(notBool);
3153   __ cmpl(tos_state, atos);
3154   __ jcc(Assembler::notEqual, notObj);
3155   // atos
3156   if (!EnableValhalla) {
3157     if (!is_static) pop_and_check_object(obj);
3158     do_oop_load(_masm, field, rax);
3159     __ push(atos);
3160     if (!is_static && rc == may_rewrite) {
3161       patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3162     }
3163     __ jmp(Done);
3164   } else {
3165     if (is_static) {
3166       __ load_heap_oop(rax, field);
3167       Label is_null_free_inline_type, uninitialized;
3168       // Issue below if the static field has not been initialized yet
3169       __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3170         // field is not a null free inline type
3171         __ push(atos);
3172         __ jmp(Done);
3173       // field is a null free inline type, must not return null even if uninitialized
3174       __ bind(is_null_free_inline_type);
3175           __ testptr(rax, rax);
3176         __ jcc(Assembler::zero, uninitialized);
3177           __ push(atos);
3178           __ jmp(Done);
3179         __ bind(uninitialized);
3180 #ifdef _LP64
3181           Label slow_case, finish;
3182           __ movptr(rbx, Address(obj, java_lang_Class::klass_offset()));
3183           __ cmpb(Address(rbx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3184           __ jcc(Assembler::notEqual, slow_case);
3185         __ get_default_value_oop(rbx, rscratch1, rax);
3186         __ jmp(finish);
3187         __ bind(slow_case);
3188 #endif // LP64
3189           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_inline_type_field),
3190                 obj, cache);
3191 #ifdef _LP64
3192           __ bind(finish);
3193   #endif // _LP64
3194         __ verify_oop(rax);
3195         __ push(atos);
3196         __ jmp(Done);
3197     } else {
3198       Label is_flat, nonnull, is_inline_type, rewrite_inline;
3199       __ test_field_is_null_free_inline_type(flags, rscratch1, is_inline_type);
3200       // field is not a null free inline type
3201       pop_and_check_object(obj);
3202       __ load_heap_oop(rax, field);
3203       __ push(atos);
3204       if (rc == may_rewrite) {
3205         patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3206       }
3207       __ jmp(Done);
3208       __ bind(is_inline_type);
3209       __ test_field_is_flat(flags, rscratch1, is_flat);
3210           // field is not flat
3211           pop_and_check_object(obj);
3212           __ load_heap_oop(rax, field);
3213           __ testptr(rax, rax);
3214           __ jcc(Assembler::notZero, nonnull);
3215             __ load_unsigned_short(flags, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
3216             __ movptr(rcx, Address(cache, ResolvedFieldEntry::field_holder_offset()));
3217             __ get_inline_type_field_klass(rcx, flags, rbx);
3218             __ get_default_value_oop(rbx, rcx, rax);
3219           __ bind(nonnull);
3220           __ verify_oop(rax);
3221           __ push(atos);
3222           __ jmp(rewrite_inline);
3223         __ bind(is_flat);
3224           pop_and_check_object(rax);
3225           __ load_unsigned_short(flags, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
3226           __ movptr(rcx, Address(cache, ResolvedFieldEntry::field_holder_offset()));
3227           __ read_flat_field(rcx, flags, rbx, rax);
3228           __ verify_oop(rax);
3229           __ push(atos);
3230       __ bind(rewrite_inline);
3231       if (rc == may_rewrite) {
3232         patch_bytecode(Bytecodes::_fast_qgetfield, bc, rbx);
3233       }
3234         __ jmp(Done);
3235     }
3236   }

3237 
3238   __ bind(notObj);
3239 
3240   if (!is_static) pop_and_check_object(obj);
3241 
3242   __ cmpl(tos_state, itos);
3243   __ jcc(Assembler::notEqual, notInt);
3244   // itos
3245   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3246   __ push(itos);
3247   // Rewrite bytecode to be faster
3248   if (!is_static && rc == may_rewrite) {
3249     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3250   }
3251   __ jmp(Done);
3252 
3253   __ bind(notInt);
3254   __ cmpl(tos_state, ctos);
3255   __ jcc(Assembler::notEqual, notChar);
3256   // ctos
3257   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3258   __ push(ctos);
3259   // Rewrite bytecode to be faster
3260   if (!is_static && rc == may_rewrite) {
3261     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

3321 #endif
3322 
3323   __ bind(Done);
3324   // [jk] not needed currently
3325   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3326   //                                              Assembler::LoadStore));
3327 }
3328 
3329 void TemplateTable::getfield(int byte_no) {
3330   getfield_or_static(byte_no, false);
3331 }
3332 
3333 void TemplateTable::nofast_getfield(int byte_no) {
3334   getfield_or_static(byte_no, false, may_not_rewrite);
3335 }
3336 
3337 void TemplateTable::getstatic(int byte_no) {
3338   getfield_or_static(byte_no, true);
3339 }
3340 
3341 void TemplateTable::withfield() {
3342   transition(vtos, atos);
3343 
3344   Register cache = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
3345   Register index = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
3346 
3347   resolve_cache_and_index_for_field(f2_byte, cache, index);
3348 
3349   __ lea(rax, at_tos());
3350   __ call_VM(rbx, CAST_FROM_FN_PTR(address, InterpreterRuntime::withfield), cache, rax);
3351   // new value type is returned in rbx
3352   // stack adjustment is returned in rax
3353   __ verify_oop(rbx);
3354   __ addptr(rsp, rax);
3355   __ movptr(rax, rbx);
3356 }
3357 
3358 // The registers cache and index expected to be set before call.
3359 // The function may destroy various registers, just not the cache and index registers.
3360 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3361   // Cache is rcx and index is rdx
3362   const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3363   const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx);   // Object pointer
3364   const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3365 
3366   if (JvmtiExport::can_post_field_modification()) {
3367     // Check to see if a field modification watch has been set before
3368     // we take the time to call into the VM.
3369     Label L1;
3370     assert_different_registers(cache, obj, rax);
3371     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3372     __ testl(rax, rax);
3373     __ jcc(Assembler::zero, L1);
3374 
3375     __ mov(entry, cache);
3376 

3419     // cache: field entry pointer
3420     // value: jvalue object on the stack
3421     __ call_VM(noreg,
3422               CAST_FROM_FN_PTR(address,
3423                               InterpreterRuntime::post_field_modification),
3424               obj, entry, value);
3425     // Reload field entry
3426     __ load_field_entry(cache, index);
3427     __ bind(L1);
3428   }
3429 }
3430 
3431 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3432   transition(vtos, vtos);
3433 
3434   const Register obj = rcx;
3435   const Register cache = rcx;
3436   const Register index = rdx;
3437   const Register tos_state   = rdx;
3438   const Register off   = rbx;
3439   const Register flags = r9;
3440 
3441   resolve_cache_and_index_for_field(byte_no, cache, index);
3442   jvmti_post_field_mod(cache, index, is_static);
3443   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3444 
3445   // [jk] not needed currently
3446   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3447   //                                              Assembler::StoreStore));
3448 
3449   Label notVolatile, Done;
3450 
3451   // Check for volatile store
3452   __ movl(rscratch1, flags);
3453   __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
3454   __ testl(rscratch1, rscratch1);
3455   __ jcc(Assembler::zero, notVolatile);
3456 
3457   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3458   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3459                                                Assembler::StoreStore));
3460   __ jmp(Done);
3461   __ bind(notVolatile);
3462 
3463   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3464 
3465   __ bind(Done);
3466 }
3467 
3468 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3469                                               Register obj, Register off, Register tos_state, Register flags) {
3470 
3471   // field addresses
3472   const Address field(obj, off, Address::times_1, 0*wordSize);
3473   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3474 
3475   Label notByte, notBool, notInt, notShort, notChar,
3476         notLong, notFloat, notObj, notInlineType;
3477   Label Done;
3478 
3479   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3480 
3481   // Test TOS state
3482   __ testl(tos_state, tos_state);
3483   __ jcc(Assembler::notZero, notByte);
3484 
3485   // btos
3486   {
3487     __ pop(btos);
3488     if (!is_static) pop_and_check_object(obj);
3489     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3490     if (!is_static && rc == may_rewrite) {
3491       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3492     }
3493     __ jmp(Done);
3494   }
3495 
3496   __ bind(notByte);
3497   __ cmpl(tos_state, ztos);
3498   __ jcc(Assembler::notEqual, notBool);
3499 
3500   // ztos
3501   {
3502     __ pop(ztos);
3503     if (!is_static) pop_and_check_object(obj);
3504     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3505     if (!is_static && rc == may_rewrite) {
3506       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3507     }
3508     __ jmp(Done);
3509   }
3510 
3511   __ bind(notBool);
3512   __ cmpl(tos_state, atos);
3513   __ jcc(Assembler::notEqual, notObj);
3514 
3515   // atos
3516   {
3517     if (!EnableValhalla) {
3518       __ pop(atos);
3519       if (!is_static) pop_and_check_object(obj);
3520       // Store into the field
3521       do_oop_store(_masm, field, rax);
3522       if (!is_static && rc == may_rewrite) {
3523         patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3524       }
3525       __ jmp(Done);
3526     } else {
3527       __ pop(atos);
3528       if (is_static) {
3529         Label is_inline_type;
3530         __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_inline_type);
3531         __ null_check(rax);
3532         __ bind(is_inline_type);
3533         do_oop_store(_masm, field, rax);
3534         __ jmp(Done);
3535       } else {
3536         Label is_inline_type, is_flat, rewrite_not_inline, rewrite_inline;
3537         __ test_field_is_null_free_inline_type(flags, rscratch1, is_inline_type);
3538         // Not an inline type
3539         pop_and_check_object(obj);
3540         // Store into the field
3541         do_oop_store(_masm, field, rax);
3542         __ bind(rewrite_not_inline);
3543         if (rc == may_rewrite) {
3544           patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3545         }
3546         __ jmp(Done);
3547         // Implementation of the inline type semantic
3548         __ bind(is_inline_type);
3549         __ null_check(rax);
3550         __ test_field_is_flat(flags, rscratch1, is_flat);
3551         // field is not flat
3552         pop_and_check_object(obj);
3553         // Store into the field
3554         do_oop_store(_masm, field, rax);
3555         __ jmp(rewrite_inline);
3556         __ bind(is_flat);
3557         // field is flat
3558         pop_and_check_object(obj);
3559         assert_different_registers(rax, rdx, obj, off);
3560         __ load_klass(rdx, rax, rscratch1);
3561         __ data_for_oop(rax, rax, rdx);
3562         __ addptr(obj, off);
3563         __ access_value_copy(IN_HEAP, rax, obj, rdx);
3564         __ bind(rewrite_inline);
3565         if (rc == may_rewrite) {
3566           patch_bytecode(Bytecodes::_fast_qputfield, bc, rbx, true, byte_no);
3567         }
3568         __ jmp(Done);
3569       }
3570     }

3571   }
3572 
3573   __ bind(notObj);
3574   __ cmpl(tos_state, itos);
3575   __ jcc(Assembler::notEqual, notInt);
3576 
3577   // itos
3578   {
3579     __ pop(itos);
3580     if (!is_static) pop_and_check_object(obj);
3581     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3582     if (!is_static && rc == may_rewrite) {
3583       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3584     }
3585     __ jmp(Done);
3586   }
3587 
3588   __ bind(notInt);
3589   __ cmpl(tos_state, ctos);
3590   __ jcc(Assembler::notEqual, notChar);

3689 }
3690 
3691 void TemplateTable::jvmti_post_fast_field_mod() {
3692 
3693   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3694 
3695   if (JvmtiExport::can_post_field_modification()) {
3696     // Check to see if a field modification watch has been set before
3697     // we take the time to call into the VM.
3698     Label L2;
3699     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3700     __ testl(scratch, scratch);
3701     __ jcc(Assembler::zero, L2);
3702     __ pop_ptr(rbx);                  // copy the object pointer from tos
3703     __ verify_oop(rbx);
3704     __ push_ptr(rbx);                 // put the object pointer back on tos
3705     // Save tos values before call_VM() clobbers them. Since we have
3706     // to do it for every data type, we use the saved values as the
3707     // jvalue object.
3708     switch (bytecode()) {          // load values into the jvalue object
3709     case Bytecodes::_fast_qputfield: //fall through
3710     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3711     case Bytecodes::_fast_bputfield: // fall through
3712     case Bytecodes::_fast_zputfield: // fall through
3713     case Bytecodes::_fast_sputfield: // fall through
3714     case Bytecodes::_fast_cputfield: // fall through
3715     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3716     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3717     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3718     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3719 
3720     default:
3721       ShouldNotReachHere();
3722     }
3723     __ mov(scratch, rsp);             // points to jvalue on the stack
3724     // access constant pool cache entry
3725     LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3726     NOT_LP64(__ load_field_entry(rax, rdx));
3727     __ verify_oop(rbx);
3728     // rbx: object pointer copied above
3729     // c_rarg2: cache entry pointer
3730     // c_rarg3: jvalue object on the stack
3731     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3732     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3733 
3734     switch (bytecode()) {             // restore tos values
3735     case Bytecodes::_fast_qputfield: // fall through
3736     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3737     case Bytecodes::_fast_bputfield: // fall through
3738     case Bytecodes::_fast_zputfield: // fall through
3739     case Bytecodes::_fast_sputfield: // fall through
3740     case Bytecodes::_fast_cputfield: // fall through
3741     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3742     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3743     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3744     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3745     default: break;
3746     }
3747     __ bind(L2);
3748   }
3749 }
3750 
3751 void TemplateTable::fast_storefield(TosState state) {
3752   transition(state, vtos);
3753 
3754   Register cache = rcx;
3755 
3756   Label notVolatile, Done;
3757 
3758   jvmti_post_fast_field_mod();
3759 
3760   __ push(rax);
3761   __ load_field_entry(rcx, rax);
3762   load_resolved_field_entry(noreg, cache, rax, rbx, rdx);


3763   __ pop(rax);
3764   // RBX: field offset, RCX: RAX: TOS, RDX: flags
3765 
3766   // Get object from stack
3767   pop_and_check_object(rcx);
3768 
3769   // field address
3770   const Address field(rcx, rbx, Address::times_1);
3771 
3772   // Check for volatile store
3773   __ movl(rscratch2, rdx);  // saving flags for is_flat test
3774   __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3775   __ testl(rscratch2, rscratch2);
3776   __ jcc(Assembler::zero, notVolatile);
3777 
3778   fast_storefield_helper(field, rax, rdx);
3779   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3780                                                Assembler::StoreStore));
3781   __ jmp(Done);
3782   __ bind(notVolatile);
3783 
3784   fast_storefield_helper(field, rax, rdx);
3785 
3786   __ bind(Done);
3787 }
3788 
3789 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3790 
3791   // access field
3792   switch (bytecode()) {
3793   case Bytecodes::_fast_qputfield:
3794     {
3795       Label is_flat, done;
3796       __ null_check(rax);
3797       __ test_field_is_flat(flags, rscratch1, is_flat);
3798       // field is not flat
3799       do_oop_store(_masm, field, rax);
3800       __ jmp(done);
3801       __ bind(is_flat);
3802       // field is flat
3803       __ load_klass(rdx, rax, rscratch1);
3804       __ data_for_oop(rax, rax, rdx);
3805       __ lea(rcx, field);
3806       __ access_value_copy(IN_HEAP, rax, rcx, rdx);
3807       __ bind(done);
3808     }
3809     break;
3810   case Bytecodes::_fast_aputfield:
3811     {
3812       do_oop_store(_masm, field, rax);
3813     }
3814     break;
3815   case Bytecodes::_fast_lputfield:
3816 #ifdef _LP64
3817     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3818 #else
3819   __ stop("should not be rewritten");
3820 #endif
3821     break;
3822   case Bytecodes::_fast_iputfield:
3823     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3824     break;
3825   case Bytecodes::_fast_zputfield:
3826     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3827     break;
3828   case Bytecodes::_fast_bputfield:
3829     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3830     break;
3831   case Bytecodes::_fast_sputfield:
3832     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3833     break;

3855     Label L1;
3856     __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3857     __ testl(rcx, rcx);
3858     __ jcc(Assembler::zero, L1);
3859     // access constant pool cache entry
3860     LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3861     NOT_LP64(__ load_field_entry(rcx, rdx));
3862     __ verify_oop(rax);
3863     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3864     LP64_ONLY(__ mov(c_rarg1, rax));
3865     // c_rarg1: object pointer copied above
3866     // c_rarg2: cache entry pointer
3867     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3868     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3869     __ pop_ptr(rax); // restore object pointer
3870     __ bind(L1);
3871   }
3872 
3873   // access constant pool cache
3874   __ load_field_entry(rcx, rbx);
3875   __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3876 
3877   // rax: object
3878   __ verify_oop(rax);
3879   __ null_check(rax);
3880   Address field(rax, rdx, Address::times_1);
3881 
3882   // access field
3883   switch (bytecode()) {
3884   case Bytecodes::_fast_qgetfield:
3885     {
3886       Label is_flat, nonnull, Done;
3887       __ load_unsigned_byte(rscratch1, Address(rcx, in_bytes(ResolvedFieldEntry::flags_offset())));
3888       __ test_field_is_flat(rscratch1, rscratch2, is_flat);
3889         // field is not flat
3890         __ load_heap_oop(rax, field);
3891         __ testptr(rax, rax);
3892         __ jcc(Assembler::notZero, nonnull);
3893           __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3894           __ movptr(rcx, Address(rcx, ResolvedFieldEntry::field_holder_offset()));
3895           __ get_inline_type_field_klass(rcx, rdx, rbx);
3896           __ get_default_value_oop(rbx, rcx, rax);
3897         __ bind(nonnull);
3898         __ verify_oop(rax);
3899         __ jmp(Done);
3900       __ bind(is_flat);
3901       // field is flat
3902         __ push(rdx); // save offset
3903         __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3904         __ movptr(rcx, Address(rcx, ResolvedFieldEntry::field_holder_offset()));
3905         __ pop(rbx); // restore offset
3906         __ read_flat_field(rcx, rdx, rbx, rax);
3907       __ bind(Done);
3908       __ verify_oop(rax);
3909     }
3910     break;
3911   case Bytecodes::_fast_agetfield:
3912     do_oop_load(_masm, field, rax);
3913     __ verify_oop(rax);
3914     break;
3915   case Bytecodes::_fast_lgetfield:
3916 #ifdef _LP64
3917     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3918 #else
3919   __ stop("should not be rewritten");
3920 #endif
3921     break;
3922   case Bytecodes::_fast_igetfield:
3923     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3924     break;
3925   case Bytecodes::_fast_bgetfield:
3926     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3927     break;
3928   case Bytecodes::_fast_sgetfield:
3929     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3930     break;

4326 
4327   // Note:  rax_callsite is already pushed
4328 
4329   // %%% should make a type profile for any invokedynamic that takes a ref argument
4330   // profile this call
4331   __ profile_call(rbcp);
4332   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4333 
4334   __ verify_oop(rax_callsite);
4335 
4336   __ jump_from_interpreted(rbx_method, rdx);
4337 }
4338 
4339 //-----------------------------------------------------------------------------
4340 // Allocation
4341 
4342 void TemplateTable::_new() {
4343   transition(vtos, atos);
4344   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4345   Label slow_case;

4346   Label done;
4347   Label is_not_value;
4348 
4349   __ get_cpool_and_tags(rcx, rax);
4350 
4351   // Make sure the class we're about to instantiate has been resolved.
4352   // This is done before loading InstanceKlass to be consistent with the order
4353   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4354   const int tags_offset = Array<u1>::base_offset_in_bytes();
4355   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4356   __ jcc(Assembler::notEqual, slow_case);
4357 
4358   // get InstanceKlass
4359   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4360 
4361   __ cmpb(Address(rcx, InstanceKlass::kind_offset()), Klass::InlineKlassKind);
4362   __ jcc(Assembler::notEqual, is_not_value);
4363 
4364   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_InstantiationError));
4365 
4366   __ bind(is_not_value);
4367 
4368   // make sure klass is initialized & doesn't have finalizer

4369   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4370   __ jcc(Assembler::notEqual, slow_case);
4371 
4372   __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);
4373   __ jmp(done);












4374 
4375   // slow case
4376   __ bind(slow_case);
4377 
4378   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4379   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);





4380 
4381   __ get_constant_pool(rarg1);
4382   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4383   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4384    __ verify_oop(rax);
4385 
4386   // continue
4387   __ bind(done);
4388 }

4389 
4390 void TemplateTable::aconst_init() {
4391   transition(vtos, atos);








4392 
4393   Label slow_case;
4394   Label done;
4395   Label is_value;





4396 
4397   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4398   __ get_cpool_and_tags(rcx, rax);








4399 
4400   // Make sure the class we're about to instantiate has been resolved.
4401   // This is done before loading InstanceKlass to be consistent with the order
4402   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4403   const int tags_offset = Array<u1>::base_offset_in_bytes();
4404   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4405   __ jcc(Assembler::notEqual, slow_case);


4406 
4407   // get InstanceKlass
4408   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4409 
4410   __ cmpb(Address(rcx, InstanceKlass::kind_offset()), Klass::InlineKlassKind);
4411   __ jcc(Assembler::equal, is_value);
4412 
4413   // in the future, aconst_init will just return null instead of throwing an exception
4414   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError));
4415 
4416   __ bind(is_value);
4417 
4418   // make sure klass is fully initialized
4419   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4420   __ jcc(Assembler::notEqual, slow_case);
4421 
4422   // have a resolved InlineKlass in rcx, return the default value oop from it
4423   __ get_default_value_oop(rcx, rdx, rax);
4424   __ jmp(done);
4425 

4426   __ bind(slow_case);


4427 
4428   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4429   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4430 

4431   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4432   __ get_constant_pool(rarg1);
4433 
4434   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::aconst_init),
4435       rarg1, rarg2);
4436 

4437   __ bind(done);
4438   __ verify_oop(rax);
4439 }
4440 
4441 void TemplateTable::newarray() {
4442   transition(itos, atos);
4443   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4444   __ load_unsigned_byte(rarg1, at_bcp(1));
4445   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4446           rarg1, rax);
4447 }
4448 
4449 void TemplateTable::anewarray() {
4450   transition(itos, atos);
4451 
4452   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4453   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4454 
4455   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4456   __ get_constant_pool(rarg1);
4457   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4458           rarg1, rarg2, rax);
4459 }
4460 
4461 void TemplateTable::arraylength() {
4462   transition(atos, itos);
4463   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4464 }
4465 
4466 void TemplateTable::checkcast() {
4467   transition(atos, atos);
4468   Label done, is_null, ok_is_subtype, quicked, resolved;
4469   __ testptr(rax, rax); // object is in rax
4470   __ jcc(Assembler::zero, is_null);
4471 
4472   // Get cpool & tags index
4473   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4474   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4475   // See if bytecode has already been quicked
4476   __ movzbl(rdx, Address(rdx, rbx,
4477       Address::times_1,
4478       Array<u1>::base_offset_in_bytes()));
4479   __ andl (rdx, ~JVM_CONSTANT_QDescBit);
4480   __ cmpl(rdx, JVM_CONSTANT_Class);
4481   __ jcc(Assembler::equal, quicked);
4482   __ push(atos); // save receiver for result, and for GC
4483   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4484 
4485   // vm_result_2 has metadata result
4486 #ifndef _LP64
4487   // borrow rdi from locals
4488   __ get_thread(rdi);
4489   __ get_vm_result_2(rax, rdi);
4490   __ restore_locals();
4491 #else
4492   __ get_vm_result_2(rax, r15_thread);
4493 #endif
4494 
4495   __ pop_ptr(rdx); // restore receiver
4496   __ jmpb(resolved);
4497 
4498   // Get superklass in rax and subklass in rbx
4499   __ bind(quicked);
4500   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4501   __ load_resolved_klass_at_index(rax, rcx, rbx);
4502 
4503   __ bind(resolved);
4504   __ load_klass(rbx, rdx, rscratch1);
4505 
4506   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4507   // Superklass in rax.  Subklass in rbx.
4508   __ gen_subtype_check(rbx, ok_is_subtype);
4509 
4510   // Come here on failure
4511   __ push_ptr(rdx);
4512   // object is at TOS
4513   __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4514 
4515   // Come here on success
4516   __ bind(ok_is_subtype);
4517   __ mov(rax, rdx); // Restore object in rdx
4518   __ jmp(done);
4519 
4520   __ bind(is_null);
4521 
4522   // Collect counts on whether this check-cast sees nulls a lot or not.
4523   if (ProfileInterpreter) {


4524     __ profile_null_seen(rcx);


4525   }
4526 
4527   if (EnablePrimitiveClasses) {
4528     // Get cpool & tags index
4529     __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4530     __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4531     // See if CP entry is a Q-descriptor
4532     __ movzbl(rcx, Address(rdx, rbx,
4533         Address::times_1,
4534         Array<u1>::base_offset_in_bytes()));
4535     __ andl (rcx, JVM_CONSTANT_QDescBit);
4536     __ cmpl(rcx, JVM_CONSTANT_QDescBit);
4537     __ jcc(Assembler::notEqual, done);
4538     __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
4539   }
4540 
4541   __ bind(done);
4542 }
4543 
4544 void TemplateTable::instanceof() {
4545   transition(atos, itos);
4546   Label done, is_null, ok_is_subtype, quicked, resolved;
4547   __ testptr(rax, rax);
4548   __ jcc(Assembler::zero, is_null);
4549 
4550   // Get cpool & tags index
4551   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4552   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4553   // See if bytecode has already been quicked
4554   __ movzbl(rdx, Address(rdx, rbx,
4555         Address::times_1,
4556         Array<u1>::base_offset_in_bytes()));
4557   __ andl (rdx, ~JVM_CONSTANT_QDescBit);
4558   __ cmpl(rdx, JVM_CONSTANT_Class);
4559   __ jcc(Assembler::equal, quicked);
4560 
4561   __ push(atos); // save receiver for result, and for GC
4562   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4563   // vm_result_2 has metadata result
4564 
4565 #ifndef _LP64
4566   // borrow rdi from locals
4567   __ get_thread(rdi);
4568   __ get_vm_result_2(rax, rdi);
4569   __ restore_locals();
4570 #else
4571   __ get_vm_result_2(rax, r15_thread);
4572 #endif
4573 
4574   __ pop_ptr(rdx); // restore receiver
4575   __ verify_oop(rdx);
4576   __ load_klass(rdx, rdx, rscratch1);
4577   __ jmpb(resolved);
4578 

4590   // Come here on failure
4591   __ xorl(rax, rax);
4592   __ jmpb(done);
4593   // Come here on success
4594   __ bind(ok_is_subtype);
4595   __ movl(rax, 1);
4596 
4597   // Collect counts on whether this test sees nulls a lot or not.
4598   if (ProfileInterpreter) {
4599     __ jmp(done);
4600     __ bind(is_null);
4601     __ profile_null_seen(rcx);
4602   } else {
4603     __ bind(is_null);   // same as 'done'
4604   }
4605   __ bind(done);
4606   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
4607   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
4608 }
4609 

4610 //----------------------------------------------------------------------------------------------------
4611 // Breakpoints
4612 void TemplateTable::_breakpoint() {
4613   // Note: We get here even if we are single stepping..
4614   // jbug insists on setting breakpoints at every bytecode
4615   // even if we are in single step mode.
4616 
4617   transition(vtos, vtos);
4618 
4619   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4620 
4621   // get the unpatched byte code
4622   __ get_method(rarg);
4623   __ call_VM(noreg,
4624              CAST_FROM_FN_PTR(address,
4625                               InterpreterRuntime::get_original_bytecode_at),
4626              rarg, rbcp);
4627   __ mov(rbx, rax);  // why?
4628 
4629   // post the breakpoint event

4651 // Note: monitorenter & exit are symmetric routines; which is reflected
4652 //       in the assembly code structure as well
4653 //
4654 // Stack layout:
4655 //
4656 // [expressions  ] <--- rsp               = expression stack top
4657 // ..
4658 // [expressions  ]
4659 // [monitor entry] <--- monitor block top = expression stack bot
4660 // ..
4661 // [monitor entry]
4662 // [frame data   ] <--- monitor block bot
4663 // ...
4664 // [saved rbp    ] <--- rbp
4665 void TemplateTable::monitorenter() {
4666   transition(atos, vtos);
4667 
4668   // check for null object
4669   __ null_check(rax);
4670 
4671   Label is_inline_type;
4672   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4673   __ test_markword_is_inline_type(rbx, is_inline_type);
4674 
4675   const Address monitor_block_top(
4676         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4677   const Address monitor_block_bot(
4678         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4679   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4680 
4681   Label allocated;
4682 
4683   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4684   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4685   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4686 
4687   // initialize entry pointer
4688   __ xorl(rmon, rmon); // points to free slot or null
4689 
4690   // find a free slot in the monitor block (result in rmon)
4691   {
4692     Label entry, loop, exit;
4693     __ movptr(rtop, monitor_block_top); // derelativize pointer
4694     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));

4747   // rmon: points to monitor entry
4748   __ bind(allocated);
4749 
4750   // Increment bcp to point to the next bytecode, so exception
4751   // handling for async. exceptions work correctly.
4752   // The object has already been popped from the stack, so the
4753   // expression stack looks correct.
4754   __ increment(rbcp);
4755 
4756   // store object
4757   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4758   __ lock_object(rmon);
4759 
4760   // check to make sure this monitor doesn't cause stack overflow after locking
4761   __ save_bcp();  // in case of exception
4762   __ generate_stack_overflow_check(0);
4763 
4764   // The bcp has already been incremented. Just need to dispatch to
4765   // next instruction.
4766   __ dispatch_next(vtos);
4767 
4768   __ bind(is_inline_type);
4769   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4770                     InterpreterRuntime::throw_illegal_monitor_state_exception));
4771   __ should_not_reach_here();
4772 }
4773 
4774 void TemplateTable::monitorexit() {
4775   transition(atos, vtos);
4776 
4777   // check for null object
4778   __ null_check(rax);
4779 
4780   const int is_inline_type_mask = markWord::inline_type_pattern;
4781   Label has_identity;
4782   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4783   __ andptr(rbx, is_inline_type_mask);
4784   __ cmpl(rbx, is_inline_type_mask);
4785   __ jcc(Assembler::notEqual, has_identity);
4786   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4787                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4788   __ should_not_reach_here();
4789   __ bind(has_identity);
4790 
4791   const Address monitor_block_top(
4792         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4793   const Address monitor_block_bot(
4794         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4795   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4796 
4797   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4798   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4799 
4800   Label found;
4801 
4802   // find matching slot
4803   {
4804     Label entry, loop;
4805     __ movptr(rtop, monitor_block_top); // derelativize pointer
4806     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4807     // rtop points to current entry, starting with top-most entry
4808 
4809     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4810                                         // of monitor block
< prev index next >