< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page

  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/gc_globals.hpp"
  30 #include "gc/shared/tlab_globals.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/interp_masm.hpp"
  34 #include "interpreter/templateTable.hpp"
  35 #include "memory/universe.hpp"
  36 #include "oops/methodData.hpp"
  37 #include "oops/objArrayKlass.hpp"
  38 #include "oops/oop.inline.hpp"

  39 #include "oops/resolvedIndyEntry.hpp"
  40 #include "prims/jvmtiExport.hpp"
  41 #include "prims/methodHandles.hpp"
  42 #include "runtime/frame.inline.hpp"
  43 #include "runtime/safepointMechanism.hpp"
  44 #include "runtime/sharedRuntime.hpp"
  45 #include "runtime/stubRoutines.hpp"
  46 #include "runtime/synchronizer.hpp"
  47 #include "utilities/macros.hpp"
  48 
  49 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  50 
  51 // Global Register Names
  52 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  53 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  54 
  55 // Address Computation: local variables
  56 static inline Address iaddress(int n) {
  57   return Address(rlocals, Interpreter::local_offset_in_bytes(n));
  58 }

 163 static void do_oop_load(InterpreterMacroAssembler* _masm,
 164                         Address src,
 165                         Register dst,
 166                         DecoratorSet decorators = 0) {
 167   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 168 }
 169 
 170 Address TemplateTable::at_bcp(int offset) {
 171   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 172   return Address(rbcp, offset);
 173 }
 174 
 175 
 176 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 177                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 178                                    int byte_no) {
 179   if (!RewriteBytecodes)  return;
 180   Label L_patch_done;
 181 
 182   switch (bc) {

 183   case Bytecodes::_fast_aputfield:
 184   case Bytecodes::_fast_bputfield:
 185   case Bytecodes::_fast_zputfield:
 186   case Bytecodes::_fast_cputfield:
 187   case Bytecodes::_fast_dputfield:
 188   case Bytecodes::_fast_fputfield:
 189   case Bytecodes::_fast_iputfield:
 190   case Bytecodes::_fast_lputfield:
 191   case Bytecodes::_fast_sputfield:
 192     {
 193       // We skip bytecode quickening for putfield instructions when
 194       // the put_code written to the constant pool cache is zero.
 195       // This is required so that every execution of this instruction
 196       // calls out to InterpreterRuntime::resolve_get_put to do
 197       // additional, required work.
 198       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 199       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 200       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 201       __ movl(bc_reg, bc);
 202       __ cmpl(temp_reg, (int) 0);

 355   __ sarl(rax, 16);
 356 }
 357 
 358 void TemplateTable::ldc(LdcType type) {
 359   transition(vtos, vtos);
 360   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 361   Label call_ldc, notFloat, notClass, notInt, Done;
 362 
 363   if (is_ldc_wide(type)) {
 364     __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 365   } else {
 366     __ load_unsigned_byte(rbx, at_bcp(1));
 367   }
 368 
 369   __ get_cpool_and_tags(rcx, rax);
 370   const int base_offset = ConstantPool::header_size() * wordSize;
 371   const int tags_offset = Array<u1>::base_offset_in_bytes();
 372 
 373   // get type
 374   __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));

 375 
 376   // unresolved class - get the resolved class
 377   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
 378   __ jccb(Assembler::equal, call_ldc);
 379 
 380   // unresolved class in error state - call into runtime to throw the error
 381   // from the first resolution attempt
 382   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 383   __ jccb(Assembler::equal, call_ldc);
 384 
 385   // resolved class - need to call vm to get java mirror of the class
 386   __ cmpl(rdx, JVM_CONSTANT_Class);
 387   __ jcc(Assembler::notEqual, notClass);
 388 
 389   __ bind(call_ldc);
 390 
 391   __ movl(rarg, is_ldc_wide(type) ? 1 : 0);
 392   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 393 
 394   __ push(atos);

 804                     Address(rdx, rax,
 805                             Address::times_4,
 806                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 807                     noreg, noreg);
 808 }
 809 
 810 void TemplateTable::daload() {
 811   transition(itos, dtos);
 812   // rax: index
 813   // rdx: array
 814   index_check(rdx, rax); // kills rbx
 815   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 816                     Address(rdx, rax,
 817                             Address::times_8,
 818                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 819                     noreg, noreg);
 820 }
 821 
 822 void TemplateTable::aaload() {
 823   transition(itos, atos);
 824   // rax: index
 825   // rdx: array
 826   index_check(rdx, rax); // kills rbx
 827   do_oop_load(_masm,
 828               Address(rdx, rax,
 829                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 830                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 831               rax,
 832               IS_ARRAY);


















 833 }
 834 
 835 void TemplateTable::baload() {
 836   transition(itos, itos);
 837   // rax: index
 838   // rdx: array
 839   index_check(rdx, rax); // kills rbx
 840   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 841                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 842                     noreg, noreg);
 843 }
 844 
 845 void TemplateTable::caload() {
 846   transition(itos, itos);
 847   // rax: index
 848   // rdx: array
 849   index_check(rdx, rax); // kills rbx
 850   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 851                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 852                     noreg, noreg);

1098   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1099                      Address(rdx, rbx, Address::times_4,
1100                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1101                      noreg /* ftos */, noreg, noreg, noreg);
1102 }
1103 
1104 void TemplateTable::dastore() {
1105   transition(dtos, vtos);
1106   __ pop_i(rbx);
1107   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1108   // rbx:  index
1109   // rdx:  array
1110   index_check(rdx, rbx); // prefer index in rbx
1111   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1112                      Address(rdx, rbx, Address::times_8,
1113                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1114                      noreg /* dtos */, noreg, noreg, noreg);
1115 }
1116 
1117 void TemplateTable::aastore() {
1118   Label is_null, ok_is_subtype, done;
1119   transition(vtos, vtos);
1120   // stack: ..., array, index, value
1121   __ movptr(rax, at_tos());    // value
1122   __ movl(rcx, at_tos_p1()); // index
1123   __ movptr(rdx, at_tos_p2()); // array
1124 
1125   Address element_address(rdx, rcx,
1126                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1127                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1128 
1129   index_check_without_pop(rdx, rcx);     // kills rbx




1130   __ testptr(rax, rax);
1131   __ jcc(Assembler::zero, is_null);
1132 







1133   // Move subklass into rbx
1134   __ load_klass(rbx, rax, rscratch1);
1135   // Move superklass into rax
1136   __ load_klass(rax, rdx, rscratch1);
1137   __ movptr(rax, Address(rax,
1138                          ObjArrayKlass::element_klass_offset()));
1139 
1140   // Generate subtype check.  Blows rcx, rdi
1141   // Superklass in rax.  Subklass in rbx.
1142   __ gen_subtype_check(rbx, ok_is_subtype);

1143 
1144   // Come here on failure
1145   // object is at TOS
1146   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1147 
1148   // Come here on success
1149   __ bind(ok_is_subtype);
1150 
1151   // Get the value we will store
1152   __ movptr(rax, at_tos());
1153   __ movl(rcx, at_tos_p1()); // index
1154   // Now store using the appropriate barrier
1155   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1156   __ jmp(done);
1157 
1158   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1159   __ bind(is_null);
1160   __ profile_null_seen(rbx);

1161 









1162   // Store a null
1163   do_oop_store(_masm, element_address, noreg, IS_ARRAY);







1164 



























1165   // Pop stack arguments
1166   __ bind(done);
1167   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1168 }
1169 
1170 void TemplateTable::bastore() {
1171   transition(itos, vtos);
1172   __ pop_i(rbx);
1173   // rax: value
1174   // rbx: index
1175   // rdx: array
1176   index_check(rdx, rbx); // prefer index in rbx
1177   // Need to check whether array is boolean or byte
1178   // since both types share the bastore bytecode.
1179   __ load_klass(rcx, rdx, rscratch1);
1180   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1181   int diffbit = Klass::layout_helper_boolean_diffbit();
1182   __ testl(rcx, diffbit);
1183   Label L_skip;
1184   __ jccb(Assembler::zero, L_skip);

2313   __ jcc(j_not(cc), not_taken);
2314   branch(false, false);
2315   __ bind(not_taken);
2316   __ profile_not_taken_branch(rax);
2317 }
2318 
2319 void TemplateTable::if_nullcmp(Condition cc) {
2320   transition(atos, vtos);
2321   // assume branch is more often taken than not (loops use backward branches)
2322   Label not_taken;
2323   __ testptr(rax, rax);
2324   __ jcc(j_not(cc), not_taken);
2325   branch(false, false);
2326   __ bind(not_taken);
2327   __ profile_not_taken_branch(rax);
2328 }
2329 
2330 void TemplateTable::if_acmp(Condition cc) {
2331   transition(atos, vtos);
2332   // assume branch is more often taken than not (loops use backward branches)
2333   Label not_taken;
2334   __ pop_ptr(rdx);




































2335   __ cmpoop(rdx, rax);
2336   __ jcc(j_not(cc), not_taken);

2337   branch(false, false);
2338   __ bind(not_taken);
2339   __ profile_not_taken_branch(rax);









2340 }
2341 
2342 void TemplateTable::ret() {
2343   transition(vtos, vtos);
2344   locals_index(rbx);
2345   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2346   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2347   __ profile_ret(rbx, rcx);
2348   __ get_method(rax);
2349   __ movptr(rbcp, Address(rax, Method::const_offset()));
2350   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2351                       ConstMethod::codes_offset()));
2352   __ dispatch_next(vtos, 0, true);
2353 }
2354 
2355 void TemplateTable::wide_ret() {
2356   transition(vtos, vtos);
2357   locals_index_wide(rbx);
2358   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2359   __ profile_ret(rbx, rcx);

2589     const Register thread = rdi;
2590     __ get_thread(thread);
2591     __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2592 #endif
2593     __ jcc(Assembler::zero, no_safepoint);
2594     __ push(state);
2595     __ push_cont_fastpath();
2596     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2597                                        InterpreterRuntime::at_safepoint));
2598     __ pop_cont_fastpath();
2599     __ pop(state);
2600     __ bind(no_safepoint);
2601   }
2602 
2603   // Narrow result if state is itos but result type is smaller.
2604   // Need to narrow in the return bytecode rather than in generate_return_entry
2605   // since compiled code callers expect the result to already be narrowed.
2606   if (state == itos) {
2607     __ narrow(rax);
2608   }
2609   __ remove_activation(state, rbcp);

2610 
2611   __ jmp(rbcp);
2612 }
2613 
2614 // ----------------------------------------------------------------------------
2615 // Volatile variables demand their effects be made known to all CPU's
2616 // in order.  Store buffers on most chips allow reads & writes to
2617 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2618 // without some kind of memory barrier (i.e., it's not sufficient that
2619 // the interpreter does not reorder volatile references, the hardware
2620 // also must not reorder them).
2621 //
2622 // According to the new Java Memory Model (JMM):
2623 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2624 //     writes act as acquire & release, so:
2625 // (2) A read cannot let unrelated NON-volatile memory refs that
2626 //     happen after the read float up to before the read.  It's OK for
2627 //     non-volatile memory refs that happen before the volatile read to
2628 //     float down below it.
2629 // (3) Similar a volatile write cannot let unrelated NON-volatile

2855     __ get_cache_and_index_at_bcp(cache, index, 1);
2856     __ bind(L1);
2857   }
2858 }
2859 
2860 void TemplateTable::pop_and_check_object(Register r) {
2861   __ pop_ptr(r);
2862   __ null_check(r);  // for field access must check obj.
2863   __ verify_oop(r);
2864 }
2865 
2866 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2867   transition(vtos, vtos);
2868 
2869   const Register cache = rcx;
2870   const Register index = rdx;
2871   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2872   const Register off   = rbx;
2873   const Register flags = rax;
2874   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them

2875 
2876   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2877   jvmti_post_field_access(cache, index, is_static, false);
2878   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2879 
2880   if (!is_static) pop_and_check_object(obj);
2881 
2882   const Address field(obj, off, Address::times_1, 0*wordSize);
2883 
2884   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;








2885 
2886   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2887   // Make sure we don't need to mask edx after the above shift
2888   assert(btos == 0, "change code, btos != 0");
2889 
2890   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2891 
2892   __ jcc(Assembler::notZero, notByte);
2893   // btos

2894   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2895   __ push(btos);
2896   // Rewrite bytecode to be faster
2897   if (!is_static && rc == may_rewrite) {
2898     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2899   }
2900   __ jmp(Done);
2901 
2902   __ bind(notByte);

2903   __ cmpl(flags, ztos);
2904   __ jcc(Assembler::notEqual, notBool);
2905 
2906   // ztos (same code as btos)
2907   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
2908   __ push(ztos);
2909   // Rewrite bytecode to be faster
2910   if (!is_static && rc == may_rewrite) {
2911     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2912     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2913   }
2914   __ jmp(Done);
2915 
2916   __ bind(notBool);
2917   __ cmpl(flags, atos);
2918   __ jcc(Assembler::notEqual, notObj);
2919   // atos
2920   do_oop_load(_masm, field, rax);
2921   __ push(atos);
2922   if (!is_static && rc == may_rewrite) {
2923     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);















































































2924   }
2925   __ jmp(Done);
2926 
2927   __ bind(notObj);



2928   __ cmpl(flags, itos);
2929   __ jcc(Assembler::notEqual, notInt);
2930   // itos
2931   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
2932   __ push(itos);
2933   // Rewrite bytecode to be faster
2934   if (!is_static && rc == may_rewrite) {
2935     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2936   }
2937   __ jmp(Done);
2938 
2939   __ bind(notInt);
2940   __ cmpl(flags, ctos);
2941   __ jcc(Assembler::notEqual, notChar);
2942   // ctos
2943   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
2944   __ push(ctos);
2945   // Rewrite bytecode to be faster
2946   if (!is_static && rc == may_rewrite) {
2947     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

3007 #endif
3008 
3009   __ bind(Done);
3010   // [jk] not needed currently
3011   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3012   //                                              Assembler::LoadStore));
3013 }
3014 
3015 void TemplateTable::getfield(int byte_no) {
3016   getfield_or_static(byte_no, false);
3017 }
3018 
3019 void TemplateTable::nofast_getfield(int byte_no) {
3020   getfield_or_static(byte_no, false, may_not_rewrite);
3021 }
3022 
3023 void TemplateTable::getstatic(int byte_no) {
3024   getfield_or_static(byte_no, true);
3025 }
3026 






















3027 
3028 // The registers cache and index expected to be set before call.
3029 // The function may destroy various registers, just not the cache and index registers.
3030 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3031 
3032   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
3033   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
3034   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
3035   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
3036 
3037   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3038 
3039   if (JvmtiExport::can_post_field_modification()) {
3040     // Check to see if a field modification watch has been set before
3041     // we take the time to call into the VM.
3042     Label L1;
3043     assert_different_registers(cache, index, rax);
3044     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3045     __ testl(rax, rax);
3046     __ jcc(Assembler::zero, L1);

3102     // c_rarg1: object pointer set up above (null if static)
3103     // c_rarg2: cache entry pointer
3104     // c_rarg3: jvalue object on the stack
3105     __ call_VM(noreg,
3106                CAST_FROM_FN_PTR(address,
3107                                 InterpreterRuntime::post_field_modification),
3108                RBX, robj, RCX);
3109     __ get_cache_and_index_at_bcp(cache, index, 1);
3110     __ bind(L1);
3111   }
3112 }
3113 
3114 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3115   transition(vtos, vtos);
3116 
3117   const Register cache = rcx;
3118   const Register index = rdx;
3119   const Register obj   = rcx;
3120   const Register off   = rbx;
3121   const Register flags = rax;

3122 
3123   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3124   jvmti_post_field_mod(cache, index, is_static);
3125   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3126 
3127   // [jk] not needed currently
3128   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3129   //                                              Assembler::StoreStore));
3130 
3131   Label notVolatile, Done;
3132   __ movl(rdx, flags);
3133   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3134   __ andl(rdx, 0x1);
3135 
3136   // Check for volatile store
3137   __ testl(rdx, rdx);

3138   __ jcc(Assembler::zero, notVolatile);
3139 
3140   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3141   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3142                                                Assembler::StoreStore));
3143   __ jmp(Done);
3144   __ bind(notVolatile);
3145 
3146   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3147 
3148   __ bind(Done);
3149 }
3150 
3151 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3152                                               Register obj, Register off, Register flags) {
3153 
3154   // field addresses
3155   const Address field(obj, off, Address::times_1, 0*wordSize);
3156   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3157 
3158   Label notByte, notBool, notInt, notShort, notChar,
3159         notLong, notFloat, notObj;
3160   Label Done;
3161 
3162   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3163 
3164   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3165 
3166   assert(btos == 0, "change code, btos != 0");
3167   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3168   __ jcc(Assembler::notZero, notByte);
3169 
3170   // btos
3171   {
3172     __ pop(btos);
3173     if (!is_static) pop_and_check_object(obj);
3174     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3175     if (!is_static && rc == may_rewrite) {
3176       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3177     }
3178     __ jmp(Done);
3179   }

3182   __ cmpl(flags, ztos);
3183   __ jcc(Assembler::notEqual, notBool);
3184 
3185   // ztos
3186   {
3187     __ pop(ztos);
3188     if (!is_static) pop_and_check_object(obj);
3189     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3190     if (!is_static && rc == may_rewrite) {
3191       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3192     }
3193     __ jmp(Done);
3194   }
3195 
3196   __ bind(notBool);
3197   __ cmpl(flags, atos);
3198   __ jcc(Assembler::notEqual, notObj);
3199 
3200   // atos
3201   {
3202     __ pop(atos);
3203     if (!is_static) pop_and_check_object(obj);
3204     // Store into the field
3205     do_oop_store(_masm, field, rax);
3206     if (!is_static && rc == may_rewrite) {
3207       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);















































3208     }
3209     __ jmp(Done);
3210   }
3211 
3212   __ bind(notObj);
3213   __ cmpl(flags, itos);
3214   __ jcc(Assembler::notEqual, notInt);
3215 
3216   // itos
3217   {
3218     __ pop(itos);
3219     if (!is_static) pop_and_check_object(obj);
3220     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3221     if (!is_static && rc == may_rewrite) {
3222       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3223     }
3224     __ jmp(Done);
3225   }
3226 
3227   __ bind(notInt);
3228   __ cmpl(flags, ctos);
3229   __ jcc(Assembler::notEqual, notChar);

3328 }
3329 
3330 void TemplateTable::jvmti_post_fast_field_mod() {
3331 
3332   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3333 
3334   if (JvmtiExport::can_post_field_modification()) {
3335     // Check to see if a field modification watch has been set before
3336     // we take the time to call into the VM.
3337     Label L2;
3338     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3339     __ testl(scratch, scratch);
3340     __ jcc(Assembler::zero, L2);
3341     __ pop_ptr(rbx);                  // copy the object pointer from tos
3342     __ verify_oop(rbx);
3343     __ push_ptr(rbx);                 // put the object pointer back on tos
3344     // Save tos values before call_VM() clobbers them. Since we have
3345     // to do it for every data type, we use the saved values as the
3346     // jvalue object.
3347     switch (bytecode()) {          // load values into the jvalue object

3348     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3349     case Bytecodes::_fast_bputfield: // fall through
3350     case Bytecodes::_fast_zputfield: // fall through
3351     case Bytecodes::_fast_sputfield: // fall through
3352     case Bytecodes::_fast_cputfield: // fall through
3353     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3354     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3355     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3356     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3357 
3358     default:
3359       ShouldNotReachHere();
3360     }
3361     __ mov(scratch, rsp);             // points to jvalue on the stack
3362     // access constant pool cache entry
3363     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3364     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3365     __ verify_oop(rbx);
3366     // rbx: object pointer copied above
3367     // c_rarg2: cache entry pointer
3368     // c_rarg3: jvalue object on the stack
3369     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3370     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3371 
3372     switch (bytecode()) {             // restore tos values

3373     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3374     case Bytecodes::_fast_bputfield: // fall through
3375     case Bytecodes::_fast_zputfield: // fall through
3376     case Bytecodes::_fast_sputfield: // fall through
3377     case Bytecodes::_fast_cputfield: // fall through
3378     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3379     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3380     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3381     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3382     default: break;
3383     }
3384     __ bind(L2);
3385   }
3386 }
3387 
3388 void TemplateTable::fast_storefield(TosState state) {
3389   transition(state, vtos);
3390 
3391   ByteSize base = ConstantPoolCache::base_offset();
3392 
3393   jvmti_post_fast_field_mod();
3394 
3395   // access constant pool cache
3396   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3397 
3398   // test for volatile with rdx but rdx is tos register for lputfield.
3399   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3400                        in_bytes(base +
3401                                 ConstantPoolCacheEntry::flags_offset())));
3402 
3403   // replace index with field offset from cache entry
3404   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3405                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3406 
3407   // [jk] not needed currently
3408   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3409   //                                              Assembler::StoreStore));
3410 
3411   Label notVolatile, Done;




3412   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3413   __ andl(rdx, 0x1);
3414 
3415   // Get object from stack
3416   pop_and_check_object(rcx);
3417 
3418   // field address
3419   const Address field(rcx, rbx, Address::times_1);
3420 
3421   // Check for volatile store
3422   __ testl(rdx, rdx);
3423   __ jcc(Assembler::zero, notVolatile);
3424 
3425   fast_storefield_helper(field, rax);



3426   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3427                                                Assembler::StoreStore));
3428   __ jmp(Done);
3429   __ bind(notVolatile);
3430 
3431   fast_storefield_helper(field, rax);



3432 
3433   __ bind(Done);
3434 }
3435 
3436 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3437 
3438   // access field
3439   switch (bytecode()) {

















3440   case Bytecodes::_fast_aputfield:
3441     do_oop_store(_masm, field, rax);


3442     break;
3443   case Bytecodes::_fast_lputfield:
3444 #ifdef _LP64
3445     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3446 #else
3447   __ stop("should not be rewritten");
3448 #endif
3449     break;
3450   case Bytecodes::_fast_iputfield:
3451     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3452     break;
3453   case Bytecodes::_fast_zputfield:
3454     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3455     break;
3456   case Bytecodes::_fast_bputfield:
3457     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3458     break;
3459   case Bytecodes::_fast_sputfield:
3460     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3461     break;

3491     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3492     LP64_ONLY(__ mov(c_rarg1, rax));
3493     // c_rarg1: object pointer copied above
3494     // c_rarg2: cache entry pointer
3495     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3496     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3497     __ pop_ptr(rax); // restore object pointer
3498     __ bind(L1);
3499   }
3500 
3501   // access constant pool cache
3502   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3503   // replace index with field offset from cache entry
3504   // [jk] not needed currently
3505   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3506   //                      in_bytes(ConstantPoolCache::base_offset() +
3507   //                               ConstantPoolCacheEntry::flags_offset())));
3508   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3509   // __ andl(rdx, 0x1);
3510   //
3511   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3512                          in_bytes(ConstantPoolCache::base_offset() +
3513                                   ConstantPoolCacheEntry::f2_offset())));
3514 
3515   // rax: object
3516   __ verify_oop(rax);
3517   __ null_check(rax);
3518   Address field(rax, rbx, Address::times_1);
3519 
3520   // access field
3521   switch (bytecode()) {







































3522   case Bytecodes::_fast_agetfield:
3523     do_oop_load(_masm, field, rax);
3524     __ verify_oop(rax);
3525     break;
3526   case Bytecodes::_fast_lgetfield:
3527 #ifdef _LP64
3528     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3529 #else
3530   __ stop("should not be rewritten");
3531 #endif
3532     break;
3533   case Bytecodes::_fast_igetfield:
3534     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3535     break;
3536   case Bytecodes::_fast_bgetfield:
3537     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3538     break;
3539   case Bytecodes::_fast_sgetfield:
3540     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3541     break;

3967 
3968   // Note:  rax_callsite is already pushed by prepare_invoke
3969 
3970   // %%% should make a type profile for any invokedynamic that takes a ref argument
3971   // profile this call
3972   __ profile_call(rbcp);
3973   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3974 
3975   __ verify_oop(rax_callsite);
3976 
3977   __ jump_from_interpreted(rbx_method, rdx);
3978 }
3979 
3980 //-----------------------------------------------------------------------------
3981 // Allocation
3982 
3983 void TemplateTable::_new() {
3984   transition(vtos, atos);
3985   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3986   Label slow_case;
3987   Label slow_case_no_pop;
3988   Label done;
3989   Label initialize_header;
3990 
3991   __ get_cpool_and_tags(rcx, rax);
3992 
3993   // Make sure the class we're about to instantiate has been resolved.
3994   // This is done before loading InstanceKlass to be consistent with the order
3995   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3996   const int tags_offset = Array<u1>::base_offset_in_bytes();
3997   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3998   __ jcc(Assembler::notEqual, slow_case_no_pop);
3999 
4000   // get InstanceKlass
4001   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4002   __ push(rcx);  // save the contexts of klass for initializing the header






4003 
4004   // make sure klass is initialized & doesn't have finalizer
4005   // make sure klass is fully initialized
4006   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4007   __ jcc(Assembler::notEqual, slow_case);
4008 
4009   // get instance_size in InstanceKlass (scaled to a count of bytes)
4010   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
4011   // test to see if it has a finalizer or is malformed in some way
4012   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
4013   __ jcc(Assembler::notZero, slow_case);
4014 
4015   // Allocate the instance:
4016   //  If TLAB is enabled:
4017   //    Try to allocate in the TLAB.
4018   //    If fails, go to the slow path.
4019   //    Initialize the allocation.
4020   //    Exit.
4021   //
4022   //  Go to slow path.
4023 
4024   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);

4025 
4026   if (UseTLAB) {
4027     NOT_LP64(__ get_thread(thread);)
4028     __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4029     if (ZeroTLAB) {
4030       // the fields have been already cleared
4031       __ jmp(initialize_header);
4032     }
4033 
4034     // The object is initialized before the header.  If the object size is
4035     // zero, go directly to the header initialization.
4036     __ decrement(rdx, sizeof(oopDesc));
4037     __ jcc(Assembler::zero, initialize_header);
4038 
4039     // Initialize topmost object field, divide rdx by 8, check if odd and
4040     // test if zero.
4041     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
4042     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4043 
4044     // rdx must have been multiple of 8
4045 #ifdef ASSERT
4046     // make sure rdx was multiple of 8
4047     Label L;
4048     // Ignore partial flag stall after shrl() since it is debug VM
4049     __ jcc(Assembler::carryClear, L);
4050     __ stop("object size is not multiple of 2 - adjust this code");
4051     __ bind(L);
4052     // rdx must be > 0, no extra check needed here
4053 #endif
4054 
4055     // initialize remaining object fields: rdx was a multiple of 8
4056     { Label loop;
4057     __ bind(loop);
4058     __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4059     NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4060     __ decrement(rdx);
4061     __ jcc(Assembler::notZero, loop);
4062     }
4063 
4064     // initialize object header only.
4065     __ bind(initialize_header);
4066     __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4067               (intptr_t)markWord::prototype().value()); // header
4068     __ pop(rcx);   // get saved klass back in the register.
4069 #ifdef _LP64
4070     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4071     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
4072 #endif
4073     __ store_klass(rax, rcx, rscratch1);  // klass
4074 
4075     {
4076       SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0, rscratch1);
4077       // Trigger dtrace event for fastpath
4078       __ push(atos);
4079       __ call_VM_leaf(
4080            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4081       __ pop(atos);
4082     }
4083 
4084     __ jmp(done);
4085   }
















4086 
4087   // slow case
4088   __ bind(slow_case);
4089   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4090   __ bind(slow_case_no_pop);
4091 
4092   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4093   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4094 
4095   __ get_constant_pool(rarg1);
4096   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4097   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4098    __ verify_oop(rax);


4099 
4100   // continue
4101   __ bind(done);

4102 }
4103 
4104 void TemplateTable::newarray() {
4105   transition(itos, atos);
4106   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4107   __ load_unsigned_byte(rarg1, at_bcp(1));
4108   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4109           rarg1, rax);
4110 }
4111 
4112 void TemplateTable::anewarray() {
4113   transition(itos, atos);
4114 
4115   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4116   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4117 
4118   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4119   __ get_constant_pool(rarg1);
4120   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4121           rarg1, rarg2, rax);
4122 }
4123 
4124 void TemplateTable::arraylength() {
4125   transition(atos, itos);
4126   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4127 }
4128 
4129 void TemplateTable::checkcast() {
4130   transition(atos, atos);
4131   Label done, is_null, ok_is_subtype, quicked, resolved;
4132   __ testptr(rax, rax); // object is in rax
4133   __ jcc(Assembler::zero, is_null);
4134 
4135   // Get cpool & tags index
4136   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4137   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4138   // See if bytecode has already been quicked
4139   __ cmpb(Address(rdx, rbx,
4140                   Address::times_1,
4141                   Array<u1>::base_offset_in_bytes()),
4142           JVM_CONSTANT_Class);

4143   __ jcc(Assembler::equal, quicked);
4144   __ push(atos); // save receiver for result, and for GC
4145   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4146 
4147   // vm_result_2 has metadata result
4148 #ifndef _LP64
4149   // borrow rdi from locals
4150   __ get_thread(rdi);
4151   __ get_vm_result_2(rax, rdi);
4152   __ restore_locals();
4153 #else
4154   __ get_vm_result_2(rax, r15_thread);
4155 #endif
4156 
4157   __ pop_ptr(rdx); // restore receiver
4158   __ jmpb(resolved);
4159 
4160   // Get superklass in rax and subklass in rbx
4161   __ bind(quicked);
4162   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4163   __ load_resolved_klass_at_index(rax, rcx, rbx);
4164 
4165   __ bind(resolved);
4166   __ load_klass(rbx, rdx, rscratch1);
4167 
4168   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4169   // Superklass in rax.  Subklass in rbx.
4170   __ gen_subtype_check(rbx, ok_is_subtype);
4171 
4172   // Come here on failure
4173   __ push_ptr(rdx);
4174   // object is at TOS
4175   __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4176 
4177   // Come here on success
4178   __ bind(ok_is_subtype);
4179   __ mov(rax, rdx); // Restore object in rdx



4180 
4181   // Collect counts on whether this check-cast sees nulls a lot or not.
4182   if (ProfileInterpreter) {
4183     __ jmp(done);
4184     __ bind(is_null);
4185     __ profile_null_seen(rcx);
4186   } else {
4187     __ bind(is_null);   // same as 'done'
4188   }















4189   __ bind(done);
4190 }
4191 
4192 void TemplateTable::instanceof() {
4193   transition(atos, itos);
4194   Label done, is_null, ok_is_subtype, quicked, resolved;
4195   __ testptr(rax, rax);
4196   __ jcc(Assembler::zero, is_null);
4197 
4198   // Get cpool & tags index
4199   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4200   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4201   // See if bytecode has already been quicked
4202   __ cmpb(Address(rdx, rbx,
4203                   Address::times_1,
4204                   Array<u1>::base_offset_in_bytes()),
4205           JVM_CONSTANT_Class);

4206   __ jcc(Assembler::equal, quicked);
4207 
4208   __ push(atos); // save receiver for result, and for GC
4209   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4210   // vm_result_2 has metadata result
4211 
4212 #ifndef _LP64
4213   // borrow rdi from locals
4214   __ get_thread(rdi);
4215   __ get_vm_result_2(rax, rdi);
4216   __ restore_locals();
4217 #else
4218   __ get_vm_result_2(rax, r15_thread);
4219 #endif
4220 
4221   __ pop_ptr(rdx); // restore receiver
4222   __ verify_oop(rdx);
4223   __ load_klass(rdx, rdx, rscratch1);
4224   __ jmpb(resolved);
4225 

4237   // Come here on failure
4238   __ xorl(rax, rax);
4239   __ jmpb(done);
4240   // Come here on success
4241   __ bind(ok_is_subtype);
4242   __ movl(rax, 1);
4243 
4244   // Collect counts on whether this test sees nulls a lot or not.
4245   if (ProfileInterpreter) {
4246     __ jmp(done);
4247     __ bind(is_null);
4248     __ profile_null_seen(rcx);
4249   } else {
4250     __ bind(is_null);   // same as 'done'
4251   }
4252   __ bind(done);
4253   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
4254   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
4255 }
4256 
4257 
4258 //----------------------------------------------------------------------------------------------------
4259 // Breakpoints
4260 void TemplateTable::_breakpoint() {
4261   // Note: We get here even if we are single stepping..
4262   // jbug insists on setting breakpoints at every bytecode
4263   // even if we are in single step mode.
4264 
4265   transition(vtos, vtos);
4266 
4267   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4268 
4269   // get the unpatched byte code
4270   __ get_method(rarg);
4271   __ call_VM(noreg,
4272              CAST_FROM_FN_PTR(address,
4273                               InterpreterRuntime::get_original_bytecode_at),
4274              rarg, rbcp);
4275   __ mov(rbx, rax);  // why?
4276 
4277   // post the breakpoint event

4299 // Note: monitorenter & exit are symmetric routines; which is reflected
4300 //       in the assembly code structure as well
4301 //
4302 // Stack layout:
4303 //
4304 // [expressions  ] <--- rsp               = expression stack top
4305 // ..
4306 // [expressions  ]
4307 // [monitor entry] <--- monitor block top = expression stack bot
4308 // ..
4309 // [monitor entry]
4310 // [frame data   ] <--- monitor block bot
4311 // ...
4312 // [saved rbp    ] <--- rbp
4313 void TemplateTable::monitorenter() {
4314   transition(atos, vtos);
4315 
4316   // check for null object
4317   __ null_check(rax);
4318 




4319   const Address monitor_block_top(
4320         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4321   const Address monitor_block_bot(
4322         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4323   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4324 
4325   Label allocated;
4326 
4327   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4328   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4329   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4330 
4331   // initialize entry pointer
4332   __ xorl(rmon, rmon); // points to free slot or null
4333 
4334   // find a free slot in the monitor block (result in rmon)
4335   {
4336     Label entry, loop, exit;
4337     __ movptr(rtop, monitor_block_top); // points to current entry,
4338                                         // starting with top-most entry

4388   // rmon: points to monitor entry
4389   __ bind(allocated);
4390 
4391   // Increment bcp to point to the next bytecode, so exception
4392   // handling for async. exceptions work correctly.
4393   // The object has already been popped from the stack, so the
4394   // expression stack looks correct.
4395   __ increment(rbcp);
4396 
4397   // store object
4398   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4399   __ lock_object(rmon);
4400 
4401   // check to make sure this monitor doesn't cause stack overflow after locking
4402   __ save_bcp();  // in case of exception
4403   __ generate_stack_overflow_check(0);
4404 
4405   // The bcp has already been incremented. Just need to dispatch to
4406   // next instruction.
4407   __ dispatch_next(vtos);





4408 }
4409 
4410 void TemplateTable::monitorexit() {
4411   transition(atos, vtos);
4412 
4413   // check for null object
4414   __ null_check(rax);
4415 











4416   const Address monitor_block_top(
4417         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4418   const Address monitor_block_bot(
4419         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4420   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4421 
4422   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4423   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4424 
4425   Label found;
4426 
4427   // find matching slot
4428   {
4429     Label entry, loop;
4430     __ movptr(rtop, monitor_block_top); // points to current entry,
4431                                         // starting with top-most entry
4432     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4433                                         // of monitor block
4434     __ jmpb(entry);
4435 

  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/gc_globals.hpp"
  30 #include "gc/shared/tlab_globals.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/interp_masm.hpp"
  34 #include "interpreter/templateTable.hpp"
  35 #include "memory/universe.hpp"
  36 #include "oops/methodData.hpp"
  37 #include "oops/objArrayKlass.hpp"
  38 #include "oops/oop.inline.hpp"
  39 #include "oops/inlineKlass.hpp"
  40 #include "oops/resolvedIndyEntry.hpp"
  41 #include "prims/jvmtiExport.hpp"
  42 #include "prims/methodHandles.hpp"
  43 #include "runtime/frame.inline.hpp"
  44 #include "runtime/safepointMechanism.hpp"
  45 #include "runtime/sharedRuntime.hpp"
  46 #include "runtime/stubRoutines.hpp"
  47 #include "runtime/synchronizer.hpp"
  48 #include "utilities/macros.hpp"
  49 
  50 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  51 
  52 // Global Register Names
  53 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  54 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  55 
  56 // Address Computation: local variables
  57 static inline Address iaddress(int n) {
  58   return Address(rlocals, Interpreter::local_offset_in_bytes(n));
  59 }

 164 static void do_oop_load(InterpreterMacroAssembler* _masm,
 165                         Address src,
 166                         Register dst,
 167                         DecoratorSet decorators = 0) {
 168   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 169 }
 170 
 171 Address TemplateTable::at_bcp(int offset) {
 172   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 173   return Address(rbcp, offset);
 174 }
 175 
 176 
 177 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 178                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 179                                    int byte_no) {
 180   if (!RewriteBytecodes)  return;
 181   Label L_patch_done;
 182 
 183   switch (bc) {
 184   case Bytecodes::_fast_qputfield:
 185   case Bytecodes::_fast_aputfield:
 186   case Bytecodes::_fast_bputfield:
 187   case Bytecodes::_fast_zputfield:
 188   case Bytecodes::_fast_cputfield:
 189   case Bytecodes::_fast_dputfield:
 190   case Bytecodes::_fast_fputfield:
 191   case Bytecodes::_fast_iputfield:
 192   case Bytecodes::_fast_lputfield:
 193   case Bytecodes::_fast_sputfield:
 194     {
 195       // We skip bytecode quickening for putfield instructions when
 196       // the put_code written to the constant pool cache is zero.
 197       // This is required so that every execution of this instruction
 198       // calls out to InterpreterRuntime::resolve_get_put to do
 199       // additional, required work.
 200       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 201       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 202       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 203       __ movl(bc_reg, bc);
 204       __ cmpl(temp_reg, (int) 0);

 357   __ sarl(rax, 16);
 358 }
 359 
 360 void TemplateTable::ldc(LdcType type) {
 361   transition(vtos, vtos);
 362   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 363   Label call_ldc, notFloat, notClass, notInt, Done;
 364 
 365   if (is_ldc_wide(type)) {
 366     __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 367   } else {
 368     __ load_unsigned_byte(rbx, at_bcp(1));
 369   }
 370 
 371   __ get_cpool_and_tags(rcx, rax);
 372   const int base_offset = ConstantPool::header_size() * wordSize;
 373   const int tags_offset = Array<u1>::base_offset_in_bytes();
 374 
 375   // get type
 376   __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));
 377   __ andl(rdx, ~JVM_CONSTANT_QDescBit);
 378 
 379   // unresolved class - get the resolved class
 380   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
 381   __ jccb(Assembler::equal, call_ldc);
 382 
 383   // unresolved class in error state - call into runtime to throw the error
 384   // from the first resolution attempt
 385   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 386   __ jccb(Assembler::equal, call_ldc);
 387 
 388   // resolved class - need to call vm to get java mirror of the class
 389   __ cmpl(rdx, JVM_CONSTANT_Class);
 390   __ jcc(Assembler::notEqual, notClass);
 391 
 392   __ bind(call_ldc);
 393 
 394   __ movl(rarg, is_ldc_wide(type) ? 1 : 0);
 395   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 396 
 397   __ push(atos);

 807                     Address(rdx, rax,
 808                             Address::times_4,
 809                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 810                     noreg, noreg);
 811 }
 812 
 813 void TemplateTable::daload() {
 814   transition(itos, dtos);
 815   // rax: index
 816   // rdx: array
 817   index_check(rdx, rax); // kills rbx
 818   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 819                     Address(rdx, rax,
 820                             Address::times_8,
 821                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 822                     noreg, noreg);
 823 }
 824 
 825 void TemplateTable::aaload() {
 826   transition(itos, atos);
 827   Register array = rdx;
 828   Register index = rax;
 829 
 830   index_check(array, index); // kills rbx
 831   __ profile_array(rbx, array, rcx);
 832   if (UseFlatArray) {
 833     Label is_flat_array, done;
 834     __ test_flat_array_oop(array, rbx, is_flat_array);
 835     do_oop_load(_masm,
 836                 Address(array, index,
 837                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 838                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 839                 rax,
 840                 IS_ARRAY);
 841     __ jmp(done);
 842     __ bind(is_flat_array);
 843     __ read_flat_element(array, index, rbx, rcx, rax);
 844     __ bind(done);
 845   } else {
 846     do_oop_load(_masm,
 847                 Address(array, index,
 848                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 849                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 850                 rax,
 851                 IS_ARRAY);
 852   }
 853   __ profile_element(rbx, rax, rcx);
 854 }
 855 
 856 void TemplateTable::baload() {
 857   transition(itos, itos);
 858   // rax: index
 859   // rdx: array
 860   index_check(rdx, rax); // kills rbx
 861   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 862                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 863                     noreg, noreg);
 864 }
 865 
 866 void TemplateTable::caload() {
 867   transition(itos, itos);
 868   // rax: index
 869   // rdx: array
 870   index_check(rdx, rax); // kills rbx
 871   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 872                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 873                     noreg, noreg);

1119   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1120                      Address(rdx, rbx, Address::times_4,
1121                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1122                      noreg /* ftos */, noreg, noreg, noreg);
1123 }
1124 
1125 void TemplateTable::dastore() {
1126   transition(dtos, vtos);
1127   __ pop_i(rbx);
1128   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1129   // rbx:  index
1130   // rdx:  array
1131   index_check(rdx, rbx); // prefer index in rbx
1132   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1133                      Address(rdx, rbx, Address::times_8,
1134                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1135                      noreg /* dtos */, noreg, noreg, noreg);
1136 }
1137 
1138 void TemplateTable::aastore() {
1139   Label is_null, is_flat_array, ok_is_subtype, done;
1140   transition(vtos, vtos);
1141   // stack: ..., array, index, value
1142   __ movptr(rax, at_tos());    // value
1143   __ movl(rcx, at_tos_p1()); // index
1144   __ movptr(rdx, at_tos_p2()); // array
1145 
1146   Address element_address(rdx, rcx,
1147                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1148                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1149 
1150   index_check_without_pop(rdx, rcx);     // kills rbx
1151 
1152   __ profile_array(rdi, rdx, rbx);
1153   __ profile_element(rdi, rax, rbx);
1154 
1155   __ testptr(rax, rax);
1156   __ jcc(Assembler::zero, is_null);
1157 
1158   // Move array class to rdi
1159   __ load_klass(rdi, rdx, rscratch1);
1160   if (UseFlatArray) {
1161     __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1162     __ test_flat_array_layout(rbx, is_flat_array);
1163   }
1164 
1165   // Move subklass into rbx
1166   __ load_klass(rbx, rax, rscratch1);
1167   // Move array element superklass into rax
1168   __ movptr(rax, Address(rdi,

1169                          ObjArrayKlass::element_klass_offset()));
1170 
1171   // Generate subtype check.  Blows rcx, rdi
1172   // Superklass in rax.  Subklass in rbx.
1173   // is "rbx <: rax" ? (value subclass <: array element superclass)
1174   __ gen_subtype_check(rbx, ok_is_subtype, false);
1175 
1176   // Come here on failure
1177   // object is at TOS
1178   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1179 
1180   // Come here on success
1181   __ bind(ok_is_subtype);
1182 
1183   // Get the value we will store
1184   __ movptr(rax, at_tos());
1185   __ movl(rcx, at_tos_p1()); // index
1186   // Now store using the appropriate barrier
1187   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1188   __ jmp(done);
1189 
1190   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1191   __ bind(is_null);
1192   if (EnablePrimitiveClasses) {
1193     Label is_null_into_value_array_npe, store_null;
1194 
1195     // No way to store null in null-free array
1196     __ test_null_free_array_oop(rdx, rbx, is_null_into_value_array_npe);
1197     __ jmp(store_null);
1198 
1199     __ bind(is_null_into_value_array_npe);
1200     __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1201 
1202     __ bind(store_null);
1203   }
1204   // Store a null
1205   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1206   __ jmp(done);
1207 
1208   if (UseFlatArray) {
1209     Label is_type_ok;
1210     __ bind(is_flat_array); // Store non-null value to flat
1211 
1212     // Simplistic type check...
1213 
1214     // Profile the not-null value's klass.
1215     __ load_klass(rbx, rax, rscratch1);
1216     // Move element klass into rax
1217     __ movptr(rax, Address(rdi, ArrayKlass::element_klass_offset()));
1218     // flat value array needs exact type match
1219     // is "rax == rbx" (value subclass == array element superclass)
1220     __ cmpptr(rax, rbx);
1221     __ jccb(Assembler::equal, is_type_ok);
1222 
1223     __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1224 
1225     __ bind(is_type_ok);
1226     // rbx: value's klass
1227     // rdx: array
1228     // rdi: array klass
1229     __ test_klass_is_empty_inline_type(rbx, rax, done);
1230 
1231     // calc dst for copy
1232     __ movl(rax, at_tos_p1()); // index
1233     __ data_for_value_array_index(rdx, rdi, rax, rax);
1234 
1235     // ...and src for copy
1236     __ movptr(rcx, at_tos());  // value
1237     __ data_for_oop(rcx, rcx, rbx);
1238 
1239     __ access_value_copy(IN_HEAP, rcx, rax, rbx);
1240   }
1241   // Pop stack arguments
1242   __ bind(done);
1243   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1244 }
1245 
1246 void TemplateTable::bastore() {
1247   transition(itos, vtos);
1248   __ pop_i(rbx);
1249   // rax: value
1250   // rbx: index
1251   // rdx: array
1252   index_check(rdx, rbx); // prefer index in rbx
1253   // Need to check whether array is boolean or byte
1254   // since both types share the bastore bytecode.
1255   __ load_klass(rcx, rdx, rscratch1);
1256   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1257   int diffbit = Klass::layout_helper_boolean_diffbit();
1258   __ testl(rcx, diffbit);
1259   Label L_skip;
1260   __ jccb(Assembler::zero, L_skip);

2389   __ jcc(j_not(cc), not_taken);
2390   branch(false, false);
2391   __ bind(not_taken);
2392   __ profile_not_taken_branch(rax);
2393 }
2394 
2395 void TemplateTable::if_nullcmp(Condition cc) {
2396   transition(atos, vtos);
2397   // assume branch is more often taken than not (loops use backward branches)
2398   Label not_taken;
2399   __ testptr(rax, rax);
2400   __ jcc(j_not(cc), not_taken);
2401   branch(false, false);
2402   __ bind(not_taken);
2403   __ profile_not_taken_branch(rax);
2404 }
2405 
2406 void TemplateTable::if_acmp(Condition cc) {
2407   transition(atos, vtos);
2408   // assume branch is more often taken than not (loops use backward branches)
2409   Label taken, not_taken;
2410   __ pop_ptr(rdx);
2411 
2412   __ profile_acmp(rbx, rdx, rax, rcx);
2413 
2414   const int is_inline_type_mask = markWord::inline_type_pattern;
2415   if (EnableValhalla) {
2416     __ cmpoop(rdx, rax);
2417     __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
2418 
2419     // might be substitutable, test if either rax or rdx is null
2420     __ testptr(rax, rax);
2421     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2422     __ testptr(rdx, rdx);
2423     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2424 
2425     // and both are values ?
2426     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2427     __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
2428     __ andptr(rbx, is_inline_type_mask);
2429     __ cmpptr(rbx, is_inline_type_mask);
2430     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2431 
2432     // same value klass ?
2433     __ load_metadata(rbx, rdx);
2434     __ load_metadata(rcx, rax);
2435     __ cmpptr(rbx, rcx);
2436     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2437 
2438     // Know both are the same type, let's test for substitutability...
2439     if (cc == equal) {
2440       invoke_is_substitutable(rax, rdx, taken, not_taken);
2441     } else {
2442       invoke_is_substitutable(rax, rdx, not_taken, taken);
2443     }
2444     __ stop("Not reachable");
2445   }
2446 
2447   __ cmpoop(rdx, rax);
2448   __ jcc(j_not(cc), not_taken);
2449   __ bind(taken);
2450   branch(false, false);
2451   __ bind(not_taken);
2452   __ profile_not_taken_branch(rax, true);
2453 }
2454 
2455 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2456                                             Label& is_subst, Label& not_subst) {
2457   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2458   // Restored...rax answer, jmp to outcome...
2459   __ testl(rax, rax);
2460   __ jcc(Assembler::zero, not_subst);
2461   __ jmp(is_subst);
2462 }
2463 
2464 void TemplateTable::ret() {
2465   transition(vtos, vtos);
2466   locals_index(rbx);
2467   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2468   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2469   __ profile_ret(rbx, rcx);
2470   __ get_method(rax);
2471   __ movptr(rbcp, Address(rax, Method::const_offset()));
2472   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2473                       ConstMethod::codes_offset()));
2474   __ dispatch_next(vtos, 0, true);
2475 }
2476 
2477 void TemplateTable::wide_ret() {
2478   transition(vtos, vtos);
2479   locals_index_wide(rbx);
2480   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2481   __ profile_ret(rbx, rcx);

2711     const Register thread = rdi;
2712     __ get_thread(thread);
2713     __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2714 #endif
2715     __ jcc(Assembler::zero, no_safepoint);
2716     __ push(state);
2717     __ push_cont_fastpath();
2718     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2719                                        InterpreterRuntime::at_safepoint));
2720     __ pop_cont_fastpath();
2721     __ pop(state);
2722     __ bind(no_safepoint);
2723   }
2724 
2725   // Narrow result if state is itos but result type is smaller.
2726   // Need to narrow in the return bytecode rather than in generate_return_entry
2727   // since compiled code callers expect the result to already be narrowed.
2728   if (state == itos) {
2729     __ narrow(rax);
2730   }
2731 
2732   __ remove_activation(state, rbcp, true, true, true);
2733 
2734   __ jmp(rbcp);
2735 }
2736 
2737 // ----------------------------------------------------------------------------
2738 // Volatile variables demand their effects be made known to all CPU's
2739 // in order.  Store buffers on most chips allow reads & writes to
2740 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2741 // without some kind of memory barrier (i.e., it's not sufficient that
2742 // the interpreter does not reorder volatile references, the hardware
2743 // also must not reorder them).
2744 //
2745 // According to the new Java Memory Model (JMM):
2746 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2747 //     writes act as acquire & release, so:
2748 // (2) A read cannot let unrelated NON-volatile memory refs that
2749 //     happen after the read float up to before the read.  It's OK for
2750 //     non-volatile memory refs that happen before the volatile read to
2751 //     float down below it.
2752 // (3) Similar a volatile write cannot let unrelated NON-volatile

2978     __ get_cache_and_index_at_bcp(cache, index, 1);
2979     __ bind(L1);
2980   }
2981 }
2982 
2983 void TemplateTable::pop_and_check_object(Register r) {
2984   __ pop_ptr(r);
2985   __ null_check(r);  // for field access must check obj.
2986   __ verify_oop(r);
2987 }
2988 
2989 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2990   transition(vtos, vtos);
2991 
2992   const Register cache = rcx;
2993   const Register index = rdx;
2994   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2995   const Register off   = rbx;
2996   const Register flags = rax;
2997   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2998   const Register flags2 = rdx;
2999 
3000   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3001   jvmti_post_field_access(cache, index, is_static, false);
3002   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3003 


3004   const Address field(obj, off, Address::times_1, 0*wordSize);
3005 
3006   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
3007 
3008   if (!is_static) {
3009     __ movptr(rcx, Address(cache, index, Address::times_ptr,
3010                            in_bytes(ConstantPoolCache::base_offset() +
3011                                     ConstantPoolCacheEntry::f1_offset())));
3012   }
3013 
3014   __ movl(flags2, flags);
3015 
3016   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3017   // Make sure we don't need to mask edx after the above shift
3018   assert(btos == 0, "change code, btos != 0");
3019 
3020   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3021 
3022   __ jcc(Assembler::notZero, notByte);
3023   // btos
3024   if (!is_static) pop_and_check_object(obj);
3025   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3026   __ push(btos);
3027   // Rewrite bytecode to be faster
3028   if (!is_static && rc == may_rewrite) {
3029     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3030   }
3031   __ jmp(Done);
3032 
3033   __ bind(notByte);
3034 
3035   __ cmpl(flags, ztos);
3036   __ jcc(Assembler::notEqual, notBool);
3037    if (!is_static) pop_and_check_object(obj);
3038   // ztos (same code as btos)
3039   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3040   __ push(ztos);
3041   // Rewrite bytecode to be faster
3042   if (!is_static && rc == may_rewrite) {
3043     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3044     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3045   }
3046   __ jmp(Done);
3047 
3048   __ bind(notBool);
3049   __ cmpl(flags, atos);
3050   __ jcc(Assembler::notEqual, notObj);
3051   // atos
3052   if (!EnablePrimitiveClasses) {
3053     if (!is_static) pop_and_check_object(obj);
3054     do_oop_load(_masm, field, rax);
3055     __ push(atos);
3056     if (!is_static && rc == may_rewrite) {
3057       patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3058     }
3059     __ jmp(Done);
3060   } else {
3061     if (is_static) {
3062       __ load_heap_oop(rax, field);
3063       Label is_null_free_inline_type, uninitialized;
3064       // Issue below if the static field has not been initialized yet
3065       __ test_field_is_null_free_inline_type(flags2, rscratch1, is_null_free_inline_type);
3066         // field is not a null free inline type
3067         __ push(atos);
3068         __ jmp(Done);
3069       // field is a null free inline type, must not return null even if uninitialized
3070       __ bind(is_null_free_inline_type);
3071           __ testptr(rax, rax);
3072         __ jcc(Assembler::zero, uninitialized);
3073           __ push(atos);
3074           __ jmp(Done);
3075         __ bind(uninitialized);
3076           __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3077 #ifdef _LP64
3078           Label slow_case, finish;
3079           __ movptr(rbx, Address(obj, java_lang_Class::klass_offset()));
3080           __ cmpb(Address(rbx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3081           __ jcc(Assembler::notEqual, slow_case);
3082         __ get_default_value_oop(rbx, rscratch1, rax);
3083         __ jmp(finish);
3084         __ bind(slow_case);
3085 #endif // LP64
3086           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_inline_type_field),
3087                 obj, flags2);
3088 #ifdef _LP64
3089           __ bind(finish);
3090   #endif // _LP64
3091         __ verify_oop(rax);
3092         __ push(atos);
3093         __ jmp(Done);
3094     } else {
3095       Label is_flat, nonnull, is_inline_type, rewrite_inline;
3096       __ test_field_is_null_free_inline_type(flags2, rscratch1, is_inline_type);
3097       // field is not a null free inline type
3098       pop_and_check_object(obj);
3099       __ load_heap_oop(rax, field);
3100       __ push(atos);
3101       if (rc == may_rewrite) {
3102         patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3103       }
3104       __ jmp(Done);
3105       __ bind(is_inline_type);
3106         __ test_field_is_flat(flags2, rscratch1, is_flat);
3107           // field is not flat
3108           __ movptr(rax, rcx);  // small dance required to preserve the klass_holder somewhere
3109           pop_and_check_object(obj);
3110           __ push(rax);
3111           __ load_heap_oop(rax, field);
3112           __ pop(rcx);
3113           __ testptr(rax, rax);
3114           __ jcc(Assembler::notZero, nonnull);
3115             __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3116             __ get_inline_type_field_klass(rcx, flags2, rbx);
3117             __ get_default_value_oop(rbx, rcx, rax);
3118           __ bind(nonnull);
3119           __ verify_oop(rax);
3120           __ push(atos);
3121           __ jmp(rewrite_inline);
3122         __ bind(is_flat);
3123         // field is flat
3124           __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3125           pop_and_check_object(rax);
3126           __ read_flat_field(rcx, flags2, rbx, rax);
3127           __ verify_oop(rax);
3128           __ push(atos);
3129       __ bind(rewrite_inline);
3130       if (rc == may_rewrite) {
3131         patch_bytecode(Bytecodes::_fast_qgetfield, bc, rbx);
3132       }
3133         __ jmp(Done);
3134     }
3135   }

3136 
3137   __ bind(notObj);
3138 
3139   if (!is_static) pop_and_check_object(obj);
3140 
3141   __ cmpl(flags, itos);
3142   __ jcc(Assembler::notEqual, notInt);
3143   // itos
3144   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3145   __ push(itos);
3146   // Rewrite bytecode to be faster
3147   if (!is_static && rc == may_rewrite) {
3148     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3149   }
3150   __ jmp(Done);
3151 
3152   __ bind(notInt);
3153   __ cmpl(flags, ctos);
3154   __ jcc(Assembler::notEqual, notChar);
3155   // ctos
3156   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3157   __ push(ctos);
3158   // Rewrite bytecode to be faster
3159   if (!is_static && rc == may_rewrite) {
3160     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

3220 #endif
3221 
3222   __ bind(Done);
3223   // [jk] not needed currently
3224   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3225   //                                              Assembler::LoadStore));
3226 }
3227 
3228 void TemplateTable::getfield(int byte_no) {
3229   getfield_or_static(byte_no, false);
3230 }
3231 
3232 void TemplateTable::nofast_getfield(int byte_no) {
3233   getfield_or_static(byte_no, false, may_not_rewrite);
3234 }
3235 
3236 void TemplateTable::getstatic(int byte_no) {
3237   getfield_or_static(byte_no, true);
3238 }
3239 
3240 void TemplateTable::withfield() {
3241   transition(vtos, atos);
3242 
3243   Register cache = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
3244   Register index = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
3245 
3246   resolve_cache_and_index(f2_byte, cache, index, sizeof(u2));
3247 
3248   Register cpentry = rbx;
3249 
3250   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3251 
3252   __ lea(cpentry, Address(cache, index, Address::times_ptr,
3253                          in_bytes(cp_base_offset)));
3254   __ lea(rax, at_tos());
3255   __ call_VM(rbx, CAST_FROM_FN_PTR(address, InterpreterRuntime::withfield), cpentry, rax);
3256   // new value type is returned in rbx
3257   // stack adjustment is returned in rax
3258   __ verify_oop(rbx);
3259   __ addptr(rsp, rax);
3260   __ movptr(rax, rbx);
3261 }
3262 
3263 // The registers cache and index expected to be set before call.
3264 // The function may destroy various registers, just not the cache and index registers.
3265 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3266 
3267   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
3268   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
3269   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
3270   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
3271 
3272   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3273 
3274   if (JvmtiExport::can_post_field_modification()) {
3275     // Check to see if a field modification watch has been set before
3276     // we take the time to call into the VM.
3277     Label L1;
3278     assert_different_registers(cache, index, rax);
3279     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3280     __ testl(rax, rax);
3281     __ jcc(Assembler::zero, L1);

3337     // c_rarg1: object pointer set up above (null if static)
3338     // c_rarg2: cache entry pointer
3339     // c_rarg3: jvalue object on the stack
3340     __ call_VM(noreg,
3341                CAST_FROM_FN_PTR(address,
3342                                 InterpreterRuntime::post_field_modification),
3343                RBX, robj, RCX);
3344     __ get_cache_and_index_at_bcp(cache, index, 1);
3345     __ bind(L1);
3346   }
3347 }
3348 
3349 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3350   transition(vtos, vtos);
3351 
3352   const Register cache = rcx;
3353   const Register index = rdx;
3354   const Register obj   = rcx;
3355   const Register off   = rbx;
3356   const Register flags = rax;
3357   const Register flags2 = rdx;
3358 
3359   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3360   jvmti_post_field_mod(cache, index, is_static);
3361   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3362 
3363   // [jk] not needed currently
3364   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3365   //                                              Assembler::StoreStore));
3366 
3367   Label notVolatile, Done;
3368   __ movl(rdx, flags);
3369   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3370   __ andl(rdx, 0x1);
3371 
3372   // Check for volatile store
3373   __ testl(rdx, rdx);
3374   __ movl(flags2, flags);
3375   __ jcc(Assembler::zero, notVolatile);
3376 
3377   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags, flags2);
3378   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3379                                                Assembler::StoreStore));
3380   __ jmp(Done);
3381   __ bind(notVolatile);
3382 
3383   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags, flags2);
3384 
3385   __ bind(Done);
3386 }
3387 
3388 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3389                                               Register obj, Register off, Register flags, Register flags2) {
3390 
3391   // field addresses
3392   const Address field(obj, off, Address::times_1, 0*wordSize);
3393   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3394 
3395   Label notByte, notBool, notInt, notShort, notChar,
3396         notLong, notFloat, notObj, notInlineType;
3397   Label Done;
3398 
3399   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3400 
3401   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3402 
3403   assert(btos == 0, "change code, btos != 0");
3404   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3405   __ jcc(Assembler::notZero, notByte);
3406 
3407   // btos
3408   {
3409     __ pop(btos);
3410     if (!is_static) pop_and_check_object(obj);
3411     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3412     if (!is_static && rc == may_rewrite) {
3413       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3414     }
3415     __ jmp(Done);
3416   }

3419   __ cmpl(flags, ztos);
3420   __ jcc(Assembler::notEqual, notBool);
3421 
3422   // ztos
3423   {
3424     __ pop(ztos);
3425     if (!is_static) pop_and_check_object(obj);
3426     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3427     if (!is_static && rc == may_rewrite) {
3428       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3429     }
3430     __ jmp(Done);
3431   }
3432 
3433   __ bind(notBool);
3434   __ cmpl(flags, atos);
3435   __ jcc(Assembler::notEqual, notObj);
3436 
3437   // atos
3438   {
3439     if (!EnablePrimitiveClasses) {
3440       __ pop(atos);
3441       if (!is_static) pop_and_check_object(obj);
3442       // Store into the field
3443       do_oop_store(_masm, field, rax);
3444       if (!is_static && rc == may_rewrite) {
3445         patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3446       }
3447       __ jmp(Done);
3448     } else {
3449       __ pop(atos);
3450       if (is_static) {
3451         Label is_inline_type;
3452         __ test_field_is_not_null_free_inline_type(flags2, rscratch1, is_inline_type);
3453         __ null_check(rax);
3454         __ bind(is_inline_type);
3455         do_oop_store(_masm, field, rax);
3456         __ jmp(Done);
3457       } else {
3458         Label is_inline_type, is_flat, rewrite_not_inline, rewrite_inline;
3459         __ test_field_is_null_free_inline_type(flags2, rscratch1, is_inline_type);
3460         // Not an inline type
3461         pop_and_check_object(obj);
3462         // Store into the field
3463         do_oop_store(_masm, field, rax);
3464         __ bind(rewrite_not_inline);
3465         if (rc == may_rewrite) {
3466           patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3467         }
3468         __ jmp(Done);
3469         // Implementation of the inline type semantic
3470         __ bind(is_inline_type);
3471         __ null_check(rax);
3472         __ test_field_is_flat(flags2, rscratch1, is_flat);
3473         // field is not flat
3474         pop_and_check_object(obj);
3475         // Store into the field
3476         do_oop_store(_masm, field, rax);
3477         __ jmp(rewrite_inline);
3478         __ bind(is_flat);
3479         // field is flat
3480         pop_and_check_object(obj);
3481         assert_different_registers(rax, rdx, obj, off);
3482         __ load_klass(rdx, rax, rscratch1);
3483         __ data_for_oop(rax, rax, rdx);
3484         __ addptr(obj, off);
3485         __ access_value_copy(IN_HEAP, rax, obj, rdx);
3486         __ bind(rewrite_inline);
3487         if (rc == may_rewrite) {
3488           patch_bytecode(Bytecodes::_fast_qputfield, bc, rbx, true, byte_no);
3489         }
3490         __ jmp(Done);
3491       }
3492     }

3493   }
3494 
3495   __ bind(notObj);
3496   __ cmpl(flags, itos);
3497   __ jcc(Assembler::notEqual, notInt);
3498 
3499   // itos
3500   {
3501     __ pop(itos);
3502     if (!is_static) pop_and_check_object(obj);
3503     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3504     if (!is_static && rc == may_rewrite) {
3505       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3506     }
3507     __ jmp(Done);
3508   }
3509 
3510   __ bind(notInt);
3511   __ cmpl(flags, ctos);
3512   __ jcc(Assembler::notEqual, notChar);

3611 }
3612 
3613 void TemplateTable::jvmti_post_fast_field_mod() {
3614 
3615   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3616 
3617   if (JvmtiExport::can_post_field_modification()) {
3618     // Check to see if a field modification watch has been set before
3619     // we take the time to call into the VM.
3620     Label L2;
3621     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3622     __ testl(scratch, scratch);
3623     __ jcc(Assembler::zero, L2);
3624     __ pop_ptr(rbx);                  // copy the object pointer from tos
3625     __ verify_oop(rbx);
3626     __ push_ptr(rbx);                 // put the object pointer back on tos
3627     // Save tos values before call_VM() clobbers them. Since we have
3628     // to do it for every data type, we use the saved values as the
3629     // jvalue object.
3630     switch (bytecode()) {          // load values into the jvalue object
3631     case Bytecodes::_fast_qputfield: //fall through
3632     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3633     case Bytecodes::_fast_bputfield: // fall through
3634     case Bytecodes::_fast_zputfield: // fall through
3635     case Bytecodes::_fast_sputfield: // fall through
3636     case Bytecodes::_fast_cputfield: // fall through
3637     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3638     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3639     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3640     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3641 
3642     default:
3643       ShouldNotReachHere();
3644     }
3645     __ mov(scratch, rsp);             // points to jvalue on the stack
3646     // access constant pool cache entry
3647     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3648     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3649     __ verify_oop(rbx);
3650     // rbx: object pointer copied above
3651     // c_rarg2: cache entry pointer
3652     // c_rarg3: jvalue object on the stack
3653     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3654     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3655 
3656     switch (bytecode()) {             // restore tos values
3657     case Bytecodes::_fast_qputfield: // fall through
3658     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3659     case Bytecodes::_fast_bputfield: // fall through
3660     case Bytecodes::_fast_zputfield: // fall through
3661     case Bytecodes::_fast_sputfield: // fall through
3662     case Bytecodes::_fast_cputfield: // fall through
3663     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3664     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3665     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3666     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3667     default: break;
3668     }
3669     __ bind(L2);
3670   }
3671 }
3672 
3673 void TemplateTable::fast_storefield(TosState state) {
3674   transition(state, vtos);
3675 
3676   ByteSize base = ConstantPoolCache::base_offset();
3677 
3678   jvmti_post_fast_field_mod();
3679 
3680   // access constant pool cache
3681   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3682 
3683   // test for volatile with rdx but rdx is tos register for lputfield.
3684   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3685                        in_bytes(base +
3686                                 ConstantPoolCacheEntry::flags_offset())));
3687 
3688   // replace index with field offset from cache entry
3689   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3690                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3691 
3692   // [jk] not needed currently
3693   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3694   //                                              Assembler::StoreStore));
3695 
3696   Label notVolatile, Done;
3697   if (bytecode() == Bytecodes::_fast_qputfield) {
3698     __ movl(rscratch2, rdx);  // saving flags for is_flat test
3699   }
3700 
3701   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3702   __ andl(rdx, 0x1);
3703 
3704   // Get object from stack
3705   pop_and_check_object(rcx);
3706 
3707   // field address
3708   const Address field(rcx, rbx, Address::times_1);
3709 
3710   // Check for volatile store
3711   __ testl(rdx, rdx);
3712   __ jcc(Assembler::zero, notVolatile);
3713 
3714   if (bytecode() == Bytecodes::_fast_qputfield) {
3715     __ movl(rdx, rscratch2);  // restoring flags for is_flat test
3716   }
3717   fast_storefield_helper(field, rax, rdx);
3718   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3719                                                Assembler::StoreStore));
3720   __ jmp(Done);
3721   __ bind(notVolatile);
3722 
3723   if (bytecode() == Bytecodes::_fast_qputfield) {
3724     __ movl(rdx, rscratch2);  // restoring flags for is_flat test
3725   }
3726   fast_storefield_helper(field, rax, rdx);
3727 
3728   __ bind(Done);
3729 }
3730 
3731 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3732 
3733   // access field
3734   switch (bytecode()) {
3735   case Bytecodes::_fast_qputfield:
3736     {
3737       Label is_flat, done;
3738       __ null_check(rax);
3739       __ test_field_is_flat(flags, rscratch1, is_flat);
3740       // field is not flat
3741       do_oop_store(_masm, field, rax);
3742       __ jmp(done);
3743       __ bind(is_flat);
3744       // field is flat
3745       __ load_klass(rdx, rax, rscratch1);
3746       __ data_for_oop(rax, rax, rdx);
3747       __ lea(rcx, field);
3748       __ access_value_copy(IN_HEAP, rax, rcx, rdx);
3749       __ bind(done);
3750     }
3751     break;
3752   case Bytecodes::_fast_aputfield:
3753     {
3754       do_oop_store(_masm, field, rax);
3755     }
3756     break;
3757   case Bytecodes::_fast_lputfield:
3758 #ifdef _LP64
3759     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3760 #else
3761   __ stop("should not be rewritten");
3762 #endif
3763     break;
3764   case Bytecodes::_fast_iputfield:
3765     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3766     break;
3767   case Bytecodes::_fast_zputfield:
3768     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3769     break;
3770   case Bytecodes::_fast_bputfield:
3771     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3772     break;
3773   case Bytecodes::_fast_sputfield:
3774     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3775     break;

3805     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3806     LP64_ONLY(__ mov(c_rarg1, rax));
3807     // c_rarg1: object pointer copied above
3808     // c_rarg2: cache entry pointer
3809     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3810     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3811     __ pop_ptr(rax); // restore object pointer
3812     __ bind(L1);
3813   }
3814 
3815   // access constant pool cache
3816   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3817   // replace index with field offset from cache entry
3818   // [jk] not needed currently
3819   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3820   //                      in_bytes(ConstantPoolCache::base_offset() +
3821   //                               ConstantPoolCacheEntry::flags_offset())));
3822   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3823   // __ andl(rdx, 0x1);
3824   //
3825   __ movptr(rdx, Address(rcx, rbx, Address::times_ptr,
3826                          in_bytes(ConstantPoolCache::base_offset() +
3827                                   ConstantPoolCacheEntry::f2_offset())));
3828 
3829   // rax: object
3830   __ verify_oop(rax);
3831   __ null_check(rax);
3832   Address field(rax, rdx, Address::times_1);
3833 
3834   // access field
3835   switch (bytecode()) {
3836   case Bytecodes::_fast_qgetfield:
3837     {
3838       Label is_flat, nonnull, Done;
3839       __ movptr(rscratch1, Address(rcx, rbx, Address::times_ptr,
3840                                    in_bytes(ConstantPoolCache::base_offset() +
3841                                             ConstantPoolCacheEntry::flags_offset())));
3842       __ test_field_is_flat(rscratch1, rscratch2, is_flat);
3843         // field is not flat
3844         __ load_heap_oop(rax, field);
3845         __ testptr(rax, rax);
3846         __ jcc(Assembler::notZero, nonnull);
3847           __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3848                              in_bytes(ConstantPoolCache::base_offset() +
3849                                       ConstantPoolCacheEntry::flags_offset())));
3850           __ andl(rdx, ConstantPoolCacheEntry::field_index_mask);
3851           __ movptr(rcx, Address(rcx, rbx, Address::times_ptr,
3852                                        in_bytes(ConstantPoolCache::base_offset() +
3853                                                 ConstantPoolCacheEntry::f1_offset())));
3854           __ get_inline_type_field_klass(rcx, rdx, rbx);
3855           __ get_default_value_oop(rbx, rcx, rax);
3856         __ bind(nonnull);
3857         __ verify_oop(rax);
3858         __ jmp(Done);
3859       __ bind(is_flat);
3860       // field is flat
3861         __ push(rdx); // save offset
3862         __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3863                            in_bytes(ConstantPoolCache::base_offset() +
3864                                     ConstantPoolCacheEntry::flags_offset())));
3865         __ andl(rdx, ConstantPoolCacheEntry::field_index_mask);
3866         __ movptr(rcx, Address(rcx, rbx, Address::times_ptr,
3867                                      in_bytes(ConstantPoolCache::base_offset() +
3868                                               ConstantPoolCacheEntry::f1_offset())));
3869         __ pop(rbx); // restore offset
3870         __ read_flat_field(rcx, rdx, rbx, rax);
3871       __ bind(Done);
3872       __ verify_oop(rax);
3873     }
3874     break;
3875   case Bytecodes::_fast_agetfield:
3876     do_oop_load(_masm, field, rax);
3877     __ verify_oop(rax);
3878     break;
3879   case Bytecodes::_fast_lgetfield:
3880 #ifdef _LP64
3881     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3882 #else
3883   __ stop("should not be rewritten");
3884 #endif
3885     break;
3886   case Bytecodes::_fast_igetfield:
3887     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3888     break;
3889   case Bytecodes::_fast_bgetfield:
3890     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3891     break;
3892   case Bytecodes::_fast_sgetfield:
3893     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3894     break;

4320 
4321   // Note:  rax_callsite is already pushed by prepare_invoke
4322 
4323   // %%% should make a type profile for any invokedynamic that takes a ref argument
4324   // profile this call
4325   __ profile_call(rbcp);
4326   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4327 
4328   __ verify_oop(rax_callsite);
4329 
4330   __ jump_from_interpreted(rbx_method, rdx);
4331 }
4332 
4333 //-----------------------------------------------------------------------------
4334 // Allocation
4335 
4336 void TemplateTable::_new() {
4337   transition(vtos, atos);
4338   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4339   Label slow_case;

4340   Label done;
4341   Label is_not_value;
4342 
4343   __ get_cpool_and_tags(rcx, rax);
4344 
4345   // Make sure the class we're about to instantiate has been resolved.
4346   // This is done before loading InstanceKlass to be consistent with the order
4347   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4348   const int tags_offset = Array<u1>::base_offset_in_bytes();
4349   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4350   __ jcc(Assembler::notEqual, slow_case);
4351 
4352   // get InstanceKlass
4353   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4354 
4355   __ cmpb(Address(rcx, InstanceKlass::kind_offset()), Klass::InlineKlassKind);
4356   __ jcc(Assembler::notEqual, is_not_value);
4357 
4358   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_InstantiationError));
4359 
4360   __ bind(is_not_value);
4361 
4362   // make sure klass is initialized & doesn't have finalizer

4363   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4364   __ jcc(Assembler::notEqual, slow_case);
4365 
4366   __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);
4367   __ jmp(done);












4368 
4369   // slow case
4370   __ bind(slow_case);
4371 
4372   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4373   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);





4374 
4375   __ get_constant_pool(rarg1);
4376   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4377   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4378    __ verify_oop(rax);
4379 
4380   // continue
4381   __ bind(done);
4382 }

4383 
4384 void TemplateTable::aconst_init() {
4385   transition(vtos, atos);








4386 
4387   Label slow_case;
4388   Label done;
4389   Label is_value;





4390 
4391   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4392   __ get_cpool_and_tags(rcx, rax);








4393 
4394   // Make sure the class we're about to instantiate has been resolved.
4395   // This is done before loading InstanceKlass to be consistent with the order
4396   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4397   const int tags_offset = Array<u1>::base_offset_in_bytes();
4398   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4399   __ jcc(Assembler::notEqual, slow_case);


4400 
4401   // get InstanceKlass
4402   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4403 
4404   __ cmpb(Address(rcx, InstanceKlass::kind_offset()), Klass::InlineKlassKind);
4405   __ jcc(Assembler::equal, is_value);
4406 
4407   // in the future, aconst_init will just return null instead of throwing an exception
4408   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError));
4409 
4410   __ bind(is_value);
4411 
4412   // make sure klass is fully initialized
4413   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4414   __ jcc(Assembler::notEqual, slow_case);
4415 
4416   // have a resolved InlineKlass in rcx, return the default value oop from it
4417   __ get_default_value_oop(rcx, rdx, rax);
4418   __ jmp(done);
4419 

4420   __ bind(slow_case);


4421 
4422   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4423   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4424 

4425   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4426   __ get_constant_pool(rarg1);
4427 
4428   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::aconst_init),
4429       rarg1, rarg2);
4430 

4431   __ bind(done);
4432   __ verify_oop(rax);
4433 }
4434 
4435 void TemplateTable::newarray() {
4436   transition(itos, atos);
4437   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4438   __ load_unsigned_byte(rarg1, at_bcp(1));
4439   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4440           rarg1, rax);
4441 }
4442 
4443 void TemplateTable::anewarray() {
4444   transition(itos, atos);
4445 
4446   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4447   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4448 
4449   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4450   __ get_constant_pool(rarg1);
4451   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4452           rarg1, rarg2, rax);
4453 }
4454 
4455 void TemplateTable::arraylength() {
4456   transition(atos, itos);
4457   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4458 }
4459 
4460 void TemplateTable::checkcast() {
4461   transition(atos, atos);
4462   Label done, is_null, ok_is_subtype, quicked, resolved;
4463   __ testptr(rax, rax); // object is in rax
4464   __ jcc(Assembler::zero, is_null);
4465 
4466   // Get cpool & tags index
4467   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4468   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4469   // See if bytecode has already been quicked
4470   __ movzbl(rdx, Address(rdx, rbx,
4471       Address::times_1,
4472       Array<u1>::base_offset_in_bytes()));
4473   __ andl (rdx, ~JVM_CONSTANT_QDescBit);
4474   __ cmpl(rdx, JVM_CONSTANT_Class);
4475   __ jcc(Assembler::equal, quicked);
4476   __ push(atos); // save receiver for result, and for GC
4477   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4478 
4479   // vm_result_2 has metadata result
4480 #ifndef _LP64
4481   // borrow rdi from locals
4482   __ get_thread(rdi);
4483   __ get_vm_result_2(rax, rdi);
4484   __ restore_locals();
4485 #else
4486   __ get_vm_result_2(rax, r15_thread);
4487 #endif
4488 
4489   __ pop_ptr(rdx); // restore receiver
4490   __ jmpb(resolved);
4491 
4492   // Get superklass in rax and subklass in rbx
4493   __ bind(quicked);
4494   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4495   __ load_resolved_klass_at_index(rax, rcx, rbx);
4496 
4497   __ bind(resolved);
4498   __ load_klass(rbx, rdx, rscratch1);
4499 
4500   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4501   // Superklass in rax.  Subklass in rbx.
4502   __ gen_subtype_check(rbx, ok_is_subtype);
4503 
4504   // Come here on failure
4505   __ push_ptr(rdx);
4506   // object is at TOS
4507   __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4508 
4509   // Come here on success
4510   __ bind(ok_is_subtype);
4511   __ mov(rax, rdx); // Restore object in rdx
4512   __ jmp(done);
4513 
4514   __ bind(is_null);
4515 
4516   // Collect counts on whether this check-cast sees nulls a lot or not.
4517   if (ProfileInterpreter) {


4518     __ profile_null_seen(rcx);


4519   }
4520 
4521   if (EnablePrimitiveClasses) {
4522     // Get cpool & tags index
4523     __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4524     __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4525     // See if CP entry is a Q-descriptor
4526     __ movzbl(rcx, Address(rdx, rbx,
4527         Address::times_1,
4528         Array<u1>::base_offset_in_bytes()));
4529     __ andl (rcx, JVM_CONSTANT_QDescBit);
4530     __ cmpl(rcx, JVM_CONSTANT_QDescBit);
4531     __ jcc(Assembler::notEqual, done);
4532     __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
4533   }
4534 
4535   __ bind(done);
4536 }
4537 
4538 void TemplateTable::instanceof() {
4539   transition(atos, itos);
4540   Label done, is_null, ok_is_subtype, quicked, resolved;
4541   __ testptr(rax, rax);
4542   __ jcc(Assembler::zero, is_null);
4543 
4544   // Get cpool & tags index
4545   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4546   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4547   // See if bytecode has already been quicked
4548   __ movzbl(rdx, Address(rdx, rbx,
4549         Address::times_1,
4550         Array<u1>::base_offset_in_bytes()));
4551   __ andl (rdx, ~JVM_CONSTANT_QDescBit);
4552   __ cmpl(rdx, JVM_CONSTANT_Class);
4553   __ jcc(Assembler::equal, quicked);
4554 
4555   __ push(atos); // save receiver for result, and for GC
4556   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4557   // vm_result_2 has metadata result
4558 
4559 #ifndef _LP64
4560   // borrow rdi from locals
4561   __ get_thread(rdi);
4562   __ get_vm_result_2(rax, rdi);
4563   __ restore_locals();
4564 #else
4565   __ get_vm_result_2(rax, r15_thread);
4566 #endif
4567 
4568   __ pop_ptr(rdx); // restore receiver
4569   __ verify_oop(rdx);
4570   __ load_klass(rdx, rdx, rscratch1);
4571   __ jmpb(resolved);
4572 

4584   // Come here on failure
4585   __ xorl(rax, rax);
4586   __ jmpb(done);
4587   // Come here on success
4588   __ bind(ok_is_subtype);
4589   __ movl(rax, 1);
4590 
4591   // Collect counts on whether this test sees nulls a lot or not.
4592   if (ProfileInterpreter) {
4593     __ jmp(done);
4594     __ bind(is_null);
4595     __ profile_null_seen(rcx);
4596   } else {
4597     __ bind(is_null);   // same as 'done'
4598   }
4599   __ bind(done);
4600   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
4601   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
4602 }
4603 

4604 //----------------------------------------------------------------------------------------------------
4605 // Breakpoints
4606 void TemplateTable::_breakpoint() {
4607   // Note: We get here even if we are single stepping..
4608   // jbug insists on setting breakpoints at every bytecode
4609   // even if we are in single step mode.
4610 
4611   transition(vtos, vtos);
4612 
4613   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4614 
4615   // get the unpatched byte code
4616   __ get_method(rarg);
4617   __ call_VM(noreg,
4618              CAST_FROM_FN_PTR(address,
4619                               InterpreterRuntime::get_original_bytecode_at),
4620              rarg, rbcp);
4621   __ mov(rbx, rax);  // why?
4622 
4623   // post the breakpoint event

4645 // Note: monitorenter & exit are symmetric routines; which is reflected
4646 //       in the assembly code structure as well
4647 //
4648 // Stack layout:
4649 //
4650 // [expressions  ] <--- rsp               = expression stack top
4651 // ..
4652 // [expressions  ]
4653 // [monitor entry] <--- monitor block top = expression stack bot
4654 // ..
4655 // [monitor entry]
4656 // [frame data   ] <--- monitor block bot
4657 // ...
4658 // [saved rbp    ] <--- rbp
4659 void TemplateTable::monitorenter() {
4660   transition(atos, vtos);
4661 
4662   // check for null object
4663   __ null_check(rax);
4664 
4665   Label is_inline_type;
4666   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4667   __ test_markword_is_inline_type(rbx, is_inline_type);
4668 
4669   const Address monitor_block_top(
4670         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4671   const Address monitor_block_bot(
4672         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4673   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4674 
4675   Label allocated;
4676 
4677   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4678   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4679   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4680 
4681   // initialize entry pointer
4682   __ xorl(rmon, rmon); // points to free slot or null
4683 
4684   // find a free slot in the monitor block (result in rmon)
4685   {
4686     Label entry, loop, exit;
4687     __ movptr(rtop, monitor_block_top); // points to current entry,
4688                                         // starting with top-most entry

4738   // rmon: points to monitor entry
4739   __ bind(allocated);
4740 
4741   // Increment bcp to point to the next bytecode, so exception
4742   // handling for async. exceptions work correctly.
4743   // The object has already been popped from the stack, so the
4744   // expression stack looks correct.
4745   __ increment(rbcp);
4746 
4747   // store object
4748   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4749   __ lock_object(rmon);
4750 
4751   // check to make sure this monitor doesn't cause stack overflow after locking
4752   __ save_bcp();  // in case of exception
4753   __ generate_stack_overflow_check(0);
4754 
4755   // The bcp has already been incremented. Just need to dispatch to
4756   // next instruction.
4757   __ dispatch_next(vtos);
4758 
4759   __ bind(is_inline_type);
4760   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4761                     InterpreterRuntime::throw_illegal_monitor_state_exception));
4762   __ should_not_reach_here();
4763 }
4764 
4765 void TemplateTable::monitorexit() {
4766   transition(atos, vtos);
4767 
4768   // check for null object
4769   __ null_check(rax);
4770 
4771   const int is_inline_type_mask = markWord::inline_type_pattern;
4772   Label has_identity;
4773   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4774   __ andptr(rbx, is_inline_type_mask);
4775   __ cmpl(rbx, is_inline_type_mask);
4776   __ jcc(Assembler::notEqual, has_identity);
4777   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4778                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4779   __ should_not_reach_here();
4780   __ bind(has_identity);
4781 
4782   const Address monitor_block_top(
4783         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4784   const Address monitor_block_bot(
4785         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4786   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4787 
4788   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4789   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4790 
4791   Label found;
4792 
4793   // find matching slot
4794   {
4795     Label entry, loop;
4796     __ movptr(rtop, monitor_block_top); // points to current entry,
4797                                         // starting with top-most entry
4798     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4799                                         // of monitor block
4800     __ jmpb(entry);
4801 
< prev index next >