< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




 137   case TemplateTable::less_equal   : return Assembler::greater;
 138   case TemplateTable::greater      : return Assembler::lessEqual;
 139   case TemplateTable::greater_equal: return Assembler::less;
 140   }
 141   ShouldNotReachHere();
 142   return Assembler::zero;
 143 }
 144 
 145 
 146 
 147 // Miscelaneous helper routines
 148 // Store an oop (or NULL) at the address described by obj.
 149 // If val == noreg this means store a NULL
 150 
 151 
 152 static void do_oop_store(InterpreterMacroAssembler* _masm,
 153                          Address dst,
 154                          Register val,
 155                          DecoratorSet decorators = 0) {
 156   assert(val == noreg || val == rax, "parameter is just for looks");
 157   __ store_heap_oop(dst, val, rdx, rbx, decorators);
 158 }
 159 
 160 static void do_oop_load(InterpreterMacroAssembler* _masm,
 161                         Address src,
 162                         Register dst,
 163                         DecoratorSet decorators = 0) {
 164   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 165 }
 166 
 167 Address TemplateTable::at_bcp(int offset) {
 168   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 169   return Address(rbcp, offset);
 170 }
 171 
 172 
 173 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 174                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 175                                    int byte_no) {
 176   if (!RewriteBytecodes)  return;
 177   Label L_patch_done;
 178 
 179   switch (bc) {

 180   case Bytecodes::_fast_aputfield:
 181   case Bytecodes::_fast_bputfield:
 182   case Bytecodes::_fast_zputfield:
 183   case Bytecodes::_fast_cputfield:
 184   case Bytecodes::_fast_dputfield:
 185   case Bytecodes::_fast_fputfield:
 186   case Bytecodes::_fast_iputfield:
 187   case Bytecodes::_fast_lputfield:
 188   case Bytecodes::_fast_sputfield:
 189     {
 190       // We skip bytecode quickening for putfield instructions when
 191       // the put_code written to the constant pool cache is zero.
 192       // This is required so that every execution of this instruction
 193       // calls out to InterpreterRuntime::resolve_get_put to do
 194       // additional, required work.
 195       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 196       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 197       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 198       __ movl(bc_reg, bc);
 199       __ cmpl(temp_reg, (int) 0);


 352   __ sarl(rax, 16);
 353 }
 354 
 355 void TemplateTable::ldc(bool wide) {
 356   transition(vtos, vtos);
 357   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 358   Label call_ldc, notFloat, notClass, notInt, Done;
 359 
 360   if (wide) {
 361     __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 362   } else {
 363     __ load_unsigned_byte(rbx, at_bcp(1));
 364   }
 365 
 366   __ get_cpool_and_tags(rcx, rax);
 367   const int base_offset = ConstantPool::header_size() * wordSize;
 368   const int tags_offset = Array<u1>::base_offset_in_bytes();
 369 
 370   // get type
 371   __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));

 372 
 373   // unresolved class - get the resolved class
 374   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
 375   __ jccb(Assembler::equal, call_ldc);
 376 
 377   // unresolved class in error state - call into runtime to throw the error
 378   // from the first resolution attempt
 379   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 380   __ jccb(Assembler::equal, call_ldc);
 381 
 382   // resolved class - need to call vm to get java mirror of the class
 383   __ cmpl(rdx, JVM_CONSTANT_Class);
 384   __ jcc(Assembler::notEqual, notClass);
 385 
 386   __ bind(call_ldc);
 387 
 388   __ movl(rarg, wide);
 389   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 390 
 391   __ push(atos);


 802                     Address(rdx, rax,
 803                             Address::times_4,
 804                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 805                     noreg, noreg);
 806 }
 807 
 808 void TemplateTable::daload() {
 809   transition(itos, dtos);
 810   // rax: index
 811   // rdx: array
 812   index_check(rdx, rax); // kills rbx
 813   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 814                     Address(rdx, rax,
 815                             Address::times_8,
 816                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 817                     noreg, noreg);
 818 }
 819 
 820 void TemplateTable::aaload() {
 821   transition(itos, atos);
 822   // rax: index
 823   // rdx: array
 824   index_check(rdx, rax); // kills rbx
 825   do_oop_load(_masm,
 826               Address(rdx, rax,
 827                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 828                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 829               rax,
 830               IS_ARRAY);

















 831 }
 832 
 833 void TemplateTable::baload() {
 834   transition(itos, itos);
 835   // rax: index
 836   // rdx: array
 837   index_check(rdx, rax); // kills rbx
 838   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 839                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 840                     noreg, noreg);
 841 }
 842 
 843 void TemplateTable::caload() {
 844   transition(itos, itos);
 845   // rax: index
 846   // rdx: array
 847   index_check(rdx, rax); // kills rbx
 848   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 849                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 850                     noreg, noreg);


1096   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1097                      Address(rdx, rbx, Address::times_4,
1098                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1099                      noreg /* ftos */, noreg, noreg);
1100 }
1101 
1102 void TemplateTable::dastore() {
1103   transition(dtos, vtos);
1104   __ pop_i(rbx);
1105   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1106   // rbx:  index
1107   // rdx:  array
1108   index_check(rdx, rbx); // prefer index in rbx
1109   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1110                      Address(rdx, rbx, Address::times_8,
1111                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1112                      noreg /* dtos */, noreg, noreg);
1113 }
1114 
1115 void TemplateTable::aastore() {
1116   Label is_null, ok_is_subtype, done;
1117   transition(vtos, vtos);
1118   // stack: ..., array, index, value
1119   __ movptr(rax, at_tos());    // value
1120   __ movl(rcx, at_tos_p1()); // index
1121   __ movptr(rdx, at_tos_p2()); // array
1122 
1123   Address element_address(rdx, rcx,
1124                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1125                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1126 
1127   index_check_without_pop(rdx, rcx);     // kills rbx

1128   __ testptr(rax, rax);
1129   __ jcc(Assembler::zero, is_null);
1130 






1131   // Move subklass into rbx
1132   __ load_klass(rbx, rax);
1133   // Move superklass into rax
1134   __ load_klass(rax, rdx);
1135   __ movptr(rax, Address(rax,
1136                          ObjArrayKlass::element_klass_offset()));
1137 
1138   // Generate subtype check.  Blows rcx, rdi
1139   // Superklass in rax.  Subklass in rbx.

1140   __ gen_subtype_check(rbx, ok_is_subtype);
1141 
1142   // Come here on failure
1143   // object is at TOS
1144   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1145 
1146   // Come here on success
1147   __ bind(ok_is_subtype);
1148 
1149   // Get the value we will store
1150   __ movptr(rax, at_tos());
1151   __ movl(rcx, at_tos_p1()); // index
1152   // Now store using the appropriate barrier
1153   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1154   __ jmp(done);
1155 
1156   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1157   __ bind(is_null);
1158   __ profile_null_seen(rbx);









1159 


1160   // Store a NULL
1161   do_oop_store(_masm, element_address, noreg, IS_ARRAY);

1162 
























1163   // Pop stack arguments
1164   __ bind(done);
1165   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1166 }
1167 
1168 void TemplateTable::bastore() {
1169   transition(itos, vtos);
1170   __ pop_i(rbx);
1171   // rax: value
1172   // rbx: index
1173   // rdx: array
1174   index_check(rdx, rbx); // prefer index in rbx
1175   // Need to check whether array is boolean or byte
1176   // since both types share the bastore bytecode.
1177   __ load_klass(rcx, rdx);
1178   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1179   int diffbit = Klass::layout_helper_boolean_diffbit();
1180   __ testl(rcx, diffbit);
1181   Label L_skip;
1182   __ jccb(Assembler::zero, L_skip);


2388   __ jcc(j_not(cc), not_taken);
2389   branch(false, false);
2390   __ bind(not_taken);
2391   __ profile_not_taken_branch(rax);
2392 }
2393 
2394 void TemplateTable::if_nullcmp(Condition cc) {
2395   transition(atos, vtos);
2396   // assume branch is more often taken than not (loops use backward branches)
2397   Label not_taken;
2398   __ testptr(rax, rax);
2399   __ jcc(j_not(cc), not_taken);
2400   branch(false, false);
2401   __ bind(not_taken);
2402   __ profile_not_taken_branch(rax);
2403 }
2404 
2405 void TemplateTable::if_acmp(Condition cc) {
2406   transition(atos, vtos);
2407   // assume branch is more often taken than not (loops use backward branches)
2408   Label not_taken;
2409   __ pop_ptr(rdx);




































2410   __ cmpoop(rdx, rax);
2411   __ jcc(j_not(cc), not_taken);

2412   branch(false, false);
2413   __ bind(not_taken);
2414   __ profile_not_taken_branch(rax);
2415 }
2416 









2417 void TemplateTable::ret() {
2418   transition(vtos, vtos);
2419   locals_index(rbx);
2420   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2421   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2422   __ profile_ret(rbx, rcx);
2423   __ get_method(rax);
2424   __ movptr(rbcp, Address(rax, Method::const_offset()));
2425   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2426                       ConstMethod::codes_offset()));
2427   __ dispatch_next(vtos, 0, true);
2428 }
2429 
2430 void TemplateTable::wide_ret() {
2431   transition(vtos, vtos);
2432   locals_index_wide(rbx);
2433   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2434   __ profile_ret(rbx, rcx);
2435   __ get_method(rax);
2436   __ movptr(rbcp, Address(rax, Method::const_offset()));


2662     __ testb(Address(r15_thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2663 #else
2664     const Register thread = rdi;
2665     __ get_thread(thread);
2666     __ testb(Address(thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2667 #endif
2668     __ jcc(Assembler::zero, no_safepoint);
2669     __ push(state);
2670     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2671                                     InterpreterRuntime::at_safepoint));
2672     __ pop(state);
2673     __ bind(no_safepoint);
2674   }
2675 
2676   // Narrow result if state is itos but result type is smaller.
2677   // Need to narrow in the return bytecode rather than in generate_return_entry
2678   // since compiled code callers expect the result to already be narrowed.
2679   if (state == itos) {
2680     __ narrow(rax);
2681   }
2682   __ remove_activation(state, rbcp);

2683 
2684   __ jmp(rbcp);
2685 }
2686 
2687 // ----------------------------------------------------------------------------
2688 // Volatile variables demand their effects be made known to all CPU's
2689 // in order.  Store buffers on most chips allow reads & writes to
2690 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2691 // without some kind of memory barrier (i.e., it's not sufficient that
2692 // the interpreter does not reorder volatile references, the hardware
2693 // also must not reorder them).
2694 //
2695 // According to the new Java Memory Model (JMM):
2696 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2697 //     writes act as aquire & release, so:
2698 // (2) A read cannot let unrelated NON-volatile memory refs that
2699 //     happen after the read float up to before the read.  It's OK for
2700 //     non-volatile memory refs that happen before the volatile read to
2701 //     float down below it.
2702 // (3) Similar a volatile write cannot let unrelated NON-volatile


2860     __ get_cache_and_index_at_bcp(cache, index, 1);
2861     __ bind(L1);
2862   }
2863 }
2864 
2865 void TemplateTable::pop_and_check_object(Register r) {
2866   __ pop_ptr(r);
2867   __ null_check(r);  // for field access must check obj.
2868   __ verify_oop(r);
2869 }
2870 
2871 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2872   transition(vtos, vtos);
2873 
2874   const Register cache = rcx;
2875   const Register index = rdx;
2876   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2877   const Register off   = rbx;
2878   const Register flags = rax;
2879   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them

2880 
2881   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2882   jvmti_post_field_access(cache, index, is_static, false);
2883   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2884 
2885   if (!is_static) pop_and_check_object(obj);
2886 
2887   const Address field(obj, off, Address::times_1, 0*wordSize);
2888 
2889   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;








2890 
2891   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2892   // Make sure we don't need to mask edx after the above shift
2893   assert(btos == 0, "change code, btos != 0");
2894 
2895   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2896 
2897   __ jcc(Assembler::notZero, notByte);
2898   // btos

2899   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2900   __ push(btos);
2901   // Rewrite bytecode to be faster
2902   if (!is_static && rc == may_rewrite) {
2903     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2904   }
2905   __ jmp(Done);
2906 
2907   __ bind(notByte);

2908   __ cmpl(flags, ztos);
2909   __ jcc(Assembler::notEqual, notBool);
2910 
2911   // ztos (same code as btos)
2912   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
2913   __ push(ztos);
2914   // Rewrite bytecode to be faster
2915   if (!is_static && rc == may_rewrite) {
2916     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2917     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2918   }
2919   __ jmp(Done);
2920 
2921   __ bind(notBool);
2922   __ cmpl(flags, atos);
2923   __ jcc(Assembler::notEqual, notObj);
2924   // atos
2925   do_oop_load(_masm, field, rax);
2926   __ push(atos);
2927   if (!is_static && rc == may_rewrite) {
2928     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
































































2929   }
2930   __ jmp(Done);
2931 
2932   __ bind(notObj);



2933   __ cmpl(flags, itos);
2934   __ jcc(Assembler::notEqual, notInt);
2935   // itos
2936   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
2937   __ push(itos);
2938   // Rewrite bytecode to be faster
2939   if (!is_static && rc == may_rewrite) {
2940     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2941   }
2942   __ jmp(Done);
2943 
2944   __ bind(notInt);
2945   __ cmpl(flags, ctos);
2946   __ jcc(Assembler::notEqual, notChar);
2947   // ctos
2948   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
2949   __ push(ctos);
2950   // Rewrite bytecode to be faster
2951   if (!is_static && rc == may_rewrite) {
2952     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);


3012 #endif
3013 
3014   __ bind(Done);
3015   // [jk] not needed currently
3016   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3017   //                                              Assembler::LoadStore));
3018 }
3019 
3020 void TemplateTable::getfield(int byte_no) {
3021   getfield_or_static(byte_no, false);
3022 }
3023 
3024 void TemplateTable::nofast_getfield(int byte_no) {
3025   getfield_or_static(byte_no, false, may_not_rewrite);
3026 }
3027 
3028 void TemplateTable::getstatic(int byte_no) {
3029   getfield_or_static(byte_no, true);
3030 }
3031 















3032 
3033 // The registers cache and index expected to be set before call.
3034 // The function may destroy various registers, just not the cache and index registers.
3035 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3036 
3037   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
3038   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
3039   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
3040   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
3041 
3042   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3043 
3044   if (JvmtiExport::can_post_field_modification()) {
3045     // Check to see if a field modification watch has been set before
3046     // we take the time to call into the VM.
3047     Label L1;
3048     assert_different_registers(cache, index, rax);
3049     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3050     __ testl(rax, rax);
3051     __ jcc(Assembler::zero, L1);


3107     // c_rarg1: object pointer set up above (NULL if static)
3108     // c_rarg2: cache entry pointer
3109     // c_rarg3: jvalue object on the stack
3110     __ call_VM(noreg,
3111                CAST_FROM_FN_PTR(address,
3112                                 InterpreterRuntime::post_field_modification),
3113                RBX, robj, RCX);
3114     __ get_cache_and_index_at_bcp(cache, index, 1);
3115     __ bind(L1);
3116   }
3117 }
3118 
3119 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3120   transition(vtos, vtos);
3121 
3122   const Register cache = rcx;
3123   const Register index = rdx;
3124   const Register obj   = rcx;
3125   const Register off   = rbx;
3126   const Register flags = rax;

3127 
3128   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3129   jvmti_post_field_mod(cache, index, is_static);
3130   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3131 
3132   // [jk] not needed currently
3133   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3134   //                                              Assembler::StoreStore));
3135 
3136   Label notVolatile, Done;
3137   __ movl(rdx, flags);
3138   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3139   __ andl(rdx, 0x1);
3140 
3141   // Check for volatile store
3142   __ testl(rdx, rdx);

3143   __ jcc(Assembler::zero, notVolatile);
3144 
3145   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3146   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3147                                                Assembler::StoreStore));
3148   __ jmp(Done);
3149   __ bind(notVolatile);
3150 
3151   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3152 
3153   __ bind(Done);
3154 }
3155 
3156 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3157                                               Register obj, Register off, Register flags) {
3158 
3159   // field addresses
3160   const Address field(obj, off, Address::times_1, 0*wordSize);
3161   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3162 
3163   Label notByte, notBool, notInt, notShort, notChar,
3164         notLong, notFloat, notObj;
3165   Label Done;
3166 
3167   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3168 
3169   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3170 
3171   assert(btos == 0, "change code, btos != 0");
3172   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3173   __ jcc(Assembler::notZero, notByte);
3174 
3175   // btos
3176   {
3177     __ pop(btos);
3178     if (!is_static) pop_and_check_object(obj);
3179     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3180     if (!is_static && rc == may_rewrite) {
3181       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3182     }
3183     __ jmp(Done);
3184   }


3187   __ cmpl(flags, ztos);
3188   __ jcc(Assembler::notEqual, notBool);
3189 
3190   // ztos
3191   {
3192     __ pop(ztos);
3193     if (!is_static) pop_and_check_object(obj);
3194     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3195     if (!is_static && rc == may_rewrite) {
3196       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3197     }
3198     __ jmp(Done);
3199   }
3200 
3201   __ bind(notBool);
3202   __ cmpl(flags, atos);
3203   __ jcc(Assembler::notEqual, notObj);
3204 
3205   // atos
3206   {
3207     __ pop(atos);
3208     if (!is_static) pop_and_check_object(obj);
3209     // Store into the field
3210     do_oop_store(_masm, field, rax);
3211     if (!is_static && rc == may_rewrite) {
3212       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);











































3213     }
3214     __ jmp(Done);
3215   }
3216 
3217   __ bind(notObj);
3218   __ cmpl(flags, itos);
3219   __ jcc(Assembler::notEqual, notInt);
3220 
3221   // itos
3222   {
3223     __ pop(itos);
3224     if (!is_static) pop_and_check_object(obj);
3225     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3226     if (!is_static && rc == may_rewrite) {
3227       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3228     }
3229     __ jmp(Done);
3230   }
3231 
3232   __ bind(notInt);
3233   __ cmpl(flags, ctos);
3234   __ jcc(Assembler::notEqual, notChar);


3333 }
3334 
3335 void TemplateTable::jvmti_post_fast_field_mod() {
3336 
3337   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3338 
3339   if (JvmtiExport::can_post_field_modification()) {
3340     // Check to see if a field modification watch has been set before
3341     // we take the time to call into the VM.
3342     Label L2;
3343     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3344     __ testl(scratch, scratch);
3345     __ jcc(Assembler::zero, L2);
3346     __ pop_ptr(rbx);                  // copy the object pointer from tos
3347     __ verify_oop(rbx);
3348     __ push_ptr(rbx);                 // put the object pointer back on tos
3349     // Save tos values before call_VM() clobbers them. Since we have
3350     // to do it for every data type, we use the saved values as the
3351     // jvalue object.
3352     switch (bytecode()) {          // load values into the jvalue object

3353     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3354     case Bytecodes::_fast_bputfield: // fall through
3355     case Bytecodes::_fast_zputfield: // fall through
3356     case Bytecodes::_fast_sputfield: // fall through
3357     case Bytecodes::_fast_cputfield: // fall through
3358     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3359     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3360     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3361     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3362 
3363     default:
3364       ShouldNotReachHere();
3365     }
3366     __ mov(scratch, rsp);             // points to jvalue on the stack
3367     // access constant pool cache entry
3368     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3369     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3370     __ verify_oop(rbx);
3371     // rbx: object pointer copied above
3372     // c_rarg2: cache entry pointer
3373     // c_rarg3: jvalue object on the stack
3374     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3375     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3376 
3377     switch (bytecode()) {             // restore tos values

3378     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3379     case Bytecodes::_fast_bputfield: // fall through
3380     case Bytecodes::_fast_zputfield: // fall through
3381     case Bytecodes::_fast_sputfield: // fall through
3382     case Bytecodes::_fast_cputfield: // fall through
3383     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3384     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3385     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3386     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3387     default: break;
3388     }
3389     __ bind(L2);
3390   }
3391 }
3392 
3393 void TemplateTable::fast_storefield(TosState state) {
3394   transition(state, vtos);
3395 
3396   ByteSize base = ConstantPoolCache::base_offset();
3397 
3398   jvmti_post_fast_field_mod();
3399 
3400   // access constant pool cache
3401   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3402 
3403   // test for volatile with rdx but rdx is tos register for lputfield.
3404   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3405                        in_bytes(base +
3406                                 ConstantPoolCacheEntry::flags_offset())));
3407 
3408   // replace index with field offset from cache entry
3409   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3410                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3411 
3412   // [jk] not needed currently
3413   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3414   //                                              Assembler::StoreStore));
3415 
3416   Label notVolatile, Done;




3417   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3418   __ andl(rdx, 0x1);
3419 
3420   // Get object from stack
3421   pop_and_check_object(rcx);
3422 
3423   // field address
3424   const Address field(rcx, rbx, Address::times_1);
3425 
3426   // Check for volatile store
3427   __ testl(rdx, rdx);
3428   __ jcc(Assembler::zero, notVolatile);
3429 
3430   fast_storefield_helper(field, rax);
3431   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3432                                                Assembler::StoreStore));
3433   __ jmp(Done);
3434   __ bind(notVolatile);
3435 
3436   fast_storefield_helper(field, rax);
3437 
3438   __ bind(Done);
3439 }
3440 
3441 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3442 
3443   // access field
3444   switch (bytecode()) {














3445   case Bytecodes::_fast_aputfield:
3446     do_oop_store(_masm, field, rax);


3447     break;
3448   case Bytecodes::_fast_lputfield:
3449 #ifdef _LP64
3450     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3451 #else
3452   __ stop("should not be rewritten");
3453 #endif
3454     break;
3455   case Bytecodes::_fast_iputfield:
3456     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3457     break;
3458   case Bytecodes::_fast_zputfield:
3459     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3460     break;
3461   case Bytecodes::_fast_bputfield:
3462     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3463     break;
3464   case Bytecodes::_fast_sputfield:
3465     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3466     break;


3496     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3497     LP64_ONLY(__ mov(c_rarg1, rax));
3498     // c_rarg1: object pointer copied above
3499     // c_rarg2: cache entry pointer
3500     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3501     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3502     __ pop_ptr(rax); // restore object pointer
3503     __ bind(L1);
3504   }
3505 
3506   // access constant pool cache
3507   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3508   // replace index with field offset from cache entry
3509   // [jk] not needed currently
3510   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3511   //                      in_bytes(ConstantPoolCache::base_offset() +
3512   //                               ConstantPoolCacheEntry::flags_offset())));
3513   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3514   // __ andl(rdx, 0x1);
3515   //
3516   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3517                          in_bytes(ConstantPoolCache::base_offset() +
3518                                   ConstantPoolCacheEntry::f2_offset())));
3519 
3520   // rax: object
3521   __ verify_oop(rax);
3522   __ null_check(rax);
3523   Address field(rax, rbx, Address::times_1);
3524 
3525   // access field
3526   switch (bytecode()) {




































3527   case Bytecodes::_fast_agetfield:
3528     do_oop_load(_masm, field, rax);
3529     __ verify_oop(rax);
3530     break;
3531   case Bytecodes::_fast_lgetfield:
3532 #ifdef _LP64
3533     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3534 #else
3535   __ stop("should not be rewritten");
3536 #endif
3537     break;
3538   case Bytecodes::_fast_igetfield:
3539     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3540     break;
3541   case Bytecodes::_fast_bgetfield:
3542     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3543     break;
3544   case Bytecodes::_fast_sgetfield:
3545     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3546     break;


4129     __ jmp(done);
4130   }
4131 
4132   // slow case
4133   __ bind(slow_case);
4134   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4135   __ bind(slow_case_no_pop);
4136 
4137   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4138   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4139 
4140   __ get_constant_pool(rarg1);
4141   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4142   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4143    __ verify_oop(rax);
4144 
4145   // continue
4146   __ bind(done);
4147 }
4148 














4149 void TemplateTable::newarray() {
4150   transition(itos, atos);
4151   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4152   __ load_unsigned_byte(rarg1, at_bcp(1));
4153   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4154           rarg1, rax);
4155 }
4156 
4157 void TemplateTable::anewarray() {
4158   transition(itos, atos);
4159 
4160   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4161   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4162 
4163   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4164   __ get_constant_pool(rarg1);
4165   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4166           rarg1, rarg2, rax);
4167 }
4168 
4169 void TemplateTable::arraylength() {
4170   transition(atos, itos);
4171   __ null_check(rax, arrayOopDesc::length_offset_in_bytes());
4172   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4173 }
4174 
4175 void TemplateTable::checkcast() {
4176   transition(atos, atos);
4177   Label done, is_null, ok_is_subtype, quicked, resolved;
4178   __ testptr(rax, rax); // object is in rax
4179   __ jcc(Assembler::zero, is_null);
4180 
4181   // Get cpool & tags index
4182   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4183   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4184   // See if bytecode has already been quicked
4185   __ cmpb(Address(rdx, rbx,
4186                   Address::times_1,
4187                   Array<u1>::base_offset_in_bytes()),
4188           JVM_CONSTANT_Class);

4189   __ jcc(Assembler::equal, quicked);
4190   __ push(atos); // save receiver for result, and for GC
4191   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4192 
4193   // vm_result_2 has metadata result
4194 #ifndef _LP64
4195   // borrow rdi from locals
4196   __ get_thread(rdi);
4197   __ get_vm_result_2(rax, rdi);
4198   __ restore_locals();
4199 #else
4200   __ get_vm_result_2(rax, r15_thread);
4201 #endif
4202 
4203   __ pop_ptr(rdx); // restore receiver
4204   __ jmpb(resolved);
4205 
4206   // Get superklass in rax and subklass in rbx
4207   __ bind(quicked);
4208   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4209   __ load_resolved_klass_at_index(rax, rcx, rbx);
4210 
4211   __ bind(resolved);
4212   __ load_klass(rbx, rdx);
4213 
4214   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4215   // Superklass in rax.  Subklass in rbx.
4216   __ gen_subtype_check(rbx, ok_is_subtype);
4217 
4218   // Come here on failure
4219   __ push_ptr(rdx);
4220   // object is at TOS
4221   __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4222 
4223   // Come here on success
4224   __ bind(ok_is_subtype);
4225   __ mov(rax, rdx); // Restore object in rdx



4226 
4227   // Collect counts on whether this check-cast sees NULLs a lot or not.
4228   if (ProfileInterpreter) {
4229     __ jmp(done);
4230     __ bind(is_null);
4231     __ profile_null_seen(rcx);
4232   } else {
4233     __ bind(is_null);   // same as 'done'
4234   }















4235   __ bind(done);
4236 }
4237 
4238 void TemplateTable::instanceof() {
4239   transition(atos, itos);
4240   Label done, is_null, ok_is_subtype, quicked, resolved;
4241   __ testptr(rax, rax);
4242   __ jcc(Assembler::zero, is_null);
4243 
4244   // Get cpool & tags index
4245   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4246   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4247   // See if bytecode has already been quicked
4248   __ cmpb(Address(rdx, rbx,
4249                   Address::times_1,
4250                   Array<u1>::base_offset_in_bytes()),
4251           JVM_CONSTANT_Class);

4252   __ jcc(Assembler::equal, quicked);
4253 
4254   __ push(atos); // save receiver for result, and for GC
4255   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4256   // vm_result_2 has metadata result
4257 
4258 #ifndef _LP64
4259   // borrow rdi from locals
4260   __ get_thread(rdi);
4261   __ get_vm_result_2(rax, rdi);
4262   __ restore_locals();
4263 #else
4264   __ get_vm_result_2(rax, r15_thread);
4265 #endif
4266 
4267   __ pop_ptr(rdx); // restore receiver
4268   __ verify_oop(rdx);
4269   __ load_klass(rdx, rdx);
4270   __ jmpb(resolved);
4271 


4283   // Come here on failure
4284   __ xorl(rax, rax);
4285   __ jmpb(done);
4286   // Come here on success
4287   __ bind(ok_is_subtype);
4288   __ movl(rax, 1);
4289 
4290   // Collect counts on whether this test sees NULLs a lot or not.
4291   if (ProfileInterpreter) {
4292     __ jmp(done);
4293     __ bind(is_null);
4294     __ profile_null_seen(rcx);
4295   } else {
4296     __ bind(is_null);   // same as 'done'
4297   }
4298   __ bind(done);
4299   // rax = 0: obj == NULL or  obj is not an instanceof the specified klass
4300   // rax = 1: obj != NULL and obj is     an instanceof the specified klass
4301 }
4302 
4303 
4304 //----------------------------------------------------------------------------------------------------
4305 // Breakpoints
4306 void TemplateTable::_breakpoint() {
4307   // Note: We get here even if we are single stepping..
4308   // jbug insists on setting breakpoints at every bytecode
4309   // even if we are in single step mode.
4310 
4311   transition(vtos, vtos);
4312 
4313   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4314 
4315   // get the unpatched byte code
4316   __ get_method(rarg);
4317   __ call_VM(noreg,
4318              CAST_FROM_FN_PTR(address,
4319                               InterpreterRuntime::get_original_bytecode_at),
4320              rarg, rbcp);
4321   __ mov(rbx, rax);  // why?
4322 
4323   // post the breakpoint event


4347 //
4348 // Stack layout:
4349 //
4350 // [expressions  ] <--- rsp               = expression stack top
4351 // ..
4352 // [expressions  ]
4353 // [monitor entry] <--- monitor block top = expression stack bot
4354 // ..
4355 // [monitor entry]
4356 // [frame data   ] <--- monitor block bot
4357 // ...
4358 // [saved rbp    ] <--- rbp
4359 void TemplateTable::monitorenter() {
4360   transition(atos, vtos);
4361 
4362   // check for NULL object
4363   __ null_check(rax);
4364 
4365   __ resolve(IS_NOT_NULL, rax);
4366 











4367   const Address monitor_block_top(
4368         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4369   const Address monitor_block_bot(
4370         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4371   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4372 
4373   Label allocated;
4374 
4375   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4376   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4377   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4378 
4379   // initialize entry pointer
4380   __ xorl(rmon, rmon); // points to free slot or NULL
4381 
4382   // find a free slot in the monitor block (result in rmon)
4383   {
4384     Label entry, loop, exit;
4385     __ movptr(rtop, monitor_block_top); // points to current entry,
4386                                         // starting with top-most entry


4445   // store object
4446   __ movptr(Address(rmon, BasicObjectLock::obj_offset_in_bytes()), rax);
4447   __ lock_object(rmon);
4448 
4449   // check to make sure this monitor doesn't cause stack overflow after locking
4450   __ save_bcp();  // in case of exception
4451   __ generate_stack_overflow_check(0);
4452 
4453   // The bcp has already been incremented. Just need to dispatch to
4454   // next instruction.
4455   __ dispatch_next(vtos);
4456 }
4457 
4458 void TemplateTable::monitorexit() {
4459   transition(atos, vtos);
4460 
4461   // check for NULL object
4462   __ null_check(rax);
4463 
4464   __ resolve(IS_NOT_NULL, rax);











4465 
4466   const Address monitor_block_top(
4467         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4468   const Address monitor_block_bot(
4469         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4470   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4471 
4472   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4473   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4474 
4475   Label found;
4476 
4477   // find matching slot
4478   {
4479     Label entry, loop;
4480     __ movptr(rtop, monitor_block_top); // points to current entry,
4481                                         // starting with top-most entry
4482     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4483                                         // of monitor block
4484     __ jmpb(entry);




 137   case TemplateTable::less_equal   : return Assembler::greater;
 138   case TemplateTable::greater      : return Assembler::lessEqual;
 139   case TemplateTable::greater_equal: return Assembler::less;
 140   }
 141   ShouldNotReachHere();
 142   return Assembler::zero;
 143 }
 144 
 145 
 146 
 147 // Miscelaneous helper routines
 148 // Store an oop (or NULL) at the address described by obj.
 149 // If val == noreg this means store a NULL
 150 
 151 
 152 static void do_oop_store(InterpreterMacroAssembler* _masm,
 153                          Address dst,
 154                          Register val,
 155                          DecoratorSet decorators = 0) {
 156   assert(val == noreg || val == rax, "parameter is just for looks");
 157   __ store_heap_oop(dst, val, rdx, rbx, noreg, decorators);
 158 }
 159 
 160 static void do_oop_load(InterpreterMacroAssembler* _masm,
 161                         Address src,
 162                         Register dst,
 163                         DecoratorSet decorators = 0) {
 164   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 165 }
 166 
 167 Address TemplateTable::at_bcp(int offset) {
 168   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 169   return Address(rbcp, offset);
 170 }
 171 
 172 
 173 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 174                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 175                                    int byte_no) {
 176   if (!RewriteBytecodes)  return;
 177   Label L_patch_done;
 178 
 179   switch (bc) {
 180   case Bytecodes::_fast_qputfield:
 181   case Bytecodes::_fast_aputfield:
 182   case Bytecodes::_fast_bputfield:
 183   case Bytecodes::_fast_zputfield:
 184   case Bytecodes::_fast_cputfield:
 185   case Bytecodes::_fast_dputfield:
 186   case Bytecodes::_fast_fputfield:
 187   case Bytecodes::_fast_iputfield:
 188   case Bytecodes::_fast_lputfield:
 189   case Bytecodes::_fast_sputfield:
 190     {
 191       // We skip bytecode quickening for putfield instructions when
 192       // the put_code written to the constant pool cache is zero.
 193       // This is required so that every execution of this instruction
 194       // calls out to InterpreterRuntime::resolve_get_put to do
 195       // additional, required work.
 196       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 197       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 198       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 199       __ movl(bc_reg, bc);
 200       __ cmpl(temp_reg, (int) 0);


 353   __ sarl(rax, 16);
 354 }
 355 
 356 void TemplateTable::ldc(bool wide) {
 357   transition(vtos, vtos);
 358   Register rarg = NOT_LP64(rcx) LP64_ONLY(c_rarg1);
 359   Label call_ldc, notFloat, notClass, notInt, Done;
 360 
 361   if (wide) {
 362     __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
 363   } else {
 364     __ load_unsigned_byte(rbx, at_bcp(1));
 365   }
 366 
 367   __ get_cpool_and_tags(rcx, rax);
 368   const int base_offset = ConstantPool::header_size() * wordSize;
 369   const int tags_offset = Array<u1>::base_offset_in_bytes();
 370 
 371   // get type
 372   __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));
 373   __ andl(rdx, ~JVM_CONSTANT_QDescBit);
 374 
 375   // unresolved class - get the resolved class
 376   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
 377   __ jccb(Assembler::equal, call_ldc);
 378 
 379   // unresolved class in error state - call into runtime to throw the error
 380   // from the first resolution attempt
 381   __ cmpl(rdx, JVM_CONSTANT_UnresolvedClassInError);
 382   __ jccb(Assembler::equal, call_ldc);
 383 
 384   // resolved class - need to call vm to get java mirror of the class
 385   __ cmpl(rdx, JVM_CONSTANT_Class);
 386   __ jcc(Assembler::notEqual, notClass);
 387 
 388   __ bind(call_ldc);
 389 
 390   __ movl(rarg, wide);
 391   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), rarg);
 392 
 393   __ push(atos);


 804                     Address(rdx, rax,
 805                             Address::times_4,
 806                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 807                     noreg, noreg);
 808 }
 809 
 810 void TemplateTable::daload() {
 811   transition(itos, dtos);
 812   // rax: index
 813   // rdx: array
 814   index_check(rdx, rax); // kills rbx
 815   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 816                     Address(rdx, rax,
 817                             Address::times_8,
 818                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 819                     noreg, noreg);
 820 }
 821 
 822 void TemplateTable::aaload() {
 823   transition(itos, atos);
 824 
 825   Register array = rcx;
 826   Register index = rax;
 827 
 828   index_check(array, index); // kills rbx
 829   if (ValueArrayFlatten) {
 830     Label is_flat_array, done;
 831     __ test_flattened_array_oop(array, rbx, is_flat_array);
 832     do_oop_load(_masm,
 833                 Address(array, index,
 834                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 835                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 836                 rax,
 837                 IS_ARRAY);
 838     __ jmp(done);
 839     __ bind(is_flat_array);
 840     __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_load), array, index);
 841     __ bind(done);
 842   } else {
 843     do_oop_load(_masm,
 844                 Address(array, index,
 845                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 846                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 847                 rax,
 848                 IS_ARRAY);
 849   }
 850 }
 851 
 852 void TemplateTable::baload() {
 853   transition(itos, itos);
 854   // rax: index
 855   // rdx: array
 856   index_check(rdx, rax); // kills rbx
 857   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 858                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 859                     noreg, noreg);
 860 }
 861 
 862 void TemplateTable::caload() {
 863   transition(itos, itos);
 864   // rax: index
 865   // rdx: array
 866   index_check(rdx, rax); // kills rbx
 867   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 868                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 869                     noreg, noreg);


1115   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1116                      Address(rdx, rbx, Address::times_4,
1117                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1118                      noreg /* ftos */, noreg, noreg);
1119 }
1120 
1121 void TemplateTable::dastore() {
1122   transition(dtos, vtos);
1123   __ pop_i(rbx);
1124   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1125   // rbx:  index
1126   // rdx:  array
1127   index_check(rdx, rbx); // prefer index in rbx
1128   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1129                      Address(rdx, rbx, Address::times_8,
1130                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1131                      noreg /* dtos */, noreg, noreg);
1132 }
1133 
1134 void TemplateTable::aastore() {
1135   Label is_null, is_flat_array, ok_is_subtype, done;
1136   transition(vtos, vtos);
1137   // stack: ..., array, index, value
1138   __ movptr(rax, at_tos());    // value
1139   __ movl(rcx, at_tos_p1()); // index
1140   __ movptr(rdx, at_tos_p2()); // array
1141 
1142   Address element_address(rdx, rcx,
1143                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1144                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1145 
1146   index_check_without_pop(rdx, rcx);     // kills rbx
1147 
1148   __ testptr(rax, rax);
1149   __ jcc(Assembler::zero, is_null);
1150 
1151   // Move array class to rdi
1152   __ load_klass(rdi, rdx);
1153   if (ValueArrayFlatten) {
1154     __ test_flattened_array_oop(rdx, rbx, is_flat_array);
1155   }
1156 
1157   // Move subklass into rbx
1158   __ load_klass(rbx, rax);
1159   // Move array element superklass into rax
1160   __ movptr(rax, Address(rdi,

1161                          ObjArrayKlass::element_klass_offset()));
1162 
1163   // Generate subtype check.  Blows rcx, rdi
1164   // Superklass in rax.  Subklass in rbx.
1165   // is "rbx <: rax" ? (value subclass <: array element superclass)
1166   __ gen_subtype_check(rbx, ok_is_subtype);
1167 
1168   // Come here on failure
1169   // object is at TOS
1170   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1171 
1172   // Come here on success
1173   __ bind(ok_is_subtype);
1174 
1175   // Get the value we will store
1176   __ movptr(rax, at_tos());
1177   __ movl(rcx, at_tos_p1()); // index
1178   // Now store using the appropriate barrier
1179   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1180   __ jmp(done);
1181 
1182   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
1183   __ bind(is_null);
1184   __ profile_null_seen(rbx);
1185   if (EnableValhalla) {
1186     Label is_null_into_value_array_npe, store_null;
1187 
1188     // No way to store null in null-free array
1189     __ test_null_free_array_oop(rdx, rbx, is_null_into_value_array_npe);
1190     __ jmp(store_null);
1191 
1192     __ bind(is_null_into_value_array_npe);
1193     __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1194 
1195     __ bind(store_null);
1196   }
1197   // Store a NULL
1198   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1199   __ jmp(done);
1200 
1201   if (EnableValhalla) {
1202     Label is_type_ok;
1203     __ bind(is_flat_array); // Store non-null value to flat
1204 
1205     // Simplistic type check...
1206 
1207     // Profile the not-null value's klass.
1208     __ load_klass(rbx, rax);
1209     __ profile_typecheck(rcx, rbx, rax); // blows rcx, and rax
1210     // Move element klass into rax
1211     __ movptr(rax, Address(rdi, ArrayKlass::element_klass_offset()));
1212     // flat value array needs exact type match
1213     // is "rax == rbx" (value subclass == array element superclass)
1214     __ cmpptr(rax, rbx);
1215     __ jccb(Assembler::equal, is_type_ok);
1216 
1217     __ profile_typecheck_failed(rcx);
1218     __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1219 
1220     __ bind(is_type_ok);
1221     __ movptr(rax, at_tos());  // value
1222     __ movl(rcx, at_tos_p1()); // index
1223     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_store), rax, rdx, rcx);
1224   }
1225   // Pop stack arguments
1226   __ bind(done);
1227   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1228 }
1229 
1230 void TemplateTable::bastore() {
1231   transition(itos, vtos);
1232   __ pop_i(rbx);
1233   // rax: value
1234   // rbx: index
1235   // rdx: array
1236   index_check(rdx, rbx); // prefer index in rbx
1237   // Need to check whether array is boolean or byte
1238   // since both types share the bastore bytecode.
1239   __ load_klass(rcx, rdx);
1240   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1241   int diffbit = Klass::layout_helper_boolean_diffbit();
1242   __ testl(rcx, diffbit);
1243   Label L_skip;
1244   __ jccb(Assembler::zero, L_skip);


2450   __ jcc(j_not(cc), not_taken);
2451   branch(false, false);
2452   __ bind(not_taken);
2453   __ profile_not_taken_branch(rax);
2454 }
2455 
2456 void TemplateTable::if_nullcmp(Condition cc) {
2457   transition(atos, vtos);
2458   // assume branch is more often taken than not (loops use backward branches)
2459   Label not_taken;
2460   __ testptr(rax, rax);
2461   __ jcc(j_not(cc), not_taken);
2462   branch(false, false);
2463   __ bind(not_taken);
2464   __ profile_not_taken_branch(rax);
2465 }
2466 
2467 void TemplateTable::if_acmp(Condition cc) {
2468   transition(atos, vtos);
2469   // assume branch is more often taken than not (loops use backward branches)
2470   Label taken, not_taken;
2471   __ pop_ptr(rdx);
2472 
2473   const int is_value_mask = markOopDesc::always_locked_pattern;
2474   if (EnableValhalla) {
2475     __ cmpoop(rdx, rax);
2476     __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
2477 
2478     // might be substitutable, test if either rax or rdx is null
2479     __ movptr(rbx, rdx);
2480     __ andptr(rbx, rax);
2481     __ testptr(rbx, rbx);
2482     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2483 
2484     // and both are values ?
2485     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2486     __ andptr(rbx, is_value_mask);
2487     __ movptr(rcx, Address(rax, oopDesc::mark_offset_in_bytes()));
2488     __ andptr(rbx, is_value_mask);
2489     __ andptr(rbx, rcx);
2490     __ cmpl(rbx, is_value_mask);
2491     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2492 
2493     // same value klass ?
2494     __ load_metadata(rbx, rdx);
2495     __ load_metadata(rcx, rax);
2496     __ cmpptr(rbx, rcx);
2497     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2498 
2499     // Know both are the same type, let's test for substitutability...
2500     if (cc == equal) {
2501       invoke_is_substitutable(rax, rdx, taken, not_taken);
2502     } else {
2503       invoke_is_substitutable(rax, rdx, not_taken, taken);
2504     }
2505     __ stop("Not reachable");
2506   }
2507 
2508   __ cmpoop(rdx, rax);
2509   __ jcc(j_not(cc), not_taken);
2510   __ bind(taken);
2511   branch(false, false);
2512   __ bind(not_taken);
2513   __ profile_not_taken_branch(rax);
2514 }
2515 
2516 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2517                                             Label& is_subst, Label& not_subst) {
2518   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2519   // Restored...rax answer, jmp to outcome...
2520   __ testl(rax, rax);
2521   __ jcc(Assembler::zero, not_subst);
2522   __ jmp(is_subst);
2523 }
2524 
2525 void TemplateTable::ret() {
2526   transition(vtos, vtos);
2527   locals_index(rbx);
2528   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2529   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2530   __ profile_ret(rbx, rcx);
2531   __ get_method(rax);
2532   __ movptr(rbcp, Address(rax, Method::const_offset()));
2533   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2534                       ConstMethod::codes_offset()));
2535   __ dispatch_next(vtos, 0, true);
2536 }
2537 
2538 void TemplateTable::wide_ret() {
2539   transition(vtos, vtos);
2540   locals_index_wide(rbx);
2541   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2542   __ profile_ret(rbx, rcx);
2543   __ get_method(rax);
2544   __ movptr(rbcp, Address(rax, Method::const_offset()));


2770     __ testb(Address(r15_thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2771 #else
2772     const Register thread = rdi;
2773     __ get_thread(thread);
2774     __ testb(Address(thread, Thread::polling_page_offset()), SafepointMechanism::poll_bit());
2775 #endif
2776     __ jcc(Assembler::zero, no_safepoint);
2777     __ push(state);
2778     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2779                                     InterpreterRuntime::at_safepoint));
2780     __ pop(state);
2781     __ bind(no_safepoint);
2782   }
2783 
2784   // Narrow result if state is itos but result type is smaller.
2785   // Need to narrow in the return bytecode rather than in generate_return_entry
2786   // since compiled code callers expect the result to already be narrowed.
2787   if (state == itos) {
2788     __ narrow(rax);
2789   }
2790 
2791   __ remove_activation(state, rbcp, true, true, true);
2792 
2793   __ jmp(rbcp);
2794 }
2795 
2796 // ----------------------------------------------------------------------------
2797 // Volatile variables demand their effects be made known to all CPU's
2798 // in order.  Store buffers on most chips allow reads & writes to
2799 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2800 // without some kind of memory barrier (i.e., it's not sufficient that
2801 // the interpreter does not reorder volatile references, the hardware
2802 // also must not reorder them).
2803 //
2804 // According to the new Java Memory Model (JMM):
2805 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2806 //     writes act as aquire & release, so:
2807 // (2) A read cannot let unrelated NON-volatile memory refs that
2808 //     happen after the read float up to before the read.  It's OK for
2809 //     non-volatile memory refs that happen before the volatile read to
2810 //     float down below it.
2811 // (3) Similar a volatile write cannot let unrelated NON-volatile


2969     __ get_cache_and_index_at_bcp(cache, index, 1);
2970     __ bind(L1);
2971   }
2972 }
2973 
2974 void TemplateTable::pop_and_check_object(Register r) {
2975   __ pop_ptr(r);
2976   __ null_check(r);  // for field access must check obj.
2977   __ verify_oop(r);
2978 }
2979 
2980 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2981   transition(vtos, vtos);
2982 
2983   const Register cache = rcx;
2984   const Register index = rdx;
2985   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2986   const Register off   = rbx;
2987   const Register flags = rax;
2988   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2989   const Register flags2 = rdx;
2990 
2991   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2992   jvmti_post_field_access(cache, index, is_static, false);
2993   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2994 


2995   const Address field(obj, off, Address::times_1, 0*wordSize);
2996 
2997   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notValueType;
2998 
2999   if (!is_static) {
3000     __ movptr(rcx, Address(cache, index, Address::times_ptr,
3001                            in_bytes(ConstantPoolCache::base_offset() +
3002                                     ConstantPoolCacheEntry::f1_offset())));
3003   }
3004 
3005   __ movl(flags2, flags);
3006 
3007   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3008   // Make sure we don't need to mask edx after the above shift
3009   assert(btos == 0, "change code, btos != 0");
3010 
3011   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3012 
3013   __ jcc(Assembler::notZero, notByte);
3014   // btos
3015   if (!is_static) pop_and_check_object(obj);
3016   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3017   __ push(btos);
3018   // Rewrite bytecode to be faster
3019   if (!is_static && rc == may_rewrite) {
3020     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3021   }
3022   __ jmp(Done);
3023 
3024   __ bind(notByte);
3025 
3026   __ cmpl(flags, ztos);
3027   __ jcc(Assembler::notEqual, notBool);
3028    if (!is_static) pop_and_check_object(obj);
3029   // ztos (same code as btos)
3030   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3031   __ push(ztos);
3032   // Rewrite bytecode to be faster
3033   if (!is_static && rc == may_rewrite) {
3034     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3035     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3036   }
3037   __ jmp(Done);
3038 
3039   __ bind(notBool);
3040   __ cmpl(flags, atos);
3041   __ jcc(Assembler::notEqual, notObj);
3042   // atos
3043   if (!EnableValhalla) {
3044     if (!is_static) pop_and_check_object(obj);
3045     do_oop_load(_masm, field, rax);
3046     __ push(atos);
3047     if (!is_static && rc == may_rewrite) {
3048       patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3049     }
3050     __ jmp(Done);
3051   } else {
3052     if (is_static) {
3053       __ load_heap_oop(rax, field);
3054       Label isFlattenable, uninitialized;
3055       // Issue below if the static field has not been initialized yet
3056       __ test_field_is_flattenable(flags2, rscratch1, isFlattenable);
3057         // Not flattenable case
3058         __ push(atos);
3059         __ jmp(Done);
3060       // Flattenable case, must not return null even if uninitialized
3061       __ bind(isFlattenable);
3062         __ testptr(rax, rax);
3063         __ jcc(Assembler::zero, uninitialized);
3064           __ push(atos);
3065           __ jmp(Done);
3066         __ bind(uninitialized);
3067           __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3068           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_value_field),
3069                  obj, flags2);
3070           __ verify_oop(rax);
3071           __ push(atos);
3072           __ jmp(Done);
3073     } else {
3074       Label isFlattened, nonnull, isFlattenable, rewriteFlattenable;
3075       __ test_field_is_flattenable(flags2, rscratch1, isFlattenable);
3076         // Non-flattenable field case, also covers the object case
3077         pop_and_check_object(obj);
3078         __ load_heap_oop(rax, field);
3079         __ push(atos);
3080         if (rc == may_rewrite) {
3081           patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3082         }
3083         __ jmp(Done);
3084       __ bind(isFlattenable);
3085         __ test_field_is_flattened(flags2, rscratch1, isFlattened);
3086           // Non-flattened field case
3087           pop_and_check_object(obj);
3088           __ load_heap_oop(rax, field);
3089           __ testptr(rax, rax);
3090           __ jcc(Assembler::notZero, nonnull);
3091             __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3092             __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field),
3093                        obj, flags2);
3094           __ bind(nonnull);
3095           __ verify_oop(rax);
3096           __ push(atos);
3097           __ jmp(rewriteFlattenable);
3098         __ bind(isFlattened);
3099           __ andl(flags2, ConstantPoolCacheEntry::field_index_mask);
3100           pop_and_check_object(rbx);
3101           call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field),
3102                   rbx, flags2, rcx);
3103           __ verify_oop(rax);
3104           __ push(atos);
3105       __ bind(rewriteFlattenable);
3106       if (rc == may_rewrite) {
3107         patch_bytecode(Bytecodes::_fast_qgetfield, bc, rbx);
3108       }
3109       __ jmp(Done);
3110     }
3111   }

3112 
3113   __ bind(notObj);
3114 
3115   if (!is_static) pop_and_check_object(obj);
3116 
3117   __ cmpl(flags, itos);
3118   __ jcc(Assembler::notEqual, notInt);
3119   // itos
3120   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3121   __ push(itos);
3122   // Rewrite bytecode to be faster
3123   if (!is_static && rc == may_rewrite) {
3124     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3125   }
3126   __ jmp(Done);
3127 
3128   __ bind(notInt);
3129   __ cmpl(flags, ctos);
3130   __ jcc(Assembler::notEqual, notChar);
3131   // ctos
3132   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3133   __ push(ctos);
3134   // Rewrite bytecode to be faster
3135   if (!is_static && rc == may_rewrite) {
3136     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);


3196 #endif
3197 
3198   __ bind(Done);
3199   // [jk] not needed currently
3200   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3201   //                                              Assembler::LoadStore));
3202 }
3203 
3204 void TemplateTable::getfield(int byte_no) {
3205   getfield_or_static(byte_no, false);
3206 }
3207 
3208 void TemplateTable::nofast_getfield(int byte_no) {
3209   getfield_or_static(byte_no, false, may_not_rewrite);
3210 }
3211 
3212 void TemplateTable::getstatic(int byte_no) {
3213   getfield_or_static(byte_no, true);
3214 }
3215 
3216 void TemplateTable::withfield() {
3217   transition(vtos, atos);
3218 
3219   Register cache = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
3220   Register index = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
3221 
3222   resolve_cache_and_index(f2_byte, cache, index, sizeof(u2));
3223 
3224   call_VM(rbx, CAST_FROM_FN_PTR(address, InterpreterRuntime::withfield), cache);
3225   // new value type is returned in rbx
3226   // stack adjustement is returned in rax
3227   __ verify_oop(rbx);
3228   __ addptr(rsp, rax);
3229   __ movptr(rax, rbx);
3230 }
3231 
3232 // The registers cache and index expected to be set before call.
3233 // The function may destroy various registers, just not the cache and index registers.
3234 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3235 
3236   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
3237   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
3238   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
3239   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
3240 
3241   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3242 
3243   if (JvmtiExport::can_post_field_modification()) {
3244     // Check to see if a field modification watch has been set before
3245     // we take the time to call into the VM.
3246     Label L1;
3247     assert_different_registers(cache, index, rax);
3248     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3249     __ testl(rax, rax);
3250     __ jcc(Assembler::zero, L1);


3306     // c_rarg1: object pointer set up above (NULL if static)
3307     // c_rarg2: cache entry pointer
3308     // c_rarg3: jvalue object on the stack
3309     __ call_VM(noreg,
3310                CAST_FROM_FN_PTR(address,
3311                                 InterpreterRuntime::post_field_modification),
3312                RBX, robj, RCX);
3313     __ get_cache_and_index_at_bcp(cache, index, 1);
3314     __ bind(L1);
3315   }
3316 }
3317 
3318 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3319   transition(vtos, vtos);
3320 
3321   const Register cache = rcx;
3322   const Register index = rdx;
3323   const Register obj   = rcx;
3324   const Register off   = rbx;
3325   const Register flags = rax;
3326   const Register flags2 = rdx;
3327 
3328   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3329   jvmti_post_field_mod(cache, index, is_static);
3330   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3331 
3332   // [jk] not needed currently
3333   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3334   //                                              Assembler::StoreStore));
3335 
3336   Label notVolatile, Done;
3337   __ movl(rdx, flags);
3338   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3339   __ andl(rdx, 0x1);
3340 
3341   // Check for volatile store
3342   __ testl(rdx, rdx);
3343   __ movl(flags2, flags);
3344   __ jcc(Assembler::zero, notVolatile);
3345 
3346   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags, flags2);
3347   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3348                                                Assembler::StoreStore));
3349   __ jmp(Done);
3350   __ bind(notVolatile);
3351 
3352   putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags, flags2);
3353 
3354   __ bind(Done);
3355 }
3356 
3357 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3358                                               Register obj, Register off, Register flags, Register flags2) {
3359 
3360   // field addresses
3361   const Address field(obj, off, Address::times_1, 0*wordSize);
3362   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3363 
3364   Label notByte, notBool, notInt, notShort, notChar,
3365         notLong, notFloat, notObj, notValueType;
3366   Label Done;
3367 
3368   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3369 
3370   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3371 
3372   assert(btos == 0, "change code, btos != 0");
3373   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3374   __ jcc(Assembler::notZero, notByte);
3375 
3376   // btos
3377   {
3378     __ pop(btos);
3379     if (!is_static) pop_and_check_object(obj);
3380     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3381     if (!is_static && rc == may_rewrite) {
3382       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3383     }
3384     __ jmp(Done);
3385   }


3388   __ cmpl(flags, ztos);
3389   __ jcc(Assembler::notEqual, notBool);
3390 
3391   // ztos
3392   {
3393     __ pop(ztos);
3394     if (!is_static) pop_and_check_object(obj);
3395     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3396     if (!is_static && rc == may_rewrite) {
3397       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3398     }
3399     __ jmp(Done);
3400   }
3401 
3402   __ bind(notBool);
3403   __ cmpl(flags, atos);
3404   __ jcc(Assembler::notEqual, notObj);
3405 
3406   // atos
3407   {
3408     if (!EnableValhalla) {
3409       __ pop(atos);
3410       if (!is_static) pop_and_check_object(obj);
3411       // Store into the field
3412       do_oop_store(_masm, field, rax);
3413       if (!is_static && rc == may_rewrite) {
3414         patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3415       }
3416       __ jmp(Done);
3417     } else {
3418       __ pop(atos);
3419       if (is_static) {
3420         Label notFlattenable, notBuffered;
3421         __ test_field_is_not_flattenable(flags2, rscratch1, notFlattenable);
3422         __ null_check(rax);
3423         __ bind(notFlattenable);
3424         do_oop_store(_masm, field, rax);
3425         __ jmp(Done);
3426       } else {
3427         Label isFlattenable, isFlattened, notBuffered, notBuffered2, rewriteNotFlattenable, rewriteFlattenable;
3428         __ test_field_is_flattenable(flags2, rscratch1, isFlattenable);
3429         // Not flattenable case, covers not flattenable values and objects
3430         pop_and_check_object(obj);
3431         // Store into the field
3432         do_oop_store(_masm, field, rax);
3433         __ bind(rewriteNotFlattenable);
3434         if (rc == may_rewrite) {
3435           patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3436         }
3437         __ jmp(Done);
3438         // Implementation of the flattenable semantic
3439         __ bind(isFlattenable);
3440         __ null_check(rax);
3441         __ test_field_is_flattened(flags2, rscratch1, isFlattened);
3442         // Not flattened case
3443         pop_and_check_object(obj);
3444         // Store into the field
3445         do_oop_store(_masm, field, rax);
3446         __ jmp(rewriteFlattenable);
3447         __ bind(isFlattened);
3448         pop_and_check_object(obj);
3449         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3450                 rax, off, obj);
3451         __ bind(rewriteFlattenable);
3452         if (rc == may_rewrite) {
3453           patch_bytecode(Bytecodes::_fast_qputfield, bc, rbx, true, byte_no);
3454         }
3455         __ jmp(Done);
3456       }
3457     }

3458   }
3459 
3460   __ bind(notObj);
3461   __ cmpl(flags, itos);
3462   __ jcc(Assembler::notEqual, notInt);
3463 
3464   // itos
3465   {
3466     __ pop(itos);
3467     if (!is_static) pop_and_check_object(obj);
3468     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3469     if (!is_static && rc == may_rewrite) {
3470       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3471     }
3472     __ jmp(Done);
3473   }
3474 
3475   __ bind(notInt);
3476   __ cmpl(flags, ctos);
3477   __ jcc(Assembler::notEqual, notChar);


3576 }
3577 
3578 void TemplateTable::jvmti_post_fast_field_mod() {
3579 
3580   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3581 
3582   if (JvmtiExport::can_post_field_modification()) {
3583     // Check to see if a field modification watch has been set before
3584     // we take the time to call into the VM.
3585     Label L2;
3586     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3587     __ testl(scratch, scratch);
3588     __ jcc(Assembler::zero, L2);
3589     __ pop_ptr(rbx);                  // copy the object pointer from tos
3590     __ verify_oop(rbx);
3591     __ push_ptr(rbx);                 // put the object pointer back on tos
3592     // Save tos values before call_VM() clobbers them. Since we have
3593     // to do it for every data type, we use the saved values as the
3594     // jvalue object.
3595     switch (bytecode()) {          // load values into the jvalue object
3596     case Bytecodes::_fast_qputfield: //fall through
3597     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3598     case Bytecodes::_fast_bputfield: // fall through
3599     case Bytecodes::_fast_zputfield: // fall through
3600     case Bytecodes::_fast_sputfield: // fall through
3601     case Bytecodes::_fast_cputfield: // fall through
3602     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3603     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3604     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3605     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3606 
3607     default:
3608       ShouldNotReachHere();
3609     }
3610     __ mov(scratch, rsp);             // points to jvalue on the stack
3611     // access constant pool cache entry
3612     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3613     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3614     __ verify_oop(rbx);
3615     // rbx: object pointer copied above
3616     // c_rarg2: cache entry pointer
3617     // c_rarg3: jvalue object on the stack
3618     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3619     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3620 
3621     switch (bytecode()) {             // restore tos values
3622     case Bytecodes::_fast_qputfield: // fall through
3623     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3624     case Bytecodes::_fast_bputfield: // fall through
3625     case Bytecodes::_fast_zputfield: // fall through
3626     case Bytecodes::_fast_sputfield: // fall through
3627     case Bytecodes::_fast_cputfield: // fall through
3628     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3629     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3630     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3631     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3632     default: break;
3633     }
3634     __ bind(L2);
3635   }
3636 }
3637 
3638 void TemplateTable::fast_storefield(TosState state) {
3639   transition(state, vtos);
3640 
3641   ByteSize base = ConstantPoolCache::base_offset();
3642 
3643   jvmti_post_fast_field_mod();
3644 
3645   // access constant pool cache
3646   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3647 
3648   // test for volatile with rdx but rdx is tos register for lputfield.
3649   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3650                        in_bytes(base +
3651                                 ConstantPoolCacheEntry::flags_offset())));
3652 
3653   // replace index with field offset from cache entry
3654   __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3655                          in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3656 
3657   // [jk] not needed currently
3658   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3659   //                                              Assembler::StoreStore));
3660 
3661   Label notVolatile, Done;
3662   if (bytecode() == Bytecodes::_fast_qputfield) {
3663     __ movl(rscratch2, rdx);
3664   }
3665 
3666   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3667   __ andl(rdx, 0x1);
3668 
3669   // Get object from stack
3670   pop_and_check_object(rcx);
3671 
3672   // field address
3673   const Address field(rcx, rbx, Address::times_1);
3674 
3675   // Check for volatile store
3676   __ testl(rdx, rdx);
3677   __ jcc(Assembler::zero, notVolatile);
3678 
3679   fast_storefield_helper(field, rax);
3680   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3681                                                Assembler::StoreStore));
3682   __ jmp(Done);
3683   __ bind(notVolatile);
3684 
3685   fast_storefield_helper(field, rax);
3686 
3687   __ bind(Done);
3688 }
3689 
3690 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3691 
3692   // access field
3693   switch (bytecode()) {
3694   case Bytecodes::_fast_qputfield:
3695     {
3696       Label isFlattened, done;
3697       __ null_check(rax);
3698       __ test_field_is_flattened(rscratch2, rscratch1, isFlattened);
3699       // No Flattened case
3700       do_oop_store(_masm, field, rax);
3701       __ jmp(done);
3702       __ bind(isFlattened);
3703       call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3704           rax, rbx, rcx);
3705       __ bind(done);
3706     }
3707     break;
3708   case Bytecodes::_fast_aputfield:
3709     {
3710       do_oop_store(_masm, field, rax);
3711     }
3712     break;
3713   case Bytecodes::_fast_lputfield:
3714 #ifdef _LP64
3715     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3716 #else
3717   __ stop("should not be rewritten");
3718 #endif
3719     break;
3720   case Bytecodes::_fast_iputfield:
3721     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3722     break;
3723   case Bytecodes::_fast_zputfield:
3724     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3725     break;
3726   case Bytecodes::_fast_bputfield:
3727     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3728     break;
3729   case Bytecodes::_fast_sputfield:
3730     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3731     break;


3761     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3762     LP64_ONLY(__ mov(c_rarg1, rax));
3763     // c_rarg1: object pointer copied above
3764     // c_rarg2: cache entry pointer
3765     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3766     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3767     __ pop_ptr(rax); // restore object pointer
3768     __ bind(L1);
3769   }
3770 
3771   // access constant pool cache
3772   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3773   // replace index with field offset from cache entry
3774   // [jk] not needed currently
3775   // __ movl(rdx, Address(rcx, rbx, Address::times_8,
3776   //                      in_bytes(ConstantPoolCache::base_offset() +
3777   //                               ConstantPoolCacheEntry::flags_offset())));
3778   // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3779   // __ andl(rdx, 0x1);
3780   //
3781   __ movptr(rdx, Address(rcx, rbx, Address::times_ptr,
3782                          in_bytes(ConstantPoolCache::base_offset() +
3783                                   ConstantPoolCacheEntry::f2_offset())));
3784 
3785   // rax: object
3786   __ verify_oop(rax);
3787   __ null_check(rax);
3788   Address field(rax, rdx, Address::times_1);
3789 
3790   // access field
3791   switch (bytecode()) {
3792   case Bytecodes::_fast_qgetfield:
3793     {
3794       Label isFlattened, nonnull, Done;
3795       __ movptr(rscratch1, Address(rcx, rbx, Address::times_ptr,
3796                                    in_bytes(ConstantPoolCache::base_offset() +
3797                                             ConstantPoolCacheEntry::flags_offset())));
3798       __ test_field_is_flattened(rscratch1, rscratch2, isFlattened);
3799         // Non-flattened field case
3800         __ movptr(rscratch1, rax);
3801         __ load_heap_oop(rax, field);
3802         __ testptr(rax, rax);
3803         __ jcc(Assembler::notZero, nonnull);
3804           __ movptr(rax, rscratch1);
3805           __ movl(rcx, Address(rcx, rbx, Address::times_ptr,
3806                              in_bytes(ConstantPoolCache::base_offset() +
3807                                       ConstantPoolCacheEntry::flags_offset())));
3808           __ andl(rcx, ConstantPoolCacheEntry::field_index_mask);
3809           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field),
3810                      rax, rcx);
3811         __ bind(nonnull);
3812         __ verify_oop(rax);
3813         __ jmp(Done);
3814       __ bind(isFlattened);
3815         __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3816                            in_bytes(ConstantPoolCache::base_offset() +
3817                                     ConstantPoolCacheEntry::flags_offset())));
3818         __ andl(rdx, ConstantPoolCacheEntry::field_index_mask);
3819         __ movptr(rcx, Address(rcx, rbx, Address::times_ptr,
3820                                      in_bytes(ConstantPoolCache::base_offset() +
3821                                               ConstantPoolCacheEntry::f1_offset())));
3822         call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field),
3823                 rax, rdx, rcx);
3824         __ verify_oop(rax);
3825       __ bind(Done);
3826     }
3827     break;
3828   case Bytecodes::_fast_agetfield:
3829     do_oop_load(_masm, field, rax);
3830     __ verify_oop(rax);
3831     break;
3832   case Bytecodes::_fast_lgetfield:
3833 #ifdef _LP64
3834     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3835 #else
3836   __ stop("should not be rewritten");
3837 #endif
3838     break;
3839   case Bytecodes::_fast_igetfield:
3840     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3841     break;
3842   case Bytecodes::_fast_bgetfield:
3843     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3844     break;
3845   case Bytecodes::_fast_sgetfield:
3846     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3847     break;


4430     __ jmp(done);
4431   }
4432 
4433   // slow case
4434   __ bind(slow_case);
4435   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4436   __ bind(slow_case_no_pop);
4437 
4438   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4439   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4440 
4441   __ get_constant_pool(rarg1);
4442   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4443   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4444    __ verify_oop(rax);
4445 
4446   // continue
4447   __ bind(done);
4448 }
4449 
4450 void TemplateTable::defaultvalue() {
4451   transition(vtos, atos);
4452 
4453   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4454   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4455 
4456   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4457   __ get_constant_pool(rarg1);
4458 
4459   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::defaultvalue),
4460       rarg1, rarg2);
4461   __ verify_oop(rax);
4462 }
4463 
4464 void TemplateTable::newarray() {
4465   transition(itos, atos);
4466   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4467   __ load_unsigned_byte(rarg1, at_bcp(1));
4468   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4469           rarg1, rax);
4470 }
4471 
4472 void TemplateTable::anewarray() {
4473   transition(itos, atos);
4474 
4475   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4476   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4477 
4478   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4479   __ get_constant_pool(rarg1);
4480   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4481           rarg1, rarg2, rax);
4482 }
4483 
4484 void TemplateTable::arraylength() {
4485   transition(atos, itos);
4486   __ null_check(rax, arrayOopDesc::length_offset_in_bytes());
4487   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4488 }
4489 
4490 void TemplateTable::checkcast() {
4491   transition(atos, atos);
4492   Label done, is_null, ok_is_subtype, quicked, resolved;
4493   __ testptr(rax, rax); // object is in rax
4494   __ jcc(Assembler::zero, is_null);
4495 
4496   // Get cpool & tags index
4497   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4498   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4499   // See if bytecode has already been quicked
4500   __ movzbl(rdx, Address(rdx, rbx,
4501       Address::times_1,
4502       Array<u1>::base_offset_in_bytes()));
4503   __ andl (rdx, ~JVM_CONSTANT_QDescBit);
4504   __ cmpl(rdx, JVM_CONSTANT_Class);
4505   __ jcc(Assembler::equal, quicked);
4506   __ push(atos); // save receiver for result, and for GC
4507   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4508 
4509   // vm_result_2 has metadata result
4510 #ifndef _LP64
4511   // borrow rdi from locals
4512   __ get_thread(rdi);
4513   __ get_vm_result_2(rax, rdi);
4514   __ restore_locals();
4515 #else
4516   __ get_vm_result_2(rax, r15_thread);
4517 #endif
4518 
4519   __ pop_ptr(rdx); // restore receiver
4520   __ jmpb(resolved);
4521 
4522   // Get superklass in rax and subklass in rbx
4523   __ bind(quicked);
4524   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4525   __ load_resolved_klass_at_index(rax, rcx, rbx);
4526 
4527   __ bind(resolved);
4528   __ load_klass(rbx, rdx);
4529 
4530   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4531   // Superklass in rax.  Subklass in rbx.
4532   __ gen_subtype_check(rbx, ok_is_subtype);
4533 
4534   // Come here on failure
4535   __ push_ptr(rdx);
4536   // object is at TOS
4537   __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4538 
4539   // Come here on success
4540   __ bind(ok_is_subtype);
4541   __ mov(rax, rdx); // Restore object in rdx
4542   __ jmp(done);
4543 
4544   __ bind(is_null);
4545 
4546   // Collect counts on whether this check-cast sees NULLs a lot or not.
4547   if (ProfileInterpreter) {


4548     __ profile_null_seen(rcx);


4549   }
4550 
4551   if (EnableValhalla) {
4552     // Get cpool & tags index
4553     __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4554     __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4555     // See if CP entry is a Q-descriptor
4556     __ movzbl(rcx, Address(rdx, rbx,
4557         Address::times_1,
4558         Array<u1>::base_offset_in_bytes()));
4559     __ andl (rcx, JVM_CONSTANT_QDescBit);
4560     __ cmpl(rcx, JVM_CONSTANT_QDescBit);
4561     __ jcc(Assembler::notEqual, done);
4562     __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
4563   }
4564 
4565   __ bind(done);
4566 }
4567 
4568 void TemplateTable::instanceof() {
4569   transition(atos, itos);
4570   Label done, is_null, ok_is_subtype, quicked, resolved;
4571   __ testptr(rax, rax);
4572   __ jcc(Assembler::zero, is_null);
4573 
4574   // Get cpool & tags index
4575   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4576   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4577   // See if bytecode has already been quicked
4578   __ movzbl(rdx, Address(rdx, rbx,
4579         Address::times_1,
4580         Array<u1>::base_offset_in_bytes()));
4581   __ andl (rdx, ~JVM_CONSTANT_QDescBit);
4582   __ cmpl(rdx, JVM_CONSTANT_Class);
4583   __ jcc(Assembler::equal, quicked);
4584 
4585   __ push(atos); // save receiver for result, and for GC
4586   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4587   // vm_result_2 has metadata result
4588 
4589 #ifndef _LP64
4590   // borrow rdi from locals
4591   __ get_thread(rdi);
4592   __ get_vm_result_2(rax, rdi);
4593   __ restore_locals();
4594 #else
4595   __ get_vm_result_2(rax, r15_thread);
4596 #endif
4597 
4598   __ pop_ptr(rdx); // restore receiver
4599   __ verify_oop(rdx);
4600   __ load_klass(rdx, rdx);
4601   __ jmpb(resolved);
4602 


4614   // Come here on failure
4615   __ xorl(rax, rax);
4616   __ jmpb(done);
4617   // Come here on success
4618   __ bind(ok_is_subtype);
4619   __ movl(rax, 1);
4620 
4621   // Collect counts on whether this test sees NULLs a lot or not.
4622   if (ProfileInterpreter) {
4623     __ jmp(done);
4624     __ bind(is_null);
4625     __ profile_null_seen(rcx);
4626   } else {
4627     __ bind(is_null);   // same as 'done'
4628   }
4629   __ bind(done);
4630   // rax = 0: obj == NULL or  obj is not an instanceof the specified klass
4631   // rax = 1: obj != NULL and obj is     an instanceof the specified klass
4632 }
4633 

4634 //----------------------------------------------------------------------------------------------------
4635 // Breakpoints
4636 void TemplateTable::_breakpoint() {
4637   // Note: We get here even if we are single stepping..
4638   // jbug insists on setting breakpoints at every bytecode
4639   // even if we are in single step mode.
4640 
4641   transition(vtos, vtos);
4642 
4643   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4644 
4645   // get the unpatched byte code
4646   __ get_method(rarg);
4647   __ call_VM(noreg,
4648              CAST_FROM_FN_PTR(address,
4649                               InterpreterRuntime::get_original_bytecode_at),
4650              rarg, rbcp);
4651   __ mov(rbx, rax);  // why?
4652 
4653   // post the breakpoint event


4677 //
4678 // Stack layout:
4679 //
4680 // [expressions  ] <--- rsp               = expression stack top
4681 // ..
4682 // [expressions  ]
4683 // [monitor entry] <--- monitor block top = expression stack bot
4684 // ..
4685 // [monitor entry]
4686 // [frame data   ] <--- monitor block bot
4687 // ...
4688 // [saved rbp    ] <--- rbp
4689 void TemplateTable::monitorenter() {
4690   transition(atos, vtos);
4691 
4692   // check for NULL object
4693   __ null_check(rax);
4694 
4695   __ resolve(IS_NOT_NULL, rax);
4696 
4697   const int is_value_mask = markOopDesc::always_locked_pattern;
4698   Label has_identity;
4699   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4700   __ andptr(rbx, is_value_mask);
4701   __ cmpl(rbx, is_value_mask);
4702   __ jcc(Assembler::notEqual, has_identity);
4703   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4704                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4705   __ should_not_reach_here();
4706   __ bind(has_identity);
4707 
4708   const Address monitor_block_top(
4709         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4710   const Address monitor_block_bot(
4711         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4712   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4713 
4714   Label allocated;
4715 
4716   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4717   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4718   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4719 
4720   // initialize entry pointer
4721   __ xorl(rmon, rmon); // points to free slot or NULL
4722 
4723   // find a free slot in the monitor block (result in rmon)
4724   {
4725     Label entry, loop, exit;
4726     __ movptr(rtop, monitor_block_top); // points to current entry,
4727                                         // starting with top-most entry


4786   // store object
4787   __ movptr(Address(rmon, BasicObjectLock::obj_offset_in_bytes()), rax);
4788   __ lock_object(rmon);
4789 
4790   // check to make sure this monitor doesn't cause stack overflow after locking
4791   __ save_bcp();  // in case of exception
4792   __ generate_stack_overflow_check(0);
4793 
4794   // The bcp has already been incremented. Just need to dispatch to
4795   // next instruction.
4796   __ dispatch_next(vtos);
4797 }
4798 
4799 void TemplateTable::monitorexit() {
4800   transition(atos, vtos);
4801 
4802   // check for NULL object
4803   __ null_check(rax);
4804 
4805   __ resolve(IS_NOT_NULL, rax);
4806 
4807   const int is_value_mask = markOopDesc::always_locked_pattern;
4808   Label has_identity;
4809   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4810   __ andptr(rbx, is_value_mask);
4811   __ cmpl(rbx, is_value_mask);
4812   __ jcc(Assembler::notEqual, has_identity);
4813   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4814                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4815   __ should_not_reach_here();
4816   __ bind(has_identity);
4817 
4818   const Address monitor_block_top(
4819         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4820   const Address monitor_block_bot(
4821         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4822   const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
4823 
4824   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4825   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4826 
4827   Label found;
4828 
4829   // find matching slot
4830   {
4831     Label entry, loop;
4832     __ movptr(rtop, monitor_block_top); // points to current entry,
4833                                         // starting with top-most entry
4834     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4835                                         // of monitor block
4836     __ jmpb(entry);


< prev index next >