< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page

  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/gc_globals.hpp"
  30 #include "gc/shared/tlab_globals.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/interp_masm.hpp"
  34 #include "interpreter/templateTable.hpp"
  35 #include "memory/universe.hpp"
  36 #include "oops/methodCounters.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/objArrayKlass.hpp"
  39 #include "oops/oop.inline.hpp"

  40 #include "oops/resolvedFieldEntry.hpp"
  41 #include "oops/resolvedIndyEntry.hpp"
  42 #include "oops/resolvedMethodEntry.hpp"
  43 #include "prims/jvmtiExport.hpp"
  44 #include "prims/methodHandles.hpp"
  45 #include "runtime/frame.inline.hpp"
  46 #include "runtime/safepointMechanism.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/stubRoutines.hpp"
  49 #include "runtime/synchronizer.hpp"
  50 #include "utilities/macros.hpp"
  51 
  52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  53 
  54 // Global Register Names
  55 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  56 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  57 
  58 // Address Computation: local variables
  59 static inline Address iaddress(int n) {

 166 static void do_oop_load(InterpreterMacroAssembler* _masm,
 167                         Address src,
 168                         Register dst,
 169                         DecoratorSet decorators = 0) {
 170   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 171 }
 172 
 173 Address TemplateTable::at_bcp(int offset) {
 174   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 175   return Address(rbcp, offset);
 176 }
 177 
 178 
 179 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 180                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 181                                    int byte_no) {
 182   if (!RewriteBytecodes)  return;
 183   Label L_patch_done;
 184 
 185   switch (bc) {

 186   case Bytecodes::_fast_aputfield:
 187   case Bytecodes::_fast_bputfield:
 188   case Bytecodes::_fast_zputfield:
 189   case Bytecodes::_fast_cputfield:
 190   case Bytecodes::_fast_dputfield:
 191   case Bytecodes::_fast_fputfield:
 192   case Bytecodes::_fast_iputfield:
 193   case Bytecodes::_fast_lputfield:
 194   case Bytecodes::_fast_sputfield:
 195     {
 196       // We skip bytecode quickening for putfield instructions when
 197       // the put_code written to the constant pool cache is zero.
 198       // This is required so that every execution of this instruction
 199       // calls out to InterpreterRuntime::resolve_get_put to do
 200       // additional, required work.
 201       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 202       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 203       __ load_field_entry(temp_reg, bc_reg);
 204       if (byte_no == f1_byte) {
 205         __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 813                     Address(rdx, rax,
 814                             Address::times_4,
 815                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 816                     noreg, noreg);
 817 }
 818 
 819 void TemplateTable::daload() {
 820   transition(itos, dtos);
 821   // rax: index
 822   // rdx: array
 823   index_check(rdx, rax); // kills rbx
 824   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 825                     Address(rdx, rax,
 826                             Address::times_8,
 827                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 828                     noreg, noreg);
 829 }
 830 
 831 void TemplateTable::aaload() {
 832   transition(itos, atos);
 833   // rax: index
 834   // rdx: array
 835   index_check(rdx, rax); // kills rbx
 836   do_oop_load(_masm,
 837               Address(rdx, rax,
 838                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 839                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 840               rax,
 841               IS_ARRAY);


















 842 }
 843 
 844 void TemplateTable::baload() {
 845   transition(itos, itos);
 846   // rax: index
 847   // rdx: array
 848   index_check(rdx, rax); // kills rbx
 849   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 850                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 851                     noreg, noreg);
 852 }
 853 
 854 void TemplateTable::caload() {
 855   transition(itos, itos);
 856   // rax: index
 857   // rdx: array
 858   index_check(rdx, rax); // kills rbx
 859   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 860                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 861                     noreg, noreg);

1107   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1108                      Address(rdx, rbx, Address::times_4,
1109                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1110                      noreg /* ftos */, noreg, noreg, noreg);
1111 }
1112 
1113 void TemplateTable::dastore() {
1114   transition(dtos, vtos);
1115   __ pop_i(rbx);
1116   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1117   // rbx:  index
1118   // rdx:  array
1119   index_check(rdx, rbx); // prefer index in rbx
1120   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1121                      Address(rdx, rbx, Address::times_8,
1122                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1123                      noreg /* dtos */, noreg, noreg, noreg);
1124 }
1125 
1126 void TemplateTable::aastore() {
1127   Label is_null, ok_is_subtype, done;
1128   transition(vtos, vtos);
1129   // stack: ..., array, index, value
1130   __ movptr(rax, at_tos());    // value
1131   __ movl(rcx, at_tos_p1()); // index
1132   __ movptr(rdx, at_tos_p2()); // array
1133 
1134   Address element_address(rdx, rcx,
1135                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1136                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1137 
1138   index_check_without_pop(rdx, rcx);     // kills rbx




1139   __ testptr(rax, rax);
1140   __ jcc(Assembler::zero, is_null);
1141 







1142   // Move subklass into rbx
1143   __ load_klass(rbx, rax, rscratch1);
1144   // Move superklass into rax
1145   __ load_klass(rax, rdx, rscratch1);
1146   __ movptr(rax, Address(rax,
1147                          ObjArrayKlass::element_klass_offset()));
1148 
1149   // Generate subtype check.  Blows rcx, rdi
1150   // Superklass in rax.  Subklass in rbx.
1151   __ gen_subtype_check(rbx, ok_is_subtype);

1152 
1153   // Come here on failure
1154   // object is at TOS
1155   __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1156 
1157   // Come here on success
1158   __ bind(ok_is_subtype);
1159 
1160   // Get the value we will store
1161   __ movptr(rax, at_tos());
1162   __ movl(rcx, at_tos_p1()); // index
1163   // Now store using the appropriate barrier
1164   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1165   __ jmp(done);
1166 
1167   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1168   __ bind(is_null);
1169   __ profile_null_seen(rbx);

1170 









1171   // Store a null
1172   do_oop_store(_masm, element_address, noreg, IS_ARRAY);

1173 

































1174   // Pop stack arguments
1175   __ bind(done);
1176   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1177 }
1178 
1179 void TemplateTable::bastore() {
1180   transition(itos, vtos);
1181   __ pop_i(rbx);
1182   // rax: value
1183   // rbx: index
1184   // rdx: array
1185   index_check(rdx, rbx); // prefer index in rbx
1186   // Need to check whether array is boolean or byte
1187   // since both types share the bastore bytecode.
1188   __ load_klass(rcx, rdx, rscratch1);
1189   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1190   int diffbit = Klass::layout_helper_boolean_diffbit();
1191   __ testl(rcx, diffbit);
1192   Label L_skip;
1193   __ jccb(Assembler::zero, L_skip);

2322   __ jcc(j_not(cc), not_taken);
2323   branch(false, false);
2324   __ bind(not_taken);
2325   __ profile_not_taken_branch(rax);
2326 }
2327 
2328 void TemplateTable::if_nullcmp(Condition cc) {
2329   transition(atos, vtos);
2330   // assume branch is more often taken than not (loops use backward branches)
2331   Label not_taken;
2332   __ testptr(rax, rax);
2333   __ jcc(j_not(cc), not_taken);
2334   branch(false, false);
2335   __ bind(not_taken);
2336   __ profile_not_taken_branch(rax);
2337 }
2338 
2339 void TemplateTable::if_acmp(Condition cc) {
2340   transition(atos, vtos);
2341   // assume branch is more often taken than not (loops use backward branches)
2342   Label not_taken;
2343   __ pop_ptr(rdx);




































2344   __ cmpoop(rdx, rax);
2345   __ jcc(j_not(cc), not_taken);

2346   branch(false, false);
2347   __ bind(not_taken);
2348   __ profile_not_taken_branch(rax);









2349 }
2350 
2351 void TemplateTable::ret() {
2352   transition(vtos, vtos);
2353   locals_index(rbx);
2354   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2355   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2356   __ profile_ret(rbx, rcx);
2357   __ get_method(rax);
2358   __ movptr(rbcp, Address(rax, Method::const_offset()));
2359   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2360                       ConstMethod::codes_offset()));
2361   __ dispatch_next(vtos, 0, true);
2362 }
2363 
2364 void TemplateTable::wide_ret() {
2365   transition(vtos, vtos);
2366   locals_index_wide(rbx);
2367   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2368   __ profile_ret(rbx, rcx);

2597     const Register thread = rdi;
2598     __ get_thread(thread);
2599     __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2600 #endif
2601     __ jcc(Assembler::zero, no_safepoint);
2602     __ push(state);
2603     __ push_cont_fastpath();
2604     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2605                                        InterpreterRuntime::at_safepoint));
2606     __ pop_cont_fastpath();
2607     __ pop(state);
2608     __ bind(no_safepoint);
2609   }
2610 
2611   // Narrow result if state is itos but result type is smaller.
2612   // Need to narrow in the return bytecode rather than in generate_return_entry
2613   // since compiled code callers expect the result to already be narrowed.
2614   if (state == itos) {
2615     __ narrow(rax);
2616   }
2617   __ remove_activation(state, rbcp);

2618 
2619   __ jmp(rbcp);
2620 }
2621 
2622 // ----------------------------------------------------------------------------
2623 // Volatile variables demand their effects be made known to all CPU's
2624 // in order.  Store buffers on most chips allow reads & writes to
2625 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2626 // without some kind of memory barrier (i.e., it's not sufficient that
2627 // the interpreter does not reorder volatile references, the hardware
2628 // also must not reorder them).
2629 //
2630 // According to the new Java Memory Model (JMM):
2631 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2632 //     writes act as acquire & release, so:
2633 // (2) A read cannot let unrelated NON-volatile memory refs that
2634 //     happen after the read float up to before the read.  It's OK for
2635 //     non-volatile memory refs that happen before the volatile read to
2636 //     float down below it.
2637 // (3) Similar a volatile write cannot let unrelated NON-volatile

2963     }
2964     // rax,:   object pointer or null
2965     // cache: cache entry pointer
2966     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2967               rax, cache);
2968 
2969     __ load_field_entry(cache, index);
2970     __ bind(L1);
2971   }
2972 }
2973 
2974 void TemplateTable::pop_and_check_object(Register r) {
2975   __ pop_ptr(r);
2976   __ null_check(r);  // for field access must check obj.
2977   __ verify_oop(r);
2978 }
2979 
2980 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2981   transition(vtos, vtos);
2982 
2983   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2984   const Register cache = rcx;
2985   const Register index = rdx;
2986   const Register off   = rbx;
2987   const Register tos_state   = rax;
2988   const Register flags = rdx;
2989   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2990 
2991   resolve_cache_and_index_for_field(byte_no, cache, index);
2992   jvmti_post_field_access(cache, index, is_static, false);
2993   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2994 
2995   if (!is_static) pop_and_check_object(obj);
2996 
2997   const Address field(obj, off, Address::times_1, 0*wordSize);
2998 
2999   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
3000 
3001   // Make sure we don't need to mask edx after the above shift
3002   assert(btos == 0, "change code, btos != 0");
3003   __ testl(tos_state, tos_state);
3004   __ jcc(Assembler::notZero, notByte);
3005 
3006   // btos

3007   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3008   __ push(btos);
3009   // Rewrite bytecode to be faster
3010   if (!is_static && rc == may_rewrite) {
3011     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3012   }
3013   __ jmp(Done);
3014 
3015   __ bind(notByte);
3016   __ cmpl(tos_state, ztos);
3017   __ jcc(Assembler::notEqual, notBool);
3018 
3019   // ztos (same code as btos)
3020   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3021   __ push(ztos);
3022   // Rewrite bytecode to be faster
3023   if (!is_static && rc == may_rewrite) {
3024     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3025     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3026   }
3027   __ jmp(Done);
3028 
3029   __ bind(notBool);
3030   __ cmpl(tos_state, atos);
3031   __ jcc(Assembler::notEqual, notObj);
3032   // atos
3033   do_oop_load(_masm, field, rax);
3034   __ push(atos);
3035   if (!is_static && rc == may_rewrite) {
3036     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);




















































































3037   }
3038   __ jmp(Done);
3039 
3040   __ bind(notObj);



3041   __ cmpl(tos_state, itos);
3042   __ jcc(Assembler::notEqual, notInt);
3043   // itos
3044   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3045   __ push(itos);
3046   // Rewrite bytecode to be faster
3047   if (!is_static && rc == may_rewrite) {
3048     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3049   }
3050   __ jmp(Done);
3051 
3052   __ bind(notInt);
3053   __ cmpl(tos_state, ctos);
3054   __ jcc(Assembler::notEqual, notChar);
3055   // ctos
3056   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3057   __ push(ctos);
3058   // Rewrite bytecode to be faster
3059   if (!is_static && rc == may_rewrite) {
3060     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

3120 #endif
3121 
3122   __ bind(Done);
3123   // [jk] not needed currently
3124   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3125   //                                              Assembler::LoadStore));
3126 }
3127 
3128 void TemplateTable::getfield(int byte_no) {
3129   getfield_or_static(byte_no, false);
3130 }
3131 
3132 void TemplateTable::nofast_getfield(int byte_no) {
3133   getfield_or_static(byte_no, false, may_not_rewrite);
3134 }
3135 
3136 void TemplateTable::getstatic(int byte_no) {
3137   getfield_or_static(byte_no, true);
3138 }
3139 
3140 
3141 // The registers cache and index expected to be set before call.
3142 // The function may destroy various registers, just not the cache and index registers.
3143 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3144   // Cache is rcx and index is rdx
3145   const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3146   const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx);   // Object pointer
3147   const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3148 
3149   if (JvmtiExport::can_post_field_modification()) {
3150     // Check to see if a field modification watch has been set before
3151     // we take the time to call into the VM.
3152     Label L1;
3153     assert_different_registers(cache, obj, rax);
3154     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3155     __ testl(rax, rax);
3156     __ jcc(Assembler::zero, L1);
3157 
3158     __ mov(entry, cache);
3159 
3160     if (is_static) {

3202     // cache: field entry pointer
3203     // value: jvalue object on the stack
3204     __ call_VM(noreg,
3205               CAST_FROM_FN_PTR(address,
3206                               InterpreterRuntime::post_field_modification),
3207               obj, entry, value);
3208     // Reload field entry
3209     __ load_field_entry(cache, index);
3210     __ bind(L1);
3211   }
3212 }
3213 
3214 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3215   transition(vtos, vtos);
3216 
3217   const Register obj = rcx;
3218   const Register cache = rcx;
3219   const Register index = rdx;
3220   const Register tos_state   = rdx;
3221   const Register off   = rbx;
3222   const Register flags = rax;
3223 
3224   resolve_cache_and_index_for_field(byte_no, cache, index);
3225   jvmti_post_field_mod(cache, index, is_static);
3226   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3227 
3228   // [jk] not needed currently
3229   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3230   //                                              Assembler::StoreStore));
3231 
3232   Label notVolatile, Done;
3233 
3234   // Check for volatile store
3235   __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
3236   __ testl(flags, flags);

3237   __ jcc(Assembler::zero, notVolatile);
3238 
3239   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3240   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3241                                                Assembler::StoreStore));
3242   __ jmp(Done);
3243   __ bind(notVolatile);
3244 
3245   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3246 
3247   __ bind(Done);
3248 }
3249 
3250 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3251                                               Register obj, Register off, Register tos_state) {
3252 
3253   // field addresses
3254   const Address field(obj, off, Address::times_1, 0*wordSize);
3255   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3256 
3257   Label notByte, notBool, notInt, notShort, notChar,
3258         notLong, notFloat, notObj;
3259   Label Done;
3260 
3261   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3262 
3263   // Test TOS state
3264   __ testl(tos_state, tos_state);
3265   __ jcc(Assembler::notZero, notByte);
3266 
3267   // btos
3268   {
3269     __ pop(btos);
3270     if (!is_static) pop_and_check_object(obj);
3271     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3272     if (!is_static && rc == may_rewrite) {
3273       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3274     }
3275     __ jmp(Done);
3276   }
3277 
3278   __ bind(notByte);
3279   __ cmpl(tos_state, ztos);
3280   __ jcc(Assembler::notEqual, notBool);
3281 
3282   // ztos
3283   {
3284     __ pop(ztos);
3285     if (!is_static) pop_and_check_object(obj);
3286     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3287     if (!is_static && rc == may_rewrite) {
3288       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3289     }
3290     __ jmp(Done);
3291   }
3292 
3293   __ bind(notBool);
3294   __ cmpl(tos_state, atos);
3295   __ jcc(Assembler::notEqual, notObj);
3296 
3297   // atos
3298   {
3299     __ pop(atos);
3300     if (!is_static) pop_and_check_object(obj);
3301     // Store into the field
3302     do_oop_store(_masm, field, rax);
3303     if (!is_static && rc == may_rewrite) {
3304       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);






















































3305     }
3306     __ jmp(Done);
3307   }
3308 
3309   __ bind(notObj);
3310   __ cmpl(tos_state, itos);
3311   __ jcc(Assembler::notEqual, notInt);
3312 
3313   // itos
3314   {
3315     __ pop(itos);
3316     if (!is_static) pop_and_check_object(obj);
3317     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3318     if (!is_static && rc == may_rewrite) {
3319       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3320     }
3321     __ jmp(Done);
3322   }
3323 
3324   __ bind(notInt);
3325   __ cmpl(tos_state, ctos);
3326   __ jcc(Assembler::notEqual, notChar);

3425 }
3426 
3427 void TemplateTable::jvmti_post_fast_field_mod() {
3428 
3429   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3430 
3431   if (JvmtiExport::can_post_field_modification()) {
3432     // Check to see if a field modification watch has been set before
3433     // we take the time to call into the VM.
3434     Label L2;
3435     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3436     __ testl(scratch, scratch);
3437     __ jcc(Assembler::zero, L2);
3438     __ pop_ptr(rbx);                  // copy the object pointer from tos
3439     __ verify_oop(rbx);
3440     __ push_ptr(rbx);                 // put the object pointer back on tos
3441     // Save tos values before call_VM() clobbers them. Since we have
3442     // to do it for every data type, we use the saved values as the
3443     // jvalue object.
3444     switch (bytecode()) {          // load values into the jvalue object

3445     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3446     case Bytecodes::_fast_bputfield: // fall through
3447     case Bytecodes::_fast_zputfield: // fall through
3448     case Bytecodes::_fast_sputfield: // fall through
3449     case Bytecodes::_fast_cputfield: // fall through
3450     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3451     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3452     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3453     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3454 
3455     default:
3456       ShouldNotReachHere();
3457     }
3458     __ mov(scratch, rsp);             // points to jvalue on the stack
3459     // access constant pool cache entry
3460     LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3461     NOT_LP64(__ load_field_entry(rax, rdx));
3462     __ verify_oop(rbx);
3463     // rbx: object pointer copied above
3464     // c_rarg2: cache entry pointer
3465     // c_rarg3: jvalue object on the stack
3466     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3467     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3468 
3469     switch (bytecode()) {             // restore tos values

3470     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3471     case Bytecodes::_fast_bputfield: // fall through
3472     case Bytecodes::_fast_zputfield: // fall through
3473     case Bytecodes::_fast_sputfield: // fall through
3474     case Bytecodes::_fast_cputfield: // fall through
3475     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3476     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3477     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3478     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3479     default: break;
3480     }
3481     __ bind(L2);
3482   }
3483 }
3484 
3485 void TemplateTable::fast_storefield(TosState state) {
3486   transition(state, vtos);
3487 
3488   Register cache = rcx;
3489 
3490   Label notVolatile, Done;
3491 
3492   jvmti_post_fast_field_mod();
3493 
3494   __ push(rax);
3495   __ load_field_entry(rcx, rax);
3496   load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3497   // RBX: field offset, RAX: TOS, RDX: flags
3498   __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3499   __ pop(rax);

3500 
3501   // Get object from stack
3502   pop_and_check_object(rcx);
3503 
3504   // field address
3505   const Address field(rcx, rbx, Address::times_1);
3506 
3507   // Check for volatile store
3508   __ testl(rdx, rdx);


3509   __ jcc(Assembler::zero, notVolatile);
3510 
3511   fast_storefield_helper(field, rax);
3512   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3513                                                Assembler::StoreStore));
3514   __ jmp(Done);
3515   __ bind(notVolatile);
3516 
3517   fast_storefield_helper(field, rax);
3518 
3519   __ bind(Done);
3520 }
3521 
3522 void TemplateTable::fast_storefield_helper(Address field, Register rax) {


3523 
3524   // access field
3525   switch (bytecode()) {
























3526   case Bytecodes::_fast_aputfield:
3527     do_oop_store(_masm, field, rax);


3528     break;
3529   case Bytecodes::_fast_lputfield:
3530 #ifdef _LP64
3531     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3532 #else
3533   __ stop("should not be rewritten");
3534 #endif
3535     break;
3536   case Bytecodes::_fast_iputfield:
3537     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3538     break;
3539   case Bytecodes::_fast_zputfield:
3540     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3541     break;
3542   case Bytecodes::_fast_bputfield:
3543     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3544     break;
3545   case Bytecodes::_fast_sputfield:
3546     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3547     break;

3569     Label L1;
3570     __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3571     __ testl(rcx, rcx);
3572     __ jcc(Assembler::zero, L1);
3573     // access constant pool cache entry
3574     LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3575     NOT_LP64(__ load_field_entry(rcx, rdx));
3576     __ verify_oop(rax);
3577     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3578     LP64_ONLY(__ mov(c_rarg1, rax));
3579     // c_rarg1: object pointer copied above
3580     // c_rarg2: cache entry pointer
3581     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3582     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3583     __ pop_ptr(rax); // restore object pointer
3584     __ bind(L1);
3585   }
3586 
3587   // access constant pool cache
3588   __ load_field_entry(rcx, rbx);
3589   __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3590 
3591   // rax: object
3592   __ verify_oop(rax);
3593   __ null_check(rax);
3594   Address field(rax, rbx, Address::times_1);
3595 
3596   // access field
3597   switch (bytecode()) {

































3598   case Bytecodes::_fast_agetfield:
3599     do_oop_load(_masm, field, rax);
3600     __ verify_oop(rax);
3601     break;
3602   case Bytecodes::_fast_lgetfield:
3603 #ifdef _LP64
3604     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3605 #else
3606   __ stop("should not be rewritten");
3607 #endif
3608     break;
3609   case Bytecodes::_fast_igetfield:
3610     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3611     break;
3612   case Bytecodes::_fast_bgetfield:
3613     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3614     break;
3615   case Bytecodes::_fast_sgetfield:
3616     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3617     break;

4013 
4014   // Note:  rax_callsite is already pushed
4015 
4016   // %%% should make a type profile for any invokedynamic that takes a ref argument
4017   // profile this call
4018   __ profile_call(rbcp);
4019   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4020 
4021   __ verify_oop(rax_callsite);
4022 
4023   __ jump_from_interpreted(rbx_method, rdx);
4024 }
4025 
4026 //-----------------------------------------------------------------------------
4027 // Allocation
4028 
4029 void TemplateTable::_new() {
4030   transition(vtos, atos);
4031   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4032   Label slow_case;
4033   Label slow_case_no_pop;
4034   Label done;
4035   Label initialize_header;
4036 
4037   __ get_cpool_and_tags(rcx, rax);
4038 
4039   // Make sure the class we're about to instantiate has been resolved.
4040   // This is done before loading InstanceKlass to be consistent with the order
4041   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4042   const int tags_offset = Array<u1>::base_offset_in_bytes();
4043   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4044   __ jcc(Assembler::notEqual, slow_case_no_pop);
4045 
4046   // get InstanceKlass
4047   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4048   __ push(rcx);  // save the contexts of klass for initializing the header
4049 
4050   // make sure klass is initialized
4051 #ifdef _LP64
4052   assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
4053   __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
4054 #else
4055   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4056   __ jcc(Assembler::notEqual, slow_case);
4057 #endif
4058 
4059   // get instance_size in InstanceKlass (scaled to a count of bytes)
4060   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
4061   // test to see if it is malformed in some way
4062   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
4063   __ jcc(Assembler::notZero, slow_case);
4064 
4065   // Allocate the instance:
4066   //  If TLAB is enabled:
4067   //    Try to allocate in the TLAB.
4068   //    If fails, go to the slow path.
4069   //    Initialize the allocation.
4070   //    Exit.
4071   //
4072   //  Go to slow path.
4073 
4074   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4075 
4076   if (UseTLAB) {
4077     NOT_LP64(__ get_thread(thread);)
4078     __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4079     if (ZeroTLAB) {
4080       // the fields have been already cleared
4081       __ jmp(initialize_header);
4082     }
4083 
4084     // The object is initialized before the header.  If the object size is
4085     // zero, go directly to the header initialization.
4086     __ decrement(rdx, sizeof(oopDesc));
4087     __ jcc(Assembler::zero, initialize_header);
4088 
4089     // Initialize topmost object field, divide rdx by 8, check if odd and
4090     // test if zero.
4091     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
4092     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4093 
4094     // rdx must have been multiple of 8
4095 #ifdef ASSERT
4096     // make sure rdx was multiple of 8
4097     Label L;
4098     // Ignore partial flag stall after shrl() since it is debug VM
4099     __ jcc(Assembler::carryClear, L);
4100     __ stop("object size is not multiple of 2 - adjust this code");
4101     __ bind(L);
4102     // rdx must be > 0, no extra check needed here
4103 #endif
4104 
4105     // initialize remaining object fields: rdx was a multiple of 8
4106     { Label loop;
4107     __ bind(loop);
4108     __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4109     NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4110     __ decrement(rdx);
4111     __ jcc(Assembler::notZero, loop);
4112     }
4113 
4114     // initialize object header only.
4115     __ bind(initialize_header);
4116     __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4117               (intptr_t)markWord::prototype().value()); // header
4118     __ pop(rcx);   // get saved klass back in the register.
4119 #ifdef _LP64
4120     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4121     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
4122 #endif
4123     __ store_klass(rax, rcx, rscratch1);  // klass
4124 
4125     if (DTraceAllocProbes) {
4126       // Trigger dtrace event for fastpath
4127       __ push(atos);
4128       __ call_VM_leaf(
4129            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4130       __ pop(atos);
4131     }
4132 
4133     __ jmp(done);
4134   }
4135 
4136   // slow case
4137   __ bind(slow_case);
4138   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4139   __ bind(slow_case_no_pop);
4140 
4141   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4142   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4143 
4144   __ get_constant_pool(rarg1);
4145   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4146   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4147    __ verify_oop(rax);
4148 
4149   // continue
4150   __ bind(done);
4151 }
4152 
4153 void TemplateTable::newarray() {
4154   transition(itos, atos);
4155   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4156   __ load_unsigned_byte(rarg1, at_bcp(1));
4157   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4158           rarg1, rax);
4159 }

4168   __ get_constant_pool(rarg1);
4169   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4170           rarg1, rarg2, rax);
4171 }
4172 
4173 void TemplateTable::arraylength() {
4174   transition(atos, itos);
4175   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4176 }
4177 
4178 void TemplateTable::checkcast() {
4179   transition(atos, atos);
4180   Label done, is_null, ok_is_subtype, quicked, resolved;
4181   __ testptr(rax, rax); // object is in rax
4182   __ jcc(Assembler::zero, is_null);
4183 
4184   // Get cpool & tags index
4185   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4186   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4187   // See if bytecode has already been quicked
4188   __ cmpb(Address(rdx, rbx,
4189                   Address::times_1,
4190                   Array<u1>::base_offset_in_bytes()),
4191           JVM_CONSTANT_Class);
4192   __ jcc(Assembler::equal, quicked);
4193   __ push(atos); // save receiver for result, and for GC
4194   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4195 
4196   // vm_result_2 has metadata result
4197 #ifndef _LP64
4198   // borrow rdi from locals
4199   __ get_thread(rdi);
4200   __ get_vm_result_2(rax, rdi);
4201   __ restore_locals();
4202 #else
4203   __ get_vm_result_2(rax, r15_thread);
4204 #endif
4205 
4206   __ pop_ptr(rdx); // restore receiver
4207   __ jmpb(resolved);
4208 
4209   // Get superklass in rax and subklass in rbx
4210   __ bind(quicked);
4211   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4212   __ load_resolved_klass_at_index(rax, rcx, rbx);
4213 
4214   __ bind(resolved);
4215   __ load_klass(rbx, rdx, rscratch1);
4216 
4217   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4218   // Superklass in rax.  Subklass in rbx.
4219   __ gen_subtype_check(rbx, ok_is_subtype);
4220 
4221   // Come here on failure
4222   __ push_ptr(rdx);
4223   // object is at TOS
4224   __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
4225 
4226   // Come here on success
4227   __ bind(ok_is_subtype);
4228   __ mov(rax, rdx); // Restore object in rdx



4229 
4230   // Collect counts on whether this check-cast sees nulls a lot or not.
4231   if (ProfileInterpreter) {
4232     __ jmp(done);
4233     __ bind(is_null);
4234     __ profile_null_seen(rcx);
4235   } else {
4236     __ bind(is_null);   // same as 'done'
4237   }

4238   __ bind(done);
4239 }
4240 
4241 void TemplateTable::instanceof() {
4242   transition(atos, itos);
4243   Label done, is_null, ok_is_subtype, quicked, resolved;
4244   __ testptr(rax, rax);
4245   __ jcc(Assembler::zero, is_null);
4246 
4247   // Get cpool & tags index
4248   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4249   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4250   // See if bytecode has already been quicked
4251   __ cmpb(Address(rdx, rbx,
4252                   Address::times_1,
4253                   Array<u1>::base_offset_in_bytes()),
4254           JVM_CONSTANT_Class);
4255   __ jcc(Assembler::equal, quicked);
4256 
4257   __ push(atos); // save receiver for result, and for GC
4258   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4259   // vm_result_2 has metadata result
4260 
4261 #ifndef _LP64
4262   // borrow rdi from locals
4263   __ get_thread(rdi);
4264   __ get_vm_result_2(rax, rdi);
4265   __ restore_locals();
4266 #else
4267   __ get_vm_result_2(rax, r15_thread);
4268 #endif
4269 
4270   __ pop_ptr(rdx); // restore receiver
4271   __ verify_oop(rdx);
4272   __ load_klass(rdx, rdx, rscratch1);
4273   __ jmpb(resolved);
4274 

4286   // Come here on failure
4287   __ xorl(rax, rax);
4288   __ jmpb(done);
4289   // Come here on success
4290   __ bind(ok_is_subtype);
4291   __ movl(rax, 1);
4292 
4293   // Collect counts on whether this test sees nulls a lot or not.
4294   if (ProfileInterpreter) {
4295     __ jmp(done);
4296     __ bind(is_null);
4297     __ profile_null_seen(rcx);
4298   } else {
4299     __ bind(is_null);   // same as 'done'
4300   }
4301   __ bind(done);
4302   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
4303   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
4304 }
4305 
4306 
4307 //----------------------------------------------------------------------------------------------------
4308 // Breakpoints
4309 void TemplateTable::_breakpoint() {
4310   // Note: We get here even if we are single stepping..
4311   // jbug insists on setting breakpoints at every bytecode
4312   // even if we are in single step mode.
4313 
4314   transition(vtos, vtos);
4315 
4316   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4317 
4318   // get the unpatched byte code
4319   __ get_method(rarg);
4320   __ call_VM(noreg,
4321              CAST_FROM_FN_PTR(address,
4322                               InterpreterRuntime::get_original_bytecode_at),
4323              rarg, rbcp);
4324   __ mov(rbx, rax);  // why?
4325 
4326   // post the breakpoint event

4348 // Note: monitorenter & exit are symmetric routines; which is reflected
4349 //       in the assembly code structure as well
4350 //
4351 // Stack layout:
4352 //
4353 // [expressions  ] <--- rsp               = expression stack top
4354 // ..
4355 // [expressions  ]
4356 // [monitor entry] <--- monitor block top = expression stack bot
4357 // ..
4358 // [monitor entry]
4359 // [frame data   ] <--- monitor block bot
4360 // ...
4361 // [saved rbp    ] <--- rbp
4362 void TemplateTable::monitorenter() {
4363   transition(atos, vtos);
4364 
4365   // check for null object
4366   __ null_check(rax);
4367 




4368   const Address monitor_block_top(
4369         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4370   const Address monitor_block_bot(
4371         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4372   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4373 
4374   Label allocated;
4375 
4376   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4377   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4378   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4379 
4380   // initialize entry pointer
4381   __ xorl(rmon, rmon); // points to free slot or null
4382 
4383   // find a free slot in the monitor block (result in rmon)
4384   {
4385     Label entry, loop, exit;
4386     __ movptr(rtop, monitor_block_top); // derelativize pointer
4387     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));

4440   // rmon: points to monitor entry
4441   __ bind(allocated);
4442 
4443   // Increment bcp to point to the next bytecode, so exception
4444   // handling for async. exceptions work correctly.
4445   // The object has already been popped from the stack, so the
4446   // expression stack looks correct.
4447   __ increment(rbcp);
4448 
4449   // store object
4450   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4451   __ lock_object(rmon);
4452 
4453   // check to make sure this monitor doesn't cause stack overflow after locking
4454   __ save_bcp();  // in case of exception
4455   __ generate_stack_overflow_check(0);
4456 
4457   // The bcp has already been incremented. Just need to dispatch to
4458   // next instruction.
4459   __ dispatch_next(vtos);





4460 }
4461 
4462 void TemplateTable::monitorexit() {
4463   transition(atos, vtos);
4464 
4465   // check for null object
4466   __ null_check(rax);
4467 











4468   const Address monitor_block_top(
4469         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4470   const Address monitor_block_bot(
4471         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4472   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4473 
4474   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4475   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4476 
4477   Label found;
4478 
4479   // find matching slot
4480   {
4481     Label entry, loop;
4482     __ movptr(rtop, monitor_block_top); // derelativize pointer
4483     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4484     // rtop points to current entry, starting with top-most entry
4485 
4486     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4487                                         // of monitor block

  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/gc_globals.hpp"
  30 #include "gc/shared/tlab_globals.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/interp_masm.hpp"
  34 #include "interpreter/templateTable.hpp"
  35 #include "memory/universe.hpp"
  36 #include "oops/methodCounters.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/objArrayKlass.hpp"
  39 #include "oops/oop.inline.hpp"
  40 #include "oops/inlineKlass.hpp"
  41 #include "oops/resolvedFieldEntry.hpp"
  42 #include "oops/resolvedIndyEntry.hpp"
  43 #include "oops/resolvedMethodEntry.hpp"
  44 #include "prims/jvmtiExport.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "runtime/frame.inline.hpp"
  47 #include "runtime/safepointMechanism.hpp"
  48 #include "runtime/sharedRuntime.hpp"
  49 #include "runtime/stubRoutines.hpp"
  50 #include "runtime/synchronizer.hpp"
  51 #include "utilities/macros.hpp"
  52 
  53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  54 
  55 // Global Register Names
  56 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  57 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  58 
  59 // Address Computation: local variables
  60 static inline Address iaddress(int n) {

 167 static void do_oop_load(InterpreterMacroAssembler* _masm,
 168                         Address src,
 169                         Register dst,
 170                         DecoratorSet decorators = 0) {
 171   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 172 }
 173 
 174 Address TemplateTable::at_bcp(int offset) {
 175   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 176   return Address(rbcp, offset);
 177 }
 178 
 179 
 180 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 181                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 182                                    int byte_no) {
 183   if (!RewriteBytecodes)  return;
 184   Label L_patch_done;
 185 
 186   switch (bc) {
 187   case Bytecodes::_fast_vputfield:
 188   case Bytecodes::_fast_aputfield:
 189   case Bytecodes::_fast_bputfield:
 190   case Bytecodes::_fast_zputfield:
 191   case Bytecodes::_fast_cputfield:
 192   case Bytecodes::_fast_dputfield:
 193   case Bytecodes::_fast_fputfield:
 194   case Bytecodes::_fast_iputfield:
 195   case Bytecodes::_fast_lputfield:
 196   case Bytecodes::_fast_sputfield:
 197     {
 198       // We skip bytecode quickening for putfield instructions when
 199       // the put_code written to the constant pool cache is zero.
 200       // This is required so that every execution of this instruction
 201       // calls out to InterpreterRuntime::resolve_get_put to do
 202       // additional, required work.
 203       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 204       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 205       __ load_field_entry(temp_reg, bc_reg);
 206       if (byte_no == f1_byte) {
 207         __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 815                     Address(rdx, rax,
 816                             Address::times_4,
 817                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 818                     noreg, noreg);
 819 }
 820 
 821 void TemplateTable::daload() {
 822   transition(itos, dtos);
 823   // rax: index
 824   // rdx: array
 825   index_check(rdx, rax); // kills rbx
 826   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 827                     Address(rdx, rax,
 828                             Address::times_8,
 829                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 830                     noreg, noreg);
 831 }
 832 
 833 void TemplateTable::aaload() {
 834   transition(itos, atos);
 835   Register array = rdx;
 836   Register index = rax;
 837 
 838   index_check(array, index); // kills rbx
 839   __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
 840   if (UseFlatArray) {
 841     Label is_flat_array, done;
 842     __ test_flat_array_oop(array, rbx, is_flat_array);
 843     do_oop_load(_masm,
 844                 Address(array, index,
 845                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 846                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 847                 rax,
 848                 IS_ARRAY);
 849     __ jmp(done);
 850     __ bind(is_flat_array);
 851     __ read_flat_element(array, index, rbx, rcx, rax);
 852     __ bind(done);
 853   } else {
 854     do_oop_load(_masm,
 855                 Address(array, index,
 856                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 857                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 858                 rax,
 859                 IS_ARRAY);
 860   }
 861   __ profile_element_type(rbx, rax, rcx);
 862 }
 863 
 864 void TemplateTable::baload() {
 865   transition(itos, itos);
 866   // rax: index
 867   // rdx: array
 868   index_check(rdx, rax); // kills rbx
 869   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 870                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 871                     noreg, noreg);
 872 }
 873 
 874 void TemplateTable::caload() {
 875   transition(itos, itos);
 876   // rax: index
 877   // rdx: array
 878   index_check(rdx, rax); // kills rbx
 879   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 880                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 881                     noreg, noreg);

1127   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1128                      Address(rdx, rbx, Address::times_4,
1129                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1130                      noreg /* ftos */, noreg, noreg, noreg);
1131 }
1132 
1133 void TemplateTable::dastore() {
1134   transition(dtos, vtos);
1135   __ pop_i(rbx);
1136   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1137   // rbx:  index
1138   // rdx:  array
1139   index_check(rdx, rbx); // prefer index in rbx
1140   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1141                      Address(rdx, rbx, Address::times_8,
1142                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1143                      noreg /* dtos */, noreg, noreg, noreg);
1144 }
1145 
1146 void TemplateTable::aastore() {
1147   Label is_null, is_flat_array, ok_is_subtype, done;
1148   transition(vtos, vtos);
1149   // stack: ..., array, index, value
1150   __ movptr(rax, at_tos());    // value
1151   __ movl(rcx, at_tos_p1()); // index
1152   __ movptr(rdx, at_tos_p2()); // array
1153 
1154   Address element_address(rdx, rcx,
1155                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1156                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1157 
1158   index_check_without_pop(rdx, rcx);     // kills rbx
1159 
1160   __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1161   __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1162 
1163   __ testptr(rax, rax);
1164   __ jcc(Assembler::zero, is_null);
1165 
1166   // Move array class to rdi
1167   __ load_klass(rdi, rdx, rscratch1);
1168   if (UseFlatArray) {
1169     __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1170     __ test_flat_array_layout(rbx, is_flat_array);
1171   }
1172 
1173   // Move subklass into rbx
1174   __ load_klass(rbx, rax, rscratch1);
1175   // Move array element superklass into rax
1176   __ movptr(rax, Address(rdi,

1177                          ObjArrayKlass::element_klass_offset()));
1178 
1179   // Generate subtype check.  Blows rcx, rdi
1180   // Superklass in rax.  Subklass in rbx.
1181   // is "rbx <: rax" ? (value subclass <: array element superclass)
1182   __ gen_subtype_check(rbx, ok_is_subtype, false);
1183 
1184   // Come here on failure
1185   // object is at TOS
1186   __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1187 
1188   // Come here on success
1189   __ bind(ok_is_subtype);
1190 
1191   // Get the value we will store
1192   __ movptr(rax, at_tos());
1193   __ movl(rcx, at_tos_p1()); // index
1194   // Now store using the appropriate barrier
1195   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1196   __ jmp(done);
1197 
1198   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1199   __ bind(is_null);
1200   if (EnableValhalla) {
1201     Label is_null_into_value_array_npe, store_null;
1202 
1203     // No way to store null in null-free array
1204     __ test_null_free_array_oop(rdx, rbx, is_null_into_value_array_npe);
1205     __ jmp(store_null);
1206 
1207     __ bind(is_null_into_value_array_npe);
1208     __ jump(RuntimeAddress(Interpreter::_throw_NullPointerException_entry));
1209 
1210     __ bind(store_null);
1211   }
1212   // Store a null
1213   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1214   __ jmp(done);
1215 
1216   if (UseFlatArray) {
1217     Label is_type_ok;
1218     __ bind(is_flat_array); // Store non-null value to flat
1219 
1220     // Simplistic type check...
1221 
1222     // Profile the not-null value's klass.
1223     __ load_klass(rbx, rax, rscratch1);
1224     // Move element klass into rax
1225     __ movptr(rax, Address(rdi, ArrayKlass::element_klass_offset()));
1226     // flat value array needs exact type match
1227     // is "rax == rbx" (value subclass == array element superclass)
1228     __ cmpptr(rax, rbx);
1229     __ jccb(Assembler::equal, is_type_ok);
1230 
1231     __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1232 
1233     __ bind(is_type_ok);
1234     // rbx: value's klass
1235     // rdx: array
1236     // rdi: array klass
1237     __ test_klass_is_empty_inline_type(rbx, rax, done);
1238 
1239     // calc dst for copy
1240     __ movl(rax, at_tos_p1()); // index
1241     __ data_for_value_array_index(rdx, rdi, rax, rax);
1242 
1243     // ...and src for copy
1244     __ movptr(rcx, at_tos());  // value
1245     __ data_for_oop(rcx, rcx, rbx);
1246 
1247     __ access_value_copy(IN_HEAP, rcx, rax, rbx);
1248   }
1249   // Pop stack arguments
1250   __ bind(done);
1251   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1252 }
1253 
1254 void TemplateTable::bastore() {
1255   transition(itos, vtos);
1256   __ pop_i(rbx);
1257   // rax: value
1258   // rbx: index
1259   // rdx: array
1260   index_check(rdx, rbx); // prefer index in rbx
1261   // Need to check whether array is boolean or byte
1262   // since both types share the bastore bytecode.
1263   __ load_klass(rcx, rdx, rscratch1);
1264   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1265   int diffbit = Klass::layout_helper_boolean_diffbit();
1266   __ testl(rcx, diffbit);
1267   Label L_skip;
1268   __ jccb(Assembler::zero, L_skip);

2397   __ jcc(j_not(cc), not_taken);
2398   branch(false, false);
2399   __ bind(not_taken);
2400   __ profile_not_taken_branch(rax);
2401 }
2402 
2403 void TemplateTable::if_nullcmp(Condition cc) {
2404   transition(atos, vtos);
2405   // assume branch is more often taken than not (loops use backward branches)
2406   Label not_taken;
2407   __ testptr(rax, rax);
2408   __ jcc(j_not(cc), not_taken);
2409   branch(false, false);
2410   __ bind(not_taken);
2411   __ profile_not_taken_branch(rax);
2412 }
2413 
2414 void TemplateTable::if_acmp(Condition cc) {
2415   transition(atos, vtos);
2416   // assume branch is more often taken than not (loops use backward branches)
2417   Label taken, not_taken;
2418   __ pop_ptr(rdx);
2419 
2420   __ profile_acmp(rbx, rdx, rax, rcx);
2421 
2422   const int is_inline_type_mask = markWord::inline_type_pattern;
2423   if (EnableValhalla) {
2424     __ cmpoop(rdx, rax);
2425     __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
2426 
2427     // might be substitutable, test if either rax or rdx is null
2428     __ testptr(rax, rax);
2429     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2430     __ testptr(rdx, rdx);
2431     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2432 
2433     // and both are values ?
2434     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2435     __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
2436     __ andptr(rbx, is_inline_type_mask);
2437     __ cmpptr(rbx, is_inline_type_mask);
2438     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2439 
2440     // same value klass ?
2441     __ load_metadata(rbx, rdx);
2442     __ load_metadata(rcx, rax);
2443     __ cmpptr(rbx, rcx);
2444     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2445 
2446     // Know both are the same type, let's test for substitutability...
2447     if (cc == equal) {
2448       invoke_is_substitutable(rax, rdx, taken, not_taken);
2449     } else {
2450       invoke_is_substitutable(rax, rdx, not_taken, taken);
2451     }
2452     __ stop("Not reachable");
2453   }
2454 
2455   __ cmpoop(rdx, rax);
2456   __ jcc(j_not(cc), not_taken);
2457   __ bind(taken);
2458   branch(false, false);
2459   __ bind(not_taken);
2460   __ profile_not_taken_branch(rax, true);
2461 }
2462 
2463 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2464                                             Label& is_subst, Label& not_subst) {
2465   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2466   // Restored...rax answer, jmp to outcome...
2467   __ testl(rax, rax);
2468   __ jcc(Assembler::zero, not_subst);
2469   __ jmp(is_subst);
2470 }
2471 
2472 void TemplateTable::ret() {
2473   transition(vtos, vtos);
2474   locals_index(rbx);
2475   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2476   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2477   __ profile_ret(rbx, rcx);
2478   __ get_method(rax);
2479   __ movptr(rbcp, Address(rax, Method::const_offset()));
2480   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2481                       ConstMethod::codes_offset()));
2482   __ dispatch_next(vtos, 0, true);
2483 }
2484 
2485 void TemplateTable::wide_ret() {
2486   transition(vtos, vtos);
2487   locals_index_wide(rbx);
2488   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2489   __ profile_ret(rbx, rcx);

2718     const Register thread = rdi;
2719     __ get_thread(thread);
2720     __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2721 #endif
2722     __ jcc(Assembler::zero, no_safepoint);
2723     __ push(state);
2724     __ push_cont_fastpath();
2725     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2726                                        InterpreterRuntime::at_safepoint));
2727     __ pop_cont_fastpath();
2728     __ pop(state);
2729     __ bind(no_safepoint);
2730   }
2731 
2732   // Narrow result if state is itos but result type is smaller.
2733   // Need to narrow in the return bytecode rather than in generate_return_entry
2734   // since compiled code callers expect the result to already be narrowed.
2735   if (state == itos) {
2736     __ narrow(rax);
2737   }
2738 
2739   __ remove_activation(state, rbcp, true, true, true);
2740 
2741   __ jmp(rbcp);
2742 }
2743 
2744 // ----------------------------------------------------------------------------
2745 // Volatile variables demand their effects be made known to all CPU's
2746 // in order.  Store buffers on most chips allow reads & writes to
2747 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2748 // without some kind of memory barrier (i.e., it's not sufficient that
2749 // the interpreter does not reorder volatile references, the hardware
2750 // also must not reorder them).
2751 //
2752 // According to the new Java Memory Model (JMM):
2753 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2754 //     writes act as acquire & release, so:
2755 // (2) A read cannot let unrelated NON-volatile memory refs that
2756 //     happen after the read float up to before the read.  It's OK for
2757 //     non-volatile memory refs that happen before the volatile read to
2758 //     float down below it.
2759 // (3) Similar a volatile write cannot let unrelated NON-volatile

3085     }
3086     // rax,:   object pointer or null
3087     // cache: cache entry pointer
3088     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
3089               rax, cache);
3090 
3091     __ load_field_entry(cache, index);
3092     __ bind(L1);
3093   }
3094 }
3095 
3096 void TemplateTable::pop_and_check_object(Register r) {
3097   __ pop_ptr(r);
3098   __ null_check(r);  // for field access must check obj.
3099   __ verify_oop(r);
3100 }
3101 
3102 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3103   transition(vtos, vtos);
3104 
3105   const Register obj   = LP64_ONLY(r9) NOT_LP64(rcx);
3106   const Register cache = rcx;
3107   const Register index = rdx;
3108   const Register off   = rbx;
3109   const Register tos_state   = rax;
3110   const Register flags = rdx;
3111   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
3112 
3113   resolve_cache_and_index_for_field(byte_no, cache, index);
3114   jvmti_post_field_access(cache, index, is_static, false);
3115   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3116 


3117   const Address field(obj, off, Address::times_1, 0*wordSize);
3118 
3119   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
3120 
3121   // Make sure we don't need to mask edx after the above shift
3122   assert(btos == 0, "change code, btos != 0");
3123   __ testl(tos_state, tos_state);
3124   __ jcc(Assembler::notZero, notByte);
3125 
3126   // btos
3127   if (!is_static) pop_and_check_object(obj);
3128   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3129   __ push(btos);
3130   // Rewrite bytecode to be faster
3131   if (!is_static && rc == may_rewrite) {
3132     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3133   }
3134   __ jmp(Done);
3135 
3136   __ bind(notByte);
3137   __ cmpl(tos_state, ztos);
3138   __ jcc(Assembler::notEqual, notBool);
3139    if (!is_static) pop_and_check_object(obj);
3140   // ztos (same code as btos)
3141   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3142   __ push(ztos);
3143   // Rewrite bytecode to be faster
3144   if (!is_static && rc == may_rewrite) {
3145     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3146     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3147   }
3148   __ jmp(Done);
3149 
3150   __ bind(notBool);
3151   __ cmpl(tos_state, atos);
3152   __ jcc(Assembler::notEqual, notObj);
3153   // atos
3154   if (!EnableValhalla) {
3155     if (!is_static) pop_and_check_object(obj);
3156     do_oop_load(_masm, field, rax);
3157     __ push(atos);
3158     if (!is_static && rc == may_rewrite) {
3159       patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3160     }
3161     __ jmp(Done);
3162   } else {
3163     if (is_static) {
3164       __ load_heap_oop(rax, field);
3165       Label is_null_free_inline_type, uninitialized;
3166       // Issue below if the static field has not been initialized yet
3167       __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3168         // field is not a null free inline type
3169         __ push(atos);
3170         __ jmp(Done);
3171       // field is a null free inline type, must not return null even if uninitialized
3172       __ bind(is_null_free_inline_type);
3173           __ testptr(rax, rax);
3174         __ jcc(Assembler::zero, uninitialized);
3175           __ push(atos);
3176           __ jmp(Done);
3177         __ bind(uninitialized);
3178 #ifdef _LP64
3179           Label slow_case, finish;
3180           __ movptr(rbx, Address(obj, java_lang_Class::klass_offset()));
3181           __ cmpb(Address(rbx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3182           __ jcc(Assembler::notEqual, slow_case);
3183         __ get_default_value_oop(rbx, rscratch1, rax);
3184         __ jmp(finish);
3185         __ bind(slow_case);
3186 #endif // LP64
3187           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_inline_type_field),
3188                 obj, cache);
3189 #ifdef _LP64
3190           __ bind(finish);
3191   #endif // _LP64
3192         __ verify_oop(rax);
3193         __ push(atos);
3194         __ jmp(Done);
3195     } else {
3196       Label is_flat, nonnull, is_inline_type, rewrite_inline, has_null_marker;
3197       __ test_field_is_null_free_inline_type(flags, rscratch1, is_inline_type);
3198       __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3199       // field is not a null free inline type
3200       pop_and_check_object(obj);
3201       __ load_heap_oop(rax, field);
3202       __ push(atos);
3203       if (rc == may_rewrite) {
3204         patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3205       }
3206       __ jmp(Done);
3207       __ bind(is_inline_type);
3208       __ test_field_is_flat(flags, rscratch1, is_flat);
3209           // field is not flat
3210           pop_and_check_object(obj);
3211           __ load_heap_oop(rax, field);
3212           __ testptr(rax, rax);
3213           __ jcc(Assembler::notZero, nonnull);
3214             __ load_unsigned_short(flags, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
3215             __ movptr(rcx, Address(cache, ResolvedFieldEntry::field_holder_offset()));
3216             __ get_inline_type_field_klass(rcx, flags, rbx);
3217             __ get_default_value_oop(rbx, rcx, rax);
3218           __ bind(nonnull);
3219           __ verify_oop(rax);
3220           __ push(atos);
3221           __ jmp(rewrite_inline);
3222         __ bind(is_flat);
3223           pop_and_check_object(rax);
3224           __ load_unsigned_short(rdx, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
3225           __ movptr(rcx, Address(cache, ResolvedFieldEntry::field_holder_offset()));
3226           __ read_flat_field(rcx, rdx, rbx, rax);
3227           __ verify_oop(rax);
3228           __ push(atos);
3229           __ jmp(rewrite_inline);
3230       __ bind(has_null_marker);
3231         pop_and_check_object(rax);
3232         __ load_field_entry(rcx, rbx);
3233         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3234         __ get_vm_result(rax, r15_thread);
3235         __ push(atos);
3236       __ bind(rewrite_inline);
3237       if (rc == may_rewrite) {
3238         patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
3239       }
3240         __ jmp(Done);
3241     }
3242   }

3243 
3244   __ bind(notObj);
3245 
3246   if (!is_static) pop_and_check_object(obj);
3247 
3248   __ cmpl(tos_state, itos);
3249   __ jcc(Assembler::notEqual, notInt);
3250   // itos
3251   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3252   __ push(itos);
3253   // Rewrite bytecode to be faster
3254   if (!is_static && rc == may_rewrite) {
3255     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3256   }
3257   __ jmp(Done);
3258 
3259   __ bind(notInt);
3260   __ cmpl(tos_state, ctos);
3261   __ jcc(Assembler::notEqual, notChar);
3262   // ctos
3263   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3264   __ push(ctos);
3265   // Rewrite bytecode to be faster
3266   if (!is_static && rc == may_rewrite) {
3267     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

3327 #endif
3328 
3329   __ bind(Done);
3330   // [jk] not needed currently
3331   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3332   //                                              Assembler::LoadStore));
3333 }
3334 
3335 void TemplateTable::getfield(int byte_no) {
3336   getfield_or_static(byte_no, false);
3337 }
3338 
3339 void TemplateTable::nofast_getfield(int byte_no) {
3340   getfield_or_static(byte_no, false, may_not_rewrite);
3341 }
3342 
3343 void TemplateTable::getstatic(int byte_no) {
3344   getfield_or_static(byte_no, true);
3345 }
3346 

3347 // The registers cache and index expected to be set before call.
3348 // The function may destroy various registers, just not the cache and index registers.
3349 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3350   // Cache is rcx and index is rdx
3351   const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3352   const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx);   // Object pointer
3353   const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3354 
3355   if (JvmtiExport::can_post_field_modification()) {
3356     // Check to see if a field modification watch has been set before
3357     // we take the time to call into the VM.
3358     Label L1;
3359     assert_different_registers(cache, obj, rax);
3360     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3361     __ testl(rax, rax);
3362     __ jcc(Assembler::zero, L1);
3363 
3364     __ mov(entry, cache);
3365 
3366     if (is_static) {

3408     // cache: field entry pointer
3409     // value: jvalue object on the stack
3410     __ call_VM(noreg,
3411               CAST_FROM_FN_PTR(address,
3412                               InterpreterRuntime::post_field_modification),
3413               obj, entry, value);
3414     // Reload field entry
3415     __ load_field_entry(cache, index);
3416     __ bind(L1);
3417   }
3418 }
3419 
3420 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3421   transition(vtos, vtos);
3422 
3423   const Register obj = rcx;
3424   const Register cache = rcx;
3425   const Register index = rdx;
3426   const Register tos_state   = rdx;
3427   const Register off   = rbx;
3428   const Register flags = r9;
3429 
3430   resolve_cache_and_index_for_field(byte_no, cache, index);
3431   jvmti_post_field_mod(cache, index, is_static);
3432   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3433 
3434   // [jk] not needed currently
3435   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3436   //                                              Assembler::StoreStore));
3437 
3438   Label notVolatile, Done;
3439 
3440   // Check for volatile store
3441   __ movl(rscratch1, flags);
3442   __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
3443   __ testl(rscratch1, rscratch1);
3444   __ jcc(Assembler::zero, notVolatile);
3445 
3446   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3447   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3448                                                Assembler::StoreStore));
3449   __ jmp(Done);
3450   __ bind(notVolatile);
3451 
3452   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3453 
3454   __ bind(Done);
3455 }
3456 
3457 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3458                                               Register obj, Register off, Register tos_state, Register flags) {
3459 
3460   // field addresses
3461   const Address field(obj, off, Address::times_1, 0*wordSize);
3462   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3463 
3464   Label notByte, notBool, notInt, notShort, notChar,
3465         notLong, notFloat, notObj, notInlineType;
3466   Label Done;
3467 
3468   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3469 
3470   // Test TOS state
3471   __ testl(tos_state, tos_state);
3472   __ jcc(Assembler::notZero, notByte);
3473 
3474   // btos
3475   {
3476     __ pop(btos);
3477     if (!is_static) pop_and_check_object(obj);
3478     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3479     if (!is_static && rc == may_rewrite) {
3480       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3481     }
3482     __ jmp(Done);
3483   }
3484 
3485   __ bind(notByte);
3486   __ cmpl(tos_state, ztos);
3487   __ jcc(Assembler::notEqual, notBool);
3488 
3489   // ztos
3490   {
3491     __ pop(ztos);
3492     if (!is_static) pop_and_check_object(obj);
3493     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3494     if (!is_static && rc == may_rewrite) {
3495       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3496     }
3497     __ jmp(Done);
3498   }
3499 
3500   __ bind(notBool);
3501   __ cmpl(tos_state, atos);
3502   __ jcc(Assembler::notEqual, notObj);
3503 
3504   // atos
3505   {
3506     if (!EnableValhalla) {
3507       __ pop(atos);
3508       if (!is_static) pop_and_check_object(obj);
3509       // Store into the field
3510       do_oop_store(_masm, field, rax);
3511       if (!is_static && rc == may_rewrite) {
3512         patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3513       }
3514       __ jmp(Done);
3515     } else {
3516       __ pop(atos);
3517       if (is_static) {
3518         Label is_inline_type;
3519         __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_inline_type);
3520         __ null_check(rax);
3521         __ bind(is_inline_type);
3522         do_oop_store(_masm, field, rax);
3523         __ jmp(Done);
3524       } else {
3525         Label is_null_free_inline_type, is_flat, has_null_marker,
3526               write_null, rewrite_not_inline, rewrite_inline;
3527         __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3528         __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3529           // Not an inline type
3530           pop_and_check_object(obj);
3531           // Store into the field
3532           do_oop_store(_masm, field, rax);
3533           __ bind(rewrite_not_inline);
3534           if (rc == may_rewrite) {
3535             patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3536           }
3537           __ jmp(Done);
3538         // Implementation of the inline type semantic
3539         __ bind(is_null_free_inline_type);
3540           __ null_check(rax);
3541           __ test_field_is_flat(flags, rscratch1, is_flat);
3542             // field is not flat
3543             pop_and_check_object(obj);
3544             // Store into the field
3545             do_oop_store(_masm, field, rax);
3546           __ jmp(rewrite_inline);
3547           __ bind(is_flat);
3548             // field is flat
3549             pop_and_check_object(obj);
3550             assert_different_registers(rax, rdx, obj, off);
3551             __ load_klass(rdx, rax, rscratch1);
3552             __ data_for_oop(rax, rax, rdx);
3553             __ addptr(obj, off);
3554             __ access_value_copy(IN_HEAP, rax, obj, rdx);
3555             __ jmp(rewrite_inline);
3556         __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3557           pop_and_check_object(rbx);
3558           __ load_field_entry(rcx, rdx);
3559           call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3560         __ bind(rewrite_inline);
3561         if (rc == may_rewrite) {
3562           patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3563         }
3564         __ jmp(Done);
3565       }
3566     }

3567   }
3568 
3569   __ bind(notObj);
3570   __ cmpl(tos_state, itos);
3571   __ jcc(Assembler::notEqual, notInt);
3572 
3573   // itos
3574   {
3575     __ pop(itos);
3576     if (!is_static) pop_and_check_object(obj);
3577     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3578     if (!is_static && rc == may_rewrite) {
3579       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3580     }
3581     __ jmp(Done);
3582   }
3583 
3584   __ bind(notInt);
3585   __ cmpl(tos_state, ctos);
3586   __ jcc(Assembler::notEqual, notChar);

3685 }
3686 
3687 void TemplateTable::jvmti_post_fast_field_mod() {
3688 
3689   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3690 
3691   if (JvmtiExport::can_post_field_modification()) {
3692     // Check to see if a field modification watch has been set before
3693     // we take the time to call into the VM.
3694     Label L2;
3695     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3696     __ testl(scratch, scratch);
3697     __ jcc(Assembler::zero, L2);
3698     __ pop_ptr(rbx);                  // copy the object pointer from tos
3699     __ verify_oop(rbx);
3700     __ push_ptr(rbx);                 // put the object pointer back on tos
3701     // Save tos values before call_VM() clobbers them. Since we have
3702     // to do it for every data type, we use the saved values as the
3703     // jvalue object.
3704     switch (bytecode()) {          // load values into the jvalue object
3705     case Bytecodes::_fast_vputfield: //fall through
3706     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3707     case Bytecodes::_fast_bputfield: // fall through
3708     case Bytecodes::_fast_zputfield: // fall through
3709     case Bytecodes::_fast_sputfield: // fall through
3710     case Bytecodes::_fast_cputfield: // fall through
3711     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3712     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3713     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3714     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3715 
3716     default:
3717       ShouldNotReachHere();
3718     }
3719     __ mov(scratch, rsp);             // points to jvalue on the stack
3720     // access constant pool cache entry
3721     LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3722     NOT_LP64(__ load_field_entry(rax, rdx));
3723     __ verify_oop(rbx);
3724     // rbx: object pointer copied above
3725     // c_rarg2: cache entry pointer
3726     // c_rarg3: jvalue object on the stack
3727     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3728     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3729 
3730     switch (bytecode()) {             // restore tos values
3731     case Bytecodes::_fast_vputfield: // fall through
3732     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3733     case Bytecodes::_fast_bputfield: // fall through
3734     case Bytecodes::_fast_zputfield: // fall through
3735     case Bytecodes::_fast_sputfield: // fall through
3736     case Bytecodes::_fast_cputfield: // fall through
3737     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3738     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3739     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3740     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3741     default: break;
3742     }
3743     __ bind(L2);
3744   }
3745 }
3746 
3747 void TemplateTable::fast_storefield(TosState state) {
3748   transition(state, vtos);
3749 


3750   Label notVolatile, Done;
3751 
3752   jvmti_post_fast_field_mod();
3753 
3754   __ push(rax);
3755   __ load_field_entry(rcx, rax);
3756   load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);


3757   __ pop(rax);
3758   // RBX: field offset, RCX: RAX: TOS, RDX: flags
3759 
3760   // Get object from stack
3761   pop_and_check_object(rcx);
3762 
3763   // field address
3764   const Address field(rcx, rbx, Address::times_1);
3765 
3766   // Check for volatile store
3767   __ movl(rscratch2, rdx);  // saving flags for is_flat test
3768   __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3769   __ testl(rscratch2, rscratch2);
3770   __ jcc(Assembler::zero, notVolatile);
3771 
3772   fast_storefield_helper(field, rax, rdx);
3773   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3774                                                Assembler::StoreStore));
3775   __ jmp(Done);
3776   __ bind(notVolatile);
3777 
3778   fast_storefield_helper(field, rax, rdx);
3779 
3780   __ bind(Done);
3781 }
3782 
3783 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3784 
3785   // DANGER: 'field' argument depends on rcx and rbx
3786 
3787   // access field
3788   switch (bytecode()) {
3789   case Bytecodes::_fast_vputfield:
3790     {
3791       Label is_flat, has_null_marker, write_null, done;
3792       __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3793       // Null free field cases: flat or not flat
3794       __ null_check(rax);
3795       __ test_field_is_flat(flags, rscratch1, is_flat);
3796         // field is not flat
3797         do_oop_store(_masm, field, rax);
3798         __ jmp(done);
3799       __ bind(is_flat);
3800         // field is flat
3801         __ load_klass(rdx, rax, rscratch1);
3802         __ data_for_oop(rax, rax, rdx);
3803         __ lea(rcx, field);
3804         __ access_value_copy(IN_HEAP, rax, rcx, rdx);
3805         __ jmp(done);
3806       __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3807         __ movptr(rbx, rcx);
3808         __ load_field_entry(rcx, rdx);
3809         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3810       __ bind(done);
3811     }
3812     break;
3813   case Bytecodes::_fast_aputfield:
3814     {
3815       do_oop_store(_masm, field, rax);
3816     }
3817     break;
3818   case Bytecodes::_fast_lputfield:
3819 #ifdef _LP64
3820     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3821 #else
3822   __ stop("should not be rewritten");
3823 #endif
3824     break;
3825   case Bytecodes::_fast_iputfield:
3826     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3827     break;
3828   case Bytecodes::_fast_zputfield:
3829     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3830     break;
3831   case Bytecodes::_fast_bputfield:
3832     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3833     break;
3834   case Bytecodes::_fast_sputfield:
3835     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3836     break;

3858     Label L1;
3859     __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3860     __ testl(rcx, rcx);
3861     __ jcc(Assembler::zero, L1);
3862     // access constant pool cache entry
3863     LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3864     NOT_LP64(__ load_field_entry(rcx, rdx));
3865     __ verify_oop(rax);
3866     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3867     LP64_ONLY(__ mov(c_rarg1, rax));
3868     // c_rarg1: object pointer copied above
3869     // c_rarg2: cache entry pointer
3870     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3871     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3872     __ pop_ptr(rax); // restore object pointer
3873     __ bind(L1);
3874   }
3875 
3876   // access constant pool cache
3877   __ load_field_entry(rcx, rbx);
3878   __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3879 
3880   // rax: object
3881   __ verify_oop(rax);
3882   __ null_check(rax);
3883   Address field(rax, rdx, Address::times_1);
3884 
3885   // access field
3886   switch (bytecode()) {
3887   case Bytecodes::_fast_vgetfield:
3888     {
3889       Label is_flat, nonnull, Done, has_null_marker;
3890       __ load_unsigned_byte(rscratch1, Address(rcx, in_bytes(ResolvedFieldEntry::flags_offset())));
3891       __ test_field_has_null_marker(rscratch1, rscratch2, has_null_marker);
3892       __ test_field_is_flat(rscratch1, rscratch2, is_flat);
3893         // field is not flat
3894         __ load_heap_oop(rax, field);
3895         __ testptr(rax, rax);
3896         __ jcc(Assembler::notZero, nonnull);
3897           __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3898           __ movptr(rcx, Address(rcx, ResolvedFieldEntry::field_holder_offset()));
3899           __ get_inline_type_field_klass(rcx, rdx, rbx);
3900           __ get_default_value_oop(rbx, rcx, rax);
3901         __ bind(nonnull);
3902         __ verify_oop(rax);
3903         __ jmp(Done);
3904       __ bind(is_flat);
3905       // field is flat
3906         __ push(rdx); // save offset
3907         __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3908         __ movptr(rcx, Address(rcx, ResolvedFieldEntry::field_holder_offset()));
3909         __ pop(rbx); // restore offset
3910         __ read_flat_field(rcx, rdx, rbx, rax);
3911         __ jmp(Done);
3912       __ bind(has_null_marker);
3913         // rax = instance, rcx = resolved entry
3914         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3915         __ get_vm_result(rax, r15_thread);
3916       __ bind(Done);
3917       __ verify_oop(rax);
3918     }
3919     break;
3920   case Bytecodes::_fast_agetfield:
3921     do_oop_load(_masm, field, rax);
3922     __ verify_oop(rax);
3923     break;
3924   case Bytecodes::_fast_lgetfield:
3925 #ifdef _LP64
3926     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3927 #else
3928   __ stop("should not be rewritten");
3929 #endif
3930     break;
3931   case Bytecodes::_fast_igetfield:
3932     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3933     break;
3934   case Bytecodes::_fast_bgetfield:
3935     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3936     break;
3937   case Bytecodes::_fast_sgetfield:
3938     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3939     break;

4335 
4336   // Note:  rax_callsite is already pushed
4337 
4338   // %%% should make a type profile for any invokedynamic that takes a ref argument
4339   // profile this call
4340   __ profile_call(rbcp);
4341   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4342 
4343   __ verify_oop(rax_callsite);
4344 
4345   __ jump_from_interpreted(rbx_method, rdx);
4346 }
4347 
4348 //-----------------------------------------------------------------------------
4349 // Allocation
4350 
4351 void TemplateTable::_new() {
4352   transition(vtos, atos);
4353   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4354   Label slow_case;

4355   Label done;

4356 
4357   __ get_cpool_and_tags(rcx, rax);
4358 
4359   // Make sure the class we're about to instantiate has been resolved.
4360   // This is done before loading InstanceKlass to be consistent with the order
4361   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4362   const int tags_offset = Array<u1>::base_offset_in_bytes();
4363   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4364   __ jcc(Assembler::notEqual, slow_case);
4365 
4366   // get InstanceKlass
4367   __ load_resolved_klass_at_index(rcx, rcx, rdx);

4368 
4369   // make sure klass is initialized
4370 #ifdef _LP64
4371   assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
4372   __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
4373 #else
4374   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4375   __ jcc(Assembler::notEqual, slow_case);
4376 #endif
4377 
4378   __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);

































































4379     if (DTraceAllocProbes) {
4380       // Trigger dtrace event for fastpath
4381       __ push(atos);
4382       __ call_VM_leaf(
4383            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4384       __ pop(atos);
4385     }
4386   __ jmp(done);


4387 
4388   // slow case
4389   __ bind(slow_case);


4390 
4391   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4392   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4393 
4394   __ get_constant_pool(rarg1);
4395   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4396   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4397    __ verify_oop(rax);
4398 
4399   // continue
4400   __ bind(done);
4401 }
4402 
4403 void TemplateTable::newarray() {
4404   transition(itos, atos);
4405   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4406   __ load_unsigned_byte(rarg1, at_bcp(1));
4407   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4408           rarg1, rax);
4409 }

4418   __ get_constant_pool(rarg1);
4419   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4420           rarg1, rarg2, rax);
4421 }
4422 
4423 void TemplateTable::arraylength() {
4424   transition(atos, itos);
4425   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4426 }
4427 
4428 void TemplateTable::checkcast() {
4429   transition(atos, atos);
4430   Label done, is_null, ok_is_subtype, quicked, resolved;
4431   __ testptr(rax, rax); // object is in rax
4432   __ jcc(Assembler::zero, is_null);
4433 
4434   // Get cpool & tags index
4435   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4436   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4437   // See if bytecode has already been quicked
4438   __ movzbl(rdx, Address(rdx, rbx,
4439       Address::times_1,
4440       Array<u1>::base_offset_in_bytes()));
4441   __ cmpl(rdx, JVM_CONSTANT_Class);
4442   __ jcc(Assembler::equal, quicked);
4443   __ push(atos); // save receiver for result, and for GC
4444   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4445 
4446   // vm_result_2 has metadata result
4447 #ifndef _LP64
4448   // borrow rdi from locals
4449   __ get_thread(rdi);
4450   __ get_vm_result_2(rax, rdi);
4451   __ restore_locals();
4452 #else
4453   __ get_vm_result_2(rax, r15_thread);
4454 #endif
4455 
4456   __ pop_ptr(rdx); // restore receiver
4457   __ jmpb(resolved);
4458 
4459   // Get superklass in rax and subklass in rbx
4460   __ bind(quicked);
4461   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4462   __ load_resolved_klass_at_index(rax, rcx, rbx);
4463 
4464   __ bind(resolved);
4465   __ load_klass(rbx, rdx, rscratch1);
4466 
4467   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4468   // Superklass in rax.  Subklass in rbx.
4469   __ gen_subtype_check(rbx, ok_is_subtype);
4470 
4471   // Come here on failure
4472   __ push_ptr(rdx);
4473   // object is at TOS
4474   __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
4475 
4476   // Come here on success
4477   __ bind(ok_is_subtype);
4478   __ mov(rax, rdx); // Restore object in rdx
4479   __ jmp(done);
4480 
4481   __ bind(is_null);
4482 
4483   // Collect counts on whether this check-cast sees nulls a lot or not.
4484   if (ProfileInterpreter) {


4485     __ profile_null_seen(rcx);


4486   }
4487 
4488   __ bind(done);
4489 }
4490 
4491 void TemplateTable::instanceof() {
4492   transition(atos, itos);
4493   Label done, is_null, ok_is_subtype, quicked, resolved;
4494   __ testptr(rax, rax);
4495   __ jcc(Assembler::zero, is_null);
4496 
4497   // Get cpool & tags index
4498   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4499   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4500   // See if bytecode has already been quicked
4501   __ movzbl(rdx, Address(rdx, rbx,
4502         Address::times_1,
4503         Array<u1>::base_offset_in_bytes()));
4504   __ cmpl(rdx, JVM_CONSTANT_Class);
4505   __ jcc(Assembler::equal, quicked);
4506 
4507   __ push(atos); // save receiver for result, and for GC
4508   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4509   // vm_result_2 has metadata result
4510 
4511 #ifndef _LP64
4512   // borrow rdi from locals
4513   __ get_thread(rdi);
4514   __ get_vm_result_2(rax, rdi);
4515   __ restore_locals();
4516 #else
4517   __ get_vm_result_2(rax, r15_thread);
4518 #endif
4519 
4520   __ pop_ptr(rdx); // restore receiver
4521   __ verify_oop(rdx);
4522   __ load_klass(rdx, rdx, rscratch1);
4523   __ jmpb(resolved);
4524 

4536   // Come here on failure
4537   __ xorl(rax, rax);
4538   __ jmpb(done);
4539   // Come here on success
4540   __ bind(ok_is_subtype);
4541   __ movl(rax, 1);
4542 
4543   // Collect counts on whether this test sees nulls a lot or not.
4544   if (ProfileInterpreter) {
4545     __ jmp(done);
4546     __ bind(is_null);
4547     __ profile_null_seen(rcx);
4548   } else {
4549     __ bind(is_null);   // same as 'done'
4550   }
4551   __ bind(done);
4552   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
4553   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
4554 }
4555 

4556 //----------------------------------------------------------------------------------------------------
4557 // Breakpoints
4558 void TemplateTable::_breakpoint() {
4559   // Note: We get here even if we are single stepping..
4560   // jbug insists on setting breakpoints at every bytecode
4561   // even if we are in single step mode.
4562 
4563   transition(vtos, vtos);
4564 
4565   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4566 
4567   // get the unpatched byte code
4568   __ get_method(rarg);
4569   __ call_VM(noreg,
4570              CAST_FROM_FN_PTR(address,
4571                               InterpreterRuntime::get_original_bytecode_at),
4572              rarg, rbcp);
4573   __ mov(rbx, rax);  // why?
4574 
4575   // post the breakpoint event

4597 // Note: monitorenter & exit are symmetric routines; which is reflected
4598 //       in the assembly code structure as well
4599 //
4600 // Stack layout:
4601 //
4602 // [expressions  ] <--- rsp               = expression stack top
4603 // ..
4604 // [expressions  ]
4605 // [monitor entry] <--- monitor block top = expression stack bot
4606 // ..
4607 // [monitor entry]
4608 // [frame data   ] <--- monitor block bot
4609 // ...
4610 // [saved rbp    ] <--- rbp
4611 void TemplateTable::monitorenter() {
4612   transition(atos, vtos);
4613 
4614   // check for null object
4615   __ null_check(rax);
4616 
4617   Label is_inline_type;
4618   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4619   __ test_markword_is_inline_type(rbx, is_inline_type);
4620 
4621   const Address monitor_block_top(
4622         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4623   const Address monitor_block_bot(
4624         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4625   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4626 
4627   Label allocated;
4628 
4629   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4630   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4631   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4632 
4633   // initialize entry pointer
4634   __ xorl(rmon, rmon); // points to free slot or null
4635 
4636   // find a free slot in the monitor block (result in rmon)
4637   {
4638     Label entry, loop, exit;
4639     __ movptr(rtop, monitor_block_top); // derelativize pointer
4640     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));

4693   // rmon: points to monitor entry
4694   __ bind(allocated);
4695 
4696   // Increment bcp to point to the next bytecode, so exception
4697   // handling for async. exceptions work correctly.
4698   // The object has already been popped from the stack, so the
4699   // expression stack looks correct.
4700   __ increment(rbcp);
4701 
4702   // store object
4703   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4704   __ lock_object(rmon);
4705 
4706   // check to make sure this monitor doesn't cause stack overflow after locking
4707   __ save_bcp();  // in case of exception
4708   __ generate_stack_overflow_check(0);
4709 
4710   // The bcp has already been incremented. Just need to dispatch to
4711   // next instruction.
4712   __ dispatch_next(vtos);
4713 
4714   __ bind(is_inline_type);
4715   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4716                     InterpreterRuntime::throw_identity_exception), rax);
4717   __ should_not_reach_here();
4718 }
4719 
4720 void TemplateTable::monitorexit() {
4721   transition(atos, vtos);
4722 
4723   // check for null object
4724   __ null_check(rax);
4725 
4726   const int is_inline_type_mask = markWord::inline_type_pattern;
4727   Label has_identity;
4728   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4729   __ andptr(rbx, is_inline_type_mask);
4730   __ cmpl(rbx, is_inline_type_mask);
4731   __ jcc(Assembler::notEqual, has_identity);
4732   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4733                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4734   __ should_not_reach_here();
4735   __ bind(has_identity);
4736 
4737   const Address monitor_block_top(
4738         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4739   const Address monitor_block_bot(
4740         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4741   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4742 
4743   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4744   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4745 
4746   Label found;
4747 
4748   // find matching slot
4749   {
4750     Label entry, loop;
4751     __ movptr(rtop, monitor_block_top); // derelativize pointer
4752     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4753     // rtop points to current entry, starting with top-most entry
4754 
4755     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4756                                         // of monitor block
< prev index next >