< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page

   1 /*
   2  * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/gc_globals.hpp"
  30 #include "gc/shared/tlab_globals.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/interp_masm.hpp"
  34 #include "interpreter/templateTable.hpp"
  35 #include "memory/universe.hpp"
  36 #include "oops/methodCounters.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/objArrayKlass.hpp"
  39 #include "oops/oop.inline.hpp"

  40 #include "oops/resolvedFieldEntry.hpp"
  41 #include "oops/resolvedIndyEntry.hpp"
  42 #include "oops/resolvedMethodEntry.hpp"
  43 #include "prims/jvmtiExport.hpp"
  44 #include "prims/methodHandles.hpp"
  45 #include "runtime/frame.inline.hpp"
  46 #include "runtime/safepointMechanism.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/stubRoutines.hpp"
  49 #include "runtime/synchronizer.hpp"
  50 #include "utilities/macros.hpp"
  51 
  52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  53 
  54 // Global Register Names
  55 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  56 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  57 
  58 // Address Computation: local variables
  59 static inline Address iaddress(int n) {

 166 static void do_oop_load(InterpreterMacroAssembler* _masm,
 167                         Address src,
 168                         Register dst,
 169                         DecoratorSet decorators = 0) {
 170   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 171 }
 172 
 173 Address TemplateTable::at_bcp(int offset) {
 174   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 175   return Address(rbcp, offset);
 176 }
 177 
 178 
 179 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 180                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 181                                    int byte_no) {
 182   if (!RewriteBytecodes)  return;
 183   Label L_patch_done;
 184 
 185   switch (bc) {

 186   case Bytecodes::_fast_aputfield:
 187   case Bytecodes::_fast_bputfield:
 188   case Bytecodes::_fast_zputfield:
 189   case Bytecodes::_fast_cputfield:
 190   case Bytecodes::_fast_dputfield:
 191   case Bytecodes::_fast_fputfield:
 192   case Bytecodes::_fast_iputfield:
 193   case Bytecodes::_fast_lputfield:
 194   case Bytecodes::_fast_sputfield:
 195     {
 196       // We skip bytecode quickening for putfield instructions when
 197       // the put_code written to the constant pool cache is zero.
 198       // This is required so that every execution of this instruction
 199       // calls out to InterpreterRuntime::resolve_get_put to do
 200       // additional, required work.
 201       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 202       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 203       __ load_field_entry(temp_reg, bc_reg);
 204       if (byte_no == f1_byte) {
 205         __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 813                     Address(rdx, rax,
 814                             Address::times_4,
 815                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 816                     noreg, noreg);
 817 }
 818 
 819 void TemplateTable::daload() {
 820   transition(itos, dtos);
 821   // rax: index
 822   // rdx: array
 823   index_check(rdx, rax); // kills rbx
 824   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 825                     Address(rdx, rax,
 826                             Address::times_8,
 827                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 828                     noreg, noreg);
 829 }
 830 
 831 void TemplateTable::aaload() {
 832   transition(itos, atos);
 833   // rax: index
 834   // rdx: array
 835   index_check(rdx, rax); // kills rbx
 836   do_oop_load(_masm,
 837               Address(rdx, rax,
 838                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 839                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 840               rax,
 841               IS_ARRAY);



















 842 }
 843 
 844 void TemplateTable::baload() {
 845   transition(itos, itos);
 846   // rax: index
 847   // rdx: array
 848   index_check(rdx, rax); // kills rbx
 849   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 850                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 851                     noreg, noreg);
 852 }
 853 
 854 void TemplateTable::caload() {
 855   transition(itos, itos);
 856   // rax: index
 857   // rdx: array
 858   index_check(rdx, rax); // kills rbx
 859   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 860                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 861                     noreg, noreg);

1107   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1108                      Address(rdx, rbx, Address::times_4,
1109                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1110                      noreg /* ftos */, noreg, noreg, noreg);
1111 }
1112 
1113 void TemplateTable::dastore() {
1114   transition(dtos, vtos);
1115   __ pop_i(rbx);
1116   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1117   // rbx:  index
1118   // rdx:  array
1119   index_check(rdx, rbx); // prefer index in rbx
1120   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1121                      Address(rdx, rbx, Address::times_8,
1122                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1123                      noreg /* dtos */, noreg, noreg, noreg);
1124 }
1125 
1126 void TemplateTable::aastore() {
1127   Label is_null, ok_is_subtype, done;
1128   transition(vtos, vtos);
1129   // stack: ..., array, index, value
1130   __ movptr(rax, at_tos());    // value
1131   __ movl(rcx, at_tos_p1()); // index
1132   __ movptr(rdx, at_tos_p2()); // array
1133 
1134   Address element_address(rdx, rcx,
1135                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1136                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1137 
1138   index_check_without_pop(rdx, rcx);     // kills rbx




1139   __ testptr(rax, rax);
1140   __ jcc(Assembler::zero, is_null);
1141 







1142   // Move subklass into rbx
1143   __ load_klass(rbx, rax, rscratch1);
1144   // Move superklass into rax
1145   __ load_klass(rax, rdx, rscratch1);
1146   __ movptr(rax, Address(rax,
1147                          ObjArrayKlass::element_klass_offset()));
1148 
1149   // Generate subtype check.  Blows rcx, rdi
1150   // Superklass in rax.  Subklass in rbx.
1151   __ gen_subtype_check(rbx, ok_is_subtype);

1152 
1153   // Come here on failure
1154   // object is at TOS
1155   __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1156 
1157   // Come here on success
1158   __ bind(ok_is_subtype);
1159 
1160   // Get the value we will store
1161   __ movptr(rax, at_tos());
1162   __ movl(rcx, at_tos_p1()); // index
1163   // Now store using the appropriate barrier
1164   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1165   __ jmp(done);
1166 
1167   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1168   __ bind(is_null);
1169   __ profile_null_seen(rbx);












1170 





1171   // Store a null
1172   do_oop_store(_masm, element_address, noreg, IS_ARRAY);





1173 






1174   // Pop stack arguments
1175   __ bind(done);
1176   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1177 }
1178 
1179 void TemplateTable::bastore() {
1180   transition(itos, vtos);
1181   __ pop_i(rbx);
1182   // rax: value
1183   // rbx: index
1184   // rdx: array
1185   index_check(rdx, rbx); // prefer index in rbx
1186   // Need to check whether array is boolean or byte
1187   // since both types share the bastore bytecode.
1188   __ load_klass(rcx, rdx, rscratch1);
1189   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1190   int diffbit = Klass::layout_helper_boolean_diffbit();
1191   __ testl(rcx, diffbit);
1192   Label L_skip;
1193   __ jccb(Assembler::zero, L_skip);

2322   __ jcc(j_not(cc), not_taken);
2323   branch(false, false);
2324   __ bind(not_taken);
2325   __ profile_not_taken_branch(rax);
2326 }
2327 
2328 void TemplateTable::if_nullcmp(Condition cc) {
2329   transition(atos, vtos);
2330   // assume branch is more often taken than not (loops use backward branches)
2331   Label not_taken;
2332   __ testptr(rax, rax);
2333   __ jcc(j_not(cc), not_taken);
2334   branch(false, false);
2335   __ bind(not_taken);
2336   __ profile_not_taken_branch(rax);
2337 }
2338 
2339 void TemplateTable::if_acmp(Condition cc) {
2340   transition(atos, vtos);
2341   // assume branch is more often taken than not (loops use backward branches)
2342   Label not_taken;
2343   __ pop_ptr(rdx);




































2344   __ cmpoop(rdx, rax);
2345   __ jcc(j_not(cc), not_taken);

2346   branch(false, false);
2347   __ bind(not_taken);
2348   __ profile_not_taken_branch(rax);









2349 }
2350 
2351 void TemplateTable::ret() {
2352   transition(vtos, vtos);
2353   locals_index(rbx);
2354   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2355   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2356   __ profile_ret(rbx, rcx);
2357   __ get_method(rax);
2358   __ movptr(rbcp, Address(rax, Method::const_offset()));
2359   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2360                       ConstMethod::codes_offset()));
2361   __ dispatch_next(vtos, 0, true);
2362 }
2363 
2364 void TemplateTable::wide_ret() {
2365   transition(vtos, vtos);
2366   locals_index_wide(rbx);
2367   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2368   __ profile_ret(rbx, rcx);

2597     const Register thread = rdi;
2598     __ get_thread(thread);
2599     __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2600 #endif
2601     __ jcc(Assembler::zero, no_safepoint);
2602     __ push(state);
2603     __ push_cont_fastpath();
2604     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2605                                        InterpreterRuntime::at_safepoint));
2606     __ pop_cont_fastpath();
2607     __ pop(state);
2608     __ bind(no_safepoint);
2609   }
2610 
2611   // Narrow result if state is itos but result type is smaller.
2612   // Need to narrow in the return bytecode rather than in generate_return_entry
2613   // since compiled code callers expect the result to already be narrowed.
2614   if (state == itos) {
2615     __ narrow(rax);
2616   }
2617   __ remove_activation(state, rbcp);

2618 
2619   __ jmp(rbcp);
2620 }
2621 
2622 // ----------------------------------------------------------------------------
2623 // Volatile variables demand their effects be made known to all CPU's
2624 // in order.  Store buffers on most chips allow reads & writes to
2625 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2626 // without some kind of memory barrier (i.e., it's not sufficient that
2627 // the interpreter does not reorder volatile references, the hardware
2628 // also must not reorder them).
2629 //
2630 // According to the new Java Memory Model (JMM):
2631 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2632 //     writes act as acquire & release, so:
2633 // (2) A read cannot let unrelated NON-volatile memory refs that
2634 //     happen after the read float up to before the read.  It's OK for
2635 //     non-volatile memory refs that happen before the volatile read to
2636 //     float down below it.
2637 // (3) Similar a volatile write cannot let unrelated NON-volatile

2963     }
2964     // rax,:   object pointer or null
2965     // cache: cache entry pointer
2966     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2967               rax, cache);
2968 
2969     __ load_field_entry(cache, index);
2970     __ bind(L1);
2971   }
2972 }
2973 
2974 void TemplateTable::pop_and_check_object(Register r) {
2975   __ pop_ptr(r);
2976   __ null_check(r);  // for field access must check obj.
2977   __ verify_oop(r);
2978 }
2979 
2980 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2981   transition(vtos, vtos);
2982 
2983   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2984   const Register cache = rcx;
2985   const Register index = rdx;
2986   const Register off   = rbx;
2987   const Register tos_state   = rax;
2988   const Register flags = rdx;
2989   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2990 
2991   resolve_cache_and_index_for_field(byte_no, cache, index);
2992   jvmti_post_field_access(cache, index, is_static, false);
2993   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2994 
2995   if (!is_static) pop_and_check_object(obj);
2996 
2997   const Address field(obj, off, Address::times_1, 0*wordSize);
2998 
2999   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
3000 
3001   // Make sure we don't need to mask edx after the above shift
3002   assert(btos == 0, "change code, btos != 0");
3003   __ testl(tos_state, tos_state);
3004   __ jcc(Assembler::notZero, notByte);
3005 
3006   // btos

3007   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3008   __ push(btos);
3009   // Rewrite bytecode to be faster
3010   if (!is_static && rc == may_rewrite) {
3011     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3012   }
3013   __ jmp(Done);
3014 
3015   __ bind(notByte);
3016   __ cmpl(tos_state, ztos);
3017   __ jcc(Assembler::notEqual, notBool);
3018 
3019   // ztos (same code as btos)
3020   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3021   __ push(ztos);
3022   // Rewrite bytecode to be faster
3023   if (!is_static && rc == may_rewrite) {
3024     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3025     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3026   }
3027   __ jmp(Done);
3028 
3029   __ bind(notBool);
3030   __ cmpl(tos_state, atos);
3031   __ jcc(Assembler::notEqual, notObj);
3032   // atos
3033   do_oop_load(_masm, field, rax);
3034   __ push(atos);
3035   if (!is_static && rc == may_rewrite) {
3036     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);


















































































3037   }
3038   __ jmp(Done);
3039 
3040   __ bind(notObj);



3041   __ cmpl(tos_state, itos);
3042   __ jcc(Assembler::notEqual, notInt);
3043   // itos
3044   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3045   __ push(itos);
3046   // Rewrite bytecode to be faster
3047   if (!is_static && rc == may_rewrite) {
3048     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3049   }
3050   __ jmp(Done);
3051 
3052   __ bind(notInt);
3053   __ cmpl(tos_state, ctos);
3054   __ jcc(Assembler::notEqual, notChar);
3055   // ctos
3056   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3057   __ push(ctos);
3058   // Rewrite bytecode to be faster
3059   if (!is_static && rc == may_rewrite) {
3060     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

3120 #endif
3121 
3122   __ bind(Done);
3123   // [jk] not needed currently
3124   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3125   //                                              Assembler::LoadStore));
3126 }
3127 
3128 void TemplateTable::getfield(int byte_no) {
3129   getfield_or_static(byte_no, false);
3130 }
3131 
3132 void TemplateTable::nofast_getfield(int byte_no) {
3133   getfield_or_static(byte_no, false, may_not_rewrite);
3134 }
3135 
3136 void TemplateTable::getstatic(int byte_no) {
3137   getfield_or_static(byte_no, true);
3138 }
3139 
3140 
3141 // The registers cache and index expected to be set before call.
3142 // The function may destroy various registers, just not the cache and index registers.
3143 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3144   // Cache is rcx and index is rdx
3145   const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3146   const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx);   // Object pointer
3147   const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3148 
3149   if (JvmtiExport::can_post_field_modification()) {
3150     // Check to see if a field modification watch has been set before
3151     // we take the time to call into the VM.
3152     Label L1;
3153     assert_different_registers(cache, obj, rax);
3154     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3155     __ testl(rax, rax);
3156     __ jcc(Assembler::zero, L1);
3157 
3158     __ mov(entry, cache);
3159 
3160     if (is_static) {

3202     // cache: field entry pointer
3203     // value: jvalue object on the stack
3204     __ call_VM(noreg,
3205               CAST_FROM_FN_PTR(address,
3206                               InterpreterRuntime::post_field_modification),
3207               obj, entry, value);
3208     // Reload field entry
3209     __ load_field_entry(cache, index);
3210     __ bind(L1);
3211   }
3212 }
3213 
3214 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3215   transition(vtos, vtos);
3216 
3217   const Register obj = rcx;
3218   const Register cache = rcx;
3219   const Register index = rdx;
3220   const Register tos_state   = rdx;
3221   const Register off   = rbx;
3222   const Register flags = rax;
3223 
3224   resolve_cache_and_index_for_field(byte_no, cache, index);
3225   jvmti_post_field_mod(cache, index, is_static);
3226   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3227 
3228   // [jk] not needed currently
3229   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3230   //                                              Assembler::StoreStore));
3231 
3232   Label notVolatile, Done;
3233 
3234   // Check for volatile store
3235   __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
3236   __ testl(flags, flags);

3237   __ jcc(Assembler::zero, notVolatile);
3238 
3239   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3240   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3241                                                Assembler::StoreStore));
3242   __ jmp(Done);
3243   __ bind(notVolatile);
3244 
3245   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3246 
3247   __ bind(Done);
3248 }
3249 
3250 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3251                                               Register obj, Register off, Register tos_state) {
3252 
3253   // field addresses
3254   const Address field(obj, off, Address::times_1, 0*wordSize);
3255   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3256 
3257   Label notByte, notBool, notInt, notShort, notChar,
3258         notLong, notFloat, notObj;
3259   Label Done;
3260 
3261   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3262 
3263   // Test TOS state
3264   __ testl(tos_state, tos_state);
3265   __ jcc(Assembler::notZero, notByte);
3266 
3267   // btos
3268   {
3269     __ pop(btos);
3270     if (!is_static) pop_and_check_object(obj);
3271     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3272     if (!is_static && rc == may_rewrite) {
3273       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3274     }
3275     __ jmp(Done);
3276   }
3277 
3278   __ bind(notByte);
3279   __ cmpl(tos_state, ztos);
3280   __ jcc(Assembler::notEqual, notBool);
3281 
3282   // ztos
3283   {
3284     __ pop(ztos);
3285     if (!is_static) pop_and_check_object(obj);
3286     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3287     if (!is_static && rc == may_rewrite) {
3288       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3289     }
3290     __ jmp(Done);
3291   }
3292 
3293   __ bind(notBool);
3294   __ cmpl(tos_state, atos);
3295   __ jcc(Assembler::notEqual, notObj);
3296 
3297   // atos
3298   {
3299     __ pop(atos);
3300     if (!is_static) pop_and_check_object(obj);
3301     // Store into the field
3302     do_oop_store(_masm, field, rax);
3303     if (!is_static && rc == may_rewrite) {
3304       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);

























































3305     }
3306     __ jmp(Done);
3307   }
3308 
3309   __ bind(notObj);
3310   __ cmpl(tos_state, itos);
3311   __ jcc(Assembler::notEqual, notInt);
3312 
3313   // itos
3314   {
3315     __ pop(itos);
3316     if (!is_static) pop_and_check_object(obj);
3317     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3318     if (!is_static && rc == may_rewrite) {
3319       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3320     }
3321     __ jmp(Done);
3322   }
3323 
3324   __ bind(notInt);
3325   __ cmpl(tos_state, ctos);
3326   __ jcc(Assembler::notEqual, notChar);

3425 }
3426 
3427 void TemplateTable::jvmti_post_fast_field_mod() {
3428 
3429   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3430 
3431   if (JvmtiExport::can_post_field_modification()) {
3432     // Check to see if a field modification watch has been set before
3433     // we take the time to call into the VM.
3434     Label L2;
3435     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3436     __ testl(scratch, scratch);
3437     __ jcc(Assembler::zero, L2);
3438     __ pop_ptr(rbx);                  // copy the object pointer from tos
3439     __ verify_oop(rbx);
3440     __ push_ptr(rbx);                 // put the object pointer back on tos
3441     // Save tos values before call_VM() clobbers them. Since we have
3442     // to do it for every data type, we use the saved values as the
3443     // jvalue object.
3444     switch (bytecode()) {          // load values into the jvalue object

3445     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3446     case Bytecodes::_fast_bputfield: // fall through
3447     case Bytecodes::_fast_zputfield: // fall through
3448     case Bytecodes::_fast_sputfield: // fall through
3449     case Bytecodes::_fast_cputfield: // fall through
3450     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3451     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3452     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3453     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3454 
3455     default:
3456       ShouldNotReachHere();
3457     }
3458     __ mov(scratch, rsp);             // points to jvalue on the stack
3459     // access constant pool cache entry
3460     LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3461     NOT_LP64(__ load_field_entry(rax, rdx));
3462     __ verify_oop(rbx);
3463     // rbx: object pointer copied above
3464     // c_rarg2: cache entry pointer
3465     // c_rarg3: jvalue object on the stack
3466     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3467     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3468 
3469     switch (bytecode()) {             // restore tos values

3470     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3471     case Bytecodes::_fast_bputfield: // fall through
3472     case Bytecodes::_fast_zputfield: // fall through
3473     case Bytecodes::_fast_sputfield: // fall through
3474     case Bytecodes::_fast_cputfield: // fall through
3475     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3476     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3477     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3478     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3479     default: break;
3480     }
3481     __ bind(L2);
3482   }
3483 }
3484 
3485 void TemplateTable::fast_storefield(TosState state) {
3486   transition(state, vtos);
3487 
3488   Register cache = rcx;
3489 
3490   Label notVolatile, Done;
3491 
3492   jvmti_post_fast_field_mod();
3493 
3494   __ push(rax);
3495   __ load_field_entry(rcx, rax);
3496   load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3497   // RBX: field offset, RAX: TOS, RDX: flags
3498   __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3499   __ pop(rax);

3500 
3501   // Get object from stack
3502   pop_and_check_object(rcx);
3503 
3504   // field address
3505   const Address field(rcx, rbx, Address::times_1);
3506 
3507   // Check for volatile store
3508   __ testl(rdx, rdx);


3509   __ jcc(Assembler::zero, notVolatile);
3510 
3511   fast_storefield_helper(field, rax);
3512   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3513                                                Assembler::StoreStore));
3514   __ jmp(Done);
3515   __ bind(notVolatile);
3516 
3517   fast_storefield_helper(field, rax);
3518 
3519   __ bind(Done);
3520 }
3521 
3522 void TemplateTable::fast_storefield_helper(Address field, Register rax) {


3523 
3524   // access field
3525   switch (bytecode()) {



























3526   case Bytecodes::_fast_aputfield:
3527     do_oop_store(_masm, field, rax);


3528     break;
3529   case Bytecodes::_fast_lputfield:
3530 #ifdef _LP64
3531     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3532 #else
3533   __ stop("should not be rewritten");
3534 #endif
3535     break;
3536   case Bytecodes::_fast_iputfield:
3537     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3538     break;
3539   case Bytecodes::_fast_zputfield:
3540     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3541     break;
3542   case Bytecodes::_fast_bputfield:
3543     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3544     break;
3545   case Bytecodes::_fast_sputfield:
3546     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3547     break;

3569     Label L1;
3570     __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3571     __ testl(rcx, rcx);
3572     __ jcc(Assembler::zero, L1);
3573     // access constant pool cache entry
3574     LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3575     NOT_LP64(__ load_field_entry(rcx, rdx));
3576     __ verify_oop(rax);
3577     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3578     LP64_ONLY(__ mov(c_rarg1, rax));
3579     // c_rarg1: object pointer copied above
3580     // c_rarg2: cache entry pointer
3581     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3582     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3583     __ pop_ptr(rax); // restore object pointer
3584     __ bind(L1);
3585   }
3586 
3587   // access constant pool cache
3588   __ load_field_entry(rcx, rbx);
3589   __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3590 
3591   // rax: object
3592   __ verify_oop(rax);
3593   __ null_check(rax);
3594   Address field(rax, rbx, Address::times_1);
3595 
3596   // access field
3597   switch (bytecode()) {





























3598   case Bytecodes::_fast_agetfield:
3599     do_oop_load(_masm, field, rax);
3600     __ verify_oop(rax);
3601     break;
3602   case Bytecodes::_fast_lgetfield:
3603 #ifdef _LP64
3604     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3605 #else
3606   __ stop("should not be rewritten");
3607 #endif
3608     break;
3609   case Bytecodes::_fast_igetfield:
3610     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3611     break;
3612   case Bytecodes::_fast_bgetfield:
3613     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3614     break;
3615   case Bytecodes::_fast_sgetfield:
3616     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3617     break;

4013 
4014   // Note:  rax_callsite is already pushed
4015 
4016   // %%% should make a type profile for any invokedynamic that takes a ref argument
4017   // profile this call
4018   __ profile_call(rbcp);
4019   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4020 
4021   __ verify_oop(rax_callsite);
4022 
4023   __ jump_from_interpreted(rbx_method, rdx);
4024 }
4025 
4026 //-----------------------------------------------------------------------------
4027 // Allocation
4028 
4029 void TemplateTable::_new() {
4030   transition(vtos, atos);
4031   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4032   Label slow_case;
4033   Label slow_case_no_pop;
4034   Label done;
4035   Label initialize_header;
4036 
4037   __ get_cpool_and_tags(rcx, rax);
4038 
4039   // Make sure the class we're about to instantiate has been resolved.
4040   // This is done before loading InstanceKlass to be consistent with the order
4041   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4042   const int tags_offset = Array<u1>::base_offset_in_bytes();
4043   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4044   __ jcc(Assembler::notEqual, slow_case_no_pop);
4045 
4046   // get InstanceKlass
4047   __ load_resolved_klass_at_index(rcx, rcx, rdx);
4048   __ push(rcx);  // save the contexts of klass for initializing the header
4049 
4050   // make sure klass is initialized
4051   // init_state needs acquire, but x86 is TSO, and so we are already good.
4052 #ifdef _LP64
4053   assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
4054   __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
4055 #else
4056   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4057   __ jcc(Assembler::notEqual, slow_case);
4058 #endif
4059 
4060   // get instance_size in InstanceKlass (scaled to a count of bytes)
4061   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
4062   // test to see if it is malformed in some way
4063   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
4064   __ jcc(Assembler::notZero, slow_case);
4065 
4066   // Allocate the instance:
4067   //  If TLAB is enabled:
4068   //    Try to allocate in the TLAB.
4069   //    If fails, go to the slow path.
4070   //    Initialize the allocation.
4071   //    Exit.
4072   //
4073   //  Go to slow path.
4074 
4075   const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4076 
4077   if (UseTLAB) {
4078     NOT_LP64(__ get_thread(thread);)
4079     __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4080     if (ZeroTLAB) {
4081       // the fields have been already cleared
4082       __ jmp(initialize_header);
4083     }
4084 
4085     // The object is initialized before the header.  If the object size is
4086     // zero, go directly to the header initialization.
4087     __ decrement(rdx, sizeof(oopDesc));
4088     __ jcc(Assembler::zero, initialize_header);
4089 
4090     // Initialize topmost object field, divide rdx by 8, check if odd and
4091     // test if zero.
4092     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
4093     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4094 
4095     // rdx must have been multiple of 8
4096 #ifdef ASSERT
4097     // make sure rdx was multiple of 8
4098     Label L;
4099     // Ignore partial flag stall after shrl() since it is debug VM
4100     __ jcc(Assembler::carryClear, L);
4101     __ stop("object size is not multiple of 2 - adjust this code");
4102     __ bind(L);
4103     // rdx must be > 0, no extra check needed here
4104 #endif
4105 
4106     // initialize remaining object fields: rdx was a multiple of 8
4107     { Label loop;
4108     __ bind(loop);
4109     __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4110     NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4111     __ decrement(rdx);
4112     __ jcc(Assembler::notZero, loop);
4113     }
4114 
4115     // initialize object header only.
4116     __ bind(initialize_header);
4117     __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4118               (intptr_t)markWord::prototype().value()); // header
4119     __ pop(rcx);   // get saved klass back in the register.
4120 #ifdef _LP64
4121     __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4122     __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
4123 #endif
4124     __ store_klass(rax, rcx, rscratch1);  // klass
4125 
4126     if (DTraceAllocProbes) {
4127       // Trigger dtrace event for fastpath
4128       __ push(atos);
4129       __ call_VM_leaf(
4130            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4131       __ pop(atos);
4132     }
4133 
4134     __ jmp(done);
4135   }
4136 
4137   // slow case
4138   __ bind(slow_case);
4139   __ pop(rcx);   // restore stack pointer to what it was when we came in.
4140   __ bind(slow_case_no_pop);
4141 
4142   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4143   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4144 
4145   __ get_constant_pool(rarg1);
4146   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4147   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4148    __ verify_oop(rax);
4149 
4150   // continue
4151   __ bind(done);
4152 }
4153 
4154 void TemplateTable::newarray() {
4155   transition(itos, atos);
4156   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4157   __ load_unsigned_byte(rarg1, at_bcp(1));
4158   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4159           rarg1, rax);
4160 }

4169   __ get_constant_pool(rarg1);
4170   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4171           rarg1, rarg2, rax);
4172 }
4173 
4174 void TemplateTable::arraylength() {
4175   transition(atos, itos);
4176   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4177 }
4178 
4179 void TemplateTable::checkcast() {
4180   transition(atos, atos);
4181   Label done, is_null, ok_is_subtype, quicked, resolved;
4182   __ testptr(rax, rax); // object is in rax
4183   __ jcc(Assembler::zero, is_null);
4184 
4185   // Get cpool & tags index
4186   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4187   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4188   // See if bytecode has already been quicked
4189   __ cmpb(Address(rdx, rbx,
4190                   Address::times_1,
4191                   Array<u1>::base_offset_in_bytes()),
4192           JVM_CONSTANT_Class);
4193   __ jcc(Assembler::equal, quicked);
4194   __ push(atos); // save receiver for result, and for GC
4195   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4196 
4197   // vm_result_2 has metadata result
4198 #ifndef _LP64
4199   // borrow rdi from locals
4200   __ get_thread(rdi);
4201   __ get_vm_result_2(rax, rdi);
4202   __ restore_locals();
4203 #else
4204   __ get_vm_result_2(rax, r15_thread);
4205 #endif
4206 
4207   __ pop_ptr(rdx); // restore receiver
4208   __ jmpb(resolved);
4209 
4210   // Get superklass in rax and subklass in rbx
4211   __ bind(quicked);
4212   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4213   __ load_resolved_klass_at_index(rax, rcx, rbx);
4214 
4215   __ bind(resolved);
4216   __ load_klass(rbx, rdx, rscratch1);
4217 
4218   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4219   // Superklass in rax.  Subklass in rbx.
4220   __ gen_subtype_check(rbx, ok_is_subtype);
4221 
4222   // Come here on failure
4223   __ push_ptr(rdx);
4224   // object is at TOS
4225   __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
4226 
4227   // Come here on success
4228   __ bind(ok_is_subtype);
4229   __ mov(rax, rdx); // Restore object in rdx



4230 
4231   // Collect counts on whether this check-cast sees nulls a lot or not.
4232   if (ProfileInterpreter) {
4233     __ jmp(done);
4234     __ bind(is_null);
4235     __ profile_null_seen(rcx);
4236   } else {
4237     __ bind(is_null);   // same as 'done'
4238   }

4239   __ bind(done);
4240 }
4241 
4242 void TemplateTable::instanceof() {
4243   transition(atos, itos);
4244   Label done, is_null, ok_is_subtype, quicked, resolved;
4245   __ testptr(rax, rax);
4246   __ jcc(Assembler::zero, is_null);
4247 
4248   // Get cpool & tags index
4249   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4250   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4251   // See if bytecode has already been quicked
4252   __ cmpb(Address(rdx, rbx,
4253                   Address::times_1,
4254                   Array<u1>::base_offset_in_bytes()),
4255           JVM_CONSTANT_Class);
4256   __ jcc(Assembler::equal, quicked);
4257 
4258   __ push(atos); // save receiver for result, and for GC
4259   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4260   // vm_result_2 has metadata result
4261 
4262 #ifndef _LP64
4263   // borrow rdi from locals
4264   __ get_thread(rdi);
4265   __ get_vm_result_2(rax, rdi);
4266   __ restore_locals();
4267 #else
4268   __ get_vm_result_2(rax, r15_thread);
4269 #endif
4270 
4271   __ pop_ptr(rdx); // restore receiver
4272   __ verify_oop(rdx);
4273   __ load_klass(rdx, rdx, rscratch1);
4274   __ jmpb(resolved);
4275 

4287   // Come here on failure
4288   __ xorl(rax, rax);
4289   __ jmpb(done);
4290   // Come here on success
4291   __ bind(ok_is_subtype);
4292   __ movl(rax, 1);
4293 
4294   // Collect counts on whether this test sees nulls a lot or not.
4295   if (ProfileInterpreter) {
4296     __ jmp(done);
4297     __ bind(is_null);
4298     __ profile_null_seen(rcx);
4299   } else {
4300     __ bind(is_null);   // same as 'done'
4301   }
4302   __ bind(done);
4303   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
4304   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
4305 }
4306 
4307 
4308 //----------------------------------------------------------------------------------------------------
4309 // Breakpoints
4310 void TemplateTable::_breakpoint() {
4311   // Note: We get here even if we are single stepping..
4312   // jbug insists on setting breakpoints at every bytecode
4313   // even if we are in single step mode.
4314 
4315   transition(vtos, vtos);
4316 
4317   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4318 
4319   // get the unpatched byte code
4320   __ get_method(rarg);
4321   __ call_VM(noreg,
4322              CAST_FROM_FN_PTR(address,
4323                               InterpreterRuntime::get_original_bytecode_at),
4324              rarg, rbcp);
4325   __ mov(rbx, rax);  // why?
4326 
4327   // post the breakpoint event

4349 // Note: monitorenter & exit are symmetric routines; which is reflected
4350 //       in the assembly code structure as well
4351 //
4352 // Stack layout:
4353 //
4354 // [expressions  ] <--- rsp               = expression stack top
4355 // ..
4356 // [expressions  ]
4357 // [monitor entry] <--- monitor block top = expression stack bot
4358 // ..
4359 // [monitor entry]
4360 // [frame data   ] <--- monitor block bot
4361 // ...
4362 // [saved rbp    ] <--- rbp
4363 void TemplateTable::monitorenter() {
4364   transition(atos, vtos);
4365 
4366   // check for null object
4367   __ null_check(rax);
4368 




4369   const Address monitor_block_top(
4370         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4371   const Address monitor_block_bot(
4372         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4373   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4374 
4375   Label allocated;
4376 
4377   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4378   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4379   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4380 
4381   // initialize entry pointer
4382   __ xorl(rmon, rmon); // points to free slot or null
4383 
4384   // find a free slot in the monitor block (result in rmon)
4385   {
4386     Label entry, loop, exit;
4387     __ movptr(rtop, monitor_block_top); // derelativize pointer
4388     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));

4441   // rmon: points to monitor entry
4442   __ bind(allocated);
4443 
4444   // Increment bcp to point to the next bytecode, so exception
4445   // handling for async. exceptions work correctly.
4446   // The object has already been popped from the stack, so the
4447   // expression stack looks correct.
4448   __ increment(rbcp);
4449 
4450   // store object
4451   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4452   __ lock_object(rmon);
4453 
4454   // check to make sure this monitor doesn't cause stack overflow after locking
4455   __ save_bcp();  // in case of exception
4456   __ generate_stack_overflow_check(0);
4457 
4458   // The bcp has already been incremented. Just need to dispatch to
4459   // next instruction.
4460   __ dispatch_next(vtos);





4461 }
4462 
4463 void TemplateTable::monitorexit() {
4464   transition(atos, vtos);
4465 
4466   // check for null object
4467   __ null_check(rax);
4468 











4469   const Address monitor_block_top(
4470         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4471   const Address monitor_block_bot(
4472         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4473   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4474 
4475   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4476   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4477 
4478   Label found;
4479 
4480   // find matching slot
4481   {
4482     Label entry, loop;
4483     __ movptr(rtop, monitor_block_top); // derelativize pointer
4484     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4485     // rtop points to current entry, starting with top-most entry
4486 
4487     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4488                                         // of monitor block

   1 /*
   2  * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "compiler/disassembler.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/gc_globals.hpp"
  30 #include "gc/shared/tlab_globals.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "interpreter/interpreterRuntime.hpp"
  33 #include "interpreter/interp_masm.hpp"
  34 #include "interpreter/templateTable.hpp"
  35 #include "memory/universe.hpp"
  36 #include "oops/methodCounters.hpp"
  37 #include "oops/methodData.hpp"
  38 #include "oops/objArrayKlass.hpp"
  39 #include "oops/oop.inline.hpp"
  40 #include "oops/inlineKlass.hpp"
  41 #include "oops/resolvedFieldEntry.hpp"
  42 #include "oops/resolvedIndyEntry.hpp"
  43 #include "oops/resolvedMethodEntry.hpp"
  44 #include "prims/jvmtiExport.hpp"
  45 #include "prims/methodHandles.hpp"
  46 #include "runtime/frame.inline.hpp"
  47 #include "runtime/safepointMechanism.hpp"
  48 #include "runtime/sharedRuntime.hpp"
  49 #include "runtime/stubRoutines.hpp"
  50 #include "runtime/synchronizer.hpp"
  51 #include "utilities/macros.hpp"
  52 
  53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  54 
  55 // Global Register Names
  56 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  57 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  58 
  59 // Address Computation: local variables
  60 static inline Address iaddress(int n) {

 167 static void do_oop_load(InterpreterMacroAssembler* _masm,
 168                         Address src,
 169                         Register dst,
 170                         DecoratorSet decorators = 0) {
 171   __ load_heap_oop(dst, src, rdx, rbx, decorators);
 172 }
 173 
 174 Address TemplateTable::at_bcp(int offset) {
 175   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 176   return Address(rbcp, offset);
 177 }
 178 
 179 
 180 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 181                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 182                                    int byte_no) {
 183   if (!RewriteBytecodes)  return;
 184   Label L_patch_done;
 185 
 186   switch (bc) {
 187   case Bytecodes::_fast_vputfield:
 188   case Bytecodes::_fast_aputfield:
 189   case Bytecodes::_fast_bputfield:
 190   case Bytecodes::_fast_zputfield:
 191   case Bytecodes::_fast_cputfield:
 192   case Bytecodes::_fast_dputfield:
 193   case Bytecodes::_fast_fputfield:
 194   case Bytecodes::_fast_iputfield:
 195   case Bytecodes::_fast_lputfield:
 196   case Bytecodes::_fast_sputfield:
 197     {
 198       // We skip bytecode quickening for putfield instructions when
 199       // the put_code written to the constant pool cache is zero.
 200       // This is required so that every execution of this instruction
 201       // calls out to InterpreterRuntime::resolve_get_put to do
 202       // additional, required work.
 203       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 204       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 205       __ load_field_entry(temp_reg, bc_reg);
 206       if (byte_no == f1_byte) {
 207         __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 815                     Address(rdx, rax,
 816                             Address::times_4,
 817                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 818                     noreg, noreg);
 819 }
 820 
 821 void TemplateTable::daload() {
 822   transition(itos, dtos);
 823   // rax: index
 824   // rdx: array
 825   index_check(rdx, rax); // kills rbx
 826   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 827                     Address(rdx, rax,
 828                             Address::times_8,
 829                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 830                     noreg, noreg);
 831 }
 832 
 833 void TemplateTable::aaload() {
 834   transition(itos, atos);
 835   Register array = rdx;
 836   Register index = rax;
 837 
 838   index_check(array, index); // kills rbx
 839   __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
 840   if (UseFlatArray) {
 841     Label is_flat_array, done;
 842     __ test_flat_array_oop(array, rbx, is_flat_array);
 843     do_oop_load(_masm,
 844                 Address(array, index,
 845                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 846                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 847                 rax,
 848                 IS_ARRAY);
 849     __ jmp(done);
 850     __ bind(is_flat_array);
 851     __ movptr(rcx, array);
 852     call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), rcx, index);
 853     __ bind(done);
 854   } else {
 855     do_oop_load(_masm,
 856                 Address(array, index,
 857                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 858                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 859                 rax,
 860                 IS_ARRAY);
 861   }
 862   __ profile_element_type(rbx, rax, rcx);
 863 }
 864 
 865 void TemplateTable::baload() {
 866   transition(itos, itos);
 867   // rax: index
 868   // rdx: array
 869   index_check(rdx, rax); // kills rbx
 870   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 871                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 872                     noreg, noreg);
 873 }
 874 
 875 void TemplateTable::caload() {
 876   transition(itos, itos);
 877   // rax: index
 878   // rdx: array
 879   index_check(rdx, rax); // kills rbx
 880   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 881                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 882                     noreg, noreg);

1128   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1129                      Address(rdx, rbx, Address::times_4,
1130                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1131                      noreg /* ftos */, noreg, noreg, noreg);
1132 }
1133 
1134 void TemplateTable::dastore() {
1135   transition(dtos, vtos);
1136   __ pop_i(rbx);
1137   // value is in UseSSE >= 2 ? xmm0 : ST(0)
1138   // rbx:  index
1139   // rdx:  array
1140   index_check(rdx, rbx); // prefer index in rbx
1141   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1142                      Address(rdx, rbx, Address::times_8,
1143                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1144                      noreg /* dtos */, noreg, noreg, noreg);
1145 }
1146 
1147 void TemplateTable::aastore() {
1148   Label is_null, is_flat_array, ok_is_subtype, done;
1149   transition(vtos, vtos);
1150   // stack: ..., array, index, value
1151   __ movptr(rax, at_tos());    // value
1152   __ movl(rcx, at_tos_p1()); // index
1153   __ movptr(rdx, at_tos_p2()); // array
1154 
1155   Address element_address(rdx, rcx,
1156                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1157                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1158 
1159   index_check_without_pop(rdx, rcx);     // kills rbx
1160 
1161   __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1162   __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1163 
1164   __ testptr(rax, rax);
1165   __ jcc(Assembler::zero, is_null);
1166 
1167   // Move array class to rdi
1168   __ load_klass(rdi, rdx, rscratch1);
1169   if (UseFlatArray) {
1170     __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1171     __ test_flat_array_layout(rbx, is_flat_array);
1172   }
1173 
1174   // Move subklass into rbx
1175   __ load_klass(rbx, rax, rscratch1);
1176   // Move array element superklass into rax
1177   __ movptr(rax, Address(rdi,

1178                          ObjArrayKlass::element_klass_offset()));
1179 
1180   // Generate subtype check.  Blows rcx, rdi
1181   // Superklass in rax.  Subklass in rbx.
1182   // is "rbx <: rax" ? (value subclass <: array element superclass)
1183   __ gen_subtype_check(rbx, ok_is_subtype, false);
1184 
1185   // Come here on failure
1186   // object is at TOS
1187   __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1188 
1189   // Come here on success
1190   __ bind(ok_is_subtype);
1191 
1192   // Get the value we will store
1193   __ movptr(rax, at_tos());
1194   __ movl(rcx, at_tos_p1()); // index
1195   // Now store using the appropriate barrier
1196   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1197   __ jmp(done);
1198 
1199   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1200   __ bind(is_null);
1201   if (EnableValhalla) {
1202     Label is_null_into_value_array_npe, store_null;
1203 
1204       // Move array class to rdi
1205     __ load_klass(rdi, rdx, rscratch1);
1206     if (UseFlatArray) {
1207       __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1208       __ test_flat_array_layout(rbx, is_flat_array);
1209     }
1210 
1211     // No way to store null in null-free array
1212     __ test_null_free_array_oop(rdx, rbx, is_null_into_value_array_npe);
1213     __ jmp(store_null);
1214 
1215     __ bind(is_null_into_value_array_npe);
1216     __ jump(RuntimeAddress(Interpreter::_throw_NullPointerException_entry));
1217 
1218     __ bind(store_null);
1219   }
1220   // Store a null
1221   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1222   __ jmp(done);
1223 
1224   if (UseFlatArray) {
1225     Label is_type_ok;
1226     __ bind(is_flat_array); // Store non-null value to flat
1227 
1228     __ movptr(rax, at_tos());
1229     __ movl(rcx, at_tos_p1()); // index
1230     __ movptr(rdx, at_tos_p2()); // array
1231 
1232     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), rax, rdx, rcx);
1233   }
1234   // Pop stack arguments
1235   __ bind(done);
1236   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1237 }
1238 
1239 void TemplateTable::bastore() {
1240   transition(itos, vtos);
1241   __ pop_i(rbx);
1242   // rax: value
1243   // rbx: index
1244   // rdx: array
1245   index_check(rdx, rbx); // prefer index in rbx
1246   // Need to check whether array is boolean or byte
1247   // since both types share the bastore bytecode.
1248   __ load_klass(rcx, rdx, rscratch1);
1249   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1250   int diffbit = Klass::layout_helper_boolean_diffbit();
1251   __ testl(rcx, diffbit);
1252   Label L_skip;
1253   __ jccb(Assembler::zero, L_skip);

2382   __ jcc(j_not(cc), not_taken);
2383   branch(false, false);
2384   __ bind(not_taken);
2385   __ profile_not_taken_branch(rax);
2386 }
2387 
2388 void TemplateTable::if_nullcmp(Condition cc) {
2389   transition(atos, vtos);
2390   // assume branch is more often taken than not (loops use backward branches)
2391   Label not_taken;
2392   __ testptr(rax, rax);
2393   __ jcc(j_not(cc), not_taken);
2394   branch(false, false);
2395   __ bind(not_taken);
2396   __ profile_not_taken_branch(rax);
2397 }
2398 
2399 void TemplateTable::if_acmp(Condition cc) {
2400   transition(atos, vtos);
2401   // assume branch is more often taken than not (loops use backward branches)
2402   Label taken, not_taken;
2403   __ pop_ptr(rdx);
2404 
2405   __ profile_acmp(rbx, rdx, rax, rcx);
2406 
2407   const int is_inline_type_mask = markWord::inline_type_pattern;
2408   if (EnableValhalla) {
2409     __ cmpoop(rdx, rax);
2410     __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
2411 
2412     // might be substitutable, test if either rax or rdx is null
2413     __ testptr(rax, rax);
2414     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2415     __ testptr(rdx, rdx);
2416     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2417 
2418     // and both are values ?
2419     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2420     __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
2421     __ andptr(rbx, is_inline_type_mask);
2422     __ cmpptr(rbx, is_inline_type_mask);
2423     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2424 
2425     // same value klass ?
2426     __ load_metadata(rbx, rdx);
2427     __ load_metadata(rcx, rax);
2428     __ cmpptr(rbx, rcx);
2429     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2430 
2431     // Know both are the same type, let's test for substitutability...
2432     if (cc == equal) {
2433       invoke_is_substitutable(rax, rdx, taken, not_taken);
2434     } else {
2435       invoke_is_substitutable(rax, rdx, not_taken, taken);
2436     }
2437     __ stop("Not reachable");
2438   }
2439 
2440   __ cmpoop(rdx, rax);
2441   __ jcc(j_not(cc), not_taken);
2442   __ bind(taken);
2443   branch(false, false);
2444   __ bind(not_taken);
2445   __ profile_not_taken_branch(rax, true);
2446 }
2447 
2448 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2449                                             Label& is_subst, Label& not_subst) {
2450   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2451   // Restored...rax answer, jmp to outcome...
2452   __ testl(rax, rax);
2453   __ jcc(Assembler::zero, not_subst);
2454   __ jmp(is_subst);
2455 }
2456 
2457 void TemplateTable::ret() {
2458   transition(vtos, vtos);
2459   locals_index(rbx);
2460   LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2461   NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2462   __ profile_ret(rbx, rcx);
2463   __ get_method(rax);
2464   __ movptr(rbcp, Address(rax, Method::const_offset()));
2465   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2466                       ConstMethod::codes_offset()));
2467   __ dispatch_next(vtos, 0, true);
2468 }
2469 
2470 void TemplateTable::wide_ret() {
2471   transition(vtos, vtos);
2472   locals_index_wide(rbx);
2473   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2474   __ profile_ret(rbx, rcx);

2703     const Register thread = rdi;
2704     __ get_thread(thread);
2705     __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2706 #endif
2707     __ jcc(Assembler::zero, no_safepoint);
2708     __ push(state);
2709     __ push_cont_fastpath();
2710     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2711                                        InterpreterRuntime::at_safepoint));
2712     __ pop_cont_fastpath();
2713     __ pop(state);
2714     __ bind(no_safepoint);
2715   }
2716 
2717   // Narrow result if state is itos but result type is smaller.
2718   // Need to narrow in the return bytecode rather than in generate_return_entry
2719   // since compiled code callers expect the result to already be narrowed.
2720   if (state == itos) {
2721     __ narrow(rax);
2722   }
2723 
2724   __ remove_activation(state, rbcp, true, true, true);
2725 
2726   __ jmp(rbcp);
2727 }
2728 
2729 // ----------------------------------------------------------------------------
2730 // Volatile variables demand their effects be made known to all CPU's
2731 // in order.  Store buffers on most chips allow reads & writes to
2732 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2733 // without some kind of memory barrier (i.e., it's not sufficient that
2734 // the interpreter does not reorder volatile references, the hardware
2735 // also must not reorder them).
2736 //
2737 // According to the new Java Memory Model (JMM):
2738 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2739 //     writes act as acquire & release, so:
2740 // (2) A read cannot let unrelated NON-volatile memory refs that
2741 //     happen after the read float up to before the read.  It's OK for
2742 //     non-volatile memory refs that happen before the volatile read to
2743 //     float down below it.
2744 // (3) Similar a volatile write cannot let unrelated NON-volatile

3070     }
3071     // rax,:   object pointer or null
3072     // cache: cache entry pointer
3073     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
3074               rax, cache);
3075 
3076     __ load_field_entry(cache, index);
3077     __ bind(L1);
3078   }
3079 }
3080 
3081 void TemplateTable::pop_and_check_object(Register r) {
3082   __ pop_ptr(r);
3083   __ null_check(r);  // for field access must check obj.
3084   __ verify_oop(r);
3085 }
3086 
3087 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3088   transition(vtos, vtos);
3089 
3090   const Register obj   = LP64_ONLY(r9) NOT_LP64(rcx);
3091   const Register cache = rcx;
3092   const Register index = rdx;
3093   const Register off   = rbx;
3094   const Register tos_state   = rax;
3095   const Register flags = rdx;
3096   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
3097 
3098   resolve_cache_and_index_for_field(byte_no, cache, index);
3099   jvmti_post_field_access(cache, index, is_static, false);
3100   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3101 


3102   const Address field(obj, off, Address::times_1, 0*wordSize);
3103 
3104   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
3105 
3106   // Make sure we don't need to mask edx after the above shift
3107   assert(btos == 0, "change code, btos != 0");
3108   __ testl(tos_state, tos_state);
3109   __ jcc(Assembler::notZero, notByte);
3110 
3111   // btos
3112   if (!is_static) pop_and_check_object(obj);
3113   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3114   __ push(btos);
3115   // Rewrite bytecode to be faster
3116   if (!is_static && rc == may_rewrite) {
3117     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3118   }
3119   __ jmp(Done);
3120 
3121   __ bind(notByte);
3122   __ cmpl(tos_state, ztos);
3123   __ jcc(Assembler::notEqual, notBool);
3124    if (!is_static) pop_and_check_object(obj);
3125   // ztos (same code as btos)
3126   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3127   __ push(ztos);
3128   // Rewrite bytecode to be faster
3129   if (!is_static && rc == may_rewrite) {
3130     // use btos rewriting, no truncating to t/f bit is needed for getfield.
3131     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3132   }
3133   __ jmp(Done);
3134 
3135   __ bind(notBool);
3136   __ cmpl(tos_state, atos);
3137   __ jcc(Assembler::notEqual, notObj);
3138   // atos
3139   if (!EnableValhalla) {
3140     if (!is_static) pop_and_check_object(obj);
3141     do_oop_load(_masm, field, rax);
3142     __ push(atos);
3143     if (!is_static && rc == may_rewrite) {
3144       patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3145     }
3146     __ jmp(Done);
3147   } else {
3148     if (is_static) {
3149       __ load_heap_oop(rax, field);
3150       Label is_null_free_inline_type, uninitialized;
3151       // Issue below if the static field has not been initialized yet
3152       __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3153         // field is not a null free inline type
3154         __ push(atos);
3155         __ jmp(Done);
3156       // field is a null free inline type, must not return null even if uninitialized
3157       __ bind(is_null_free_inline_type);
3158           __ testptr(rax, rax);
3159         __ jcc(Assembler::zero, uninitialized);
3160           __ push(atos);
3161           __ jmp(Done);
3162         __ bind(uninitialized);
3163 #ifdef _LP64
3164           Label slow_case, finish;
3165           __ movptr(rbx, Address(obj, java_lang_Class::klass_offset()));
3166           __ cmpb(Address(rbx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3167           __ jcc(Assembler::notEqual, slow_case);
3168         __ get_default_value_oop(rbx, rscratch1, rax);
3169         __ jmp(finish);
3170         __ bind(slow_case);
3171 #endif // LP64
3172           __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_inline_type_field),
3173                 obj, cache);
3174 #ifdef _LP64
3175           __ bind(finish);
3176   #endif // _LP64
3177         __ verify_oop(rax);
3178         __ push(atos);
3179         __ jmp(Done);
3180     } else {
3181       Label is_flat, nonnull, is_inline_type, rewrite_inline, has_null_marker;
3182       __ test_field_is_null_free_inline_type(flags, rscratch1, is_inline_type);
3183       __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3184       // field is not a null free inline type
3185       pop_and_check_object(obj);
3186       __ load_heap_oop(rax, field);
3187       __ push(atos);
3188       if (rc == may_rewrite) {
3189         patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3190       }
3191       __ jmp(Done);
3192       __ bind(is_inline_type);
3193       __ test_field_is_flat(flags, rscratch1, is_flat);
3194           // field is not flat
3195           pop_and_check_object(obj);
3196           __ load_heap_oop(rax, field);
3197           __ testptr(rax, rax);
3198           __ jcc(Assembler::notZero, nonnull);
3199             __ load_unsigned_short(flags, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
3200             __ movptr(rcx, Address(cache, ResolvedFieldEntry::field_holder_offset()));
3201             __ get_inline_type_field_klass(rcx, flags, rbx);
3202             __ get_default_value_oop(rbx, rcx, rax);
3203           __ bind(nonnull);
3204           __ verify_oop(rax);
3205           __ push(atos);
3206           __ jmp(rewrite_inline);
3207         __ bind(is_flat);
3208           pop_and_check_object(rax);
3209           __ read_flat_field(rcx, rdx, rbx, rax);
3210           __ verify_oop(rax);
3211           __ push(atos);
3212           __ jmp(rewrite_inline);
3213       __ bind(has_null_marker);
3214         pop_and_check_object(rax);
3215         __ load_field_entry(rcx, rbx);
3216         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3217         __ get_vm_result(rax, r15_thread);
3218         __ push(atos);
3219       __ bind(rewrite_inline);
3220       if (rc == may_rewrite) {
3221         patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
3222       }
3223         __ jmp(Done);
3224     }
3225   }

3226 
3227   __ bind(notObj);
3228 
3229   if (!is_static) pop_and_check_object(obj);
3230 
3231   __ cmpl(tos_state, itos);
3232   __ jcc(Assembler::notEqual, notInt);
3233   // itos
3234   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3235   __ push(itos);
3236   // Rewrite bytecode to be faster
3237   if (!is_static && rc == may_rewrite) {
3238     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3239   }
3240   __ jmp(Done);
3241 
3242   __ bind(notInt);
3243   __ cmpl(tos_state, ctos);
3244   __ jcc(Assembler::notEqual, notChar);
3245   // ctos
3246   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3247   __ push(ctos);
3248   // Rewrite bytecode to be faster
3249   if (!is_static && rc == may_rewrite) {
3250     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

3310 #endif
3311 
3312   __ bind(Done);
3313   // [jk] not needed currently
3314   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3315   //                                              Assembler::LoadStore));
3316 }
3317 
3318 void TemplateTable::getfield(int byte_no) {
3319   getfield_or_static(byte_no, false);
3320 }
3321 
3322 void TemplateTable::nofast_getfield(int byte_no) {
3323   getfield_or_static(byte_no, false, may_not_rewrite);
3324 }
3325 
3326 void TemplateTable::getstatic(int byte_no) {
3327   getfield_or_static(byte_no, true);
3328 }
3329 

3330 // The registers cache and index expected to be set before call.
3331 // The function may destroy various registers, just not the cache and index registers.
3332 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3333   // Cache is rcx and index is rdx
3334   const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3335   const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx);   // Object pointer
3336   const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3337 
3338   if (JvmtiExport::can_post_field_modification()) {
3339     // Check to see if a field modification watch has been set before
3340     // we take the time to call into the VM.
3341     Label L1;
3342     assert_different_registers(cache, obj, rax);
3343     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3344     __ testl(rax, rax);
3345     __ jcc(Assembler::zero, L1);
3346 
3347     __ mov(entry, cache);
3348 
3349     if (is_static) {

3391     // cache: field entry pointer
3392     // value: jvalue object on the stack
3393     __ call_VM(noreg,
3394               CAST_FROM_FN_PTR(address,
3395                               InterpreterRuntime::post_field_modification),
3396               obj, entry, value);
3397     // Reload field entry
3398     __ load_field_entry(cache, index);
3399     __ bind(L1);
3400   }
3401 }
3402 
3403 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3404   transition(vtos, vtos);
3405 
3406   const Register obj = rcx;
3407   const Register cache = rcx;
3408   const Register index = rdx;
3409   const Register tos_state   = rdx;
3410   const Register off   = rbx;
3411   const Register flags = r9;
3412 
3413   resolve_cache_and_index_for_field(byte_no, cache, index);
3414   jvmti_post_field_mod(cache, index, is_static);
3415   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3416 
3417   // [jk] not needed currently
3418   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3419   //                                              Assembler::StoreStore));
3420 
3421   Label notVolatile, Done;
3422 
3423   // Check for volatile store
3424   __ movl(rscratch1, flags);
3425   __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
3426   __ testl(rscratch1, rscratch1);
3427   __ jcc(Assembler::zero, notVolatile);
3428 
3429   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3430   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3431                                                Assembler::StoreStore));
3432   __ jmp(Done);
3433   __ bind(notVolatile);
3434 
3435   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3436 
3437   __ bind(Done);
3438 }
3439 
3440 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3441                                               Register obj, Register off, Register tos_state, Register flags) {
3442 
3443   // field addresses
3444   const Address field(obj, off, Address::times_1, 0*wordSize);
3445   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3446 
3447   Label notByte, notBool, notInt, notShort, notChar,
3448         notLong, notFloat, notObj, notInlineType;
3449   Label Done;
3450 
3451   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3452 
3453   // Test TOS state
3454   __ testl(tos_state, tos_state);
3455   __ jcc(Assembler::notZero, notByte);
3456 
3457   // btos
3458   {
3459     __ pop(btos);
3460     if (!is_static) pop_and_check_object(obj);
3461     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3462     if (!is_static && rc == may_rewrite) {
3463       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3464     }
3465     __ jmp(Done);
3466   }
3467 
3468   __ bind(notByte);
3469   __ cmpl(tos_state, ztos);
3470   __ jcc(Assembler::notEqual, notBool);
3471 
3472   // ztos
3473   {
3474     __ pop(ztos);
3475     if (!is_static) pop_and_check_object(obj);
3476     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3477     if (!is_static && rc == may_rewrite) {
3478       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3479     }
3480     __ jmp(Done);
3481   }
3482 
3483   __ bind(notBool);
3484   __ cmpl(tos_state, atos);
3485   __ jcc(Assembler::notEqual, notObj);
3486 
3487   // atos
3488   {
3489     if (!EnableValhalla) {
3490       __ pop(atos);
3491       if (!is_static) pop_and_check_object(obj);
3492       // Store into the field
3493       do_oop_store(_masm, field, rax);
3494       if (!is_static && rc == may_rewrite) {
3495         patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3496       }
3497       __ jmp(Done);
3498     } else {
3499       __ pop(atos);
3500       if (is_static) {
3501         Label is_inline_type;
3502         __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_inline_type);
3503         __ null_check(rax);
3504         __ bind(is_inline_type);
3505         do_oop_store(_masm, field, rax);
3506         __ jmp(Done);
3507       } else {
3508         Label is_null_free_inline_type, is_flat, has_null_marker,
3509               write_null, rewrite_not_inline, rewrite_inline;
3510         __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3511         __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3512           // Not an inline type
3513           pop_and_check_object(obj);
3514           // Store into the field
3515           do_oop_store(_masm, field, rax);
3516           __ bind(rewrite_not_inline);
3517           if (rc == may_rewrite) {
3518             patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3519           }
3520           __ jmp(Done);
3521         // Implementation of the inline type semantic
3522         __ bind(is_null_free_inline_type);
3523           __ null_check(rax);
3524           __ test_field_is_flat(flags, rscratch1, is_flat);
3525             // field is not flat
3526             pop_and_check_object(obj);
3527             // Store into the field
3528             do_oop_store(_masm, field, rax);
3529           __ jmp(rewrite_inline);
3530           __ bind(is_flat);
3531             // field is flat
3532             __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3533             __ movptr(r9, Address(rcx, in_bytes(ResolvedFieldEntry::field_holder_offset())));
3534             pop_and_check_object(obj);  // obj = rcx
3535             __ load_klass(r8, rax, rscratch1);
3536             __ data_for_oop(rax, rax, r8);
3537             __ addptr(obj, off);
3538             __ inline_layout_info(r9, rdx, rbx);
3539             // because we use InlineLayoutInfo, we need special value access code specialized for fields (arrays will need a different API)
3540             __ flat_field_copy(IN_HEAP, rax, obj, rbx);
3541             __ jmp(rewrite_inline);
3542         __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3543           pop_and_check_object(rbx);
3544           __ load_field_entry(rcx, rdx);
3545           call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3546         __ bind(rewrite_inline);
3547         if (rc == may_rewrite) {
3548           patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3549         }
3550         __ jmp(Done);
3551       }
3552     }

3553   }
3554 
3555   __ bind(notObj);
3556   __ cmpl(tos_state, itos);
3557   __ jcc(Assembler::notEqual, notInt);
3558 
3559   // itos
3560   {
3561     __ pop(itos);
3562     if (!is_static) pop_and_check_object(obj);
3563     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3564     if (!is_static && rc == may_rewrite) {
3565       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3566     }
3567     __ jmp(Done);
3568   }
3569 
3570   __ bind(notInt);
3571   __ cmpl(tos_state, ctos);
3572   __ jcc(Assembler::notEqual, notChar);

3671 }
3672 
3673 void TemplateTable::jvmti_post_fast_field_mod() {
3674 
3675   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3676 
3677   if (JvmtiExport::can_post_field_modification()) {
3678     // Check to see if a field modification watch has been set before
3679     // we take the time to call into the VM.
3680     Label L2;
3681     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3682     __ testl(scratch, scratch);
3683     __ jcc(Assembler::zero, L2);
3684     __ pop_ptr(rbx);                  // copy the object pointer from tos
3685     __ verify_oop(rbx);
3686     __ push_ptr(rbx);                 // put the object pointer back on tos
3687     // Save tos values before call_VM() clobbers them. Since we have
3688     // to do it for every data type, we use the saved values as the
3689     // jvalue object.
3690     switch (bytecode()) {          // load values into the jvalue object
3691     case Bytecodes::_fast_vputfield: //fall through
3692     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3693     case Bytecodes::_fast_bputfield: // fall through
3694     case Bytecodes::_fast_zputfield: // fall through
3695     case Bytecodes::_fast_sputfield: // fall through
3696     case Bytecodes::_fast_cputfield: // fall through
3697     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3698     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3699     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3700     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3701 
3702     default:
3703       ShouldNotReachHere();
3704     }
3705     __ mov(scratch, rsp);             // points to jvalue on the stack
3706     // access constant pool cache entry
3707     LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3708     NOT_LP64(__ load_field_entry(rax, rdx));
3709     __ verify_oop(rbx);
3710     // rbx: object pointer copied above
3711     // c_rarg2: cache entry pointer
3712     // c_rarg3: jvalue object on the stack
3713     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3714     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3715 
3716     switch (bytecode()) {             // restore tos values
3717     case Bytecodes::_fast_vputfield: // fall through
3718     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3719     case Bytecodes::_fast_bputfield: // fall through
3720     case Bytecodes::_fast_zputfield: // fall through
3721     case Bytecodes::_fast_sputfield: // fall through
3722     case Bytecodes::_fast_cputfield: // fall through
3723     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3724     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3725     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3726     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3727     default: break;
3728     }
3729     __ bind(L2);
3730   }
3731 }
3732 
3733 void TemplateTable::fast_storefield(TosState state) {
3734   transition(state, vtos);
3735 


3736   Label notVolatile, Done;
3737 
3738   jvmti_post_fast_field_mod();
3739 
3740   __ push(rax);
3741   __ load_field_entry(rcx, rax);
3742   load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);


3743   __ pop(rax);
3744   // RBX: field offset, RCX: RAX: TOS, RDX: flags
3745 
3746   // Get object from stack
3747   pop_and_check_object(rcx);
3748 
3749   // field address
3750   const Address field(rcx, rbx, Address::times_1);
3751 
3752   // Check for volatile store
3753   __ movl(rscratch2, rdx);  // saving flags for is_flat test
3754   __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3755   __ testl(rscratch2, rscratch2);
3756   __ jcc(Assembler::zero, notVolatile);
3757 
3758   fast_storefield_helper(field, rax, rdx);
3759   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3760                                                Assembler::StoreStore));
3761   __ jmp(Done);
3762   __ bind(notVolatile);
3763 
3764   fast_storefield_helper(field, rax, rdx);
3765 
3766   __ bind(Done);
3767 }
3768 
3769 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3770 
3771   // DANGER: 'field' argument depends on rcx and rbx
3772 
3773   // access field
3774   switch (bytecode()) {
3775   case Bytecodes::_fast_vputfield:
3776     {
3777       Label is_flat, has_null_marker, write_null, done;
3778       __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3779       // Null free field cases: flat or not flat
3780       __ null_check(rax);
3781       __ test_field_is_flat(flags, rscratch1, is_flat);
3782         // field is not flat
3783         do_oop_store(_masm, field, rax);
3784         __ jmp(done);
3785       __ bind(is_flat);
3786         __ load_field_entry(r8, r9);
3787         __ load_unsigned_short(r9, Address(r8, in_bytes(ResolvedFieldEntry::field_index_offset())));
3788         __ movptr(r8, Address(r8, in_bytes(ResolvedFieldEntry::field_holder_offset())));
3789         __ inline_layout_info(r8, r9, r8);
3790         __ load_klass(rdx, rax, rscratch1);
3791         __ data_for_oop(rax, rax, rdx);
3792         __ lea(rcx, field);
3793         __ flat_field_copy(IN_HEAP, rax, rcx, r8);
3794         __ jmp(done);
3795       __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3796         __ movptr(rbx, rcx);
3797         __ load_field_entry(rcx, rdx);
3798         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3799       __ bind(done);
3800     }
3801     break;
3802   case Bytecodes::_fast_aputfield:
3803     {
3804       do_oop_store(_masm, field, rax);
3805     }
3806     break;
3807   case Bytecodes::_fast_lputfield:
3808 #ifdef _LP64
3809     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3810 #else
3811   __ stop("should not be rewritten");
3812 #endif
3813     break;
3814   case Bytecodes::_fast_iputfield:
3815     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3816     break;
3817   case Bytecodes::_fast_zputfield:
3818     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3819     break;
3820   case Bytecodes::_fast_bputfield:
3821     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3822     break;
3823   case Bytecodes::_fast_sputfield:
3824     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3825     break;

3847     Label L1;
3848     __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3849     __ testl(rcx, rcx);
3850     __ jcc(Assembler::zero, L1);
3851     // access constant pool cache entry
3852     LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3853     NOT_LP64(__ load_field_entry(rcx, rdx));
3854     __ verify_oop(rax);
3855     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3856     LP64_ONLY(__ mov(c_rarg1, rax));
3857     // c_rarg1: object pointer copied above
3858     // c_rarg2: cache entry pointer
3859     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3860     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3861     __ pop_ptr(rax); // restore object pointer
3862     __ bind(L1);
3863   }
3864 
3865   // access constant pool cache
3866   __ load_field_entry(rcx, rbx);
3867   __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3868 
3869   // rax: object
3870   __ verify_oop(rax);
3871   __ null_check(rax);
3872   Address field(rax, rdx, Address::times_1);
3873 
3874   // access field
3875   switch (bytecode()) {
3876   case Bytecodes::_fast_vgetfield:
3877     {
3878       Label is_flat, nonnull, Done, has_null_marker;
3879       __ load_unsigned_byte(rscratch1, Address(rcx, in_bytes(ResolvedFieldEntry::flags_offset())));
3880       __ test_field_has_null_marker(rscratch1, rscratch2, has_null_marker);
3881       __ test_field_is_flat(rscratch1, rscratch2, is_flat);
3882         // field is not flat
3883         __ load_heap_oop(rax, field);
3884         __ testptr(rax, rax);
3885         __ jcc(Assembler::notZero, nonnull);
3886           __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3887           __ movptr(rcx, Address(rcx, ResolvedFieldEntry::field_holder_offset()));
3888           __ get_inline_type_field_klass(rcx, rdx, rbx);
3889           __ get_default_value_oop(rbx, rcx, rax);
3890         __ bind(nonnull);
3891         __ verify_oop(rax);
3892         __ jmp(Done);
3893       __ bind(is_flat);
3894       // field is flat
3895         __ read_flat_field(rcx, rdx, rbx, rax);
3896         __ jmp(Done);
3897       __ bind(has_null_marker);
3898         // rax = instance, rcx = resolved entry
3899         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3900         __ get_vm_result(rax, r15_thread);
3901       __ bind(Done);
3902       __ verify_oop(rax);
3903     }
3904     break;
3905   case Bytecodes::_fast_agetfield:
3906     do_oop_load(_masm, field, rax);
3907     __ verify_oop(rax);
3908     break;
3909   case Bytecodes::_fast_lgetfield:
3910 #ifdef _LP64
3911     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3912 #else
3913   __ stop("should not be rewritten");
3914 #endif
3915     break;
3916   case Bytecodes::_fast_igetfield:
3917     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3918     break;
3919   case Bytecodes::_fast_bgetfield:
3920     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3921     break;
3922   case Bytecodes::_fast_sgetfield:
3923     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3924     break;

4320 
4321   // Note:  rax_callsite is already pushed
4322 
4323   // %%% should make a type profile for any invokedynamic that takes a ref argument
4324   // profile this call
4325   __ profile_call(rbcp);
4326   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4327 
4328   __ verify_oop(rax_callsite);
4329 
4330   __ jump_from_interpreted(rbx_method, rdx);
4331 }
4332 
4333 //-----------------------------------------------------------------------------
4334 // Allocation
4335 
4336 void TemplateTable::_new() {
4337   transition(vtos, atos);
4338   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4339   Label slow_case;

4340   Label done;

4341 
4342   __ get_cpool_and_tags(rcx, rax);
4343 
4344   // Make sure the class we're about to instantiate has been resolved.
4345   // This is done before loading InstanceKlass to be consistent with the order
4346   // how Constant Pool is updated (see ConstantPool::klass_at_put)
4347   const int tags_offset = Array<u1>::base_offset_in_bytes();
4348   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4349   __ jcc(Assembler::notEqual, slow_case);
4350 
4351   // get InstanceKlass
4352   __ load_resolved_klass_at_index(rcx, rcx, rdx);

4353 
4354   // make sure klass is initialized
4355   // init_state needs acquire, but x86 is TSO, and so we are already good.
4356 #ifdef _LP64
4357   assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
4358   __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
4359 #else
4360   __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4361   __ jcc(Assembler::notEqual, slow_case);
4362 #endif
4363 
4364   __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);

































































4365     if (DTraceAllocProbes) {
4366       // Trigger dtrace event for fastpath
4367       __ push(atos);
4368       __ call_VM_leaf(
4369            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4370       __ pop(atos);
4371     }
4372   __ jmp(done);


4373 
4374   // slow case
4375   __ bind(slow_case);


4376 
4377   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4378   Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4379 
4380   __ get_constant_pool(rarg1);
4381   __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4382   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4383    __ verify_oop(rax);
4384 
4385   // continue
4386   __ bind(done);
4387 }
4388 
4389 void TemplateTable::newarray() {
4390   transition(itos, atos);
4391   Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4392   __ load_unsigned_byte(rarg1, at_bcp(1));
4393   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4394           rarg1, rax);
4395 }

4404   __ get_constant_pool(rarg1);
4405   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4406           rarg1, rarg2, rax);
4407 }
4408 
4409 void TemplateTable::arraylength() {
4410   transition(atos, itos);
4411   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4412 }
4413 
4414 void TemplateTable::checkcast() {
4415   transition(atos, atos);
4416   Label done, is_null, ok_is_subtype, quicked, resolved;
4417   __ testptr(rax, rax); // object is in rax
4418   __ jcc(Assembler::zero, is_null);
4419 
4420   // Get cpool & tags index
4421   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4422   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4423   // See if bytecode has already been quicked
4424   __ movzbl(rdx, Address(rdx, rbx,
4425       Address::times_1,
4426       Array<u1>::base_offset_in_bytes()));
4427   __ cmpl(rdx, JVM_CONSTANT_Class);
4428   __ jcc(Assembler::equal, quicked);
4429   __ push(atos); // save receiver for result, and for GC
4430   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4431 
4432   // vm_result_2 has metadata result
4433 #ifndef _LP64
4434   // borrow rdi from locals
4435   __ get_thread(rdi);
4436   __ get_vm_result_2(rax, rdi);
4437   __ restore_locals();
4438 #else
4439   __ get_vm_result_2(rax, r15_thread);
4440 #endif
4441 
4442   __ pop_ptr(rdx); // restore receiver
4443   __ jmpb(resolved);
4444 
4445   // Get superklass in rax and subklass in rbx
4446   __ bind(quicked);
4447   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4448   __ load_resolved_klass_at_index(rax, rcx, rbx);
4449 
4450   __ bind(resolved);
4451   __ load_klass(rbx, rdx, rscratch1);
4452 
4453   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
4454   // Superklass in rax.  Subklass in rbx.
4455   __ gen_subtype_check(rbx, ok_is_subtype);
4456 
4457   // Come here on failure
4458   __ push_ptr(rdx);
4459   // object is at TOS
4460   __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
4461 
4462   // Come here on success
4463   __ bind(ok_is_subtype);
4464   __ mov(rax, rdx); // Restore object in rdx
4465   __ jmp(done);
4466 
4467   __ bind(is_null);
4468 
4469   // Collect counts on whether this check-cast sees nulls a lot or not.
4470   if (ProfileInterpreter) {


4471     __ profile_null_seen(rcx);


4472   }
4473 
4474   __ bind(done);
4475 }
4476 
4477 void TemplateTable::instanceof() {
4478   transition(atos, itos);
4479   Label done, is_null, ok_is_subtype, quicked, resolved;
4480   __ testptr(rax, rax);
4481   __ jcc(Assembler::zero, is_null);
4482 
4483   // Get cpool & tags index
4484   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4485   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4486   // See if bytecode has already been quicked
4487   __ movzbl(rdx, Address(rdx, rbx,
4488         Address::times_1,
4489         Array<u1>::base_offset_in_bytes()));
4490   __ cmpl(rdx, JVM_CONSTANT_Class);
4491   __ jcc(Assembler::equal, quicked);
4492 
4493   __ push(atos); // save receiver for result, and for GC
4494   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4495   // vm_result_2 has metadata result
4496 
4497 #ifndef _LP64
4498   // borrow rdi from locals
4499   __ get_thread(rdi);
4500   __ get_vm_result_2(rax, rdi);
4501   __ restore_locals();
4502 #else
4503   __ get_vm_result_2(rax, r15_thread);
4504 #endif
4505 
4506   __ pop_ptr(rdx); // restore receiver
4507   __ verify_oop(rdx);
4508   __ load_klass(rdx, rdx, rscratch1);
4509   __ jmpb(resolved);
4510 

4522   // Come here on failure
4523   __ xorl(rax, rax);
4524   __ jmpb(done);
4525   // Come here on success
4526   __ bind(ok_is_subtype);
4527   __ movl(rax, 1);
4528 
4529   // Collect counts on whether this test sees nulls a lot or not.
4530   if (ProfileInterpreter) {
4531     __ jmp(done);
4532     __ bind(is_null);
4533     __ profile_null_seen(rcx);
4534   } else {
4535     __ bind(is_null);   // same as 'done'
4536   }
4537   __ bind(done);
4538   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
4539   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
4540 }
4541 

4542 //----------------------------------------------------------------------------------------------------
4543 // Breakpoints
4544 void TemplateTable::_breakpoint() {
4545   // Note: We get here even if we are single stepping..
4546   // jbug insists on setting breakpoints at every bytecode
4547   // even if we are in single step mode.
4548 
4549   transition(vtos, vtos);
4550 
4551   Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4552 
4553   // get the unpatched byte code
4554   __ get_method(rarg);
4555   __ call_VM(noreg,
4556              CAST_FROM_FN_PTR(address,
4557                               InterpreterRuntime::get_original_bytecode_at),
4558              rarg, rbcp);
4559   __ mov(rbx, rax);  // why?
4560 
4561   // post the breakpoint event

4583 // Note: monitorenter & exit are symmetric routines; which is reflected
4584 //       in the assembly code structure as well
4585 //
4586 // Stack layout:
4587 //
4588 // [expressions  ] <--- rsp               = expression stack top
4589 // ..
4590 // [expressions  ]
4591 // [monitor entry] <--- monitor block top = expression stack bot
4592 // ..
4593 // [monitor entry]
4594 // [frame data   ] <--- monitor block bot
4595 // ...
4596 // [saved rbp    ] <--- rbp
4597 void TemplateTable::monitorenter() {
4598   transition(atos, vtos);
4599 
4600   // check for null object
4601   __ null_check(rax);
4602 
4603   Label is_inline_type;
4604   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4605   __ test_markword_is_inline_type(rbx, is_inline_type);
4606 
4607   const Address monitor_block_top(
4608         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4609   const Address monitor_block_bot(
4610         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4611   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4612 
4613   Label allocated;
4614 
4615   Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4616   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4617   Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4618 
4619   // initialize entry pointer
4620   __ xorl(rmon, rmon); // points to free slot or null
4621 
4622   // find a free slot in the monitor block (result in rmon)
4623   {
4624     Label entry, loop, exit;
4625     __ movptr(rtop, monitor_block_top); // derelativize pointer
4626     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));

4679   // rmon: points to monitor entry
4680   __ bind(allocated);
4681 
4682   // Increment bcp to point to the next bytecode, so exception
4683   // handling for async. exceptions work correctly.
4684   // The object has already been popped from the stack, so the
4685   // expression stack looks correct.
4686   __ increment(rbcp);
4687 
4688   // store object
4689   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4690   __ lock_object(rmon);
4691 
4692   // check to make sure this monitor doesn't cause stack overflow after locking
4693   __ save_bcp();  // in case of exception
4694   __ generate_stack_overflow_check(0);
4695 
4696   // The bcp has already been incremented. Just need to dispatch to
4697   // next instruction.
4698   __ dispatch_next(vtos);
4699 
4700   __ bind(is_inline_type);
4701   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4702                     InterpreterRuntime::throw_identity_exception), rax);
4703   __ should_not_reach_here();
4704 }
4705 
4706 void TemplateTable::monitorexit() {
4707   transition(atos, vtos);
4708 
4709   // check for null object
4710   __ null_check(rax);
4711 
4712   const int is_inline_type_mask = markWord::inline_type_pattern;
4713   Label has_identity;
4714   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4715   __ andptr(rbx, is_inline_type_mask);
4716   __ cmpl(rbx, is_inline_type_mask);
4717   __ jcc(Assembler::notEqual, has_identity);
4718   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4719                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4720   __ should_not_reach_here();
4721   __ bind(has_identity);
4722 
4723   const Address monitor_block_top(
4724         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4725   const Address monitor_block_bot(
4726         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4727   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4728 
4729   Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4730   Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4731 
4732   Label found;
4733 
4734   // find matching slot
4735   {
4736     Label entry, loop;
4737     __ movptr(rtop, monitor_block_top); // derelativize pointer
4738     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4739     // rtop points to current entry, starting with top-most entry
4740 
4741     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4742                                         // of monitor block
< prev index next >