< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page

  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/macroAssembler.hpp"
  26 #include "compiler/disassembler.hpp"
  27 #include "gc/shared/collectedHeap.hpp"
  28 #include "gc/shared/gc_globals.hpp"
  29 #include "gc/shared/tlab_globals.hpp"
  30 #include "interpreter/interpreter.hpp"
  31 #include "interpreter/interpreterRuntime.hpp"
  32 #include "interpreter/interp_masm.hpp"
  33 #include "interpreter/templateTable.hpp"
  34 #include "memory/universe.hpp"
  35 #include "oops/methodCounters.hpp"
  36 #include "oops/methodData.hpp"
  37 #include "oops/objArrayKlass.hpp"
  38 #include "oops/oop.inline.hpp"

  39 #include "oops/resolvedFieldEntry.hpp"
  40 #include "oops/resolvedIndyEntry.hpp"
  41 #include "oops/resolvedMethodEntry.hpp"
  42 #include "prims/jvmtiExport.hpp"
  43 #include "prims/methodHandles.hpp"
  44 #include "runtime/frame.inline.hpp"
  45 #include "runtime/safepointMechanism.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 #include "runtime/stubRoutines.hpp"
  48 #include "runtime/synchronizer.hpp"
  49 #include "utilities/macros.hpp"
  50 
  51 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  52 
  53 // Global Register Names
  54 static const Register rbcp     = r13;
  55 static const Register rlocals  = r14;
  56 
  57 // Address Computation: local variables
  58 static inline Address iaddress(int n) {

 150 static void do_oop_load(InterpreterMacroAssembler* _masm,
 151                         Address src,
 152                         Register dst,
 153                         DecoratorSet decorators = 0) {
 154   __ load_heap_oop(dst, src, rdx, decorators);
 155 }
 156 
 157 Address TemplateTable::at_bcp(int offset) {
 158   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 159   return Address(rbcp, offset);
 160 }
 161 
 162 
 163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 164                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 165                                    int byte_no) {
 166   if (!RewriteBytecodes)  return;
 167   Label L_patch_done;
 168 
 169   switch (bc) {

 170   case Bytecodes::_fast_aputfield:
 171   case Bytecodes::_fast_bputfield:
 172   case Bytecodes::_fast_zputfield:
 173   case Bytecodes::_fast_cputfield:
 174   case Bytecodes::_fast_dputfield:
 175   case Bytecodes::_fast_fputfield:
 176   case Bytecodes::_fast_iputfield:
 177   case Bytecodes::_fast_lputfield:
 178   case Bytecodes::_fast_sputfield:
 179     {
 180       // We skip bytecode quickening for putfield instructions when
 181       // the put_code written to the constant pool cache is zero.
 182       // This is required so that every execution of this instruction
 183       // calls out to InterpreterRuntime::resolve_get_put to do
 184       // additional, required work.
 185       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 186       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 187       __ load_field_entry(temp_reg, bc_reg);
 188       if (byte_no == f1_byte) {
 189         __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 758                     Address(rdx, rax,
 759                             Address::times_4,
 760                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 761                     noreg);
 762 }
 763 
 764 void TemplateTable::daload() {
 765   transition(itos, dtos);
 766   // rax: index
 767   // rdx: array
 768   index_check(rdx, rax); // kills rbx
 769   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 770                     Address(rdx, rax,
 771                             Address::times_8,
 772                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 773                     noreg);
 774 }
 775 
 776 void TemplateTable::aaload() {
 777   transition(itos, atos);
 778   // rax: index
 779   // rdx: array
 780   index_check(rdx, rax); // kills rbx
 781   do_oop_load(_masm,
 782               Address(rdx, rax,
 783                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
 784                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 785               rax,
 786               IS_ARRAY);



















 787 }
 788 
 789 void TemplateTable::baload() {
 790   transition(itos, itos);
 791   // rax: index
 792   // rdx: array
 793   index_check(rdx, rax); // kills rbx
 794   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 795                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 796                     noreg);
 797 }
 798 
 799 void TemplateTable::caload() {
 800   transition(itos, itos);
 801   // rax: index
 802   // rdx: array
 803   index_check(rdx, rax); // kills rbx
 804   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 805                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 806                     noreg);

1040   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1041                      Address(rdx, rbx, Address::times_4,
1042                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1043                      noreg /* ftos */, noreg, noreg, noreg);
1044 }
1045 
1046 void TemplateTable::dastore() {
1047   transition(dtos, vtos);
1048   __ pop_i(rbx);
1049   // value is in xmm0
1050   // rbx:  index
1051   // rdx:  array
1052   index_check(rdx, rbx); // prefer index in rbx
1053   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1054                      Address(rdx, rbx, Address::times_8,
1055                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1056                      noreg /* dtos */, noreg, noreg, noreg);
1057 }
1058 
1059 void TemplateTable::aastore() {
1060   Label is_null, ok_is_subtype, done;
1061   transition(vtos, vtos);
1062   // stack: ..., array, index, value
1063   __ movptr(rax, at_tos());    // value
1064   __ movl(rcx, at_tos_p1()); // index
1065   __ movptr(rdx, at_tos_p2()); // array
1066 
1067   Address element_address(rdx, rcx,
1068                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1069                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1070 
1071   index_check_without_pop(rdx, rcx);     // kills rbx




1072   __ testptr(rax, rax);
1073   __ jcc(Assembler::zero, is_null);
1074 







1075   // Move subklass into rbx
1076   __ load_klass(rbx, rax, rscratch1);
1077   // Move superklass into rax
1078   __ load_klass(rax, rdx, rscratch1);
1079   __ movptr(rax, Address(rax,
1080                          ObjArrayKlass::element_klass_offset()));
1081 
1082   // Generate subtype check.  Blows rcx, rdi
1083   // Superklass in rax.  Subklass in rbx.
1084   __ gen_subtype_check(rbx, ok_is_subtype);

1085 
1086   // Come here on failure
1087   // object is at TOS
1088   __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1089 
1090   // Come here on success
1091   __ bind(ok_is_subtype);
1092 
1093   // Get the value we will store
1094   __ movptr(rax, at_tos());
1095   __ movl(rcx, at_tos_p1()); // index
1096   // Now store using the appropriate barrier
1097   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1098   __ jmp(done);
1099 
1100   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1101   __ bind(is_null);
1102   __ profile_null_seen(rbx);















1103 


1104   // Store a null
1105   do_oop_store(_masm, element_address, noreg, IS_ARRAY);









1106 


1107   // Pop stack arguments
1108   __ bind(done);
1109   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1110 }
1111 
1112 void TemplateTable::bastore() {
1113   transition(itos, vtos);
1114   __ pop_i(rbx);
1115   // rax: value
1116   // rbx: index
1117   // rdx: array
1118   index_check(rdx, rbx); // prefer index in rbx
1119   // Need to check whether array is boolean or byte
1120   // since both types share the bastore bytecode.
1121   __ load_klass(rcx, rdx, rscratch1);
1122   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1123   int diffbit = Klass::layout_helper_boolean_diffbit();
1124   __ testl(rcx, diffbit);
1125   Label L_skip;
1126   __ jccb(Assembler::zero, L_skip);

1876   __ jcc(j_not(cc), not_taken);
1877   branch(false, false);
1878   __ bind(not_taken);
1879   __ profile_not_taken_branch(rax);
1880 }
1881 
1882 void TemplateTable::if_nullcmp(Condition cc) {
1883   transition(atos, vtos);
1884   // assume branch is more often taken than not (loops use backward branches)
1885   Label not_taken;
1886   __ testptr(rax, rax);
1887   __ jcc(j_not(cc), not_taken);
1888   branch(false, false);
1889   __ bind(not_taken);
1890   __ profile_not_taken_branch(rax);
1891 }
1892 
1893 void TemplateTable::if_acmp(Condition cc) {
1894   transition(atos, vtos);
1895   // assume branch is more often taken than not (loops use backward branches)
1896   Label not_taken;
1897   __ pop_ptr(rdx);




































1898   __ cmpoop(rdx, rax);
1899   __ jcc(j_not(cc), not_taken);

1900   branch(false, false);
1901   __ bind(not_taken);
1902   __ profile_not_taken_branch(rax);









1903 }
1904 
1905 void TemplateTable::ret() {
1906   transition(vtos, vtos);
1907   locals_index(rbx);
1908   __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1909   __ profile_ret(rbx, rcx);
1910   __ get_method(rax);
1911   __ movptr(rbcp, Address(rax, Method::const_offset()));
1912   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
1913                       ConstMethod::codes_offset()));
1914   __ dispatch_next(vtos, 0, true);
1915 }
1916 
1917 void TemplateTable::wide_ret() {
1918   transition(vtos, vtos);
1919   locals_index_wide(rbx);
1920   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
1921   __ profile_ret(rbx, rcx);
1922   __ get_method(rax);

2136   if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2137     Label no_safepoint;
2138     NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2139     __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2140     __ jcc(Assembler::zero, no_safepoint);
2141     __ push(state);
2142     __ push_cont_fastpath();
2143     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2144                                        InterpreterRuntime::at_safepoint));
2145     __ pop_cont_fastpath();
2146     __ pop(state);
2147     __ bind(no_safepoint);
2148   }
2149 
2150   // Narrow result if state is itos but result type is smaller.
2151   // Need to narrow in the return bytecode rather than in generate_return_entry
2152   // since compiled code callers expect the result to already be narrowed.
2153   if (state == itos) {
2154     __ narrow(rax);
2155   }
2156   __ remove_activation(state, rbcp);

2157 
2158   __ jmp(rbcp);
2159 }
2160 
2161 // ----------------------------------------------------------------------------
2162 // Volatile variables demand their effects be made known to all CPU's
2163 // in order.  Store buffers on most chips allow reads & writes to
2164 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2165 // without some kind of memory barrier (i.e., it's not sufficient that
2166 // the interpreter does not reorder volatile references, the hardware
2167 // also must not reorder them).
2168 //
2169 // According to the new Java Memory Model (JMM):
2170 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2171 //     writes act as acquire & release, so:
2172 // (2) A read cannot let unrelated NON-volatile memory refs that
2173 //     happen after the read float up to before the read.  It's OK for
2174 //     non-volatile memory refs that happen before the volatile read to
2175 //     float down below it.
2176 // (3) Similar a volatile write cannot let unrelated NON-volatile

2496     }
2497     // rax,:   object pointer or null
2498     // cache: cache entry pointer
2499     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2500               rax, cache);
2501 
2502     __ load_field_entry(cache, index);
2503     __ bind(L1);
2504   }
2505 }
2506 
2507 void TemplateTable::pop_and_check_object(Register r) {
2508   __ pop_ptr(r);
2509   __ null_check(r);  // for field access must check obj.
2510   __ verify_oop(r);
2511 }
2512 
2513 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2514   transition(vtos, vtos);
2515 
2516   const Register obj   = c_rarg3;
2517   const Register cache = rcx;
2518   const Register index = rdx;
2519   const Register off   = rbx;
2520   const Register tos_state   = rax;
2521   const Register flags = rdx;
2522   const Register bc    = c_rarg3; // uses same reg as obj, so don't mix them
2523 
2524   resolve_cache_and_index_for_field(byte_no, cache, index);
2525   jvmti_post_field_access(cache, index, is_static, false);
2526   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2527 
2528   if (!is_static) pop_and_check_object(obj);
2529 
2530   const Address field(obj, off, Address::times_1, 0*wordSize);
2531 
2532   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
2533 
2534   // Make sure we don't need to mask edx after the above shift
2535   assert(btos == 0, "change code, btos != 0");
2536   __ testl(tos_state, tos_state);
2537   __ jcc(Assembler::notZero, notByte);
2538 
2539   // btos

2540   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
2541   __ push(btos);
2542   // Rewrite bytecode to be faster
2543   if (!is_static && rc == may_rewrite) {
2544     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2545   }
2546   __ jmp(Done);
2547 
2548   __ bind(notByte);
2549   __ cmpl(tos_state, ztos);
2550   __ jcc(Assembler::notEqual, notBool);
2551 
2552   // ztos (same code as btos)
2553   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg);
2554   __ push(ztos);
2555   // Rewrite bytecode to be faster
2556   if (!is_static && rc == may_rewrite) {
2557     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2558     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2559   }
2560   __ jmp(Done);
2561 
2562   __ bind(notBool);
2563   __ cmpl(tos_state, atos);
2564   __ jcc(Assembler::notEqual, notObj);
2565   // atos
2566   do_oop_load(_masm, field, rax);
2567   __ push(atos);
2568   if (!is_static && rc == may_rewrite) {
2569     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);































































2570   }
2571   __ jmp(Done);
2572 
2573   __ bind(notObj);



2574   __ cmpl(tos_state, itos);
2575   __ jcc(Assembler::notEqual, notInt);
2576   // itos
2577   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
2578   __ push(itos);
2579   // Rewrite bytecode to be faster
2580   if (!is_static && rc == may_rewrite) {
2581     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2582   }
2583   __ jmp(Done);
2584 
2585   __ bind(notInt);
2586   __ cmpl(tos_state, ctos);
2587   __ jcc(Assembler::notEqual, notChar);
2588   // ctos
2589   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
2590   __ push(ctos);
2591   // Rewrite bytecode to be faster
2592   if (!is_static && rc == may_rewrite) {
2593     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

2653 #endif
2654 
2655   __ bind(Done);
2656   // [jk] not needed currently
2657   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2658   //                                              Assembler::LoadStore));
2659 }
2660 
2661 void TemplateTable::getfield(int byte_no) {
2662   getfield_or_static(byte_no, false);
2663 }
2664 
2665 void TemplateTable::nofast_getfield(int byte_no) {
2666   getfield_or_static(byte_no, false, may_not_rewrite);
2667 }
2668 
2669 void TemplateTable::getstatic(int byte_no) {
2670   getfield_or_static(byte_no, true);
2671 }
2672 
2673 
2674 // The registers cache and index expected to be set before call.
2675 // The function may destroy various registers, just not the cache and index registers.
2676 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2677   // Cache is rcx and index is rdx
2678   const Register entry = c_rarg2; // ResolvedFieldEntry
2679   const Register obj = c_rarg1;   // Object pointer
2680   const Register value = c_rarg3; // JValue object
2681 
2682   if (JvmtiExport::can_post_field_modification()) {
2683     // Check to see if a field modification watch has been set before
2684     // we take the time to call into the VM.
2685     Label L1;
2686     assert_different_registers(cache, obj, rax);
2687     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2688     __ testl(rax, rax);
2689     __ jcc(Assembler::zero, L1);
2690 
2691     __ mov(entry, cache);
2692 
2693     if (is_static) {

2715     // cache: field entry pointer
2716     // value: jvalue object on the stack
2717     __ call_VM(noreg,
2718               CAST_FROM_FN_PTR(address,
2719                               InterpreterRuntime::post_field_modification),
2720               obj, entry, value);
2721     // Reload field entry
2722     __ load_field_entry(cache, index);
2723     __ bind(L1);
2724   }
2725 }
2726 
2727 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2728   transition(vtos, vtos);
2729 
2730   const Register obj = rcx;
2731   const Register cache = rcx;
2732   const Register index = rdx;
2733   const Register tos_state   = rdx;
2734   const Register off   = rbx;
2735   const Register flags = rax;
2736 
2737   resolve_cache_and_index_for_field(byte_no, cache, index);
2738   jvmti_post_field_mod(cache, index, is_static);
2739   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2740 
2741   // [jk] not needed currently
2742   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2743   //                                              Assembler::StoreStore));
2744 
2745   Label notVolatile, Done;
2746 
2747   // Check for volatile store
2748   __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
2749   __ testl(flags, flags);

2750   __ jcc(Assembler::zero, notVolatile);
2751 
2752   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2753   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2754                                                Assembler::StoreStore));
2755   __ jmp(Done);
2756   __ bind(notVolatile);
2757 
2758   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2759 
2760   __ bind(Done);
2761 }
2762 
2763 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2764                                               Register obj, Register off, Register tos_state) {
2765 
2766   // field addresses
2767   const Address field(obj, off, Address::times_1, 0*wordSize);
2768 
2769   Label notByte, notBool, notInt, notShort, notChar,
2770         notLong, notFloat, notObj;
2771   Label Done;
2772 
2773   const Register bc    = c_rarg3;
2774 
2775   // Test TOS state
2776   __ testl(tos_state, tos_state);
2777   __ jcc(Assembler::notZero, notByte);
2778 
2779   // btos
2780   {
2781     __ pop(btos);
2782     if (!is_static) pop_and_check_object(obj);
2783     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2784     if (!is_static && rc == may_rewrite) {
2785       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2786     }
2787     __ jmp(Done);
2788   }
2789 
2790   __ bind(notByte);
2791   __ cmpl(tos_state, ztos);
2792   __ jcc(Assembler::notEqual, notBool);
2793 
2794   // ztos
2795   {
2796     __ pop(ztos);
2797     if (!is_static) pop_and_check_object(obj);
2798     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2799     if (!is_static && rc == may_rewrite) {
2800       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2801     }
2802     __ jmp(Done);
2803   }
2804 
2805   __ bind(notBool);
2806   __ cmpl(tos_state, atos);
2807   __ jcc(Assembler::notEqual, notObj);
2808 
2809   // atos
2810   {
2811     __ pop(atos);
2812     if (!is_static) pop_and_check_object(obj);
2813     // Store into the field
2814     do_oop_store(_masm, field, rax);
2815     if (!is_static && rc == may_rewrite) {
2816       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);

























































2817     }
2818     __ jmp(Done);
2819   }
2820 
2821   __ bind(notObj);
2822   __ cmpl(tos_state, itos);
2823   __ jcc(Assembler::notEqual, notInt);
2824 
2825   // itos
2826   {
2827     __ pop(itos);
2828     if (!is_static) pop_and_check_object(obj);
2829     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
2830     if (!is_static && rc == may_rewrite) {
2831       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2832     }
2833     __ jmp(Done);
2834   }
2835 
2836   __ bind(notInt);
2837   __ cmpl(tos_state, ctos);
2838   __ jcc(Assembler::notEqual, notChar);

2935 }
2936 
2937 void TemplateTable::jvmti_post_fast_field_mod() {
2938 
2939   const Register scratch = c_rarg3;
2940 
2941   if (JvmtiExport::can_post_field_modification()) {
2942     // Check to see if a field modification watch has been set before
2943     // we take the time to call into the VM.
2944     Label L2;
2945     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2946     __ testl(scratch, scratch);
2947     __ jcc(Assembler::zero, L2);
2948     __ pop_ptr(rbx);                  // copy the object pointer from tos
2949     __ verify_oop(rbx);
2950     __ push_ptr(rbx);                 // put the object pointer back on tos
2951     // Save tos values before call_VM() clobbers them. Since we have
2952     // to do it for every data type, we use the saved values as the
2953     // jvalue object.
2954     switch (bytecode()) {          // load values into the jvalue object

2955     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
2956     case Bytecodes::_fast_bputfield: // fall through
2957     case Bytecodes::_fast_zputfield: // fall through
2958     case Bytecodes::_fast_sputfield: // fall through
2959     case Bytecodes::_fast_cputfield: // fall through
2960     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
2961     case Bytecodes::_fast_dputfield: __ push(dtos); break;
2962     case Bytecodes::_fast_fputfield: __ push(ftos); break;
2963     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
2964 
2965     default:
2966       ShouldNotReachHere();
2967     }
2968     __ mov(scratch, rsp);             // points to jvalue on the stack
2969     // access constant pool cache entry
2970     __ load_field_entry(c_rarg2, rax);
2971     __ verify_oop(rbx);
2972     // rbx: object pointer copied above
2973     // c_rarg2: cache entry pointer
2974     // c_rarg3: jvalue object on the stack
2975     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
2976 
2977     switch (bytecode()) {             // restore tos values

2978     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
2979     case Bytecodes::_fast_bputfield: // fall through
2980     case Bytecodes::_fast_zputfield: // fall through
2981     case Bytecodes::_fast_sputfield: // fall through
2982     case Bytecodes::_fast_cputfield: // fall through
2983     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
2984     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
2985     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
2986     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
2987     default: break;
2988     }
2989     __ bind(L2);
2990   }
2991 }
2992 
2993 void TemplateTable::fast_storefield(TosState state) {
2994   transition(state, vtos);
2995 
2996   Register cache = rcx;
2997 
2998   Label notVolatile, Done;
2999 
3000   jvmti_post_fast_field_mod();
3001 
3002   __ push(rax);
3003   __ load_field_entry(rcx, rax);
3004   load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3005   // RBX: field offset, RAX: TOS, RDX: flags
3006   __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3007   __ pop(rax);

3008 
3009   // Get object from stack
3010   pop_and_check_object(rcx);
3011 
3012   // field address
3013   const Address field(rcx, rbx, Address::times_1);
3014 
3015   // Check for volatile store
3016   __ testl(rdx, rdx);


3017   __ jcc(Assembler::zero, notVolatile);
3018 
3019   fast_storefield_helper(field, rax);
3020   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3021                                                Assembler::StoreStore));
3022   __ jmp(Done);
3023   __ bind(notVolatile);
3024 
3025   fast_storefield_helper(field, rax);
3026 
3027   __ bind(Done);
3028 }
3029 
3030 void TemplateTable::fast_storefield_helper(Address field, Register rax) {


3031 
3032   // access field
3033   switch (bytecode()) {



























3034   case Bytecodes::_fast_aputfield:
3035     do_oop_store(_masm, field, rax);


3036     break;
3037   case Bytecodes::_fast_lputfield:
3038     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3039     break;
3040   case Bytecodes::_fast_iputfield:
3041     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3042     break;
3043   case Bytecodes::_fast_zputfield:
3044     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3045     break;
3046   case Bytecodes::_fast_bputfield:
3047     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3048     break;
3049   case Bytecodes::_fast_sputfield:
3050     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3051     break;
3052   case Bytecodes::_fast_cputfield:
3053     __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3054     break;
3055   case Bytecodes::_fast_fputfield:

3071     // Check to see if a field access watch has been set before we
3072     // take the time to call into the VM.
3073     Label L1;
3074     __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3075     __ testl(rcx, rcx);
3076     __ jcc(Assembler::zero, L1);
3077     // access constant pool cache entry
3078     __ load_field_entry(c_rarg2, rcx);
3079     __ verify_oop(rax);
3080     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3081     __ mov(c_rarg1, rax);
3082     // c_rarg1: object pointer copied above
3083     // c_rarg2: cache entry pointer
3084     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3085     __ pop_ptr(rax); // restore object pointer
3086     __ bind(L1);
3087   }
3088 
3089   // access constant pool cache
3090   __ load_field_entry(rcx, rbx);
3091   __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3092 
3093   // rax: object
3094   __ verify_oop(rax);
3095   __ null_check(rax);
3096   Address field(rax, rbx, Address::times_1);
3097 
3098   // access field
3099   switch (bytecode()) {


























3100   case Bytecodes::_fast_agetfield:
3101     do_oop_load(_masm, field, rax);
3102     __ verify_oop(rax);
3103     break;
3104   case Bytecodes::_fast_lgetfield:
3105     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg);
3106     break;
3107   case Bytecodes::_fast_igetfield:
3108     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
3109     break;
3110   case Bytecodes::_fast_bgetfield:
3111     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
3112     break;
3113   case Bytecodes::_fast_sgetfield:
3114     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg);
3115     break;
3116   case Bytecodes::_fast_cgetfield:
3117     __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
3118     break;
3119   case Bytecodes::_fast_fgetfield:

3504 
3505   // Note:  rax_callsite is already pushed
3506 
3507   // %%% should make a type profile for any invokedynamic that takes a ref argument
3508   // profile this call
3509   __ profile_call(rbcp);
3510   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3511 
3512   __ verify_oop(rax_callsite);
3513 
3514   __ jump_from_interpreted(rbx_method, rdx);
3515 }
3516 
3517 //-----------------------------------------------------------------------------
3518 // Allocation
3519 
3520 void TemplateTable::_new() {
3521   transition(vtos, atos);
3522   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3523   Label slow_case;
3524   Label slow_case_no_pop;
3525   Label done;
3526   Label initialize_header;
3527 
3528   __ get_cpool_and_tags(rcx, rax);
3529 
3530   // Make sure the class we're about to instantiate has been resolved.
3531   // This is done before loading InstanceKlass to be consistent with the order
3532   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3533   const int tags_offset = Array<u1>::base_offset_in_bytes();
3534   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3535   __ jcc(Assembler::notEqual, slow_case_no_pop);
3536 
3537   // get InstanceKlass
3538   __ load_resolved_klass_at_index(rcx, rcx, rdx);
3539   __ push(rcx);  // save the contexts of klass for initializing the header
3540 
3541   // make sure klass is initialized
3542   // init_state needs acquire, but x86 is TSO, and so we are already good.
3543   assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3544   __ clinit_barrier(rcx, nullptr /*L_fast_path*/, &slow_case);
3545 
3546   // get instance_size in InstanceKlass (scaled to a count of bytes)
3547   __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3548   // test to see if it is malformed in some way
3549   __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3550   __ jcc(Assembler::notZero, slow_case);
3551 
3552   // Allocate the instance:
3553   //  If TLAB is enabled:
3554   //    Try to allocate in the TLAB.
3555   //    If fails, go to the slow path.
3556   //    Initialize the allocation.
3557   //    Exit.
3558   //
3559   //  Go to slow path.
3560 
3561   if (UseTLAB) {
3562     __ tlab_allocate(rax, rdx, 0, rcx, rbx, slow_case);
3563     if (ZeroTLAB) {
3564       // the fields have been already cleared
3565       __ jmp(initialize_header);
3566     }
3567 
3568     // The object is initialized before the header.  If the object size is
3569     // zero, go directly to the header initialization.
3570     if (UseCompactObjectHeaders) {
3571       assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
3572       __ decrement(rdx, oopDesc::base_offset_in_bytes());
3573     } else {
3574       __ decrement(rdx, sizeof(oopDesc));
3575     }
3576     __ jcc(Assembler::zero, initialize_header);
3577 
3578     // Initialize topmost object field, divide rdx by 8, check if odd and
3579     // test if zero.
3580     __ xorl(rcx, rcx);    // use zero reg to clear memory (shorter code)
3581     __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
3582 
3583     // rdx must have been multiple of 8
3584 #ifdef ASSERT
3585     // make sure rdx was multiple of 8
3586     Label L;
3587     // Ignore partial flag stall after shrl() since it is debug VM
3588     __ jcc(Assembler::carryClear, L);
3589     __ stop("object size is not multiple of 2 - adjust this code");
3590     __ bind(L);
3591     // rdx must be > 0, no extra check needed here
3592 #endif
3593 
3594     // initialize remaining object fields: rdx was a multiple of 8
3595     { Label loop;
3596     __ bind(loop);
3597     int header_size_bytes = oopDesc::header_size() * HeapWordSize;
3598     assert(is_aligned(header_size_bytes, BytesPerLong), "oop header size must be 8-byte-aligned");
3599     __ movptr(Address(rax, rdx, Address::times_8, header_size_bytes - 1*oopSize), rcx);
3600     __ decrement(rdx);
3601     __ jcc(Assembler::notZero, loop);
3602     }
3603 
3604     // initialize object header only.
3605     __ bind(initialize_header);
3606     if (UseCompactObjectHeaders) {
3607       __ pop(rcx);   // get saved klass back in the register.
3608       __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
3609       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), rbx);
3610     } else {
3611       __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
3612                 (intptr_t)markWord::prototype().value()); // header
3613       __ pop(rcx);   // get saved klass back in the register.
3614       __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
3615       __ store_klass_gap(rax, rsi);  // zero klass gap for compressed oops
3616       __ store_klass(rax, rcx, rscratch1);  // klass
3617     }
3618 
3619     if (DTraceAllocProbes) {
3620       // Trigger dtrace event for fastpath
3621       __ push(atos);
3622       __ call_VM_leaf(
3623            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3624       __ pop(atos);
3625     }
3626 
3627     __ jmp(done);
3628   }
3629 
3630   // slow case
3631   __ bind(slow_case);
3632   __ pop(rcx);   // restore stack pointer to what it was when we came in.
3633   __ bind(slow_case_no_pop);
3634 
3635   __ get_constant_pool(c_rarg1);
3636   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3637   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3638    __ verify_oop(rax);
3639 
3640   // continue
3641   __ bind(done);
3642 }
3643 
3644 void TemplateTable::newarray() {
3645   transition(itos, atos);
3646   __ load_unsigned_byte(c_rarg1, at_bcp(1));
3647   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3648           c_rarg1, rax);
3649 }
3650 
3651 void TemplateTable::anewarray() {
3652   transition(itos, atos);
3653 

3655   __ get_constant_pool(c_rarg1);
3656   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3657           c_rarg1, c_rarg2, rax);
3658 }
3659 
3660 void TemplateTable::arraylength() {
3661   transition(atos, itos);
3662   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3663 }
3664 
3665 void TemplateTable::checkcast() {
3666   transition(atos, atos);
3667   Label done, is_null, ok_is_subtype, quicked, resolved;
3668   __ testptr(rax, rax); // object is in rax
3669   __ jcc(Assembler::zero, is_null);
3670 
3671   // Get cpool & tags index
3672   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3673   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3674   // See if bytecode has already been quicked
3675   __ cmpb(Address(rdx, rbx,
3676                   Address::times_1,
3677                   Array<u1>::base_offset_in_bytes()),
3678           JVM_CONSTANT_Class);
3679   __ jcc(Assembler::equal, quicked);
3680   __ push(atos); // save receiver for result, and for GC
3681   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3682 
3683   __ get_vm_result_metadata(rax);
3684 
3685   __ pop_ptr(rdx); // restore receiver
3686   __ jmpb(resolved);
3687 
3688   // Get superklass in rax and subklass in rbx
3689   __ bind(quicked);
3690   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3691   __ load_resolved_klass_at_index(rax, rcx, rbx);
3692 
3693   __ bind(resolved);
3694   __ load_klass(rbx, rdx, rscratch1);
3695 
3696   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
3697   // Superklass in rax.  Subklass in rbx.
3698   __ gen_subtype_check(rbx, ok_is_subtype);
3699 
3700   // Come here on failure
3701   __ push_ptr(rdx);
3702   // object is at TOS
3703   __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3704 
3705   // Come here on success
3706   __ bind(ok_is_subtype);
3707   __ mov(rax, rdx); // Restore object in rdx



3708 
3709   // Collect counts on whether this check-cast sees nulls a lot or not.
3710   if (ProfileInterpreter) {
3711     __ jmp(done);
3712     __ bind(is_null);
3713     __ profile_null_seen(rcx);
3714   } else {
3715     __ bind(is_null);   // same as 'done'
3716   }

3717   __ bind(done);
3718 }
3719 
3720 void TemplateTable::instanceof() {
3721   transition(atos, itos);
3722   Label done, is_null, ok_is_subtype, quicked, resolved;
3723   __ testptr(rax, rax);
3724   __ jcc(Assembler::zero, is_null);
3725 
3726   // Get cpool & tags index
3727   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3728   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3729   // See if bytecode has already been quicked
3730   __ cmpb(Address(rdx, rbx,
3731                   Address::times_1,
3732                   Array<u1>::base_offset_in_bytes()),
3733           JVM_CONSTANT_Class);
3734   __ jcc(Assembler::equal, quicked);
3735 
3736   __ push(atos); // save receiver for result, and for GC
3737   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3738 
3739   __ get_vm_result_metadata(rax);
3740 
3741   __ pop_ptr(rdx); // restore receiver
3742   __ verify_oop(rdx);
3743   __ load_klass(rdx, rdx, rscratch1);
3744   __ jmpb(resolved);
3745 
3746   // Get superklass in rax and subklass in rdx
3747   __ bind(quicked);
3748   __ load_klass(rdx, rax, rscratch1);
3749   __ load_resolved_klass_at_index(rax, rcx, rbx);
3750 
3751   __ bind(resolved);
3752 
3753   // Generate subtype check.  Blows rcx, rdi

3757   // Come here on failure
3758   __ xorl(rax, rax);
3759   __ jmpb(done);
3760   // Come here on success
3761   __ bind(ok_is_subtype);
3762   __ movl(rax, 1);
3763 
3764   // Collect counts on whether this test sees nulls a lot or not.
3765   if (ProfileInterpreter) {
3766     __ jmp(done);
3767     __ bind(is_null);
3768     __ profile_null_seen(rcx);
3769   } else {
3770     __ bind(is_null);   // same as 'done'
3771   }
3772   __ bind(done);
3773   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
3774   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
3775 }
3776 
3777 
3778 //----------------------------------------------------------------------------------------------------
3779 // Breakpoints
3780 void TemplateTable::_breakpoint() {
3781   // Note: We get here even if we are single stepping..
3782   // jbug insists on setting breakpoints at every bytecode
3783   // even if we are in single step mode.
3784 
3785   transition(vtos, vtos);
3786 
3787   // get the unpatched byte code
3788   __ get_method(c_rarg1);
3789   __ call_VM(noreg,
3790              CAST_FROM_FN_PTR(address,
3791                               InterpreterRuntime::get_original_bytecode_at),
3792              c_rarg1, rbcp);
3793   __ mov(rbx, rax);  // why?
3794 
3795   // post the breakpoint event
3796   __ get_method(c_rarg1);
3797   __ call_VM(noreg,

3817 // Note: monitorenter & exit are symmetric routines; which is reflected
3818 //       in the assembly code structure as well
3819 //
3820 // Stack layout:
3821 //
3822 // [expressions  ] <--- rsp               = expression stack top
3823 // ..
3824 // [expressions  ]
3825 // [monitor entry] <--- monitor block top = expression stack bot
3826 // ..
3827 // [monitor entry]
3828 // [frame data   ] <--- monitor block bot
3829 // ...
3830 // [saved rbp    ] <--- rbp
3831 void TemplateTable::monitorenter() {
3832   transition(atos, vtos);
3833 
3834   // check for null object
3835   __ null_check(rax);
3836 




3837   const Address monitor_block_top(
3838         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3839   const Address monitor_block_bot(
3840         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3841   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3842 
3843   Label allocated;
3844 
3845   Register rtop = c_rarg3;
3846   Register rbot = c_rarg2;
3847   Register rmon = c_rarg1;
3848 
3849   // initialize entry pointer
3850   __ xorl(rmon, rmon); // points to free slot or null
3851 
3852   // find a free slot in the monitor block (result in rmon)
3853   {
3854     Label entry, loop, exit;
3855     __ movptr(rtop, monitor_block_top); // derelativize pointer
3856     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));

3909   // rmon: points to monitor entry
3910   __ bind(allocated);
3911 
3912   // Increment bcp to point to the next bytecode, so exception
3913   // handling for async. exceptions work correctly.
3914   // The object has already been popped from the stack, so the
3915   // expression stack looks correct.
3916   __ increment(rbcp);
3917 
3918   // store object
3919   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
3920   __ lock_object(rmon);
3921 
3922   // check to make sure this monitor doesn't cause stack overflow after locking
3923   __ save_bcp();  // in case of exception
3924   __ generate_stack_overflow_check(0);
3925 
3926   // The bcp has already been incremented. Just need to dispatch to
3927   // next instruction.
3928   __ dispatch_next(vtos);





3929 }
3930 
3931 void TemplateTable::monitorexit() {
3932   transition(atos, vtos);
3933 
3934   // check for null object
3935   __ null_check(rax);
3936 











3937   const Address monitor_block_top(
3938         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3939   const Address monitor_block_bot(
3940         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3941   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3942 
3943   Register rtop = c_rarg1;
3944   Register rbot = c_rarg2;
3945 
3946   Label found;
3947 
3948   // find matching slot
3949   {
3950     Label entry, loop;
3951     __ movptr(rtop, monitor_block_top); // derelativize pointer
3952     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3953     // rtop points to current entry, starting with top-most entry
3954 
3955     __ lea(rbot, monitor_block_bot);    // points to word before bottom
3956                                         // of monitor block

  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/macroAssembler.hpp"
  26 #include "compiler/disassembler.hpp"
  27 #include "gc/shared/collectedHeap.hpp"
  28 #include "gc/shared/gc_globals.hpp"
  29 #include "gc/shared/tlab_globals.hpp"
  30 #include "interpreter/interpreter.hpp"
  31 #include "interpreter/interpreterRuntime.hpp"
  32 #include "interpreter/interp_masm.hpp"
  33 #include "interpreter/templateTable.hpp"
  34 #include "memory/universe.hpp"
  35 #include "oops/methodCounters.hpp"
  36 #include "oops/methodData.hpp"
  37 #include "oops/objArrayKlass.hpp"
  38 #include "oops/oop.inline.hpp"
  39 #include "oops/inlineKlass.hpp"
  40 #include "oops/resolvedFieldEntry.hpp"
  41 #include "oops/resolvedIndyEntry.hpp"
  42 #include "oops/resolvedMethodEntry.hpp"
  43 #include "prims/jvmtiExport.hpp"
  44 #include "prims/methodHandles.hpp"
  45 #include "runtime/frame.inline.hpp"
  46 #include "runtime/safepointMechanism.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/stubRoutines.hpp"
  49 #include "runtime/synchronizer.hpp"
  50 #include "utilities/macros.hpp"
  51 
  52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  53 
  54 // Global Register Names
  55 static const Register rbcp     = r13;
  56 static const Register rlocals  = r14;
  57 
  58 // Address Computation: local variables
  59 static inline Address iaddress(int n) {

 151 static void do_oop_load(InterpreterMacroAssembler* _masm,
 152                         Address src,
 153                         Register dst,
 154                         DecoratorSet decorators = 0) {
 155   __ load_heap_oop(dst, src, rdx, decorators);
 156 }
 157 
 158 Address TemplateTable::at_bcp(int offset) {
 159   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 160   return Address(rbcp, offset);
 161 }
 162 
 163 
 164 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 165                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 166                                    int byte_no) {
 167   if (!RewriteBytecodes)  return;
 168   Label L_patch_done;
 169 
 170   switch (bc) {
 171   case Bytecodes::_fast_vputfield:
 172   case Bytecodes::_fast_aputfield:
 173   case Bytecodes::_fast_bputfield:
 174   case Bytecodes::_fast_zputfield:
 175   case Bytecodes::_fast_cputfield:
 176   case Bytecodes::_fast_dputfield:
 177   case Bytecodes::_fast_fputfield:
 178   case Bytecodes::_fast_iputfield:
 179   case Bytecodes::_fast_lputfield:
 180   case Bytecodes::_fast_sputfield:
 181     {
 182       // We skip bytecode quickening for putfield instructions when
 183       // the put_code written to the constant pool cache is zero.
 184       // This is required so that every execution of this instruction
 185       // calls out to InterpreterRuntime::resolve_get_put to do
 186       // additional, required work.
 187       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 188       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 189       __ load_field_entry(temp_reg, bc_reg);
 190       if (byte_no == f1_byte) {
 191         __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));

 760                     Address(rdx, rax,
 761                             Address::times_4,
 762                             arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
 763                     noreg);
 764 }
 765 
 766 void TemplateTable::daload() {
 767   transition(itos, dtos);
 768   // rax: index
 769   // rdx: array
 770   index_check(rdx, rax); // kills rbx
 771   __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
 772                     Address(rdx, rax,
 773                             Address::times_8,
 774                             arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
 775                     noreg);
 776 }
 777 
 778 void TemplateTable::aaload() {
 779   transition(itos, atos);
 780   Register array = rdx;
 781   Register index = rax;
 782 
 783   index_check(array, index); // kills rbx
 784   __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
 785   if (UseArrayFlattening) {
 786     Label is_flat_array, done;
 787     __ test_flat_array_oop(array, rbx, is_flat_array);
 788     do_oop_load(_masm,
 789                 Address(array, index,
 790                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 791                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 792                 rax,
 793                 IS_ARRAY);
 794     __ jmp(done);
 795     __ bind(is_flat_array);
 796     __ movptr(rcx, array);
 797     call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), rcx, index);
 798     __ bind(done);
 799   } else {
 800     do_oop_load(_masm,
 801                 Address(array, index,
 802                         UseCompressedOops ? Address::times_4 : Address::times_ptr,
 803                         arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
 804                 rax,
 805                 IS_ARRAY);
 806   }
 807   __ profile_element_type(rbx, rax, rcx);
 808 }
 809 
 810 void TemplateTable::baload() {
 811   transition(itos, itos);
 812   // rax: index
 813   // rdx: array
 814   index_check(rdx, rax); // kills rbx
 815   __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
 816                     Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
 817                     noreg);
 818 }
 819 
 820 void TemplateTable::caload() {
 821   transition(itos, itos);
 822   // rax: index
 823   // rdx: array
 824   index_check(rdx, rax); // kills rbx
 825   __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
 826                     Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
 827                     noreg);

1061   __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1062                      Address(rdx, rbx, Address::times_4,
1063                              arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1064                      noreg /* ftos */, noreg, noreg, noreg);
1065 }
1066 
1067 void TemplateTable::dastore() {
1068   transition(dtos, vtos);
1069   __ pop_i(rbx);
1070   // value is in xmm0
1071   // rbx:  index
1072   // rdx:  array
1073   index_check(rdx, rbx); // prefer index in rbx
1074   __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1075                      Address(rdx, rbx, Address::times_8,
1076                              arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1077                      noreg /* dtos */, noreg, noreg, noreg);
1078 }
1079 
1080 void TemplateTable::aastore() {
1081   Label is_null, is_flat_array, ok_is_subtype, done;
1082   transition(vtos, vtos);
1083   // stack: ..., array, index, value
1084   __ movptr(rax, at_tos());    // value
1085   __ movl(rcx, at_tos_p1()); // index
1086   __ movptr(rdx, at_tos_p2()); // array
1087 
1088   Address element_address(rdx, rcx,
1089                           UseCompressedOops? Address::times_4 : Address::times_ptr,
1090                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1091 
1092   index_check_without_pop(rdx, rcx);     // kills rbx
1093 
1094   __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1095   __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1096 
1097   __ testptr(rax, rax);
1098   __ jcc(Assembler::zero, is_null);
1099 
1100   // Move array class to rdi
1101   __ load_klass(rdi, rdx, rscratch1);
1102   if (UseArrayFlattening) {
1103     __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1104     __ test_flat_array_layout(rbx, is_flat_array);
1105   }
1106 
1107   // Move subklass into rbx
1108   __ load_klass(rbx, rax, rscratch1);
1109   // Move array element superklass into rax
1110   __ movptr(rax, Address(rdi,

1111                          ObjArrayKlass::element_klass_offset()));
1112 
1113   // Generate subtype check.  Blows rcx, rdi
1114   // Superklass in rax.  Subklass in rbx.
1115   // is "rbx <: rax" ? (value subclass <: array element superclass)
1116   __ gen_subtype_check(rbx, ok_is_subtype, false);
1117 
1118   // Come here on failure
1119   // object is at TOS
1120   __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1121 
1122   // Come here on success
1123   __ bind(ok_is_subtype);
1124 
1125   // Get the value we will store
1126   __ movptr(rax, at_tos());
1127   __ movl(rcx, at_tos_p1()); // index
1128   // Now store using the appropriate barrier
1129   do_oop_store(_masm, element_address, rax, IS_ARRAY);
1130   __ jmp(done);
1131 
1132   // Have a null in rax, rdx=array, ecx=index.  Store null at ary[idx]
1133   __ bind(is_null);
1134   if (EnableValhalla) {
1135     Label write_null_to_null_free_array, store_null;
1136 
1137       // Move array class to rdi
1138     __ load_klass(rdi, rdx, rscratch1);
1139     if (UseArrayFlattening) {
1140       __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1141       __ test_flat_array_layout(rbx, is_flat_array);
1142     }
1143 
1144     // No way to store null in null-free array
1145     __ test_null_free_array_oop(rdx, rbx, write_null_to_null_free_array);
1146     __ jmp(store_null);
1147 
1148     __ bind(write_null_to_null_free_array);
1149     __ jump(RuntimeAddress(Interpreter::_throw_NullPointerException_entry));
1150 
1151     __ bind(store_null);
1152   }
1153   // Store a null
1154   do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1155   __ jmp(done);
1156 
1157   if (UseArrayFlattening) {
1158     Label is_type_ok;
1159     __ bind(is_flat_array); // Store non-null value to flat
1160 
1161     __ movptr(rax, at_tos());
1162     __ movl(rcx, at_tos_p1()); // index
1163     __ movptr(rdx, at_tos_p2()); // array
1164 
1165     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), rax, rdx, rcx);
1166   }
1167   // Pop stack arguments
1168   __ bind(done);
1169   __ addptr(rsp, 3 * Interpreter::stackElementSize);
1170 }
1171 
1172 void TemplateTable::bastore() {
1173   transition(itos, vtos);
1174   __ pop_i(rbx);
1175   // rax: value
1176   // rbx: index
1177   // rdx: array
1178   index_check(rdx, rbx); // prefer index in rbx
1179   // Need to check whether array is boolean or byte
1180   // since both types share the bastore bytecode.
1181   __ load_klass(rcx, rdx, rscratch1);
1182   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1183   int diffbit = Klass::layout_helper_boolean_diffbit();
1184   __ testl(rcx, diffbit);
1185   Label L_skip;
1186   __ jccb(Assembler::zero, L_skip);

1936   __ jcc(j_not(cc), not_taken);
1937   branch(false, false);
1938   __ bind(not_taken);
1939   __ profile_not_taken_branch(rax);
1940 }
1941 
1942 void TemplateTable::if_nullcmp(Condition cc) {
1943   transition(atos, vtos);
1944   // assume branch is more often taken than not (loops use backward branches)
1945   Label not_taken;
1946   __ testptr(rax, rax);
1947   __ jcc(j_not(cc), not_taken);
1948   branch(false, false);
1949   __ bind(not_taken);
1950   __ profile_not_taken_branch(rax);
1951 }
1952 
1953 void TemplateTable::if_acmp(Condition cc) {
1954   transition(atos, vtos);
1955   // assume branch is more often taken than not (loops use backward branches)
1956   Label taken, not_taken;
1957   __ pop_ptr(rdx);
1958 
1959   __ profile_acmp(rbx, rdx, rax, rcx);
1960 
1961   const int is_inline_type_mask = markWord::inline_type_pattern;
1962   if (EnableValhalla) {
1963     __ cmpoop(rdx, rax);
1964     __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
1965 
1966     // might be substitutable, test if either rax or rdx is null
1967     __ testptr(rax, rax);
1968     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1969     __ testptr(rdx, rdx);
1970     __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1971 
1972     // and both are values ?
1973     __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
1974     __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
1975     __ andptr(rbx, is_inline_type_mask);
1976     __ cmpptr(rbx, is_inline_type_mask);
1977     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
1978 
1979     // same value klass ?
1980     __ load_metadata(rbx, rdx);
1981     __ load_metadata(rcx, rax);
1982     __ cmpptr(rbx, rcx);
1983     __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
1984 
1985     // Know both are the same type, let's test for substitutability...
1986     if (cc == equal) {
1987       invoke_is_substitutable(rax, rdx, taken, not_taken);
1988     } else {
1989       invoke_is_substitutable(rax, rdx, not_taken, taken);
1990     }
1991     __ stop("Not reachable");
1992   }
1993 
1994   __ cmpoop(rdx, rax);
1995   __ jcc(j_not(cc), not_taken);
1996   __ bind(taken);
1997   branch(false, false);
1998   __ bind(not_taken);
1999   __ profile_not_taken_branch(rax, true);
2000 }
2001 
2002 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2003                                             Label& is_subst, Label& not_subst) {
2004   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2005   // Restored...rax answer, jmp to outcome...
2006   __ testl(rax, rax);
2007   __ jcc(Assembler::zero, not_subst);
2008   __ jmp(is_subst);
2009 }
2010 
2011 void TemplateTable::ret() {
2012   transition(vtos, vtos);
2013   locals_index(rbx);
2014   __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
2015   __ profile_ret(rbx, rcx);
2016   __ get_method(rax);
2017   __ movptr(rbcp, Address(rax, Method::const_offset()));
2018   __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2019                       ConstMethod::codes_offset()));
2020   __ dispatch_next(vtos, 0, true);
2021 }
2022 
2023 void TemplateTable::wide_ret() {
2024   transition(vtos, vtos);
2025   locals_index_wide(rbx);
2026   __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2027   __ profile_ret(rbx, rcx);
2028   __ get_method(rax);

2242   if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2243     Label no_safepoint;
2244     NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2245     __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2246     __ jcc(Assembler::zero, no_safepoint);
2247     __ push(state);
2248     __ push_cont_fastpath();
2249     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2250                                        InterpreterRuntime::at_safepoint));
2251     __ pop_cont_fastpath();
2252     __ pop(state);
2253     __ bind(no_safepoint);
2254   }
2255 
2256   // Narrow result if state is itos but result type is smaller.
2257   // Need to narrow in the return bytecode rather than in generate_return_entry
2258   // since compiled code callers expect the result to already be narrowed.
2259   if (state == itos) {
2260     __ narrow(rax);
2261   }
2262 
2263   __ remove_activation(state, rbcp, true, true, true);
2264 
2265   __ jmp(rbcp);
2266 }
2267 
2268 // ----------------------------------------------------------------------------
2269 // Volatile variables demand their effects be made known to all CPU's
2270 // in order.  Store buffers on most chips allow reads & writes to
2271 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2272 // without some kind of memory barrier (i.e., it's not sufficient that
2273 // the interpreter does not reorder volatile references, the hardware
2274 // also must not reorder them).
2275 //
2276 // According to the new Java Memory Model (JMM):
2277 // (1) All volatiles are serialized wrt to each other.  ALSO reads &
2278 //     writes act as acquire & release, so:
2279 // (2) A read cannot let unrelated NON-volatile memory refs that
2280 //     happen after the read float up to before the read.  It's OK for
2281 //     non-volatile memory refs that happen before the volatile read to
2282 //     float down below it.
2283 // (3) Similar a volatile write cannot let unrelated NON-volatile

2603     }
2604     // rax,:   object pointer or null
2605     // cache: cache entry pointer
2606     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2607               rax, cache);
2608 
2609     __ load_field_entry(cache, index);
2610     __ bind(L1);
2611   }
2612 }
2613 
2614 void TemplateTable::pop_and_check_object(Register r) {
2615   __ pop_ptr(r);
2616   __ null_check(r);  // for field access must check obj.
2617   __ verify_oop(r);
2618 }
2619 
2620 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2621   transition(vtos, vtos);
2622 
2623   const Register obj   = r9;
2624   const Register cache = rcx;
2625   const Register index = rdx;
2626   const Register off   = rbx;
2627   const Register tos_state   = rax;
2628   const Register flags = rdx;
2629   const Register bc    = c_rarg3; // uses same reg as obj, so don't mix them
2630 
2631   resolve_cache_and_index_for_field(byte_no, cache, index);
2632   jvmti_post_field_access(cache, index, is_static, false);
2633   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2634 


2635   const Address field(obj, off, Address::times_1, 0*wordSize);
2636 
2637   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
2638 
2639   // Make sure we don't need to mask edx after the above shift
2640   assert(btos == 0, "change code, btos != 0");
2641   __ testl(tos_state, tos_state);
2642   __ jcc(Assembler::notZero, notByte);
2643 
2644   // btos
2645   if (!is_static) pop_and_check_object(obj);
2646   __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
2647   __ push(btos);
2648   // Rewrite bytecode to be faster
2649   if (!is_static && rc == may_rewrite) {
2650     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2651   }
2652   __ jmp(Done);
2653 
2654   __ bind(notByte);
2655   __ cmpl(tos_state, ztos);
2656   __ jcc(Assembler::notEqual, notBool);
2657    if (!is_static) pop_and_check_object(obj);
2658   // ztos (same code as btos)
2659   __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg);
2660   __ push(ztos);
2661   // Rewrite bytecode to be faster
2662   if (!is_static && rc == may_rewrite) {
2663     // use btos rewriting, no truncating to t/f bit is needed for getfield.
2664     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2665   }
2666   __ jmp(Done);
2667 
2668   __ bind(notBool);
2669   __ cmpl(tos_state, atos);
2670   __ jcc(Assembler::notEqual, notObj);
2671   // atos
2672   if (!EnableValhalla) {
2673     if (!is_static) pop_and_check_object(obj);
2674     do_oop_load(_masm, field, rax);
2675     __ push(atos);
2676     if (!is_static && rc == may_rewrite) {
2677       patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2678     }
2679     __ jmp(Done);
2680   } else {
2681     if (is_static) {
2682       __ load_heap_oop(rax, field);
2683       Label is_null_free_inline_type, uninitialized;
2684       // Issue below if the static field has not been initialized yet
2685       __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
2686         // field is not a null free inline type
2687         __ push(atos);
2688         __ jmp(Done);
2689       // field is a null free inline type, must not return null even if uninitialized
2690       __ bind(is_null_free_inline_type);
2691           __ testptr(rax, rax);
2692         __ jcc(Assembler::zero, uninitialized);
2693           __ push(atos);
2694           __ jmp(Done);
2695         __ bind(uninitialized);
2696           __ jump(RuntimeAddress(Interpreter::_throw_NPE_UninitializedField_entry));
2697     } else {
2698       Label is_flat, nonnull, is_null_free_inline_type, rewrite_inline, has_null_marker;
2699       __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
2700       __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
2701       // field is not a null free inline type
2702       pop_and_check_object(obj);
2703       __ load_heap_oop(rax, field);
2704       __ push(atos);
2705       if (rc == may_rewrite) {
2706         patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2707       }
2708       __ jmp(Done);
2709       __ bind(is_null_free_inline_type);
2710       __ test_field_is_flat(flags, rscratch1, is_flat);
2711           // field is not flat
2712           pop_and_check_object(obj);
2713           __ load_heap_oop(rax, field);
2714           __ testptr(rax, rax);
2715           __ jcc(Assembler::notZero, nonnull);
2716           __ jump(RuntimeAddress(Interpreter::_throw_NPE_UninitializedField_entry));
2717           __ bind(nonnull);
2718           __ verify_oop(rax);
2719           __ push(atos);
2720           __ jmp(rewrite_inline);
2721         __ bind(is_flat);
2722           pop_and_check_object(rax);
2723           __ read_flat_field(rcx, rdx, rbx, rax);
2724           __ verify_oop(rax);
2725           __ push(atos);
2726           __ jmp(rewrite_inline);
2727       __ bind(has_null_marker);
2728         pop_and_check_object(rax);
2729         __ load_field_entry(rcx, rbx);
2730         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
2731         __ get_vm_result_oop(rax);
2732         __ push(atos);
2733       __ bind(rewrite_inline);
2734       if (rc == may_rewrite) {
2735         patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
2736       }
2737         __ jmp(Done);
2738     }
2739   }

2740 
2741   __ bind(notObj);
2742 
2743   if (!is_static) pop_and_check_object(obj);
2744 
2745   __ cmpl(tos_state, itos);
2746   __ jcc(Assembler::notEqual, notInt);
2747   // itos
2748   __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
2749   __ push(itos);
2750   // Rewrite bytecode to be faster
2751   if (!is_static && rc == may_rewrite) {
2752     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2753   }
2754   __ jmp(Done);
2755 
2756   __ bind(notInt);
2757   __ cmpl(tos_state, ctos);
2758   __ jcc(Assembler::notEqual, notChar);
2759   // ctos
2760   __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
2761   __ push(ctos);
2762   // Rewrite bytecode to be faster
2763   if (!is_static && rc == may_rewrite) {
2764     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);

2824 #endif
2825 
2826   __ bind(Done);
2827   // [jk] not needed currently
2828   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2829   //                                              Assembler::LoadStore));
2830 }
2831 
2832 void TemplateTable::getfield(int byte_no) {
2833   getfield_or_static(byte_no, false);
2834 }
2835 
2836 void TemplateTable::nofast_getfield(int byte_no) {
2837   getfield_or_static(byte_no, false, may_not_rewrite);
2838 }
2839 
2840 void TemplateTable::getstatic(int byte_no) {
2841   getfield_or_static(byte_no, true);
2842 }
2843 

2844 // The registers cache and index expected to be set before call.
2845 // The function may destroy various registers, just not the cache and index registers.
2846 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2847   // Cache is rcx and index is rdx
2848   const Register entry = c_rarg2; // ResolvedFieldEntry
2849   const Register obj = c_rarg1;   // Object pointer
2850   const Register value = c_rarg3; // JValue object
2851 
2852   if (JvmtiExport::can_post_field_modification()) {
2853     // Check to see if a field modification watch has been set before
2854     // we take the time to call into the VM.
2855     Label L1;
2856     assert_different_registers(cache, obj, rax);
2857     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2858     __ testl(rax, rax);
2859     __ jcc(Assembler::zero, L1);
2860 
2861     __ mov(entry, cache);
2862 
2863     if (is_static) {

2885     // cache: field entry pointer
2886     // value: jvalue object on the stack
2887     __ call_VM(noreg,
2888               CAST_FROM_FN_PTR(address,
2889                               InterpreterRuntime::post_field_modification),
2890               obj, entry, value);
2891     // Reload field entry
2892     __ load_field_entry(cache, index);
2893     __ bind(L1);
2894   }
2895 }
2896 
2897 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2898   transition(vtos, vtos);
2899 
2900   const Register obj = rcx;
2901   const Register cache = rcx;
2902   const Register index = rdx;
2903   const Register tos_state   = rdx;
2904   const Register off   = rbx;
2905   const Register flags = r9;
2906 
2907   resolve_cache_and_index_for_field(byte_no, cache, index);
2908   jvmti_post_field_mod(cache, index, is_static);
2909   load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2910 
2911   // [jk] not needed currently
2912   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2913   //                                              Assembler::StoreStore));
2914 
2915   Label notVolatile, Done;
2916 
2917   // Check for volatile store
2918   __ movl(rscratch1, flags);
2919   __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
2920   __ testl(rscratch1, rscratch1);
2921   __ jcc(Assembler::zero, notVolatile);
2922 
2923   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2924   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2925                                                Assembler::StoreStore));
2926   __ jmp(Done);
2927   __ bind(notVolatile);
2928 
2929   putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2930 
2931   __ bind(Done);
2932 }
2933 
2934 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2935                                               Register obj, Register off, Register tos_state, Register flags) {
2936 
2937   // field addresses
2938   const Address field(obj, off, Address::times_1, 0*wordSize);
2939 
2940   Label notByte, notBool, notInt, notShort, notChar,
2941         notLong, notFloat, notObj, notInlineType;
2942   Label Done;
2943 
2944   const Register bc    = c_rarg3;
2945 
2946   // Test TOS state
2947   __ testl(tos_state, tos_state);
2948   __ jcc(Assembler::notZero, notByte);
2949 
2950   // btos
2951   {
2952     __ pop(btos);
2953     if (!is_static) pop_and_check_object(obj);
2954     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2955     if (!is_static && rc == may_rewrite) {
2956       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2957     }
2958     __ jmp(Done);
2959   }
2960 
2961   __ bind(notByte);
2962   __ cmpl(tos_state, ztos);
2963   __ jcc(Assembler::notEqual, notBool);
2964 
2965   // ztos
2966   {
2967     __ pop(ztos);
2968     if (!is_static) pop_and_check_object(obj);
2969     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2970     if (!is_static && rc == may_rewrite) {
2971       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2972     }
2973     __ jmp(Done);
2974   }
2975 
2976   __ bind(notBool);
2977   __ cmpl(tos_state, atos);
2978   __ jcc(Assembler::notEqual, notObj);
2979 
2980   // atos
2981   {
2982     if (!EnableValhalla) {
2983       __ pop(atos);
2984       if (!is_static) pop_and_check_object(obj);
2985       // Store into the field
2986       do_oop_store(_masm, field, rax);
2987       if (!is_static && rc == may_rewrite) {
2988         patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2989       }
2990       __ jmp(Done);
2991     } else {
2992       __ pop(atos);
2993       if (is_static) {
2994         Label is_inline_type;
2995         __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_inline_type);
2996         __ null_check(rax);
2997         __ bind(is_inline_type);
2998         do_oop_store(_masm, field, rax);
2999         __ jmp(Done);
3000       } else {
3001         Label is_null_free_inline_type, is_flat, has_null_marker,
3002               write_null, rewrite_not_inline, rewrite_inline;
3003         __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3004         __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3005           // Not an inline type
3006           pop_and_check_object(obj);
3007           // Store into the field
3008           do_oop_store(_masm, field, rax);
3009           __ bind(rewrite_not_inline);
3010           if (rc == may_rewrite) {
3011             patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3012           }
3013           __ jmp(Done);
3014         // Implementation of the inline type semantic
3015         __ bind(is_null_free_inline_type);
3016           __ null_check(rax);
3017           __ test_field_is_flat(flags, rscratch1, is_flat);
3018             // field is not flat
3019             pop_and_check_object(obj);
3020             // Store into the field
3021             do_oop_store(_masm, field, rax);
3022           __ jmp(rewrite_inline);
3023           __ bind(is_flat);
3024             // field is flat
3025             __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3026             __ movptr(r9, Address(rcx, in_bytes(ResolvedFieldEntry::field_holder_offset())));
3027             pop_and_check_object(obj);  // obj = rcx
3028             __ load_klass(r8, rax, rscratch1);
3029             __ payload_addr(rax, rax, r8);
3030             __ addptr(obj, off);
3031             __ inline_layout_info(r9, rdx, rbx);
3032             // because we use InlineLayoutInfo, we need special value access code specialized for fields (arrays will need a different API)
3033             __ flat_field_copy(IN_HEAP, rax, obj, rbx);
3034             __ jmp(rewrite_inline);
3035         __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3036           pop_and_check_object(rbx);
3037           __ load_field_entry(rcx, rdx);
3038           call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3039         __ bind(rewrite_inline);
3040         if (rc == may_rewrite) {
3041           patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3042         }
3043         __ jmp(Done);
3044       }
3045     }

3046   }
3047 
3048   __ bind(notObj);
3049   __ cmpl(tos_state, itos);
3050   __ jcc(Assembler::notEqual, notInt);
3051 
3052   // itos
3053   {
3054     __ pop(itos);
3055     if (!is_static) pop_and_check_object(obj);
3056     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3057     if (!is_static && rc == may_rewrite) {
3058       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3059     }
3060     __ jmp(Done);
3061   }
3062 
3063   __ bind(notInt);
3064   __ cmpl(tos_state, ctos);
3065   __ jcc(Assembler::notEqual, notChar);

3162 }
3163 
3164 void TemplateTable::jvmti_post_fast_field_mod() {
3165 
3166   const Register scratch = c_rarg3;
3167 
3168   if (JvmtiExport::can_post_field_modification()) {
3169     // Check to see if a field modification watch has been set before
3170     // we take the time to call into the VM.
3171     Label L2;
3172     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3173     __ testl(scratch, scratch);
3174     __ jcc(Assembler::zero, L2);
3175     __ pop_ptr(rbx);                  // copy the object pointer from tos
3176     __ verify_oop(rbx);
3177     __ push_ptr(rbx);                 // put the object pointer back on tos
3178     // Save tos values before call_VM() clobbers them. Since we have
3179     // to do it for every data type, we use the saved values as the
3180     // jvalue object.
3181     switch (bytecode()) {          // load values into the jvalue object
3182     case Bytecodes::_fast_vputfield: //fall through
3183     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3184     case Bytecodes::_fast_bputfield: // fall through
3185     case Bytecodes::_fast_zputfield: // fall through
3186     case Bytecodes::_fast_sputfield: // fall through
3187     case Bytecodes::_fast_cputfield: // fall through
3188     case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3189     case Bytecodes::_fast_dputfield: __ push(dtos); break;
3190     case Bytecodes::_fast_fputfield: __ push(ftos); break;
3191     case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3192 
3193     default:
3194       ShouldNotReachHere();
3195     }
3196     __ mov(scratch, rsp);             // points to jvalue on the stack
3197     // access constant pool cache entry
3198     __ load_field_entry(c_rarg2, rax);
3199     __ verify_oop(rbx);
3200     // rbx: object pointer copied above
3201     // c_rarg2: cache entry pointer
3202     // c_rarg3: jvalue object on the stack
3203     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
3204 
3205     switch (bytecode()) {             // restore tos values
3206     case Bytecodes::_fast_vputfield: // fall through
3207     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3208     case Bytecodes::_fast_bputfield: // fall through
3209     case Bytecodes::_fast_zputfield: // fall through
3210     case Bytecodes::_fast_sputfield: // fall through
3211     case Bytecodes::_fast_cputfield: // fall through
3212     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3213     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3214     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3215     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3216     default: break;
3217     }
3218     __ bind(L2);
3219   }
3220 }
3221 
3222 void TemplateTable::fast_storefield(TosState state) {
3223   transition(state, vtos);
3224 


3225   Label notVolatile, Done;
3226 
3227   jvmti_post_fast_field_mod();
3228 
3229   __ push(rax);
3230   __ load_field_entry(rcx, rax);
3231   load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);


3232   __ pop(rax);
3233   // RBX: field offset, RCX: RAX: TOS, RDX: flags
3234 
3235   // Get object from stack
3236   pop_and_check_object(rcx);
3237 
3238   // field address
3239   const Address field(rcx, rbx, Address::times_1);
3240 
3241   // Check for volatile store
3242   __ movl(rscratch2, rdx);  // saving flags for is_flat test
3243   __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3244   __ testl(rscratch2, rscratch2);
3245   __ jcc(Assembler::zero, notVolatile);
3246 
3247   fast_storefield_helper(field, rax, rdx);
3248   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3249                                                Assembler::StoreStore));
3250   __ jmp(Done);
3251   __ bind(notVolatile);
3252 
3253   fast_storefield_helper(field, rax, rdx);
3254 
3255   __ bind(Done);
3256 }
3257 
3258 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3259 
3260   // DANGER: 'field' argument depends on rcx and rbx
3261 
3262   // access field
3263   switch (bytecode()) {
3264   case Bytecodes::_fast_vputfield:
3265     {
3266       Label is_flat, has_null_marker, write_null, done;
3267       __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3268       // Null free field cases: flat or not flat
3269       __ null_check(rax);
3270       __ test_field_is_flat(flags, rscratch1, is_flat);
3271         // field is not flat
3272         do_oop_store(_masm, field, rax);
3273         __ jmp(done);
3274       __ bind(is_flat);
3275         __ load_field_entry(r8, r9);
3276         __ load_unsigned_short(r9, Address(r8, in_bytes(ResolvedFieldEntry::field_index_offset())));
3277         __ movptr(r8, Address(r8, in_bytes(ResolvedFieldEntry::field_holder_offset())));
3278         __ inline_layout_info(r8, r9, r8);
3279         __ load_klass(rdx, rax, rscratch1);
3280         __ payload_addr(rax, rax, rdx);
3281         __ lea(rcx, field);
3282         __ flat_field_copy(IN_HEAP, rax, rcx, r8);
3283         __ jmp(done);
3284       __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3285         __ movptr(rbx, rcx);
3286         __ load_field_entry(rcx, rdx);
3287         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3288       __ bind(done);
3289     }
3290     break;
3291   case Bytecodes::_fast_aputfield:
3292     {
3293       do_oop_store(_masm, field, rax);
3294     }
3295     break;
3296   case Bytecodes::_fast_lputfield:
3297     __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3298     break;
3299   case Bytecodes::_fast_iputfield:
3300     __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3301     break;
3302   case Bytecodes::_fast_zputfield:
3303     __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3304     break;
3305   case Bytecodes::_fast_bputfield:
3306     __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3307     break;
3308   case Bytecodes::_fast_sputfield:
3309     __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3310     break;
3311   case Bytecodes::_fast_cputfield:
3312     __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3313     break;
3314   case Bytecodes::_fast_fputfield:

3330     // Check to see if a field access watch has been set before we
3331     // take the time to call into the VM.
3332     Label L1;
3333     __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3334     __ testl(rcx, rcx);
3335     __ jcc(Assembler::zero, L1);
3336     // access constant pool cache entry
3337     __ load_field_entry(c_rarg2, rcx);
3338     __ verify_oop(rax);
3339     __ push_ptr(rax);  // save object pointer before call_VM() clobbers it
3340     __ mov(c_rarg1, rax);
3341     // c_rarg1: object pointer copied above
3342     // c_rarg2: cache entry pointer
3343     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3344     __ pop_ptr(rax); // restore object pointer
3345     __ bind(L1);
3346   }
3347 
3348   // access constant pool cache
3349   __ load_field_entry(rcx, rbx);
3350   __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3351 
3352   // rax: object
3353   __ verify_oop(rax);
3354   __ null_check(rax);
3355   Address field(rax, rdx, Address::times_1);
3356 
3357   // access field
3358   switch (bytecode()) {
3359   case Bytecodes::_fast_vgetfield:
3360     {
3361       Label is_flat, nonnull, Done, has_null_marker;
3362       __ load_unsigned_byte(rscratch1, Address(rcx, in_bytes(ResolvedFieldEntry::flags_offset())));
3363       __ test_field_has_null_marker(rscratch1, rscratch2, has_null_marker);
3364       __ test_field_is_flat(rscratch1, rscratch2, is_flat);
3365         // field is not flat
3366         __ load_heap_oop(rax, field);
3367         __ testptr(rax, rax);
3368         __ jcc(Assembler::notZero, nonnull);
3369           __ jump(RuntimeAddress(Interpreter::_throw_NPE_UninitializedField_entry));
3370         __ bind(nonnull);
3371         __ verify_oop(rax);
3372         __ jmp(Done);
3373       __ bind(is_flat);
3374       // field is flat
3375         __ read_flat_field(rcx, rdx, rbx, rax);
3376         __ jmp(Done);
3377       __ bind(has_null_marker);
3378         // rax = instance, rcx = resolved entry
3379         call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3380         __ get_vm_result_oop(rax);
3381       __ bind(Done);
3382       __ verify_oop(rax);
3383     }
3384     break;
3385   case Bytecodes::_fast_agetfield:
3386     do_oop_load(_masm, field, rax);
3387     __ verify_oop(rax);
3388     break;
3389   case Bytecodes::_fast_lgetfield:
3390     __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg);
3391     break;
3392   case Bytecodes::_fast_igetfield:
3393     __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
3394     break;
3395   case Bytecodes::_fast_bgetfield:
3396     __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
3397     break;
3398   case Bytecodes::_fast_sgetfield:
3399     __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg);
3400     break;
3401   case Bytecodes::_fast_cgetfield:
3402     __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
3403     break;
3404   case Bytecodes::_fast_fgetfield:

3789 
3790   // Note:  rax_callsite is already pushed
3791 
3792   // %%% should make a type profile for any invokedynamic that takes a ref argument
3793   // profile this call
3794   __ profile_call(rbcp);
3795   __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3796 
3797   __ verify_oop(rax_callsite);
3798 
3799   __ jump_from_interpreted(rbx_method, rdx);
3800 }
3801 
3802 //-----------------------------------------------------------------------------
3803 // Allocation
3804 
3805 void TemplateTable::_new() {
3806   transition(vtos, atos);
3807   __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3808   Label slow_case;

3809   Label done;

3810 
3811   __ get_cpool_and_tags(rcx, rax);
3812 
3813   // Make sure the class we're about to instantiate has been resolved.
3814   // This is done before loading InstanceKlass to be consistent with the order
3815   // how Constant Pool is updated (see ConstantPool::klass_at_put)
3816   const int tags_offset = Array<u1>::base_offset_in_bytes();
3817   __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3818   __ jcc(Assembler::notEqual, slow_case);
3819 
3820   // get InstanceKlass
3821   __ load_resolved_klass_at_index(rcx, rcx, rdx);

3822 
3823   // make sure klass is initialized
3824   // init_state needs acquire, but x86 is TSO, and so we are already good.
3825   assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3826   __ clinit_barrier(rcx, nullptr /*L_fast_path*/, &slow_case);
3827 
3828   __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);








































































3829     if (DTraceAllocProbes) {
3830       // Trigger dtrace event for fastpath
3831       __ push(atos);
3832       __ call_VM_leaf(
3833            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3834       __ pop(atos);
3835     }
3836   __ jmp(done);


3837 
3838   // slow case
3839   __ bind(slow_case);


3840 
3841   __ get_constant_pool(c_rarg1);
3842   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3843   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3844    __ verify_oop(rax);
3845 
3846   // continue
3847   __ bind(done);
3848 }
3849 
3850 void TemplateTable::newarray() {
3851   transition(itos, atos);
3852   __ load_unsigned_byte(c_rarg1, at_bcp(1));
3853   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3854           c_rarg1, rax);
3855 }
3856 
3857 void TemplateTable::anewarray() {
3858   transition(itos, atos);
3859 

3861   __ get_constant_pool(c_rarg1);
3862   call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3863           c_rarg1, c_rarg2, rax);
3864 }
3865 
3866 void TemplateTable::arraylength() {
3867   transition(atos, itos);
3868   __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3869 }
3870 
3871 void TemplateTable::checkcast() {
3872   transition(atos, atos);
3873   Label done, is_null, ok_is_subtype, quicked, resolved;
3874   __ testptr(rax, rax); // object is in rax
3875   __ jcc(Assembler::zero, is_null);
3876 
3877   // Get cpool & tags index
3878   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3879   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3880   // See if bytecode has already been quicked
3881   __ movzbl(rdx, Address(rdx, rbx,
3882       Address::times_1,
3883       Array<u1>::base_offset_in_bytes()));
3884   __ cmpl(rdx, JVM_CONSTANT_Class);
3885   __ jcc(Assembler::equal, quicked);
3886   __ push(atos); // save receiver for result, and for GC
3887   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3888 
3889   __ get_vm_result_metadata(rax);
3890 
3891   __ pop_ptr(rdx); // restore receiver
3892   __ jmpb(resolved);
3893 
3894   // Get superklass in rax and subklass in rbx
3895   __ bind(quicked);
3896   __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3897   __ load_resolved_klass_at_index(rax, rcx, rbx);
3898 
3899   __ bind(resolved);
3900   __ load_klass(rbx, rdx, rscratch1);
3901 
3902   // Generate subtype check.  Blows rcx, rdi.  Object in rdx.
3903   // Superklass in rax.  Subklass in rbx.
3904   __ gen_subtype_check(rbx, ok_is_subtype);
3905 
3906   // Come here on failure
3907   __ push_ptr(rdx);
3908   // object is at TOS
3909   __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3910 
3911   // Come here on success
3912   __ bind(ok_is_subtype);
3913   __ mov(rax, rdx); // Restore object in rdx
3914   __ jmp(done);
3915 
3916   __ bind(is_null);
3917 
3918   // Collect counts on whether this check-cast sees nulls a lot or not.
3919   if (ProfileInterpreter) {


3920     __ profile_null_seen(rcx);


3921   }
3922 
3923   __ bind(done);
3924 }
3925 
3926 void TemplateTable::instanceof() {
3927   transition(atos, itos);
3928   Label done, is_null, ok_is_subtype, quicked, resolved;
3929   __ testptr(rax, rax);
3930   __ jcc(Assembler::zero, is_null);
3931 
3932   // Get cpool & tags index
3933   __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3934   __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3935   // See if bytecode has already been quicked
3936   __ movzbl(rdx, Address(rdx, rbx,
3937         Address::times_1,
3938         Array<u1>::base_offset_in_bytes()));
3939   __ cmpl(rdx, JVM_CONSTANT_Class);
3940   __ jcc(Assembler::equal, quicked);
3941 
3942   __ push(atos); // save receiver for result, and for GC
3943   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3944 
3945   __ get_vm_result_metadata(rax);
3946 
3947   __ pop_ptr(rdx); // restore receiver
3948   __ verify_oop(rdx);
3949   __ load_klass(rdx, rdx, rscratch1);
3950   __ jmpb(resolved);
3951 
3952   // Get superklass in rax and subklass in rdx
3953   __ bind(quicked);
3954   __ load_klass(rdx, rax, rscratch1);
3955   __ load_resolved_klass_at_index(rax, rcx, rbx);
3956 
3957   __ bind(resolved);
3958 
3959   // Generate subtype check.  Blows rcx, rdi

3963   // Come here on failure
3964   __ xorl(rax, rax);
3965   __ jmpb(done);
3966   // Come here on success
3967   __ bind(ok_is_subtype);
3968   __ movl(rax, 1);
3969 
3970   // Collect counts on whether this test sees nulls a lot or not.
3971   if (ProfileInterpreter) {
3972     __ jmp(done);
3973     __ bind(is_null);
3974     __ profile_null_seen(rcx);
3975   } else {
3976     __ bind(is_null);   // same as 'done'
3977   }
3978   __ bind(done);
3979   // rax = 0: obj == nullptr or  obj is not an instanceof the specified klass
3980   // rax = 1: obj != nullptr and obj is     an instanceof the specified klass
3981 }
3982 

3983 //----------------------------------------------------------------------------------------------------
3984 // Breakpoints
3985 void TemplateTable::_breakpoint() {
3986   // Note: We get here even if we are single stepping..
3987   // jbug insists on setting breakpoints at every bytecode
3988   // even if we are in single step mode.
3989 
3990   transition(vtos, vtos);
3991 
3992   // get the unpatched byte code
3993   __ get_method(c_rarg1);
3994   __ call_VM(noreg,
3995              CAST_FROM_FN_PTR(address,
3996                               InterpreterRuntime::get_original_bytecode_at),
3997              c_rarg1, rbcp);
3998   __ mov(rbx, rax);  // why?
3999 
4000   // post the breakpoint event
4001   __ get_method(c_rarg1);
4002   __ call_VM(noreg,

4022 // Note: monitorenter & exit are symmetric routines; which is reflected
4023 //       in the assembly code structure as well
4024 //
4025 // Stack layout:
4026 //
4027 // [expressions  ] <--- rsp               = expression stack top
4028 // ..
4029 // [expressions  ]
4030 // [monitor entry] <--- monitor block top = expression stack bot
4031 // ..
4032 // [monitor entry]
4033 // [frame data   ] <--- monitor block bot
4034 // ...
4035 // [saved rbp    ] <--- rbp
4036 void TemplateTable::monitorenter() {
4037   transition(atos, vtos);
4038 
4039   // check for null object
4040   __ null_check(rax);
4041 
4042   Label is_inline_type;
4043   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4044   __ test_markword_is_inline_type(rbx, is_inline_type);
4045 
4046   const Address monitor_block_top(
4047         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4048   const Address monitor_block_bot(
4049         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4050   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4051 
4052   Label allocated;
4053 
4054   Register rtop = c_rarg3;
4055   Register rbot = c_rarg2;
4056   Register rmon = c_rarg1;
4057 
4058   // initialize entry pointer
4059   __ xorl(rmon, rmon); // points to free slot or null
4060 
4061   // find a free slot in the monitor block (result in rmon)
4062   {
4063     Label entry, loop, exit;
4064     __ movptr(rtop, monitor_block_top); // derelativize pointer
4065     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));

4118   // rmon: points to monitor entry
4119   __ bind(allocated);
4120 
4121   // Increment bcp to point to the next bytecode, so exception
4122   // handling for async. exceptions work correctly.
4123   // The object has already been popped from the stack, so the
4124   // expression stack looks correct.
4125   __ increment(rbcp);
4126 
4127   // store object
4128   __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4129   __ lock_object(rmon);
4130 
4131   // check to make sure this monitor doesn't cause stack overflow after locking
4132   __ save_bcp();  // in case of exception
4133   __ generate_stack_overflow_check(0);
4134 
4135   // The bcp has already been incremented. Just need to dispatch to
4136   // next instruction.
4137   __ dispatch_next(vtos);
4138 
4139   __ bind(is_inline_type);
4140   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4141                     InterpreterRuntime::throw_identity_exception), rax);
4142   __ should_not_reach_here();
4143 }
4144 
4145 void TemplateTable::monitorexit() {
4146   transition(atos, vtos);
4147 
4148   // check for null object
4149   __ null_check(rax);
4150 
4151   const int is_inline_type_mask = markWord::inline_type_pattern;
4152   Label has_identity;
4153   __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4154   __ andptr(rbx, is_inline_type_mask);
4155   __ cmpl(rbx, is_inline_type_mask);
4156   __ jcc(Assembler::notEqual, has_identity);
4157   __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4158                      InterpreterRuntime::throw_illegal_monitor_state_exception));
4159   __ should_not_reach_here();
4160   __ bind(has_identity);
4161 
4162   const Address monitor_block_top(
4163         rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4164   const Address monitor_block_bot(
4165         rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4166   const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4167 
4168   Register rtop = c_rarg1;
4169   Register rbot = c_rarg2;
4170 
4171   Label found;
4172 
4173   // find matching slot
4174   {
4175     Label entry, loop;
4176     __ movptr(rtop, monitor_block_top); // derelativize pointer
4177     __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4178     // rtop points to current entry, starting with top-most entry
4179 
4180     __ lea(rbot, monitor_block_bot);    // points to word before bottom
4181                                         // of monitor block
< prev index next >