19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "compiler/disassembler.hpp"
27 #include "gc/shared/collectedHeap.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/tlab_globals.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "interpreter/interpreterRuntime.hpp"
32 #include "interpreter/interp_masm.hpp"
33 #include "interpreter/templateTable.hpp"
34 #include "memory/universe.hpp"
35 #include "oops/methodCounters.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/objArrayKlass.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "oops/resolvedFieldEntry.hpp"
40 #include "oops/resolvedIndyEntry.hpp"
41 #include "oops/resolvedMethodEntry.hpp"
42 #include "prims/jvmtiExport.hpp"
43 #include "prims/methodHandles.hpp"
44 #include "runtime/frame.inline.hpp"
45 #include "runtime/safepointMechanism.hpp"
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubRoutines.hpp"
48 #include "runtime/synchronizer.hpp"
49 #include "utilities/macros.hpp"
50
51 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
52
53 // Global Register Names
54 static const Register rbcp = r13;
55 static const Register rlocals = r14;
56
57 // Address Computation: local variables
58 static inline Address iaddress(int n) {
150 static void do_oop_load(InterpreterMacroAssembler* _masm,
151 Address src,
152 Register dst,
153 DecoratorSet decorators = 0) {
154 __ load_heap_oop(dst, src, rdx, decorators);
155 }
156
157 Address TemplateTable::at_bcp(int offset) {
158 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
159 return Address(rbcp, offset);
160 }
161
162
163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
164 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
165 int byte_no) {
166 if (!RewriteBytecodes) return;
167 Label L_patch_done;
168
169 switch (bc) {
170 case Bytecodes::_fast_aputfield:
171 case Bytecodes::_fast_bputfield:
172 case Bytecodes::_fast_zputfield:
173 case Bytecodes::_fast_cputfield:
174 case Bytecodes::_fast_dputfield:
175 case Bytecodes::_fast_fputfield:
176 case Bytecodes::_fast_iputfield:
177 case Bytecodes::_fast_lputfield:
178 case Bytecodes::_fast_sputfield:
179 {
180 // We skip bytecode quickening for putfield instructions when
181 // the put_code written to the constant pool cache is zero.
182 // This is required so that every execution of this instruction
183 // calls out to InterpreterRuntime::resolve_get_put to do
184 // additional, required work.
185 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
186 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
187 __ load_field_entry(temp_reg, bc_reg);
188 if (byte_no == f1_byte) {
189 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
758 Address(rdx, rax,
759 Address::times_4,
760 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
761 noreg);
762 }
763
764 void TemplateTable::daload() {
765 transition(itos, dtos);
766 // rax: index
767 // rdx: array
768 index_check(rdx, rax); // kills rbx
769 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
770 Address(rdx, rax,
771 Address::times_8,
772 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
773 noreg);
774 }
775
776 void TemplateTable::aaload() {
777 transition(itos, atos);
778 // rax: index
779 // rdx: array
780 index_check(rdx, rax); // kills rbx
781 do_oop_load(_masm,
782 Address(rdx, rax,
783 UseCompressedOops ? Address::times_4 : Address::times_ptr,
784 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
785 rax,
786 IS_ARRAY);
787 }
788
789 void TemplateTable::baload() {
790 transition(itos, itos);
791 // rax: index
792 // rdx: array
793 index_check(rdx, rax); // kills rbx
794 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
795 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
796 noreg);
797 }
798
799 void TemplateTable::caload() {
800 transition(itos, itos);
801 // rax: index
802 // rdx: array
803 index_check(rdx, rax); // kills rbx
804 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
805 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
806 noreg);
1040 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1041 Address(rdx, rbx, Address::times_4,
1042 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1043 noreg /* ftos */, noreg, noreg, noreg);
1044 }
1045
1046 void TemplateTable::dastore() {
1047 transition(dtos, vtos);
1048 __ pop_i(rbx);
1049 // value is in xmm0
1050 // rbx: index
1051 // rdx: array
1052 index_check(rdx, rbx); // prefer index in rbx
1053 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1054 Address(rdx, rbx, Address::times_8,
1055 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1056 noreg /* dtos */, noreg, noreg, noreg);
1057 }
1058
1059 void TemplateTable::aastore() {
1060 Label is_null, ok_is_subtype, done;
1061 transition(vtos, vtos);
1062 // stack: ..., array, index, value
1063 __ movptr(rax, at_tos()); // value
1064 __ movl(rcx, at_tos_p1()); // index
1065 __ movptr(rdx, at_tos_p2()); // array
1066
1067 Address element_address(rdx, rcx,
1068 UseCompressedOops? Address::times_4 : Address::times_ptr,
1069 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1070
1071 index_check_without_pop(rdx, rcx); // kills rbx
1072 __ testptr(rax, rax);
1073 __ jcc(Assembler::zero, is_null);
1074
1075 // Move subklass into rbx
1076 __ load_klass(rbx, rax, rscratch1);
1077 // Move superklass into rax
1078 __ load_klass(rax, rdx, rscratch1);
1079 __ movptr(rax, Address(rax,
1080 ObjArrayKlass::element_klass_offset()));
1081
1082 // Generate subtype check. Blows rcx, rdi
1083 // Superklass in rax. Subklass in rbx.
1084 __ gen_subtype_check(rbx, ok_is_subtype);
1085
1086 // Come here on failure
1087 // object is at TOS
1088 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1089
1090 // Come here on success
1091 __ bind(ok_is_subtype);
1092
1093 // Get the value we will store
1094 __ movptr(rax, at_tos());
1095 __ movl(rcx, at_tos_p1()); // index
1096 // Now store using the appropriate barrier
1097 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1098 __ jmp(done);
1099
1100 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1101 __ bind(is_null);
1102 __ profile_null_seen(rbx);
1103
1104 // Store a null
1105 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1106
1107 // Pop stack arguments
1108 __ bind(done);
1109 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1110 }
1111
1112 void TemplateTable::bastore() {
1113 transition(itos, vtos);
1114 __ pop_i(rbx);
1115 // rax: value
1116 // rbx: index
1117 // rdx: array
1118 index_check(rdx, rbx); // prefer index in rbx
1119 // Need to check whether array is boolean or byte
1120 // since both types share the bastore bytecode.
1121 __ load_klass(rcx, rdx, rscratch1);
1122 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1123 int diffbit = Klass::layout_helper_boolean_diffbit();
1124 __ testl(rcx, diffbit);
1125 Label L_skip;
1126 __ jccb(Assembler::zero, L_skip);
1874 __ jcc(j_not(cc), not_taken);
1875 branch(false, false);
1876 __ bind(not_taken);
1877 __ profile_not_taken_branch(rax);
1878 }
1879
1880 void TemplateTable::if_nullcmp(Condition cc) {
1881 transition(atos, vtos);
1882 // assume branch is more often taken than not (loops use backward branches)
1883 Label not_taken;
1884 __ testptr(rax, rax);
1885 __ jcc(j_not(cc), not_taken);
1886 branch(false, false);
1887 __ bind(not_taken);
1888 __ profile_not_taken_branch(rax);
1889 }
1890
1891 void TemplateTable::if_acmp(Condition cc) {
1892 transition(atos, vtos);
1893 // assume branch is more often taken than not (loops use backward branches)
1894 Label not_taken;
1895 __ pop_ptr(rdx);
1896 __ cmpoop(rdx, rax);
1897 __ jcc(j_not(cc), not_taken);
1898 branch(false, false);
1899 __ bind(not_taken);
1900 __ profile_not_taken_branch(rax);
1901 }
1902
1903 void TemplateTable::ret() {
1904 transition(vtos, vtos);
1905 locals_index(rbx);
1906 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1907 __ profile_ret(rbx, rcx);
1908 __ get_method(rax);
1909 __ movptr(rbcp, Address(rax, Method::const_offset()));
1910 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
1911 ConstMethod::codes_offset()));
1912 __ dispatch_next(vtos, 0, true);
1913 }
1914
1915 void TemplateTable::wide_ret() {
1916 transition(vtos, vtos);
1917 locals_index_wide(rbx);
1918 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
1919 __ profile_ret(rbx, rcx);
1920 __ get_method(rax);
2134 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2135 Label no_safepoint;
2136 NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2137 __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2138 __ jcc(Assembler::zero, no_safepoint);
2139 __ push(state);
2140 __ push_cont_fastpath();
2141 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2142 InterpreterRuntime::at_safepoint));
2143 __ pop_cont_fastpath();
2144 __ pop(state);
2145 __ bind(no_safepoint);
2146 }
2147
2148 // Narrow result if state is itos but result type is smaller.
2149 // Need to narrow in the return bytecode rather than in generate_return_entry
2150 // since compiled code callers expect the result to already be narrowed.
2151 if (state == itos) {
2152 __ narrow(rax);
2153 }
2154 __ remove_activation(state, rbcp);
2155
2156 __ jmp(rbcp);
2157 }
2158
2159 // ----------------------------------------------------------------------------
2160 // Volatile variables demand their effects be made known to all CPU's
2161 // in order. Store buffers on most chips allow reads & writes to
2162 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2163 // without some kind of memory barrier (i.e., it's not sufficient that
2164 // the interpreter does not reorder volatile references, the hardware
2165 // also must not reorder them).
2166 //
2167 // According to the new Java Memory Model (JMM):
2168 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2169 // writes act as acquire & release, so:
2170 // (2) A read cannot let unrelated NON-volatile memory refs that
2171 // happen after the read float up to before the read. It's OK for
2172 // non-volatile memory refs that happen before the volatile read to
2173 // float down below it.
2174 // (3) Similar a volatile write cannot let unrelated NON-volatile
2506 }
2507 // rax,: object pointer or null
2508 // cache: cache entry pointer
2509 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2510 rax, cache);
2511
2512 __ load_field_entry(cache, index);
2513 __ bind(L1);
2514 }
2515 }
2516
2517 void TemplateTable::pop_and_check_object(Register r) {
2518 __ pop_ptr(r);
2519 __ null_check(r); // for field access must check obj.
2520 __ verify_oop(r);
2521 }
2522
2523 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2524 transition(vtos, vtos);
2525
2526 const Register obj = c_rarg3;
2527 const Register cache = rcx;
2528 const Register index = rdx;
2529 const Register off = rbx;
2530 const Register tos_state = rax;
2531 const Register flags = rdx;
2532 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2533
2534 resolve_cache_and_index_for_field(byte_no, cache, index);
2535 jvmti_post_field_access(cache, index, is_static, false);
2536 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2537
2538 if (!is_static) pop_and_check_object(obj);
2539
2540 const Address field(obj, off, Address::times_1, 0*wordSize);
2541
2542 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
2543
2544 // Make sure we don't need to mask edx after the above shift
2545 assert(btos == 0, "change code, btos != 0");
2546 __ testl(tos_state, tos_state);
2547 __ jcc(Assembler::notZero, notByte);
2548
2549 // btos
2550 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
2551 __ push(btos);
2552 // Rewrite bytecode to be faster
2553 if (!is_static && rc == may_rewrite) {
2554 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2555 }
2556 __ jmp(Done);
2557
2558 __ bind(notByte);
2559 __ cmpl(tos_state, ztos);
2560 __ jcc(Assembler::notEqual, notBool);
2561
2562 // ztos (same code as btos)
2563 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg);
2564 __ push(ztos);
2565 // Rewrite bytecode to be faster
2566 if (!is_static && rc == may_rewrite) {
2567 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2568 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2569 }
2570 __ jmp(Done);
2571
2572 __ bind(notBool);
2573 __ cmpl(tos_state, atos);
2574 __ jcc(Assembler::notEqual, notObj);
2575 // atos
2576 do_oop_load(_masm, field, rax);
2577 __ push(atos);
2578 if (!is_static && rc == may_rewrite) {
2579 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2580 }
2581 __ jmp(Done);
2582
2583 __ bind(notObj);
2584 __ cmpl(tos_state, itos);
2585 __ jcc(Assembler::notEqual, notInt);
2586 // itos
2587 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
2588 __ push(itos);
2589 // Rewrite bytecode to be faster
2590 if (!is_static && rc == may_rewrite) {
2591 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2592 }
2593 __ jmp(Done);
2594
2595 __ bind(notInt);
2596 __ cmpl(tos_state, ctos);
2597 __ jcc(Assembler::notEqual, notChar);
2598 // ctos
2599 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
2600 __ push(ctos);
2601 // Rewrite bytecode to be faster
2602 if (!is_static && rc == may_rewrite) {
2603 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2663 #endif
2664
2665 __ bind(Done);
2666 // [jk] not needed currently
2667 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2668 // Assembler::LoadStore));
2669 }
2670
2671 void TemplateTable::getfield(int byte_no) {
2672 getfield_or_static(byte_no, false);
2673 }
2674
2675 void TemplateTable::nofast_getfield(int byte_no) {
2676 getfield_or_static(byte_no, false, may_not_rewrite);
2677 }
2678
2679 void TemplateTable::getstatic(int byte_no) {
2680 getfield_or_static(byte_no, true);
2681 }
2682
2683
2684 // The registers cache and index expected to be set before call.
2685 // The function may destroy various registers, just not the cache and index registers.
2686 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2687 // Cache is rcx and index is rdx
2688 const Register entry = c_rarg2; // ResolvedFieldEntry
2689 const Register obj = c_rarg1; // Object pointer
2690 const Register value = c_rarg3; // JValue object
2691
2692 if (JvmtiExport::can_post_field_modification()) {
2693 // Check to see if a field modification watch has been set before
2694 // we take the time to call into the VM.
2695 Label L1;
2696 assert_different_registers(cache, obj, rax);
2697 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2698 __ testl(rax, rax);
2699 __ jcc(Assembler::zero, L1);
2700
2701 __ mov(entry, cache);
2702
2703 if (is_static) {
2725 // cache: field entry pointer
2726 // value: jvalue object on the stack
2727 __ call_VM(noreg,
2728 CAST_FROM_FN_PTR(address,
2729 InterpreterRuntime::post_field_modification),
2730 obj, entry, value);
2731 // Reload field entry
2732 __ load_field_entry(cache, index);
2733 __ bind(L1);
2734 }
2735 }
2736
2737 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2738 transition(vtos, vtos);
2739
2740 const Register obj = rcx;
2741 const Register cache = rcx;
2742 const Register index = rdx;
2743 const Register tos_state = rdx;
2744 const Register off = rbx;
2745 const Register flags = rax;
2746
2747 resolve_cache_and_index_for_field(byte_no, cache, index);
2748 jvmti_post_field_mod(cache, index, is_static);
2749 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2750
2751 // [jk] not needed currently
2752 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2753 // Assembler::StoreStore));
2754
2755 Label notVolatile, Done;
2756
2757 // Check for volatile store
2758 __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
2759 __ testl(flags, flags);
2760 __ jcc(Assembler::zero, notVolatile);
2761
2762 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2763 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2764 Assembler::StoreStore));
2765 __ jmp(Done);
2766 __ bind(notVolatile);
2767
2768 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2769
2770 __ bind(Done);
2771 }
2772
2773 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2774 Register obj, Register off, Register tos_state) {
2775
2776 // field addresses
2777 const Address field(obj, off, Address::times_1, 0*wordSize);
2778
2779 Label notByte, notBool, notInt, notShort, notChar,
2780 notLong, notFloat, notObj;
2781 Label Done;
2782
2783 const Register bc = c_rarg3;
2784
2785 // Test TOS state
2786 __ testl(tos_state, tos_state);
2787 __ jcc(Assembler::notZero, notByte);
2788
2789 // btos
2790 {
2791 __ pop(btos);
2792 if (!is_static) pop_and_check_object(obj);
2793 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2794 if (!is_static && rc == may_rewrite) {
2795 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2796 }
2797 __ jmp(Done);
2798 }
2799
2800 __ bind(notByte);
2801 __ cmpl(tos_state, ztos);
2802 __ jcc(Assembler::notEqual, notBool);
2803
2804 // ztos
2805 {
2806 __ pop(ztos);
2807 if (!is_static) pop_and_check_object(obj);
2808 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2809 if (!is_static && rc == may_rewrite) {
2810 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2811 }
2812 __ jmp(Done);
2813 }
2814
2815 __ bind(notBool);
2816 __ cmpl(tos_state, atos);
2817 __ jcc(Assembler::notEqual, notObj);
2818
2819 // atos
2820 {
2821 __ pop(atos);
2822 if (!is_static) pop_and_check_object(obj);
2823 // Store into the field
2824 do_oop_store(_masm, field, rax);
2825 if (!is_static && rc == may_rewrite) {
2826 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2827 }
2828 __ jmp(Done);
2829 }
2830
2831 __ bind(notObj);
2832 __ cmpl(tos_state, itos);
2833 __ jcc(Assembler::notEqual, notInt);
2834
2835 // itos
2836 {
2837 __ pop(itos);
2838 if (!is_static) pop_and_check_object(obj);
2839 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
2840 if (!is_static && rc == may_rewrite) {
2841 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2842 }
2843 __ jmp(Done);
2844 }
2845
2846 __ bind(notInt);
2847 __ cmpl(tos_state, ctos);
2848 __ jcc(Assembler::notEqual, notChar);
2945 }
2946
2947 void TemplateTable::jvmti_post_fast_field_mod() {
2948
2949 const Register scratch = c_rarg3;
2950
2951 if (JvmtiExport::can_post_field_modification()) {
2952 // Check to see if a field modification watch has been set before
2953 // we take the time to call into the VM.
2954 Label L2;
2955 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2956 __ testl(scratch, scratch);
2957 __ jcc(Assembler::zero, L2);
2958 __ pop_ptr(rbx); // copy the object pointer from tos
2959 __ verify_oop(rbx);
2960 __ push_ptr(rbx); // put the object pointer back on tos
2961 // Save tos values before call_VM() clobbers them. Since we have
2962 // to do it for every data type, we use the saved values as the
2963 // jvalue object.
2964 switch (bytecode()) { // load values into the jvalue object
2965 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
2966 case Bytecodes::_fast_bputfield: // fall through
2967 case Bytecodes::_fast_zputfield: // fall through
2968 case Bytecodes::_fast_sputfield: // fall through
2969 case Bytecodes::_fast_cputfield: // fall through
2970 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
2971 case Bytecodes::_fast_dputfield: __ push(dtos); break;
2972 case Bytecodes::_fast_fputfield: __ push(ftos); break;
2973 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
2974
2975 default:
2976 ShouldNotReachHere();
2977 }
2978 __ mov(scratch, rsp); // points to jvalue on the stack
2979 // access constant pool cache entry
2980 __ load_field_entry(c_rarg2, rax);
2981 __ verify_oop(rbx);
2982 // rbx: object pointer copied above
2983 // c_rarg2: cache entry pointer
2984 // c_rarg3: jvalue object on the stack
2985 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
2986
2987 switch (bytecode()) { // restore tos values
2988 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
2989 case Bytecodes::_fast_bputfield: // fall through
2990 case Bytecodes::_fast_zputfield: // fall through
2991 case Bytecodes::_fast_sputfield: // fall through
2992 case Bytecodes::_fast_cputfield: // fall through
2993 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
2994 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
2995 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
2996 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
2997 default: break;
2998 }
2999 __ bind(L2);
3000 }
3001 }
3002
3003 void TemplateTable::fast_storefield(TosState state) {
3004 transition(state, vtos);
3005
3006 Register cache = rcx;
3007
3008 Label notVolatile, Done;
3009
3010 jvmti_post_fast_field_mod();
3011
3012 __ push(rax);
3013 __ load_field_entry(rcx, rax);
3014 load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3015 // RBX: field offset, RAX: TOS, RDX: flags
3016 __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3017 __ pop(rax);
3018
3019 // Get object from stack
3020 pop_and_check_object(rcx);
3021
3022 // field address
3023 const Address field(rcx, rbx, Address::times_1);
3024
3025 // Check for volatile store
3026 __ testl(rdx, rdx);
3027 __ jcc(Assembler::zero, notVolatile);
3028
3029 fast_storefield_helper(field, rax);
3030 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3031 Assembler::StoreStore));
3032 __ jmp(Done);
3033 __ bind(notVolatile);
3034
3035 fast_storefield_helper(field, rax);
3036
3037 __ bind(Done);
3038 }
3039
3040 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3041
3042 // access field
3043 switch (bytecode()) {
3044 case Bytecodes::_fast_aputfield:
3045 do_oop_store(_masm, field, rax);
3046 break;
3047 case Bytecodes::_fast_lputfield:
3048 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3049 break;
3050 case Bytecodes::_fast_iputfield:
3051 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3052 break;
3053 case Bytecodes::_fast_zputfield:
3054 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3055 break;
3056 case Bytecodes::_fast_bputfield:
3057 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3058 break;
3059 case Bytecodes::_fast_sputfield:
3060 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3061 break;
3062 case Bytecodes::_fast_cputfield:
3063 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3064 break;
3065 case Bytecodes::_fast_fputfield:
3081 // Check to see if a field access watch has been set before we
3082 // take the time to call into the VM.
3083 Label L1;
3084 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3085 __ testl(rcx, rcx);
3086 __ jcc(Assembler::zero, L1);
3087 // access constant pool cache entry
3088 __ load_field_entry(c_rarg2, rcx);
3089 __ verify_oop(rax);
3090 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3091 __ mov(c_rarg1, rax);
3092 // c_rarg1: object pointer copied above
3093 // c_rarg2: cache entry pointer
3094 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3095 __ pop_ptr(rax); // restore object pointer
3096 __ bind(L1);
3097 }
3098
3099 // access constant pool cache
3100 __ load_field_entry(rcx, rbx);
3101 __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3102
3103 // rax: object
3104 __ verify_oop(rax);
3105 __ null_check(rax);
3106 Address field(rax, rbx, Address::times_1);
3107
3108 // access field
3109 switch (bytecode()) {
3110 case Bytecodes::_fast_agetfield:
3111 do_oop_load(_masm, field, rax);
3112 __ verify_oop(rax);
3113 break;
3114 case Bytecodes::_fast_lgetfield:
3115 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg);
3116 break;
3117 case Bytecodes::_fast_igetfield:
3118 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
3119 break;
3120 case Bytecodes::_fast_bgetfield:
3121 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
3122 break;
3123 case Bytecodes::_fast_sgetfield:
3124 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg);
3125 break;
3126 case Bytecodes::_fast_cgetfield:
3127 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
3128 break;
3129 case Bytecodes::_fast_fgetfield:
3514
3515 // Note: rax_callsite is already pushed
3516
3517 // %%% should make a type profile for any invokedynamic that takes a ref argument
3518 // profile this call
3519 __ profile_call(rbcp);
3520 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3521
3522 __ verify_oop(rax_callsite);
3523
3524 __ jump_from_interpreted(rbx_method, rdx);
3525 }
3526
3527 //-----------------------------------------------------------------------------
3528 // Allocation
3529
3530 void TemplateTable::_new() {
3531 transition(vtos, atos);
3532 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3533 Label slow_case;
3534 Label slow_case_no_pop;
3535 Label done;
3536 Label initialize_header;
3537
3538 __ get_cpool_and_tags(rcx, rax);
3539
3540 // Make sure the class we're about to instantiate has been resolved.
3541 // This is done before loading InstanceKlass to be consistent with the order
3542 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3543 const int tags_offset = Array<u1>::base_offset_in_bytes();
3544 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3545 __ jcc(Assembler::notEqual, slow_case_no_pop);
3546
3547 // get InstanceKlass
3548 __ load_resolved_klass_at_index(rcx, rcx, rdx);
3549 __ push(rcx); // save the contexts of klass for initializing the header
3550
3551 // make sure klass is initialized
3552 // init_state needs acquire, but x86 is TSO, and so we are already good.
3553 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3554 __ clinit_barrier(rcx, nullptr /*L_fast_path*/, &slow_case);
3555
3556 // get instance_size in InstanceKlass (scaled to a count of bytes)
3557 __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3558 // test to see if it is malformed in some way
3559 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3560 __ jcc(Assembler::notZero, slow_case);
3561
3562 // Allocate the instance:
3563 // If TLAB is enabled:
3564 // Try to allocate in the TLAB.
3565 // If fails, go to the slow path.
3566 // Initialize the allocation.
3567 // Exit.
3568 //
3569 // Go to slow path.
3570
3571 if (UseTLAB) {
3572 __ tlab_allocate(rax, rdx, 0, rcx, rbx, slow_case);
3573 if (ZeroTLAB) {
3574 // the fields have been already cleared
3575 __ jmp(initialize_header);
3576 }
3577
3578 // The object is initialized before the header. If the object size is
3579 // zero, go directly to the header initialization.
3580 if (UseCompactObjectHeaders) {
3581 assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
3582 __ decrement(rdx, oopDesc::base_offset_in_bytes());
3583 } else {
3584 __ decrement(rdx, sizeof(oopDesc));
3585 }
3586 __ jcc(Assembler::zero, initialize_header);
3587
3588 // Initialize topmost object field, divide rdx by 8, check if odd and
3589 // test if zero.
3590 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
3591 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
3592
3593 // rdx must have been multiple of 8
3594 #ifdef ASSERT
3595 // make sure rdx was multiple of 8
3596 Label L;
3597 // Ignore partial flag stall after shrl() since it is debug VM
3598 __ jcc(Assembler::carryClear, L);
3599 __ stop("object size is not multiple of 2 - adjust this code");
3600 __ bind(L);
3601 // rdx must be > 0, no extra check needed here
3602 #endif
3603
3604 // initialize remaining object fields: rdx was a multiple of 8
3605 { Label loop;
3606 __ bind(loop);
3607 int header_size_bytes = oopDesc::header_size() * HeapWordSize;
3608 assert(is_aligned(header_size_bytes, BytesPerLong), "oop header size must be 8-byte-aligned");
3609 __ movptr(Address(rax, rdx, Address::times_8, header_size_bytes - 1*oopSize), rcx);
3610 __ decrement(rdx);
3611 __ jcc(Assembler::notZero, loop);
3612 }
3613
3614 // initialize object header only.
3615 __ bind(initialize_header);
3616 if (UseCompactObjectHeaders) {
3617 __ pop(rcx); // get saved klass back in the register.
3618 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
3619 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), rbx);
3620 } else {
3621 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
3622 (intptr_t)markWord::prototype().value()); // header
3623 __ pop(rcx); // get saved klass back in the register.
3624 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
3625 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
3626 __ store_klass(rax, rcx, rscratch1); // klass
3627 }
3628
3629 if (DTraceAllocProbes) {
3630 // Trigger dtrace event for fastpath
3631 __ push(atos);
3632 __ call_VM_leaf(
3633 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3634 __ pop(atos);
3635 }
3636
3637 __ jmp(done);
3638 }
3639
3640 // slow case
3641 __ bind(slow_case);
3642 __ pop(rcx); // restore stack pointer to what it was when we came in.
3643 __ bind(slow_case_no_pop);
3644
3645 __ get_constant_pool(c_rarg1);
3646 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3647 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3648 __ verify_oop(rax);
3649
3650 // continue
3651 __ bind(done);
3652 }
3653
3654 void TemplateTable::newarray() {
3655 transition(itos, atos);
3656 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3657 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3658 c_rarg1, rax);
3659 }
3660
3661 void TemplateTable::anewarray() {
3662 transition(itos, atos);
3663
3665 __ get_constant_pool(c_rarg1);
3666 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3667 c_rarg1, c_rarg2, rax);
3668 }
3669
3670 void TemplateTable::arraylength() {
3671 transition(atos, itos);
3672 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3673 }
3674
3675 void TemplateTable::checkcast() {
3676 transition(atos, atos);
3677 Label done, is_null, ok_is_subtype, quicked, resolved;
3678 __ testptr(rax, rax); // object is in rax
3679 __ jcc(Assembler::zero, is_null);
3680
3681 // Get cpool & tags index
3682 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3683 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3684 // See if bytecode has already been quicked
3685 __ cmpb(Address(rdx, rbx,
3686 Address::times_1,
3687 Array<u1>::base_offset_in_bytes()),
3688 JVM_CONSTANT_Class);
3689 __ jcc(Assembler::equal, quicked);
3690 __ push(atos); // save receiver for result, and for GC
3691 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3692
3693 __ get_vm_result_metadata(rax);
3694
3695 __ pop_ptr(rdx); // restore receiver
3696 __ jmpb(resolved);
3697
3698 // Get superklass in rax and subklass in rbx
3699 __ bind(quicked);
3700 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3701 __ load_resolved_klass_at_index(rax, rcx, rbx);
3702
3703 __ bind(resolved);
3704 __ load_klass(rbx, rdx, rscratch1);
3705
3706 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3707 // Superklass in rax. Subklass in rbx.
3708 __ gen_subtype_check(rbx, ok_is_subtype);
3709
3710 // Come here on failure
3711 __ push_ptr(rdx);
3712 // object is at TOS
3713 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3714
3715 // Come here on success
3716 __ bind(ok_is_subtype);
3717 __ mov(rax, rdx); // Restore object in rdx
3718
3719 // Collect counts on whether this check-cast sees nulls a lot or not.
3720 if (ProfileInterpreter) {
3721 __ jmp(done);
3722 __ bind(is_null);
3723 __ profile_null_seen(rcx);
3724 } else {
3725 __ bind(is_null); // same as 'done'
3726 }
3727 __ bind(done);
3728 }
3729
3730 void TemplateTable::instanceof() {
3731 transition(atos, itos);
3732 Label done, is_null, ok_is_subtype, quicked, resolved;
3733 __ testptr(rax, rax);
3734 __ jcc(Assembler::zero, is_null);
3735
3736 // Get cpool & tags index
3737 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3738 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3739 // See if bytecode has already been quicked
3740 __ cmpb(Address(rdx, rbx,
3741 Address::times_1,
3742 Array<u1>::base_offset_in_bytes()),
3743 JVM_CONSTANT_Class);
3744 __ jcc(Assembler::equal, quicked);
3745
3746 __ push(atos); // save receiver for result, and for GC
3747 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3748
3749 __ get_vm_result_metadata(rax);
3750
3751 __ pop_ptr(rdx); // restore receiver
3752 __ verify_oop(rdx);
3753 __ load_klass(rdx, rdx, rscratch1);
3754 __ jmpb(resolved);
3755
3756 // Get superklass in rax and subklass in rdx
3757 __ bind(quicked);
3758 __ load_klass(rdx, rax, rscratch1);
3759 __ load_resolved_klass_at_index(rax, rcx, rbx);
3760
3761 __ bind(resolved);
3762
3763 // Generate subtype check. Blows rcx, rdi
3767 // Come here on failure
3768 __ xorl(rax, rax);
3769 __ jmpb(done);
3770 // Come here on success
3771 __ bind(ok_is_subtype);
3772 __ movl(rax, 1);
3773
3774 // Collect counts on whether this test sees nulls a lot or not.
3775 if (ProfileInterpreter) {
3776 __ jmp(done);
3777 __ bind(is_null);
3778 __ profile_null_seen(rcx);
3779 } else {
3780 __ bind(is_null); // same as 'done'
3781 }
3782 __ bind(done);
3783 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
3784 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
3785 }
3786
3787
3788 //----------------------------------------------------------------------------------------------------
3789 // Breakpoints
3790 void TemplateTable::_breakpoint() {
3791 // Note: We get here even if we are single stepping..
3792 // jbug insists on setting breakpoints at every bytecode
3793 // even if we are in single step mode.
3794
3795 transition(vtos, vtos);
3796
3797 // get the unpatched byte code
3798 __ get_method(c_rarg1);
3799 __ call_VM(noreg,
3800 CAST_FROM_FN_PTR(address,
3801 InterpreterRuntime::get_original_bytecode_at),
3802 c_rarg1, rbcp);
3803 __ mov(rbx, rax); // why?
3804
3805 // post the breakpoint event
3806 __ get_method(c_rarg1);
3807 __ call_VM(noreg,
3827 // Note: monitorenter & exit are symmetric routines; which is reflected
3828 // in the assembly code structure as well
3829 //
3830 // Stack layout:
3831 //
3832 // [expressions ] <--- rsp = expression stack top
3833 // ..
3834 // [expressions ]
3835 // [monitor entry] <--- monitor block top = expression stack bot
3836 // ..
3837 // [monitor entry]
3838 // [frame data ] <--- monitor block bot
3839 // ...
3840 // [saved rbp ] <--- rbp
3841 void TemplateTable::monitorenter() {
3842 transition(atos, vtos);
3843
3844 // check for null object
3845 __ null_check(rax);
3846
3847 const Address monitor_block_top(
3848 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3849 const Address monitor_block_bot(
3850 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3851 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3852
3853 Label allocated;
3854
3855 Register rtop = c_rarg3;
3856 Register rbot = c_rarg2;
3857 Register rmon = c_rarg1;
3858
3859 // initialize entry pointer
3860 __ xorl(rmon, rmon); // points to free slot or null
3861
3862 // find a free slot in the monitor block (result in rmon)
3863 {
3864 Label entry, loop, exit;
3865 __ movptr(rtop, monitor_block_top); // derelativize pointer
3866 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3919 // rmon: points to monitor entry
3920 __ bind(allocated);
3921
3922 // Increment bcp to point to the next bytecode, so exception
3923 // handling for async. exceptions work correctly.
3924 // The object has already been popped from the stack, so the
3925 // expression stack looks correct.
3926 __ increment(rbcp);
3927
3928 // store object
3929 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
3930 __ lock_object(rmon);
3931
3932 // check to make sure this monitor doesn't cause stack overflow after locking
3933 __ save_bcp(); // in case of exception
3934 __ generate_stack_overflow_check(0);
3935
3936 // The bcp has already been incremented. Just need to dispatch to
3937 // next instruction.
3938 __ dispatch_next(vtos);
3939 }
3940
3941 void TemplateTable::monitorexit() {
3942 transition(atos, vtos);
3943
3944 // check for null object
3945 __ null_check(rax);
3946
3947 const Address monitor_block_top(
3948 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3949 const Address monitor_block_bot(
3950 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3951 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3952
3953 Register rtop = c_rarg1;
3954 Register rbot = c_rarg2;
3955
3956 Label found;
3957
3958 // find matching slot
3959 {
3960 Label entry, loop;
3961 __ movptr(rtop, monitor_block_top); // derelativize pointer
3962 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3963 // rtop points to current entry, starting with top-most entry
3964
3965 __ lea(rbot, monitor_block_bot); // points to word before bottom
3966 // of monitor block
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "compiler/disassembler.hpp"
27 #include "gc/shared/collectedHeap.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/tlab_globals.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "interpreter/interpreterRuntime.hpp"
32 #include "interpreter/interp_masm.hpp"
33 #include "interpreter/templateTable.hpp"
34 #include "memory/universe.hpp"
35 #include "oops/methodCounters.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/objArrayKlass.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "oops/inlineKlass.hpp"
40 #include "oops/resolvedFieldEntry.hpp"
41 #include "oops/resolvedIndyEntry.hpp"
42 #include "oops/resolvedMethodEntry.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/methodHandles.hpp"
45 #include "runtime/frame.inline.hpp"
46 #include "runtime/safepointMechanism.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/synchronizer.hpp"
50 #include "utilities/macros.hpp"
51
52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
53
54 // Global Register Names
55 static const Register rbcp = r13;
56 static const Register rlocals = r14;
57
58 // Address Computation: local variables
59 static inline Address iaddress(int n) {
151 static void do_oop_load(InterpreterMacroAssembler* _masm,
152 Address src,
153 Register dst,
154 DecoratorSet decorators = 0) {
155 __ load_heap_oop(dst, src, rdx, decorators);
156 }
157
158 Address TemplateTable::at_bcp(int offset) {
159 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
160 return Address(rbcp, offset);
161 }
162
163
164 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
165 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
166 int byte_no) {
167 if (!RewriteBytecodes) return;
168 Label L_patch_done;
169
170 switch (bc) {
171 case Bytecodes::_fast_vputfield:
172 case Bytecodes::_fast_aputfield:
173 case Bytecodes::_fast_bputfield:
174 case Bytecodes::_fast_zputfield:
175 case Bytecodes::_fast_cputfield:
176 case Bytecodes::_fast_dputfield:
177 case Bytecodes::_fast_fputfield:
178 case Bytecodes::_fast_iputfield:
179 case Bytecodes::_fast_lputfield:
180 case Bytecodes::_fast_sputfield:
181 {
182 // We skip bytecode quickening for putfield instructions when
183 // the put_code written to the constant pool cache is zero.
184 // This is required so that every execution of this instruction
185 // calls out to InterpreterRuntime::resolve_get_put to do
186 // additional, required work.
187 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
188 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
189 __ load_field_entry(temp_reg, bc_reg);
190 if (byte_no == f1_byte) {
191 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
760 Address(rdx, rax,
761 Address::times_4,
762 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
763 noreg);
764 }
765
766 void TemplateTable::daload() {
767 transition(itos, dtos);
768 // rax: index
769 // rdx: array
770 index_check(rdx, rax); // kills rbx
771 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
772 Address(rdx, rax,
773 Address::times_8,
774 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
775 noreg);
776 }
777
778 void TemplateTable::aaload() {
779 transition(itos, atos);
780 Register array = rdx;
781 Register index = rax;
782
783 index_check(array, index); // kills rbx
784 __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
785 if (UseArrayFlattening) {
786 Label is_flat_array, done;
787 __ test_flat_array_oop(array, rbx, is_flat_array);
788 do_oop_load(_masm,
789 Address(array, index,
790 UseCompressedOops ? Address::times_4 : Address::times_ptr,
791 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
792 rax,
793 IS_ARRAY);
794 __ jmp(done);
795 __ bind(is_flat_array);
796 __ movptr(rcx, array);
797 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), rcx, index);
798 __ bind(done);
799 } else {
800 do_oop_load(_masm,
801 Address(array, index,
802 UseCompressedOops ? Address::times_4 : Address::times_ptr,
803 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
804 rax,
805 IS_ARRAY);
806 }
807 __ profile_element_type(rbx, rax, rcx);
808 }
809
810 void TemplateTable::baload() {
811 transition(itos, itos);
812 // rax: index
813 // rdx: array
814 index_check(rdx, rax); // kills rbx
815 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
816 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
817 noreg);
818 }
819
820 void TemplateTable::caload() {
821 transition(itos, itos);
822 // rax: index
823 // rdx: array
824 index_check(rdx, rax); // kills rbx
825 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
826 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
827 noreg);
1061 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1062 Address(rdx, rbx, Address::times_4,
1063 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1064 noreg /* ftos */, noreg, noreg, noreg);
1065 }
1066
1067 void TemplateTable::dastore() {
1068 transition(dtos, vtos);
1069 __ pop_i(rbx);
1070 // value is in xmm0
1071 // rbx: index
1072 // rdx: array
1073 index_check(rdx, rbx); // prefer index in rbx
1074 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1075 Address(rdx, rbx, Address::times_8,
1076 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1077 noreg /* dtos */, noreg, noreg, noreg);
1078 }
1079
1080 void TemplateTable::aastore() {
1081 Label is_null, is_flat_array, ok_is_subtype, done;
1082 transition(vtos, vtos);
1083 // stack: ..., array, index, value
1084 __ movptr(rax, at_tos()); // value
1085 __ movl(rcx, at_tos_p1()); // index
1086 __ movptr(rdx, at_tos_p2()); // array
1087
1088 Address element_address(rdx, rcx,
1089 UseCompressedOops? Address::times_4 : Address::times_ptr,
1090 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1091
1092 index_check_without_pop(rdx, rcx); // kills rbx
1093
1094 __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1095 __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1096
1097 __ testptr(rax, rax);
1098 __ jcc(Assembler::zero, is_null);
1099
1100 // Move array class to rdi
1101 __ load_klass(rdi, rdx, rscratch1);
1102 if (UseArrayFlattening) {
1103 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1104 __ test_flat_array_layout(rbx, is_flat_array);
1105 }
1106
1107 // Move subklass into rbx
1108 __ load_klass(rbx, rax, rscratch1);
1109 // Move array element superklass into rax
1110 __ movptr(rax, Address(rdi,
1111 ObjArrayKlass::element_klass_offset()));
1112
1113 // Generate subtype check. Blows rcx, rdi
1114 // Superklass in rax. Subklass in rbx.
1115 // is "rbx <: rax" ? (value subclass <: array element superclass)
1116 __ gen_subtype_check(rbx, ok_is_subtype, false);
1117
1118 // Come here on failure
1119 // object is at TOS
1120 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1121
1122 // Come here on success
1123 __ bind(ok_is_subtype);
1124
1125 // Get the value we will store
1126 __ movptr(rax, at_tos());
1127 __ movl(rcx, at_tos_p1()); // index
1128 // Now store using the appropriate barrier
1129 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1130 __ jmp(done);
1131
1132 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1133 __ bind(is_null);
1134 if (EnableValhalla) {
1135 Label write_null_to_null_free_array, store_null;
1136
1137 // Move array class to rdi
1138 __ load_klass(rdi, rdx, rscratch1);
1139 if (UseArrayFlattening) {
1140 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1141 __ test_flat_array_layout(rbx, is_flat_array);
1142 }
1143
1144 // No way to store null in null-free array
1145 __ test_null_free_array_oop(rdx, rbx, write_null_to_null_free_array);
1146 __ jmp(store_null);
1147
1148 __ bind(write_null_to_null_free_array);
1149 __ jump(RuntimeAddress(Interpreter::_throw_NullPointerException_entry));
1150
1151 __ bind(store_null);
1152 }
1153 // Store a null
1154 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1155 __ jmp(done);
1156
1157 if (UseArrayFlattening) {
1158 Label is_type_ok;
1159 __ bind(is_flat_array); // Store non-null value to flat
1160
1161 __ movptr(rax, at_tos());
1162 __ movl(rcx, at_tos_p1()); // index
1163 __ movptr(rdx, at_tos_p2()); // array
1164
1165 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), rax, rdx, rcx);
1166 }
1167 // Pop stack arguments
1168 __ bind(done);
1169 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1170 }
1171
1172 void TemplateTable::bastore() {
1173 transition(itos, vtos);
1174 __ pop_i(rbx);
1175 // rax: value
1176 // rbx: index
1177 // rdx: array
1178 index_check(rdx, rbx); // prefer index in rbx
1179 // Need to check whether array is boolean or byte
1180 // since both types share the bastore bytecode.
1181 __ load_klass(rcx, rdx, rscratch1);
1182 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1183 int diffbit = Klass::layout_helper_boolean_diffbit();
1184 __ testl(rcx, diffbit);
1185 Label L_skip;
1186 __ jccb(Assembler::zero, L_skip);
1934 __ jcc(j_not(cc), not_taken);
1935 branch(false, false);
1936 __ bind(not_taken);
1937 __ profile_not_taken_branch(rax);
1938 }
1939
1940 void TemplateTable::if_nullcmp(Condition cc) {
1941 transition(atos, vtos);
1942 // assume branch is more often taken than not (loops use backward branches)
1943 Label not_taken;
1944 __ testptr(rax, rax);
1945 __ jcc(j_not(cc), not_taken);
1946 branch(false, false);
1947 __ bind(not_taken);
1948 __ profile_not_taken_branch(rax);
1949 }
1950
1951 void TemplateTable::if_acmp(Condition cc) {
1952 transition(atos, vtos);
1953 // assume branch is more often taken than not (loops use backward branches)
1954 Label taken, not_taken;
1955 __ pop_ptr(rdx);
1956
1957 __ profile_acmp(rbx, rdx, rax, rcx);
1958
1959 const int is_inline_type_mask = markWord::inline_type_pattern;
1960 if (EnableValhalla) {
1961 __ cmpoop(rdx, rax);
1962 __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
1963
1964 // might be substitutable, test if either rax or rdx is null
1965 __ testptr(rax, rax);
1966 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1967 __ testptr(rdx, rdx);
1968 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1969
1970 // and both are values ?
1971 __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
1972 __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
1973 __ andptr(rbx, is_inline_type_mask);
1974 __ cmpptr(rbx, is_inline_type_mask);
1975 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
1976
1977 // same value klass ?
1978 __ load_metadata(rbx, rdx);
1979 __ load_metadata(rcx, rax);
1980 __ cmpptr(rbx, rcx);
1981 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
1982
1983 // Know both are the same type, let's test for substitutability...
1984 if (cc == equal) {
1985 invoke_is_substitutable(rax, rdx, taken, not_taken);
1986 } else {
1987 invoke_is_substitutable(rax, rdx, not_taken, taken);
1988 }
1989 __ stop("Not reachable");
1990 }
1991
1992 __ cmpoop(rdx, rax);
1993 __ jcc(j_not(cc), not_taken);
1994 __ bind(taken);
1995 branch(false, false);
1996 __ bind(not_taken);
1997 __ profile_not_taken_branch(rax, true);
1998 }
1999
2000 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2001 Label& is_subst, Label& not_subst) {
2002 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2003 // Restored...rax answer, jmp to outcome...
2004 __ testl(rax, rax);
2005 __ jcc(Assembler::zero, not_subst);
2006 __ jmp(is_subst);
2007 }
2008
2009 void TemplateTable::ret() {
2010 transition(vtos, vtos);
2011 locals_index(rbx);
2012 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
2013 __ profile_ret(rbx, rcx);
2014 __ get_method(rax);
2015 __ movptr(rbcp, Address(rax, Method::const_offset()));
2016 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2017 ConstMethod::codes_offset()));
2018 __ dispatch_next(vtos, 0, true);
2019 }
2020
2021 void TemplateTable::wide_ret() {
2022 transition(vtos, vtos);
2023 locals_index_wide(rbx);
2024 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2025 __ profile_ret(rbx, rcx);
2026 __ get_method(rax);
2240 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2241 Label no_safepoint;
2242 NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2243 __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2244 __ jcc(Assembler::zero, no_safepoint);
2245 __ push(state);
2246 __ push_cont_fastpath();
2247 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2248 InterpreterRuntime::at_safepoint));
2249 __ pop_cont_fastpath();
2250 __ pop(state);
2251 __ bind(no_safepoint);
2252 }
2253
2254 // Narrow result if state is itos but result type is smaller.
2255 // Need to narrow in the return bytecode rather than in generate_return_entry
2256 // since compiled code callers expect the result to already be narrowed.
2257 if (state == itos) {
2258 __ narrow(rax);
2259 }
2260
2261 __ remove_activation(state, rbcp, true, true, true);
2262
2263 __ jmp(rbcp);
2264 }
2265
2266 // ----------------------------------------------------------------------------
2267 // Volatile variables demand their effects be made known to all CPU's
2268 // in order. Store buffers on most chips allow reads & writes to
2269 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2270 // without some kind of memory barrier (i.e., it's not sufficient that
2271 // the interpreter does not reorder volatile references, the hardware
2272 // also must not reorder them).
2273 //
2274 // According to the new Java Memory Model (JMM):
2275 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2276 // writes act as acquire & release, so:
2277 // (2) A read cannot let unrelated NON-volatile memory refs that
2278 // happen after the read float up to before the read. It's OK for
2279 // non-volatile memory refs that happen before the volatile read to
2280 // float down below it.
2281 // (3) Similar a volatile write cannot let unrelated NON-volatile
2613 }
2614 // rax,: object pointer or null
2615 // cache: cache entry pointer
2616 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2617 rax, cache);
2618
2619 __ load_field_entry(cache, index);
2620 __ bind(L1);
2621 }
2622 }
2623
2624 void TemplateTable::pop_and_check_object(Register r) {
2625 __ pop_ptr(r);
2626 __ null_check(r); // for field access must check obj.
2627 __ verify_oop(r);
2628 }
2629
2630 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2631 transition(vtos, vtos);
2632
2633 const Register obj = r9;
2634 const Register cache = rcx;
2635 const Register index = rdx;
2636 const Register off = rbx;
2637 const Register tos_state = rax;
2638 const Register flags = rdx;
2639 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2640
2641 resolve_cache_and_index_for_field(byte_no, cache, index);
2642 jvmti_post_field_access(cache, index, is_static, false);
2643 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2644
2645 const Address field(obj, off, Address::times_1, 0*wordSize);
2646
2647 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
2648
2649 // Make sure we don't need to mask edx after the above shift
2650 assert(btos == 0, "change code, btos != 0");
2651 __ testl(tos_state, tos_state);
2652 __ jcc(Assembler::notZero, notByte);
2653
2654 // btos
2655 if (!is_static) pop_and_check_object(obj);
2656 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
2657 __ push(btos);
2658 // Rewrite bytecode to be faster
2659 if (!is_static && rc == may_rewrite) {
2660 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2661 }
2662 __ jmp(Done);
2663
2664 __ bind(notByte);
2665 __ cmpl(tos_state, ztos);
2666 __ jcc(Assembler::notEqual, notBool);
2667 if (!is_static) pop_and_check_object(obj);
2668 // ztos (same code as btos)
2669 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg);
2670 __ push(ztos);
2671 // Rewrite bytecode to be faster
2672 if (!is_static && rc == may_rewrite) {
2673 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2674 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2675 }
2676 __ jmp(Done);
2677
2678 __ bind(notBool);
2679 __ cmpl(tos_state, atos);
2680 __ jcc(Assembler::notEqual, notObj);
2681 // atos
2682 if (!EnableValhalla) {
2683 if (!is_static) pop_and_check_object(obj);
2684 do_oop_load(_masm, field, rax);
2685 __ push(atos);
2686 if (!is_static && rc == may_rewrite) {
2687 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2688 }
2689 __ jmp(Done);
2690 } else {
2691 if (is_static) {
2692 __ load_heap_oop(rax, field);
2693 __ push(atos);
2694 __ jmp(Done);
2695 } else {
2696 Label is_flat, rewrite_inline;
2697 __ test_field_is_flat(flags, rscratch1, is_flat);
2698 pop_and_check_object(obj);
2699 __ load_heap_oop(rax, field);
2700 __ push(atos);
2701 if (rc == may_rewrite) {
2702 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2703 }
2704 __ jmp(Done);
2705 __ bind(is_flat);
2706 // field is flat (null-free or nullable with a null-marker)
2707 pop_and_check_object(rax);
2708 __ read_flat_field(rcx, rdx, rbx, rax);
2709 __ verify_oop(rax);
2710 __ push(atos);
2711 __ bind(rewrite_inline);
2712 if (rc == may_rewrite) {
2713 patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
2714 }
2715 __ jmp(Done);
2716 }
2717 }
2718
2719 __ bind(notObj);
2720
2721 if (!is_static) pop_and_check_object(obj);
2722
2723 __ cmpl(tos_state, itos);
2724 __ jcc(Assembler::notEqual, notInt);
2725 // itos
2726 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
2727 __ push(itos);
2728 // Rewrite bytecode to be faster
2729 if (!is_static && rc == may_rewrite) {
2730 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2731 }
2732 __ jmp(Done);
2733
2734 __ bind(notInt);
2735 __ cmpl(tos_state, ctos);
2736 __ jcc(Assembler::notEqual, notChar);
2737 // ctos
2738 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
2739 __ push(ctos);
2740 // Rewrite bytecode to be faster
2741 if (!is_static && rc == may_rewrite) {
2742 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2802 #endif
2803
2804 __ bind(Done);
2805 // [jk] not needed currently
2806 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2807 // Assembler::LoadStore));
2808 }
2809
2810 void TemplateTable::getfield(int byte_no) {
2811 getfield_or_static(byte_no, false);
2812 }
2813
2814 void TemplateTable::nofast_getfield(int byte_no) {
2815 getfield_or_static(byte_no, false, may_not_rewrite);
2816 }
2817
2818 void TemplateTable::getstatic(int byte_no) {
2819 getfield_or_static(byte_no, true);
2820 }
2821
2822 // The registers cache and index expected to be set before call.
2823 // The function may destroy various registers, just not the cache and index registers.
2824 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2825 // Cache is rcx and index is rdx
2826 const Register entry = c_rarg2; // ResolvedFieldEntry
2827 const Register obj = c_rarg1; // Object pointer
2828 const Register value = c_rarg3; // JValue object
2829
2830 if (JvmtiExport::can_post_field_modification()) {
2831 // Check to see if a field modification watch has been set before
2832 // we take the time to call into the VM.
2833 Label L1;
2834 assert_different_registers(cache, obj, rax);
2835 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2836 __ testl(rax, rax);
2837 __ jcc(Assembler::zero, L1);
2838
2839 __ mov(entry, cache);
2840
2841 if (is_static) {
2863 // cache: field entry pointer
2864 // value: jvalue object on the stack
2865 __ call_VM(noreg,
2866 CAST_FROM_FN_PTR(address,
2867 InterpreterRuntime::post_field_modification),
2868 obj, entry, value);
2869 // Reload field entry
2870 __ load_field_entry(cache, index);
2871 __ bind(L1);
2872 }
2873 }
2874
2875 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2876 transition(vtos, vtos);
2877
2878 const Register obj = rcx;
2879 const Register cache = rcx;
2880 const Register index = rdx;
2881 const Register tos_state = rdx;
2882 const Register off = rbx;
2883 const Register flags = r9;
2884
2885 resolve_cache_and_index_for_field(byte_no, cache, index);
2886 jvmti_post_field_mod(cache, index, is_static);
2887 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2888
2889 // [jk] not needed currently
2890 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2891 // Assembler::StoreStore));
2892
2893 Label notVolatile, Done;
2894
2895 // Check for volatile store
2896 __ movl(rscratch1, flags);
2897 __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
2898 __ testl(rscratch1, rscratch1);
2899 __ jcc(Assembler::zero, notVolatile);
2900
2901 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2902 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2903 Assembler::StoreStore));
2904 __ jmp(Done);
2905 __ bind(notVolatile);
2906
2907 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2908
2909 __ bind(Done);
2910 }
2911
2912 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2913 Register obj, Register off, Register tos_state, Register flags) {
2914
2915 // field addresses
2916 const Address field(obj, off, Address::times_1, 0*wordSize);
2917
2918 Label notByte, notBool, notInt, notShort, notChar,
2919 notLong, notFloat, notObj, notInlineType;
2920 Label Done;
2921
2922 const Register bc = c_rarg3;
2923
2924 // Test TOS state
2925 __ testl(tos_state, tos_state);
2926 __ jcc(Assembler::notZero, notByte);
2927
2928 // btos
2929 {
2930 __ pop(btos);
2931 if (!is_static) pop_and_check_object(obj);
2932 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2933 if (!is_static && rc == may_rewrite) {
2934 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2935 }
2936 __ jmp(Done);
2937 }
2938
2939 __ bind(notByte);
2940 __ cmpl(tos_state, ztos);
2941 __ jcc(Assembler::notEqual, notBool);
2942
2943 // ztos
2944 {
2945 __ pop(ztos);
2946 if (!is_static) pop_and_check_object(obj);
2947 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2948 if (!is_static && rc == may_rewrite) {
2949 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2950 }
2951 __ jmp(Done);
2952 }
2953
2954 __ bind(notBool);
2955 __ cmpl(tos_state, atos);
2956 __ jcc(Assembler::notEqual, notObj);
2957
2958 // atos
2959 {
2960 if (!EnableValhalla) {
2961 __ pop(atos);
2962 if (!is_static) pop_and_check_object(obj);
2963 // Store into the field
2964 do_oop_store(_masm, field, rax);
2965 if (!is_static && rc == may_rewrite) {
2966 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2967 }
2968 __ jmp(Done);
2969 } else {
2970 __ pop(atos);
2971 if (is_static) {
2972 Label is_nullable;
2973 __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_nullable);
2974 __ null_check(rax); // FIXME JDK-8341120
2975 __ bind(is_nullable);
2976 do_oop_store(_masm, field, rax);
2977 __ jmp(Done);
2978 } else {
2979 Label is_flat, null_free_reference, rewrite_inline;
2980 __ test_field_is_flat(flags, rscratch1, is_flat);
2981 __ test_field_is_null_free_inline_type(flags, rscratch1, null_free_reference);
2982 pop_and_check_object(obj);
2983 // Store into the field
2984 do_oop_store(_masm, field, rax);
2985 if (rc == may_rewrite) {
2986 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2987 }
2988 __ jmp(Done);
2989 __ bind(null_free_reference);
2990 __ null_check(rax); // FIXME JDK-8341120
2991 pop_and_check_object(obj);
2992 // Store into the field
2993 do_oop_store(_masm, field, rax);
2994 __ jmp(rewrite_inline);
2995 __ bind(is_flat);
2996 pop_and_check_object(rscratch2);
2997 __ write_flat_field(rcx, r8, rscratch1, rscratch2, rbx, rax);
2998 __ bind(rewrite_inline);
2999 if (rc == may_rewrite) {
3000 patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3001 }
3002 __ jmp(Done);
3003 }
3004 }
3005 }
3006
3007 __ bind(notObj);
3008 __ cmpl(tos_state, itos);
3009 __ jcc(Assembler::notEqual, notInt);
3010
3011 // itos
3012 {
3013 __ pop(itos);
3014 if (!is_static) pop_and_check_object(obj);
3015 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3016 if (!is_static && rc == may_rewrite) {
3017 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3018 }
3019 __ jmp(Done);
3020 }
3021
3022 __ bind(notInt);
3023 __ cmpl(tos_state, ctos);
3024 __ jcc(Assembler::notEqual, notChar);
3121 }
3122
3123 void TemplateTable::jvmti_post_fast_field_mod() {
3124
3125 const Register scratch = c_rarg3;
3126
3127 if (JvmtiExport::can_post_field_modification()) {
3128 // Check to see if a field modification watch has been set before
3129 // we take the time to call into the VM.
3130 Label L2;
3131 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3132 __ testl(scratch, scratch);
3133 __ jcc(Assembler::zero, L2);
3134 __ pop_ptr(rbx); // copy the object pointer from tos
3135 __ verify_oop(rbx);
3136 __ push_ptr(rbx); // put the object pointer back on tos
3137 // Save tos values before call_VM() clobbers them. Since we have
3138 // to do it for every data type, we use the saved values as the
3139 // jvalue object.
3140 switch (bytecode()) { // load values into the jvalue object
3141 case Bytecodes::_fast_vputfield: //fall through
3142 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3143 case Bytecodes::_fast_bputfield: // fall through
3144 case Bytecodes::_fast_zputfield: // fall through
3145 case Bytecodes::_fast_sputfield: // fall through
3146 case Bytecodes::_fast_cputfield: // fall through
3147 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3148 case Bytecodes::_fast_dputfield: __ push(dtos); break;
3149 case Bytecodes::_fast_fputfield: __ push(ftos); break;
3150 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3151
3152 default:
3153 ShouldNotReachHere();
3154 }
3155 __ mov(scratch, rsp); // points to jvalue on the stack
3156 // access constant pool cache entry
3157 __ load_field_entry(c_rarg2, rax);
3158 __ verify_oop(rbx);
3159 // rbx: object pointer copied above
3160 // c_rarg2: cache entry pointer
3161 // c_rarg3: jvalue object on the stack
3162 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
3163
3164 switch (bytecode()) { // restore tos values
3165 case Bytecodes::_fast_vputfield: // fall through
3166 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3167 case Bytecodes::_fast_bputfield: // fall through
3168 case Bytecodes::_fast_zputfield: // fall through
3169 case Bytecodes::_fast_sputfield: // fall through
3170 case Bytecodes::_fast_cputfield: // fall through
3171 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3172 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3173 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3174 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3175 default: break;
3176 }
3177 __ bind(L2);
3178 }
3179 }
3180
3181 void TemplateTable::fast_storefield(TosState state) {
3182 transition(state, vtos);
3183
3184 Label notVolatile, Done;
3185
3186 jvmti_post_fast_field_mod();
3187
3188 __ push(rax);
3189 __ load_field_entry(rcx, rax);
3190 load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);
3191 __ pop(rax);
3192 // RBX: field offset, RCX: RAX: TOS, RDX: flags
3193
3194 // Get object from stack
3195 pop_and_check_object(rcx);
3196
3197 // field address
3198 const Address field(rcx, rbx, Address::times_1);
3199
3200 // Check for volatile store
3201 __ movl(rscratch2, rdx); // saving flags for is_flat test
3202 __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3203 __ testl(rscratch2, rscratch2);
3204 __ jcc(Assembler::zero, notVolatile);
3205
3206 fast_storefield_helper(field, rax, rdx);
3207 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3208 Assembler::StoreStore));
3209 __ jmp(Done);
3210 __ bind(notVolatile);
3211
3212 fast_storefield_helper(field, rax, rdx);
3213
3214 __ bind(Done);
3215 }
3216
3217 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3218
3219 // DANGER: 'field' argument depends on rcx and rbx
3220
3221 // access field
3222 switch (bytecode()) {
3223 case Bytecodes::_fast_vputfield:
3224 {
3225 // Field is either flat (nullable or not) or non-flat and null-free
3226 Label is_flat, done;
3227 __ test_field_is_flat(flags, rscratch1, is_flat);
3228 __ null_check(rax); // FIXME JDK-8341120
3229 do_oop_store(_masm, field, rax);
3230 __ jmp(done);
3231 __ bind(is_flat);
3232 __ load_field_entry(r8, r9);
3233 __ movptr(rscratch2, rcx); // re-shuffle registers because of VM call calling convention
3234 __ write_flat_field(r8, rscratch1, r9, rscratch2, rbx, rax);
3235 __ bind(done);
3236 }
3237 break;
3238 case Bytecodes::_fast_aputfield:
3239 {
3240 do_oop_store(_masm, field, rax);
3241 }
3242 break;
3243 case Bytecodes::_fast_lputfield:
3244 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3245 break;
3246 case Bytecodes::_fast_iputfield:
3247 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3248 break;
3249 case Bytecodes::_fast_zputfield:
3250 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3251 break;
3252 case Bytecodes::_fast_bputfield:
3253 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3254 break;
3255 case Bytecodes::_fast_sputfield:
3256 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3257 break;
3258 case Bytecodes::_fast_cputfield:
3259 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3260 break;
3261 case Bytecodes::_fast_fputfield:
3277 // Check to see if a field access watch has been set before we
3278 // take the time to call into the VM.
3279 Label L1;
3280 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3281 __ testl(rcx, rcx);
3282 __ jcc(Assembler::zero, L1);
3283 // access constant pool cache entry
3284 __ load_field_entry(c_rarg2, rcx);
3285 __ verify_oop(rax);
3286 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3287 __ mov(c_rarg1, rax);
3288 // c_rarg1: object pointer copied above
3289 // c_rarg2: cache entry pointer
3290 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3291 __ pop_ptr(rax); // restore object pointer
3292 __ bind(L1);
3293 }
3294
3295 // access constant pool cache
3296 __ load_field_entry(rcx, rbx);
3297 __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3298
3299 // rax: object
3300 __ verify_oop(rax);
3301 __ null_check(rax);
3302 Address field(rax, rdx, Address::times_1);
3303
3304 // access field
3305 switch (bytecode()) {
3306 case Bytecodes::_fast_vgetfield:
3307 __ read_flat_field(rcx, rdx, rbx, rax);
3308 __ verify_oop(rax);
3309 break;
3310 case Bytecodes::_fast_agetfield:
3311 do_oop_load(_masm, field, rax);
3312 __ verify_oop(rax);
3313 break;
3314 case Bytecodes::_fast_lgetfield:
3315 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg);
3316 break;
3317 case Bytecodes::_fast_igetfield:
3318 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
3319 break;
3320 case Bytecodes::_fast_bgetfield:
3321 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
3322 break;
3323 case Bytecodes::_fast_sgetfield:
3324 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg);
3325 break;
3326 case Bytecodes::_fast_cgetfield:
3327 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
3328 break;
3329 case Bytecodes::_fast_fgetfield:
3714
3715 // Note: rax_callsite is already pushed
3716
3717 // %%% should make a type profile for any invokedynamic that takes a ref argument
3718 // profile this call
3719 __ profile_call(rbcp);
3720 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3721
3722 __ verify_oop(rax_callsite);
3723
3724 __ jump_from_interpreted(rbx_method, rdx);
3725 }
3726
3727 //-----------------------------------------------------------------------------
3728 // Allocation
3729
3730 void TemplateTable::_new() {
3731 transition(vtos, atos);
3732 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3733 Label slow_case;
3734 Label done;
3735
3736 __ get_cpool_and_tags(rcx, rax);
3737
3738 // Make sure the class we're about to instantiate has been resolved.
3739 // This is done before loading InstanceKlass to be consistent with the order
3740 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3741 const int tags_offset = Array<u1>::base_offset_in_bytes();
3742 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3743 __ jcc(Assembler::notEqual, slow_case);
3744
3745 // get InstanceKlass
3746 __ load_resolved_klass_at_index(rcx, rcx, rdx);
3747
3748 // make sure klass is initialized
3749 // init_state needs acquire, but x86 is TSO, and so we are already good.
3750 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3751 __ clinit_barrier(rcx, nullptr /*L_fast_path*/, &slow_case);
3752
3753 __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);
3754 __ jmp(done);
3755
3756 // slow case
3757 __ bind(slow_case);
3758
3759 __ get_constant_pool(c_rarg1);
3760 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3761 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3762 __ verify_oop(rax);
3763
3764 // continue
3765 __ bind(done);
3766 }
3767
3768 void TemplateTable::newarray() {
3769 transition(itos, atos);
3770 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3771 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3772 c_rarg1, rax);
3773 }
3774
3775 void TemplateTable::anewarray() {
3776 transition(itos, atos);
3777
3779 __ get_constant_pool(c_rarg1);
3780 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3781 c_rarg1, c_rarg2, rax);
3782 }
3783
3784 void TemplateTable::arraylength() {
3785 transition(atos, itos);
3786 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3787 }
3788
3789 void TemplateTable::checkcast() {
3790 transition(atos, atos);
3791 Label done, is_null, ok_is_subtype, quicked, resolved;
3792 __ testptr(rax, rax); // object is in rax
3793 __ jcc(Assembler::zero, is_null);
3794
3795 // Get cpool & tags index
3796 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3797 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3798 // See if bytecode has already been quicked
3799 __ movzbl(rdx, Address(rdx, rbx,
3800 Address::times_1,
3801 Array<u1>::base_offset_in_bytes()));
3802 __ cmpl(rdx, JVM_CONSTANT_Class);
3803 __ jcc(Assembler::equal, quicked);
3804 __ push(atos); // save receiver for result, and for GC
3805 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3806
3807 __ get_vm_result_metadata(rax);
3808
3809 __ pop_ptr(rdx); // restore receiver
3810 __ jmpb(resolved);
3811
3812 // Get superklass in rax and subklass in rbx
3813 __ bind(quicked);
3814 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3815 __ load_resolved_klass_at_index(rax, rcx, rbx);
3816
3817 __ bind(resolved);
3818 __ load_klass(rbx, rdx, rscratch1);
3819
3820 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3821 // Superklass in rax. Subklass in rbx.
3822 __ gen_subtype_check(rbx, ok_is_subtype);
3823
3824 // Come here on failure
3825 __ push_ptr(rdx);
3826 // object is at TOS
3827 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3828
3829 // Come here on success
3830 __ bind(ok_is_subtype);
3831 __ mov(rax, rdx); // Restore object in rdx
3832 __ jmp(done);
3833
3834 __ bind(is_null);
3835
3836 // Collect counts on whether this check-cast sees nulls a lot or not.
3837 if (ProfileInterpreter) {
3838 __ profile_null_seen(rcx);
3839 }
3840
3841 __ bind(done);
3842 }
3843
3844 void TemplateTable::instanceof() {
3845 transition(atos, itos);
3846 Label done, is_null, ok_is_subtype, quicked, resolved;
3847 __ testptr(rax, rax);
3848 __ jcc(Assembler::zero, is_null);
3849
3850 // Get cpool & tags index
3851 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3852 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3853 // See if bytecode has already been quicked
3854 __ movzbl(rdx, Address(rdx, rbx,
3855 Address::times_1,
3856 Array<u1>::base_offset_in_bytes()));
3857 __ cmpl(rdx, JVM_CONSTANT_Class);
3858 __ jcc(Assembler::equal, quicked);
3859
3860 __ push(atos); // save receiver for result, and for GC
3861 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3862
3863 __ get_vm_result_metadata(rax);
3864
3865 __ pop_ptr(rdx); // restore receiver
3866 __ verify_oop(rdx);
3867 __ load_klass(rdx, rdx, rscratch1);
3868 __ jmpb(resolved);
3869
3870 // Get superklass in rax and subklass in rdx
3871 __ bind(quicked);
3872 __ load_klass(rdx, rax, rscratch1);
3873 __ load_resolved_klass_at_index(rax, rcx, rbx);
3874
3875 __ bind(resolved);
3876
3877 // Generate subtype check. Blows rcx, rdi
3881 // Come here on failure
3882 __ xorl(rax, rax);
3883 __ jmpb(done);
3884 // Come here on success
3885 __ bind(ok_is_subtype);
3886 __ movl(rax, 1);
3887
3888 // Collect counts on whether this test sees nulls a lot or not.
3889 if (ProfileInterpreter) {
3890 __ jmp(done);
3891 __ bind(is_null);
3892 __ profile_null_seen(rcx);
3893 } else {
3894 __ bind(is_null); // same as 'done'
3895 }
3896 __ bind(done);
3897 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
3898 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
3899 }
3900
3901 //----------------------------------------------------------------------------------------------------
3902 // Breakpoints
3903 void TemplateTable::_breakpoint() {
3904 // Note: We get here even if we are single stepping..
3905 // jbug insists on setting breakpoints at every bytecode
3906 // even if we are in single step mode.
3907
3908 transition(vtos, vtos);
3909
3910 // get the unpatched byte code
3911 __ get_method(c_rarg1);
3912 __ call_VM(noreg,
3913 CAST_FROM_FN_PTR(address,
3914 InterpreterRuntime::get_original_bytecode_at),
3915 c_rarg1, rbcp);
3916 __ mov(rbx, rax); // why?
3917
3918 // post the breakpoint event
3919 __ get_method(c_rarg1);
3920 __ call_VM(noreg,
3940 // Note: monitorenter & exit are symmetric routines; which is reflected
3941 // in the assembly code structure as well
3942 //
3943 // Stack layout:
3944 //
3945 // [expressions ] <--- rsp = expression stack top
3946 // ..
3947 // [expressions ]
3948 // [monitor entry] <--- monitor block top = expression stack bot
3949 // ..
3950 // [monitor entry]
3951 // [frame data ] <--- monitor block bot
3952 // ...
3953 // [saved rbp ] <--- rbp
3954 void TemplateTable::monitorenter() {
3955 transition(atos, vtos);
3956
3957 // check for null object
3958 __ null_check(rax);
3959
3960 Label is_inline_type;
3961 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
3962 __ test_markword_is_inline_type(rbx, is_inline_type);
3963
3964 const Address monitor_block_top(
3965 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3966 const Address monitor_block_bot(
3967 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3968 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3969
3970 Label allocated;
3971
3972 Register rtop = c_rarg3;
3973 Register rbot = c_rarg2;
3974 Register rmon = c_rarg1;
3975
3976 // initialize entry pointer
3977 __ xorl(rmon, rmon); // points to free slot or null
3978
3979 // find a free slot in the monitor block (result in rmon)
3980 {
3981 Label entry, loop, exit;
3982 __ movptr(rtop, monitor_block_top); // derelativize pointer
3983 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4036 // rmon: points to monitor entry
4037 __ bind(allocated);
4038
4039 // Increment bcp to point to the next bytecode, so exception
4040 // handling for async. exceptions work correctly.
4041 // The object has already been popped from the stack, so the
4042 // expression stack looks correct.
4043 __ increment(rbcp);
4044
4045 // store object
4046 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4047 __ lock_object(rmon);
4048
4049 // check to make sure this monitor doesn't cause stack overflow after locking
4050 __ save_bcp(); // in case of exception
4051 __ generate_stack_overflow_check(0);
4052
4053 // The bcp has already been incremented. Just need to dispatch to
4054 // next instruction.
4055 __ dispatch_next(vtos);
4056
4057 __ bind(is_inline_type);
4058 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4059 InterpreterRuntime::throw_identity_exception), rax);
4060 __ should_not_reach_here();
4061 }
4062
4063 void TemplateTable::monitorexit() {
4064 transition(atos, vtos);
4065
4066 // check for null object
4067 __ null_check(rax);
4068
4069 const int is_inline_type_mask = markWord::inline_type_pattern;
4070 Label has_identity;
4071 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4072 __ andptr(rbx, is_inline_type_mask);
4073 __ cmpl(rbx, is_inline_type_mask);
4074 __ jcc(Assembler::notEqual, has_identity);
4075 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4076 InterpreterRuntime::throw_illegal_monitor_state_exception));
4077 __ should_not_reach_here();
4078 __ bind(has_identity);
4079
4080 const Address monitor_block_top(
4081 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4082 const Address monitor_block_bot(
4083 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4084 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4085
4086 Register rtop = c_rarg1;
4087 Register rbot = c_rarg2;
4088
4089 Label found;
4090
4091 // find matching slot
4092 {
4093 Label entry, loop;
4094 __ movptr(rtop, monitor_block_top); // derelativize pointer
4095 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4096 // rtop points to current entry, starting with top-most entry
4097
4098 __ lea(rbot, monitor_block_bot); // points to word before bottom
4099 // of monitor block
|