19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "compiler/disassembler.hpp"
27 #include "gc/shared/collectedHeap.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/tlab_globals.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "interpreter/interpreterRuntime.hpp"
32 #include "interpreter/interp_masm.hpp"
33 #include "interpreter/templateTable.hpp"
34 #include "memory/universe.hpp"
35 #include "oops/methodCounters.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/objArrayKlass.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "oops/resolvedFieldEntry.hpp"
40 #include "oops/resolvedIndyEntry.hpp"
41 #include "oops/resolvedMethodEntry.hpp"
42 #include "prims/jvmtiExport.hpp"
43 #include "prims/methodHandles.hpp"
44 #include "runtime/frame.inline.hpp"
45 #include "runtime/safepointMechanism.hpp"
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubRoutines.hpp"
48 #include "runtime/synchronizer.hpp"
49 #include "utilities/macros.hpp"
50
51 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
52
53 // Global Register Names
54 static const Register rbcp = r13;
55 static const Register rlocals = r14;
56
57 // Address Computation: local variables
58 static inline Address iaddress(int n) {
59 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
60 }
61
62 static inline Address laddress(int n) {
63 return iaddress(n + 1);
150 static void do_oop_load(InterpreterMacroAssembler* _masm,
151 Address src,
152 Register dst,
153 DecoratorSet decorators = 0) {
154 __ load_heap_oop(dst, src, rdx, decorators);
155 }
156
157 Address TemplateTable::at_bcp(int offset) {
158 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
159 return Address(rbcp, offset);
160 }
161
162
163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
164 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
165 int byte_no) {
166 if (!RewriteBytecodes) return;
167 Label L_patch_done;
168
169 switch (bc) {
170 case Bytecodes::_fast_aputfield:
171 case Bytecodes::_fast_bputfield:
172 case Bytecodes::_fast_zputfield:
173 case Bytecodes::_fast_cputfield:
174 case Bytecodes::_fast_dputfield:
175 case Bytecodes::_fast_fputfield:
176 case Bytecodes::_fast_iputfield:
177 case Bytecodes::_fast_lputfield:
178 case Bytecodes::_fast_sputfield:
179 {
180 // We skip bytecode quickening for putfield instructions when
181 // the put_code written to the constant pool cache is zero.
182 // This is required so that every execution of this instruction
183 // calls out to InterpreterRuntime::resolve_get_put to do
184 // additional, required work.
185 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
186 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
187 __ load_field_entry(temp_reg, bc_reg);
188 if (byte_no == f1_byte) {
189 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
758 Address(rdx, rax,
759 Address::times_4,
760 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
761 noreg);
762 }
763
764 void TemplateTable::daload() {
765 transition(itos, dtos);
766 // rax: index
767 // rdx: array
768 index_check(rdx, rax); // kills rbx
769 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
770 Address(rdx, rax,
771 Address::times_8,
772 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
773 noreg);
774 }
775
776 void TemplateTable::aaload() {
777 transition(itos, atos);
778 // rax: index
779 // rdx: array
780 index_check(rdx, rax); // kills rbx
781 do_oop_load(_masm,
782 Address(rdx, rax,
783 UseCompressedOops ? Address::times_4 : Address::times_ptr,
784 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
785 rax,
786 IS_ARRAY);
787 }
788
789 void TemplateTable::baload() {
790 transition(itos, itos);
791 // rax: index
792 // rdx: array
793 index_check(rdx, rax); // kills rbx
794 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
795 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
796 noreg);
797 }
798
799 void TemplateTable::caload() {
800 transition(itos, itos);
801 // rax: index
802 // rdx: array
803 index_check(rdx, rax); // kills rbx
804 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
805 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
806 noreg);
1040 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1041 Address(rdx, rbx, Address::times_4,
1042 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1043 noreg /* ftos */, noreg, noreg, noreg);
1044 }
1045
1046 void TemplateTable::dastore() {
1047 transition(dtos, vtos);
1048 __ pop_i(rbx);
1049 // value is in xmm0
1050 // rbx: index
1051 // rdx: array
1052 index_check(rdx, rbx); // prefer index in rbx
1053 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1054 Address(rdx, rbx, Address::times_8,
1055 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1056 noreg /* dtos */, noreg, noreg, noreg);
1057 }
1058
1059 void TemplateTable::aastore() {
1060 Label is_null, ok_is_subtype, done;
1061 transition(vtos, vtos);
1062 // stack: ..., array, index, value
1063 __ movptr(rax, at_tos()); // value
1064 __ movl(rcx, at_tos_p1()); // index
1065 __ movptr(rdx, at_tos_p2()); // array
1066
1067 Address element_address(rdx, rcx,
1068 UseCompressedOops? Address::times_4 : Address::times_ptr,
1069 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1070
1071 index_check_without_pop(rdx, rcx); // kills rbx
1072 __ testptr(rax, rax);
1073 __ jcc(Assembler::zero, is_null);
1074
1075 // Move subklass into rbx
1076 __ load_klass(rbx, rax, rscratch1);
1077 // Move superklass into rax
1078 __ load_klass(rax, rdx, rscratch1);
1079 __ movptr(rax, Address(rax,
1080 ObjArrayKlass::element_klass_offset()));
1081
1082 // Generate subtype check. Blows rcx, rdi
1083 // Superklass in rax. Subklass in rbx.
1084 __ gen_subtype_check(rbx, ok_is_subtype);
1085
1086 // Come here on failure
1087 // object is at TOS
1088 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1089
1090 // Come here on success
1091 __ bind(ok_is_subtype);
1092
1093 // Get the value we will store
1094 __ movptr(rax, at_tos());
1095 __ movl(rcx, at_tos_p1()); // index
1096 // Now store using the appropriate barrier
1097 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1098 __ jmp(done);
1099
1100 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1101 __ bind(is_null);
1102 __ profile_null_seen(rbx);
1103
1104 // Store a null
1105 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1106
1107 // Pop stack arguments
1108 __ bind(done);
1109 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1110 }
1111
1112 void TemplateTable::bastore() {
1113 transition(itos, vtos);
1114 __ pop_i(rbx);
1115 // rax: value
1116 // rbx: index
1117 // rdx: array
1118 index_check(rdx, rbx); // prefer index in rbx
1119 // Need to check whether array is boolean or byte
1120 // since both types share the bastore bytecode.
1121 __ load_klass(rcx, rdx, rscratch1);
1122 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1123 int diffbit = Klass::layout_helper_boolean_diffbit();
1124 __ testl(rcx, diffbit);
1125 Label L_skip;
1126 __ jccb(Assembler::zero, L_skip);
1874 __ jcc(j_not(cc), not_taken);
1875 branch(false, false);
1876 __ bind(not_taken);
1877 __ profile_not_taken_branch(rax);
1878 }
1879
1880 void TemplateTable::if_nullcmp(Condition cc) {
1881 transition(atos, vtos);
1882 // assume branch is more often taken than not (loops use backward branches)
1883 Label not_taken;
1884 __ testptr(rax, rax);
1885 __ jcc(j_not(cc), not_taken);
1886 branch(false, false);
1887 __ bind(not_taken);
1888 __ profile_not_taken_branch(rax);
1889 }
1890
1891 void TemplateTable::if_acmp(Condition cc) {
1892 transition(atos, vtos);
1893 // assume branch is more often taken than not (loops use backward branches)
1894 Label not_taken;
1895 __ pop_ptr(rdx);
1896 __ cmpoop(rdx, rax);
1897 __ jcc(j_not(cc), not_taken);
1898 branch(false, false);
1899 __ bind(not_taken);
1900 __ profile_not_taken_branch(rax);
1901 }
1902
1903 void TemplateTable::ret() {
1904 transition(vtos, vtos);
1905 locals_index(rbx);
1906 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1907 __ profile_ret(rbx, rcx);
1908 __ get_method(rax);
1909 __ movptr(rbcp, Address(rax, Method::const_offset()));
1910 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
1911 ConstMethod::codes_offset()));
1912 __ dispatch_next(vtos, 0, true);
1913 }
1914
1915 void TemplateTable::wide_ret() {
1916 transition(vtos, vtos);
1917 locals_index_wide(rbx);
1918 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
1919 __ profile_ret(rbx, rcx);
1920 __ get_method(rax);
2134 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2135 Label no_safepoint;
2136 NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2137 __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2138 __ jcc(Assembler::zero, no_safepoint);
2139 __ push(state);
2140 __ push_cont_fastpath();
2141 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2142 InterpreterRuntime::at_safepoint));
2143 __ pop_cont_fastpath();
2144 __ pop(state);
2145 __ bind(no_safepoint);
2146 }
2147
2148 // Narrow result if state is itos but result type is smaller.
2149 // Need to narrow in the return bytecode rather than in generate_return_entry
2150 // since compiled code callers expect the result to already be narrowed.
2151 if (state == itos) {
2152 __ narrow(rax);
2153 }
2154 __ remove_activation(state, rbcp);
2155
2156 __ jmp(rbcp);
2157 }
2158
2159 // ----------------------------------------------------------------------------
2160 // Volatile variables demand their effects be made known to all CPU's
2161 // in order. Store buffers on most chips allow reads & writes to
2162 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2163 // without some kind of memory barrier (i.e., it's not sufficient that
2164 // the interpreter does not reorder volatile references, the hardware
2165 // also must not reorder them).
2166 //
2167 // According to the new Java Memory Model (JMM):
2168 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2169 // writes act as acquire & release, so:
2170 // (2) A read cannot let unrelated NON-volatile memory refs that
2171 // happen after the read float up to before the read. It's OK for
2172 // non-volatile memory refs that happen before the volatile read to
2173 // float down below it.
2174 // (3) Similar a volatile write cannot let unrelated NON-volatile
2507 }
2508 // rax,: object pointer or null
2509 // cache: cache entry pointer
2510 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2511 rax, cache);
2512
2513 __ load_field_entry(cache, index);
2514 __ bind(L1);
2515 }
2516 }
2517
2518 void TemplateTable::pop_and_check_object(Register r) {
2519 __ pop_ptr(r);
2520 __ null_check(r); // for field access must check obj.
2521 __ verify_oop(r);
2522 }
2523
2524 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2525 transition(vtos, vtos);
2526
2527 const Register obj = c_rarg3;
2528 const Register cache = rcx;
2529 const Register index = rdx;
2530 const Register off = rbx;
2531 const Register tos_state = rax;
2532 const Register flags = rdx;
2533 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2534
2535 resolve_cache_and_index_for_field(byte_no, cache, index);
2536 jvmti_post_field_access(cache, index, is_static, false);
2537 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2538
2539 if (!is_static) pop_and_check_object(obj);
2540
2541 const Address field(obj, off, Address::times_1, 0*wordSize);
2542
2543 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
2544
2545 // Make sure we don't need to mask edx after the above shift
2546 assert(btos == 0, "change code, btos != 0");
2547 __ testl(tos_state, tos_state);
2548 __ jcc(Assembler::notZero, notByte);
2549
2550 // btos
2551 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
2552 __ push(btos);
2553 // Rewrite bytecode to be faster
2554 if (!is_static && rc == may_rewrite) {
2555 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2556 }
2557 __ jmp(Done);
2558
2559 __ bind(notByte);
2560 __ cmpl(tos_state, ztos);
2561 __ jcc(Assembler::notEqual, notBool);
2562
2563 // ztos (same code as btos)
2564 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg);
2565 __ push(ztos);
2566 // Rewrite bytecode to be faster
2567 if (!is_static && rc == may_rewrite) {
2568 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2569 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2570 }
2571 __ jmp(Done);
2572
2573 __ bind(notBool);
2574 __ cmpl(tos_state, atos);
2575 __ jcc(Assembler::notEqual, notObj);
2576 // atos
2577 do_oop_load(_masm, field, rax);
2578 __ push(atos);
2579 if (!is_static && rc == may_rewrite) {
2580 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2581 }
2582 __ jmp(Done);
2583
2584 __ bind(notObj);
2585 __ cmpl(tos_state, itos);
2586 __ jcc(Assembler::notEqual, notInt);
2587 // itos
2588 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
2589 __ push(itos);
2590 // Rewrite bytecode to be faster
2591 if (!is_static && rc == may_rewrite) {
2592 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2593 }
2594 __ jmp(Done);
2595
2596 __ bind(notInt);
2597 __ cmpl(tos_state, ctos);
2598 __ jcc(Assembler::notEqual, notChar);
2599 // ctos
2600 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
2601 __ push(ctos);
2602 // Rewrite bytecode to be faster
2603 if (!is_static && rc == may_rewrite) {
2604 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2664 #endif
2665
2666 __ bind(Done);
2667 // [jk] not needed currently
2668 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2669 // Assembler::LoadStore));
2670 }
2671
2672 void TemplateTable::getfield(int byte_no) {
2673 getfield_or_static(byte_no, false);
2674 }
2675
2676 void TemplateTable::nofast_getfield(int byte_no) {
2677 getfield_or_static(byte_no, false, may_not_rewrite);
2678 }
2679
2680 void TemplateTable::getstatic(int byte_no) {
2681 getfield_or_static(byte_no, true);
2682 }
2683
2684
2685 // The registers cache and index expected to be set before call.
2686 // The function may destroy various registers, just not the cache and index registers.
2687 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2688 // Cache is rcx and index is rdx
2689 const Register entry = c_rarg2; // ResolvedFieldEntry
2690 const Register obj = c_rarg1; // Object pointer
2691 const Register value = c_rarg3; // JValue object
2692
2693 if (JvmtiExport::can_post_field_modification()) {
2694 // Check to see if a field modification watch has been set before
2695 // we take the time to call into the VM.
2696 Label L1;
2697 assert_different_registers(cache, obj, rax);
2698 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2699 __ testl(rax, rax);
2700 __ jcc(Assembler::zero, L1);
2701
2702 __ mov(entry, cache);
2703
2704 if (is_static) {
2726 // cache: field entry pointer
2727 // value: jvalue object on the stack
2728 __ call_VM(noreg,
2729 CAST_FROM_FN_PTR(address,
2730 InterpreterRuntime::post_field_modification),
2731 obj, entry, value);
2732 // Reload field entry
2733 __ load_field_entry(cache, index);
2734 __ bind(L1);
2735 }
2736 }
2737
2738 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2739 transition(vtos, vtos);
2740
2741 const Register obj = rcx;
2742 const Register cache = rcx;
2743 const Register index = rdx;
2744 const Register tos_state = rdx;
2745 const Register off = rbx;
2746 const Register flags = rax;
2747
2748 resolve_cache_and_index_for_field(byte_no, cache, index);
2749 jvmti_post_field_mod(cache, index, is_static);
2750 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2751
2752 // [jk] not needed currently
2753 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2754 // Assembler::StoreStore));
2755
2756 Label notVolatile, Done;
2757
2758 // Check for volatile store
2759 __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
2760 __ testl(flags, flags);
2761 __ jcc(Assembler::zero, notVolatile);
2762
2763 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2764 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2765 Assembler::StoreStore));
2766 __ jmp(Done);
2767 __ bind(notVolatile);
2768
2769 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2770
2771 __ bind(Done);
2772 }
2773
2774 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2775 Register obj, Register off, Register tos_state) {
2776
2777 // field addresses
2778 const Address field(obj, off, Address::times_1, 0*wordSize);
2779
2780 Label notByte, notBool, notInt, notShort, notChar,
2781 notLong, notFloat, notObj;
2782 Label Done;
2783
2784 const Register bc = c_rarg3;
2785
2786 // Test TOS state
2787 __ testl(tos_state, tos_state);
2788 __ jcc(Assembler::notZero, notByte);
2789
2790 // btos
2791 {
2792 __ pop(btos);
2793 if (!is_static) pop_and_check_object(obj);
2794 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2795 if (!is_static && rc == may_rewrite) {
2802 __ cmpl(tos_state, ztos);
2803 __ jcc(Assembler::notEqual, notBool);
2804
2805 // ztos
2806 {
2807 __ pop(ztos);
2808 if (!is_static) pop_and_check_object(obj);
2809 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2810 if (!is_static && rc == may_rewrite) {
2811 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2812 }
2813 __ jmp(Done);
2814 }
2815
2816 __ bind(notBool);
2817 __ cmpl(tos_state, atos);
2818 __ jcc(Assembler::notEqual, notObj);
2819
2820 // atos
2821 {
2822 __ pop(atos);
2823 if (!is_static) pop_and_check_object(obj);
2824 // Store into the field
2825 do_oop_store(_masm, field, rax);
2826 if (!is_static && rc == may_rewrite) {
2827 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2828 }
2829 __ jmp(Done);
2830 }
2831
2832 __ bind(notObj);
2833 __ cmpl(tos_state, itos);
2834 __ jcc(Assembler::notEqual, notInt);
2835
2836 // itos
2837 {
2838 __ pop(itos);
2839 if (!is_static) pop_and_check_object(obj);
2840 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
2841 if (!is_static && rc == may_rewrite) {
2842 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2843 }
2844 __ jmp(Done);
2845 }
2846
2847 __ bind(notInt);
2848 __ cmpl(tos_state, ctos);
2849 __ jcc(Assembler::notEqual, notChar);
2946 }
2947
2948 void TemplateTable::jvmti_post_fast_field_mod() {
2949
2950 const Register scratch = c_rarg3;
2951
2952 if (JvmtiExport::can_post_field_modification()) {
2953 // Check to see if a field modification watch has been set before
2954 // we take the time to call into the VM.
2955 Label L2;
2956 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2957 __ testl(scratch, scratch);
2958 __ jcc(Assembler::zero, L2);
2959 __ pop_ptr(rbx); // copy the object pointer from tos
2960 __ verify_oop(rbx);
2961 __ push_ptr(rbx); // put the object pointer back on tos
2962 // Save tos values before call_VM() clobbers them. Since we have
2963 // to do it for every data type, we use the saved values as the
2964 // jvalue object.
2965 switch (bytecode()) { // load values into the jvalue object
2966 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
2967 case Bytecodes::_fast_bputfield: // fall through
2968 case Bytecodes::_fast_zputfield: // fall through
2969 case Bytecodes::_fast_sputfield: // fall through
2970 case Bytecodes::_fast_cputfield: // fall through
2971 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
2972 case Bytecodes::_fast_dputfield: __ push(dtos); break;
2973 case Bytecodes::_fast_fputfield: __ push(ftos); break;
2974 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
2975
2976 default:
2977 ShouldNotReachHere();
2978 }
2979 __ mov(scratch, rsp); // points to jvalue on the stack
2980 // access constant pool cache entry
2981 __ load_field_entry(c_rarg2, rax);
2982 __ verify_oop(rbx);
2983 // rbx: object pointer copied above
2984 // c_rarg2: cache entry pointer
2985 // c_rarg3: jvalue object on the stack
2986 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
2987
2988 switch (bytecode()) { // restore tos values
2989 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
2990 case Bytecodes::_fast_bputfield: // fall through
2991 case Bytecodes::_fast_zputfield: // fall through
2992 case Bytecodes::_fast_sputfield: // fall through
2993 case Bytecodes::_fast_cputfield: // fall through
2994 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
2995 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
2996 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
2997 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
2998 default: break;
2999 }
3000 __ bind(L2);
3001 }
3002 }
3003
3004 void TemplateTable::fast_storefield(TosState state) {
3005 transition(state, vtos);
3006
3007 Register cache = rcx;
3008
3009 Label notVolatile, Done;
3010
3011 jvmti_post_fast_field_mod();
3012
3013 __ push(rax);
3014 __ load_field_entry(rcx, rax);
3015 load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3016 // RBX: field offset, RAX: TOS, RDX: flags
3017 __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3018 __ pop(rax);
3019
3020 // Get object from stack
3021 pop_and_check_object(rcx);
3022
3023 // field address
3024 const Address field(rcx, rbx, Address::times_1);
3025
3026 // Check for volatile store
3027 __ testl(rdx, rdx);
3028 __ jcc(Assembler::zero, notVolatile);
3029
3030 fast_storefield_helper(field, rax);
3031 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3032 Assembler::StoreStore));
3033 __ jmp(Done);
3034 __ bind(notVolatile);
3035
3036 fast_storefield_helper(field, rax);
3037
3038 __ bind(Done);
3039 }
3040
3041 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3042
3043 // access field
3044 switch (bytecode()) {
3045 case Bytecodes::_fast_aputfield:
3046 do_oop_store(_masm, field, rax);
3047 break;
3048 case Bytecodes::_fast_lputfield:
3049 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3050 break;
3051 case Bytecodes::_fast_iputfield:
3052 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3053 break;
3054 case Bytecodes::_fast_zputfield:
3055 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3056 break;
3057 case Bytecodes::_fast_bputfield:
3058 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3059 break;
3060 case Bytecodes::_fast_sputfield:
3061 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3062 break;
3063 case Bytecodes::_fast_cputfield:
3064 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3065 break;
3066 case Bytecodes::_fast_fputfield:
3082 // Check to see if a field access watch has been set before we
3083 // take the time to call into the VM.
3084 Label L1;
3085 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3086 __ testl(rcx, rcx);
3087 __ jcc(Assembler::zero, L1);
3088 // access constant pool cache entry
3089 __ load_field_entry(c_rarg2, rcx);
3090 __ verify_oop(rax);
3091 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3092 __ mov(c_rarg1, rax);
3093 // c_rarg1: object pointer copied above
3094 // c_rarg2: cache entry pointer
3095 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3096 __ pop_ptr(rax); // restore object pointer
3097 __ bind(L1);
3098 }
3099
3100 // access constant pool cache
3101 __ load_field_entry(rcx, rbx);
3102 __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3103
3104 // rax: object
3105 __ verify_oop(rax);
3106 __ null_check(rax);
3107 Address field(rax, rbx, Address::times_1);
3108
3109 // access field
3110 switch (bytecode()) {
3111 case Bytecodes::_fast_agetfield:
3112 do_oop_load(_masm, field, rax);
3113 __ verify_oop(rax);
3114 break;
3115 case Bytecodes::_fast_lgetfield:
3116 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg);
3117 break;
3118 case Bytecodes::_fast_igetfield:
3119 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
3120 break;
3121 case Bytecodes::_fast_bgetfield:
3122 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
3123 break;
3124 case Bytecodes::_fast_sgetfield:
3125 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg);
3126 break;
3127 case Bytecodes::_fast_cgetfield:
3128 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
3129 break;
3130 case Bytecodes::_fast_fgetfield:
3515
3516 // Note: rax_callsite is already pushed
3517
3518 // %%% should make a type profile for any invokedynamic that takes a ref argument
3519 // profile this call
3520 __ profile_call(rbcp);
3521 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3522
3523 __ verify_oop(rax_callsite);
3524
3525 __ jump_from_interpreted(rbx_method, rdx);
3526 }
3527
3528 //-----------------------------------------------------------------------------
3529 // Allocation
3530
3531 void TemplateTable::_new() {
3532 transition(vtos, atos);
3533 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3534 Label slow_case;
3535 Label slow_case_no_pop;
3536 Label done;
3537 Label initialize_header;
3538
3539 __ get_cpool_and_tags(rcx, rax);
3540
3541 // Make sure the class we're about to instantiate has been resolved.
3542 // This is done before loading InstanceKlass to be consistent with the order
3543 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3544 const int tags_offset = Array<u1>::base_offset_in_bytes();
3545 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3546 __ jcc(Assembler::notEqual, slow_case_no_pop);
3547
3548 // get InstanceKlass
3549 __ load_resolved_klass_at_index(rcx, rcx, rdx);
3550 __ push(rcx); // save the contexts of klass for initializing the header
3551
3552 // make sure klass is initialized
3553 // init_state needs acquire, but x86 is TSO, and so we are already good.
3554 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3555 __ clinit_barrier(rcx, nullptr /*L_fast_path*/, &slow_case);
3556
3557 // get instance_size in InstanceKlass (scaled to a count of bytes)
3558 __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3559 // test to see if it is malformed in some way
3560 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3561 __ jcc(Assembler::notZero, slow_case);
3562
3563 // Allocate the instance:
3564 // If TLAB is enabled:
3565 // Try to allocate in the TLAB.
3566 // If fails, go to the slow path.
3567 // Initialize the allocation.
3568 // Exit.
3569 //
3570 // Go to slow path.
3571
3572 if (UseTLAB) {
3573 __ tlab_allocate(rax, rdx, 0, rcx, rbx, slow_case);
3574 if (ZeroTLAB) {
3575 // the fields have been already cleared
3576 __ jmp(initialize_header);
3577 }
3578
3579 // The object is initialized before the header. If the object size is
3580 // zero, go directly to the header initialization.
3581 if (UseCompactObjectHeaders) {
3582 assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
3583 __ decrement(rdx, oopDesc::base_offset_in_bytes());
3584 } else {
3585 __ decrement(rdx, sizeof(oopDesc));
3586 }
3587 __ jcc(Assembler::zero, initialize_header);
3588
3589 // Initialize topmost object field, divide rdx by 8, check if odd and
3590 // test if zero.
3591 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
3592 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
3593
3594 // rdx must have been multiple of 8
3595 #ifdef ASSERT
3596 // make sure rdx was multiple of 8
3597 Label L;
3598 // Ignore partial flag stall after shrl() since it is debug VM
3599 __ jcc(Assembler::carryClear, L);
3600 __ stop("object size is not multiple of 2 - adjust this code");
3601 __ bind(L);
3602 // rdx must be > 0, no extra check needed here
3603 #endif
3604
3605 // initialize remaining object fields: rdx was a multiple of 8
3606 { Label loop;
3607 __ bind(loop);
3608 int header_size_bytes = oopDesc::header_size() * HeapWordSize;
3609 assert(is_aligned(header_size_bytes, BytesPerLong), "oop header size must be 8-byte-aligned");
3610 __ movptr(Address(rax, rdx, Address::times_8, header_size_bytes - 1*oopSize), rcx);
3611 __ decrement(rdx);
3612 __ jcc(Assembler::notZero, loop);
3613 }
3614
3615 // initialize object header only.
3616 __ bind(initialize_header);
3617 if (UseCompactObjectHeaders) {
3618 __ pop(rcx); // get saved klass back in the register.
3619 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
3620 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), rbx);
3621 } else {
3622 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
3623 (intptr_t)markWord::prototype().value()); // header
3624 __ pop(rcx); // get saved klass back in the register.
3625 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
3626 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
3627 __ store_klass(rax, rcx, rscratch1); // klass
3628 }
3629
3630 if (DTraceAllocProbes) {
3631 // Trigger dtrace event for fastpath
3632 __ push(atos);
3633 __ call_VM_leaf(
3634 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3635 __ pop(atos);
3636 }
3637
3638 __ jmp(done);
3639 }
3640
3641 // slow case
3642 __ bind(slow_case);
3643 __ pop(rcx); // restore stack pointer to what it was when we came in.
3644 __ bind(slow_case_no_pop);
3645
3646 __ get_constant_pool(c_rarg1);
3647 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3648 __ call_VM_preemptable(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3649 __ verify_oop(rax);
3650
3651 // continue
3652 __ bind(done);
3653 }
3654
3655 void TemplateTable::newarray() {
3656 transition(itos, atos);
3657 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3658 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3659 c_rarg1, rax);
3660 }
3661
3662 void TemplateTable::anewarray() {
3663 transition(itos, atos);
3664
3666 __ get_constant_pool(c_rarg1);
3667 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3668 c_rarg1, c_rarg2, rax);
3669 }
3670
3671 void TemplateTable::arraylength() {
3672 transition(atos, itos);
3673 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3674 }
3675
3676 void TemplateTable::checkcast() {
3677 transition(atos, atos);
3678 Label done, is_null, ok_is_subtype, quicked, resolved;
3679 __ testptr(rax, rax); // object is in rax
3680 __ jcc(Assembler::zero, is_null);
3681
3682 // Get cpool & tags index
3683 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3684 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3685 // See if bytecode has already been quicked
3686 __ cmpb(Address(rdx, rbx,
3687 Address::times_1,
3688 Array<u1>::base_offset_in_bytes()),
3689 JVM_CONSTANT_Class);
3690 __ jcc(Assembler::equal, quicked);
3691 __ push(atos); // save receiver for result, and for GC
3692 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3693
3694 __ get_vm_result_metadata(rax);
3695
3696 __ pop_ptr(rdx); // restore receiver
3697 __ jmpb(resolved);
3698
3699 // Get superklass in rax and subklass in rbx
3700 __ bind(quicked);
3701 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3702 __ load_resolved_klass_at_index(rax, rcx, rbx);
3703
3704 __ bind(resolved);
3705 __ load_klass(rbx, rdx, rscratch1);
3706
3707 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3708 // Superklass in rax. Subklass in rbx.
3709 __ gen_subtype_check(rbx, ok_is_subtype);
3710
3711 // Come here on failure
3712 __ push_ptr(rdx);
3713 // object is at TOS
3714 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3715
3716 // Come here on success
3717 __ bind(ok_is_subtype);
3718 __ mov(rax, rdx); // Restore object in rdx
3719
3720 // Collect counts on whether this check-cast sees nulls a lot or not.
3721 if (ProfileInterpreter) {
3722 __ jmp(done);
3723 __ bind(is_null);
3724 __ profile_null_seen(rcx);
3725 } else {
3726 __ bind(is_null); // same as 'done'
3727 }
3728 __ bind(done);
3729 }
3730
3731 void TemplateTable::instanceof() {
3732 transition(atos, itos);
3733 Label done, is_null, ok_is_subtype, quicked, resolved;
3734 __ testptr(rax, rax);
3735 __ jcc(Assembler::zero, is_null);
3736
3737 // Get cpool & tags index
3738 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3739 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3740 // See if bytecode has already been quicked
3741 __ cmpb(Address(rdx, rbx,
3742 Address::times_1,
3743 Array<u1>::base_offset_in_bytes()),
3744 JVM_CONSTANT_Class);
3745 __ jcc(Assembler::equal, quicked);
3746
3747 __ push(atos); // save receiver for result, and for GC
3748 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3749
3750 __ get_vm_result_metadata(rax);
3751
3752 __ pop_ptr(rdx); // restore receiver
3753 __ verify_oop(rdx);
3754 __ load_klass(rdx, rdx, rscratch1);
3755 __ jmpb(resolved);
3756
3757 // Get superklass in rax and subklass in rdx
3758 __ bind(quicked);
3759 __ load_klass(rdx, rax, rscratch1);
3760 __ load_resolved_klass_at_index(rax, rcx, rbx);
3761
3762 __ bind(resolved);
3763
3764 // Generate subtype check. Blows rcx, rdi
3768 // Come here on failure
3769 __ xorl(rax, rax);
3770 __ jmpb(done);
3771 // Come here on success
3772 __ bind(ok_is_subtype);
3773 __ movl(rax, 1);
3774
3775 // Collect counts on whether this test sees nulls a lot or not.
3776 if (ProfileInterpreter) {
3777 __ jmp(done);
3778 __ bind(is_null);
3779 __ profile_null_seen(rcx);
3780 } else {
3781 __ bind(is_null); // same as 'done'
3782 }
3783 __ bind(done);
3784 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
3785 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
3786 }
3787
3788
3789 //----------------------------------------------------------------------------------------------------
3790 // Breakpoints
3791 void TemplateTable::_breakpoint() {
3792 // Note: We get here even if we are single stepping..
3793 // jbug insists on setting breakpoints at every bytecode
3794 // even if we are in single step mode.
3795
3796 transition(vtos, vtos);
3797
3798 // get the unpatched byte code
3799 __ get_method(c_rarg1);
3800 __ call_VM(noreg,
3801 CAST_FROM_FN_PTR(address,
3802 InterpreterRuntime::get_original_bytecode_at),
3803 c_rarg1, rbcp);
3804 __ mov(rbx, rax); // why?
3805
3806 // post the breakpoint event
3807 __ get_method(c_rarg1);
3808 __ call_VM(noreg,
3828 // Note: monitorenter & exit are symmetric routines; which is reflected
3829 // in the assembly code structure as well
3830 //
3831 // Stack layout:
3832 //
3833 // [expressions ] <--- rsp = expression stack top
3834 // ..
3835 // [expressions ]
3836 // [monitor entry] <--- monitor block top = expression stack bot
3837 // ..
3838 // [monitor entry]
3839 // [frame data ] <--- monitor block bot
3840 // ...
3841 // [saved rbp ] <--- rbp
3842 void TemplateTable::monitorenter() {
3843 transition(atos, vtos);
3844
3845 // check for null object
3846 __ null_check(rax);
3847
3848 const Address monitor_block_top(
3849 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3850 const Address monitor_block_bot(
3851 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3852 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3853
3854 Label allocated;
3855
3856 Register rtop = c_rarg3;
3857 Register rbot = c_rarg2;
3858 Register rmon = c_rarg1;
3859
3860 // initialize entry pointer
3861 __ xorl(rmon, rmon); // points to free slot or null
3862
3863 // find a free slot in the monitor block (result in rmon)
3864 {
3865 Label entry, loop, exit;
3866 __ movptr(rtop, monitor_block_top); // derelativize pointer
3867 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3920 // rmon: points to monitor entry
3921 __ bind(allocated);
3922
3923 // Increment bcp to point to the next bytecode, so exception
3924 // handling for async. exceptions work correctly.
3925 // The object has already been popped from the stack, so the
3926 // expression stack looks correct.
3927 __ increment(rbcp);
3928
3929 // store object
3930 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
3931 __ lock_object(rmon);
3932
3933 // check to make sure this monitor doesn't cause stack overflow after locking
3934 __ save_bcp(); // in case of exception
3935 __ generate_stack_overflow_check(0);
3936
3937 // The bcp has already been incremented. Just need to dispatch to
3938 // next instruction.
3939 __ dispatch_next(vtos);
3940 }
3941
3942 void TemplateTable::monitorexit() {
3943 transition(atos, vtos);
3944
3945 // check for null object
3946 __ null_check(rax);
3947
3948 const Address monitor_block_top(
3949 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3950 const Address monitor_block_bot(
3951 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3952 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3953
3954 Register rtop = c_rarg1;
3955 Register rbot = c_rarg2;
3956
3957 Label found;
3958
3959 // find matching slot
3960 {
3961 Label entry, loop;
3962 __ movptr(rtop, monitor_block_top); // derelativize pointer
3963 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3964 // rtop points to current entry, starting with top-most entry
3965
3966 __ lea(rbot, monitor_block_bot); // points to word before bottom
3967 // of monitor block
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "compiler/disassembler.hpp"
27 #include "gc/shared/collectedHeap.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/tlab_globals.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "interpreter/interpreterRuntime.hpp"
32 #include "interpreter/interp_masm.hpp"
33 #include "interpreter/templateTable.hpp"
34 #include "memory/universe.hpp"
35 #include "oops/methodCounters.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/objArrayKlass.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "oops/inlineKlass.hpp"
40 #include "oops/resolvedFieldEntry.hpp"
41 #include "oops/resolvedIndyEntry.hpp"
42 #include "oops/resolvedMethodEntry.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/methodHandles.hpp"
45 #include "runtime/arguments.hpp"
46 #include "runtime/frame.inline.hpp"
47 #include "runtime/safepointMechanism.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "runtime/stubRoutines.hpp"
50 #include "runtime/synchronizer.hpp"
51 #include "utilities/macros.hpp"
52
53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
54
55 // Global Register Names
56 static const Register rbcp = r13;
57 static const Register rlocals = r14;
58
59 // Address Computation: local variables
60 static inline Address iaddress(int n) {
61 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
62 }
63
64 static inline Address laddress(int n) {
65 return iaddress(n + 1);
152 static void do_oop_load(InterpreterMacroAssembler* _masm,
153 Address src,
154 Register dst,
155 DecoratorSet decorators = 0) {
156 __ load_heap_oop(dst, src, rdx, decorators);
157 }
158
159 Address TemplateTable::at_bcp(int offset) {
160 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
161 return Address(rbcp, offset);
162 }
163
164
165 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
166 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
167 int byte_no) {
168 if (!RewriteBytecodes) return;
169 Label L_patch_done;
170
171 switch (bc) {
172 case Bytecodes::_fast_vputfield:
173 case Bytecodes::_fast_aputfield:
174 case Bytecodes::_fast_bputfield:
175 case Bytecodes::_fast_zputfield:
176 case Bytecodes::_fast_cputfield:
177 case Bytecodes::_fast_dputfield:
178 case Bytecodes::_fast_fputfield:
179 case Bytecodes::_fast_iputfield:
180 case Bytecodes::_fast_lputfield:
181 case Bytecodes::_fast_sputfield:
182 {
183 // We skip bytecode quickening for putfield instructions when
184 // the put_code written to the constant pool cache is zero.
185 // This is required so that every execution of this instruction
186 // calls out to InterpreterRuntime::resolve_get_put to do
187 // additional, required work.
188 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
189 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
190 __ load_field_entry(temp_reg, bc_reg);
191 if (byte_no == f1_byte) {
192 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
761 Address(rdx, rax,
762 Address::times_4,
763 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
764 noreg);
765 }
766
767 void TemplateTable::daload() {
768 transition(itos, dtos);
769 // rax: index
770 // rdx: array
771 index_check(rdx, rax); // kills rbx
772 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
773 Address(rdx, rax,
774 Address::times_8,
775 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
776 noreg);
777 }
778
779 void TemplateTable::aaload() {
780 transition(itos, atos);
781 Register array = rdx;
782 Register index = rax;
783
784 index_check(array, index); // kills rbx
785 __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
786 if (UseArrayFlattening) {
787 Label is_flat_array, done;
788 __ test_flat_array_oop(array, rbx, is_flat_array);
789 do_oop_load(_masm,
790 Address(array, index,
791 UseCompressedOops ? Address::times_4 : Address::times_ptr,
792 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
793 rax,
794 IS_ARRAY);
795 __ jmp(done);
796 __ bind(is_flat_array);
797 __ movptr(rcx, array);
798 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), rcx, index);
799 __ bind(done);
800 } else {
801 do_oop_load(_masm,
802 Address(array, index,
803 UseCompressedOops ? Address::times_4 : Address::times_ptr,
804 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
805 rax,
806 IS_ARRAY);
807 }
808 __ profile_element_type(rbx, rax, rcx);
809 }
810
811 void TemplateTable::baload() {
812 transition(itos, itos);
813 // rax: index
814 // rdx: array
815 index_check(rdx, rax); // kills rbx
816 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
817 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
818 noreg);
819 }
820
821 void TemplateTable::caload() {
822 transition(itos, itos);
823 // rax: index
824 // rdx: array
825 index_check(rdx, rax); // kills rbx
826 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
827 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
828 noreg);
1062 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1063 Address(rdx, rbx, Address::times_4,
1064 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1065 noreg /* ftos */, noreg, noreg, noreg);
1066 }
1067
1068 void TemplateTable::dastore() {
1069 transition(dtos, vtos);
1070 __ pop_i(rbx);
1071 // value is in xmm0
1072 // rbx: index
1073 // rdx: array
1074 index_check(rdx, rbx); // prefer index in rbx
1075 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1076 Address(rdx, rbx, Address::times_8,
1077 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1078 noreg /* dtos */, noreg, noreg, noreg);
1079 }
1080
1081 void TemplateTable::aastore() {
1082 Label is_null, is_flat_array, ok_is_subtype, done;
1083 transition(vtos, vtos);
1084 // stack: ..., array, index, value
1085 __ movptr(rax, at_tos()); // value
1086 __ movl(rcx, at_tos_p1()); // index
1087 __ movptr(rdx, at_tos_p2()); // array
1088
1089 Address element_address(rdx, rcx,
1090 UseCompressedOops? Address::times_4 : Address::times_ptr,
1091 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1092
1093 index_check_without_pop(rdx, rcx); // kills rbx
1094
1095 __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1096 __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1097
1098 __ testptr(rax, rax);
1099 __ jcc(Assembler::zero, is_null);
1100
1101 // Move array class to rdi
1102 __ load_klass(rdi, rdx, rscratch1);
1103 if (UseArrayFlattening) {
1104 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1105 __ test_flat_array_layout(rbx, is_flat_array);
1106 }
1107
1108 // Move subklass into rbx
1109 __ load_klass(rbx, rax, rscratch1);
1110 // Move array element superklass into rax
1111 __ movptr(rax, Address(rdi,
1112 ObjArrayKlass::element_klass_offset()));
1113
1114 // Generate subtype check. Blows rcx, rdi
1115 // Superklass in rax. Subklass in rbx.
1116 // is "rbx <: rax" ? (value subclass <: array element superclass)
1117 __ gen_subtype_check(rbx, ok_is_subtype, false);
1118
1119 // Come here on failure
1120 // object is at TOS
1121 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1122
1123 // Come here on success
1124 __ bind(ok_is_subtype);
1125
1126 // Get the value we will store
1127 __ movptr(rax, at_tos());
1128 __ movl(rcx, at_tos_p1()); // index
1129 // Now store using the appropriate barrier
1130 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1131 __ jmp(done);
1132
1133 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1134 __ bind(is_null);
1135 if (Arguments::is_valhalla_enabled()) {
1136 Label write_null_to_null_free_array, store_null;
1137
1138 // Move array class to rdi
1139 __ load_klass(rdi, rdx, rscratch1);
1140 if (UseArrayFlattening) {
1141 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1142 __ test_flat_array_layout(rbx, is_flat_array);
1143 }
1144
1145 // No way to store null in null-free array
1146 __ test_null_free_array_oop(rdx, rbx, write_null_to_null_free_array);
1147 __ jmp(store_null);
1148
1149 __ bind(write_null_to_null_free_array);
1150 __ jump(RuntimeAddress(Interpreter::_throw_NullPointerException_entry));
1151
1152 __ bind(store_null);
1153 }
1154 // Store a null
1155 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1156 __ jmp(done);
1157
1158 if (UseArrayFlattening) {
1159 Label is_type_ok;
1160 __ bind(is_flat_array); // Store non-null value to flat
1161
1162 __ movptr(rax, at_tos());
1163 __ movl(rcx, at_tos_p1()); // index
1164 __ movptr(rdx, at_tos_p2()); // array
1165
1166 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), rax, rdx, rcx);
1167 }
1168 // Pop stack arguments
1169 __ bind(done);
1170 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1171 }
1172
1173 void TemplateTable::bastore() {
1174 transition(itos, vtos);
1175 __ pop_i(rbx);
1176 // rax: value
1177 // rbx: index
1178 // rdx: array
1179 index_check(rdx, rbx); // prefer index in rbx
1180 // Need to check whether array is boolean or byte
1181 // since both types share the bastore bytecode.
1182 __ load_klass(rcx, rdx, rscratch1);
1183 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1184 int diffbit = Klass::layout_helper_boolean_diffbit();
1185 __ testl(rcx, diffbit);
1186 Label L_skip;
1187 __ jccb(Assembler::zero, L_skip);
1935 __ jcc(j_not(cc), not_taken);
1936 branch(false, false);
1937 __ bind(not_taken);
1938 __ profile_not_taken_branch(rax);
1939 }
1940
1941 void TemplateTable::if_nullcmp(Condition cc) {
1942 transition(atos, vtos);
1943 // assume branch is more often taken than not (loops use backward branches)
1944 Label not_taken;
1945 __ testptr(rax, rax);
1946 __ jcc(j_not(cc), not_taken);
1947 branch(false, false);
1948 __ bind(not_taken);
1949 __ profile_not_taken_branch(rax);
1950 }
1951
1952 void TemplateTable::if_acmp(Condition cc) {
1953 transition(atos, vtos);
1954 // assume branch is more often taken than not (loops use backward branches)
1955 Label taken, not_taken;
1956 __ pop_ptr(rdx);
1957
1958 __ profile_acmp(rbx, rdx, rax, rcx);
1959
1960 const int is_inline_type_mask = markWord::inline_type_pattern;
1961 if (Arguments::is_valhalla_enabled()) {
1962 __ cmpoop(rdx, rax);
1963 __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
1964
1965 // might be substitutable, test if either rax or rdx is null
1966 __ testptr(rax, rax);
1967 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1968 __ testptr(rdx, rdx);
1969 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1970
1971 // and both are values ?
1972 __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
1973 __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
1974 __ andptr(rbx, is_inline_type_mask);
1975 __ cmpptr(rbx, is_inline_type_mask);
1976 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
1977
1978 // same value klass ?
1979 __ load_metadata(rbx, rdx);
1980 __ load_metadata(rcx, rax);
1981 __ cmpptr(rbx, rcx);
1982 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
1983
1984 // Know both are the same type, let's test for substitutability...
1985 if (cc == equal) {
1986 invoke_is_substitutable(rax, rdx, taken, not_taken);
1987 } else {
1988 invoke_is_substitutable(rax, rdx, not_taken, taken);
1989 }
1990 __ stop("Not reachable");
1991 }
1992
1993 __ cmpoop(rdx, rax);
1994 __ jcc(j_not(cc), not_taken);
1995 __ bind(taken);
1996 branch(false, false);
1997 __ bind(not_taken);
1998 __ profile_not_taken_branch(rax, true);
1999 }
2000
2001 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2002 Label& is_subst, Label& not_subst) {
2003 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2004 // Restored...rax answer, jmp to outcome...
2005 __ testl(rax, rax);
2006 __ jcc(Assembler::zero, not_subst);
2007 __ jmp(is_subst);
2008 }
2009
2010 void TemplateTable::ret() {
2011 transition(vtos, vtos);
2012 locals_index(rbx);
2013 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
2014 __ profile_ret(rbx, rcx);
2015 __ get_method(rax);
2016 __ movptr(rbcp, Address(rax, Method::const_offset()));
2017 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2018 ConstMethod::codes_offset()));
2019 __ dispatch_next(vtos, 0, true);
2020 }
2021
2022 void TemplateTable::wide_ret() {
2023 transition(vtos, vtos);
2024 locals_index_wide(rbx);
2025 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2026 __ profile_ret(rbx, rcx);
2027 __ get_method(rax);
2241 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2242 Label no_safepoint;
2243 NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2244 __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2245 __ jcc(Assembler::zero, no_safepoint);
2246 __ push(state);
2247 __ push_cont_fastpath();
2248 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2249 InterpreterRuntime::at_safepoint));
2250 __ pop_cont_fastpath();
2251 __ pop(state);
2252 __ bind(no_safepoint);
2253 }
2254
2255 // Narrow result if state is itos but result type is smaller.
2256 // Need to narrow in the return bytecode rather than in generate_return_entry
2257 // since compiled code callers expect the result to already be narrowed.
2258 if (state == itos) {
2259 __ narrow(rax);
2260 }
2261
2262 __ remove_activation(state, rbcp, true, true, true);
2263
2264 __ jmp(rbcp);
2265 }
2266
2267 // ----------------------------------------------------------------------------
2268 // Volatile variables demand their effects be made known to all CPU's
2269 // in order. Store buffers on most chips allow reads & writes to
2270 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2271 // without some kind of memory barrier (i.e., it's not sufficient that
2272 // the interpreter does not reorder volatile references, the hardware
2273 // also must not reorder them).
2274 //
2275 // According to the new Java Memory Model (JMM):
2276 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2277 // writes act as acquire & release, so:
2278 // (2) A read cannot let unrelated NON-volatile memory refs that
2279 // happen after the read float up to before the read. It's OK for
2280 // non-volatile memory refs that happen before the volatile read to
2281 // float down below it.
2282 // (3) Similar a volatile write cannot let unrelated NON-volatile
2615 }
2616 // rax,: object pointer or null
2617 // cache: cache entry pointer
2618 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2619 rax, cache);
2620
2621 __ load_field_entry(cache, index);
2622 __ bind(L1);
2623 }
2624 }
2625
2626 void TemplateTable::pop_and_check_object(Register r) {
2627 __ pop_ptr(r);
2628 __ null_check(r); // for field access must check obj.
2629 __ verify_oop(r);
2630 }
2631
2632 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2633 transition(vtos, vtos);
2634
2635 const Register obj = r9;
2636 const Register cache = rcx;
2637 const Register index = rdx;
2638 const Register off = rbx;
2639 const Register tos_state = rax;
2640 const Register flags = rdx;
2641 const Register bc = c_rarg3;
2642
2643 resolve_cache_and_index_for_field(byte_no, cache, index);
2644 jvmti_post_field_access(cache, index, is_static, false);
2645 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2646
2647 const Address field(obj, off, Address::times_1, 0*wordSize);
2648
2649 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
2650
2651 // Make sure we don't need to mask edx after the above shift
2652 assert(btos == 0, "change code, btos != 0");
2653 __ testl(tos_state, tos_state);
2654 __ jcc(Assembler::notZero, notByte);
2655
2656 // btos
2657 if (!is_static) pop_and_check_object(obj);
2658 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
2659 __ push(btos);
2660 // Rewrite bytecode to be faster
2661 if (!is_static && rc == may_rewrite) {
2662 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2663 }
2664 __ jmp(Done);
2665
2666 __ bind(notByte);
2667 __ cmpl(tos_state, ztos);
2668 __ jcc(Assembler::notEqual, notBool);
2669
2670 // ztos (same code as btos)
2671 if (!is_static) pop_and_check_object(obj);
2672 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg);
2673 __ push(ztos);
2674 // Rewrite bytecode to be faster
2675 if (!is_static && rc == may_rewrite) {
2676 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2677 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2678 }
2679 __ jmp(Done);
2680
2681 __ bind(notBool);
2682 __ cmpl(tos_state, atos);
2683 __ jcc(Assembler::notEqual, notObj);
2684 // atos
2685 if (!Arguments::is_valhalla_enabled()) {
2686 if (!is_static) pop_and_check_object(obj);
2687 do_oop_load(_masm, field, rax);
2688 __ push(atos);
2689 if (!is_static && rc == may_rewrite) {
2690 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2691 }
2692 __ jmp(Done);
2693 } else {
2694 if (is_static) {
2695 __ load_heap_oop(rax, field);
2696 __ push(atos);
2697 __ jmp(Done);
2698 } else {
2699 Label is_flat;
2700 __ test_field_is_flat(flags, rscratch1, is_flat);
2701 pop_and_check_object(obj);
2702 __ load_heap_oop(rax, field);
2703 __ push(atos);
2704 if (rc == may_rewrite) {
2705 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2706 }
2707 __ jmp(Done);
2708 __ bind(is_flat);
2709 // field is flat (null-free or nullable with a null-marker)
2710 pop_and_check_object(rax);
2711 __ read_flat_field(rcx, rax);
2712 __ verify_oop(rax);
2713 __ push(atos);
2714 if (rc == may_rewrite) {
2715 patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
2716 }
2717 __ jmp(Done);
2718 }
2719 }
2720
2721 __ bind(notObj);
2722
2723 if (!is_static) pop_and_check_object(obj);
2724
2725 __ cmpl(tos_state, itos);
2726 __ jcc(Assembler::notEqual, notInt);
2727 // itos
2728 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
2729 __ push(itos);
2730 // Rewrite bytecode to be faster
2731 if (!is_static && rc == may_rewrite) {
2732 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2733 }
2734 __ jmp(Done);
2735
2736 __ bind(notInt);
2737 __ cmpl(tos_state, ctos);
2738 __ jcc(Assembler::notEqual, notChar);
2739 // ctos
2740 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
2741 __ push(ctos);
2742 // Rewrite bytecode to be faster
2743 if (!is_static && rc == may_rewrite) {
2744 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2804 #endif
2805
2806 __ bind(Done);
2807 // [jk] not needed currently
2808 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2809 // Assembler::LoadStore));
2810 }
2811
2812 void TemplateTable::getfield(int byte_no) {
2813 getfield_or_static(byte_no, false);
2814 }
2815
2816 void TemplateTable::nofast_getfield(int byte_no) {
2817 getfield_or_static(byte_no, false, may_not_rewrite);
2818 }
2819
2820 void TemplateTable::getstatic(int byte_no) {
2821 getfield_or_static(byte_no, true);
2822 }
2823
2824 // The registers cache and index expected to be set before call.
2825 // The function may destroy various registers, just not the cache and index registers.
2826 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2827 // Cache is rcx and index is rdx
2828 const Register entry = c_rarg2; // ResolvedFieldEntry
2829 const Register obj = c_rarg1; // Object pointer
2830 const Register value = c_rarg3; // JValue object
2831
2832 if (JvmtiExport::can_post_field_modification()) {
2833 // Check to see if a field modification watch has been set before
2834 // we take the time to call into the VM.
2835 Label L1;
2836 assert_different_registers(cache, obj, rax);
2837 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2838 __ testl(rax, rax);
2839 __ jcc(Assembler::zero, L1);
2840
2841 __ mov(entry, cache);
2842
2843 if (is_static) {
2865 // cache: field entry pointer
2866 // value: jvalue object on the stack
2867 __ call_VM(noreg,
2868 CAST_FROM_FN_PTR(address,
2869 InterpreterRuntime::post_field_modification),
2870 obj, entry, value);
2871 // Reload field entry
2872 __ load_field_entry(cache, index);
2873 __ bind(L1);
2874 }
2875 }
2876
2877 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2878 transition(vtos, vtos);
2879
2880 const Register obj = rcx;
2881 const Register cache = rcx;
2882 const Register index = rdx;
2883 const Register tos_state = rdx;
2884 const Register off = rbx;
2885 const Register flags = r9;
2886
2887 resolve_cache_and_index_for_field(byte_no, cache, index);
2888 jvmti_post_field_mod(cache, index, is_static);
2889 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2890
2891 // [jk] not needed currently
2892 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2893 // Assembler::StoreStore));
2894
2895 Label notVolatile, Done;
2896
2897 // Check for volatile store
2898 __ movl(rscratch1, flags);
2899 __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
2900 __ testl(rscratch1, rscratch1);
2901 __ jcc(Assembler::zero, notVolatile);
2902
2903 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2904 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2905 Assembler::StoreStore));
2906 __ jmp(Done);
2907 __ bind(notVolatile);
2908
2909 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2910
2911 __ bind(Done);
2912 }
2913
2914 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2915 Register obj, Register off, Register tos_state, Register flags) {
2916
2917 // field addresses
2918 const Address field(obj, off, Address::times_1, 0*wordSize);
2919
2920 Label notByte, notBool, notInt, notShort, notChar,
2921 notLong, notFloat, notObj;
2922 Label Done;
2923
2924 const Register bc = c_rarg3;
2925
2926 // Test TOS state
2927 __ testl(tos_state, tos_state);
2928 __ jcc(Assembler::notZero, notByte);
2929
2930 // btos
2931 {
2932 __ pop(btos);
2933 if (!is_static) pop_and_check_object(obj);
2934 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2935 if (!is_static && rc == may_rewrite) {
2942 __ cmpl(tos_state, ztos);
2943 __ jcc(Assembler::notEqual, notBool);
2944
2945 // ztos
2946 {
2947 __ pop(ztos);
2948 if (!is_static) pop_and_check_object(obj);
2949 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2950 if (!is_static && rc == may_rewrite) {
2951 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2952 }
2953 __ jmp(Done);
2954 }
2955
2956 __ bind(notBool);
2957 __ cmpl(tos_state, atos);
2958 __ jcc(Assembler::notEqual, notObj);
2959
2960 // atos
2961 {
2962 if (!Arguments::is_valhalla_enabled()) {
2963 __ pop(atos);
2964 if (!is_static) pop_and_check_object(obj);
2965 // Store into the field
2966 do_oop_store(_masm, field, rax);
2967 if (!is_static && rc == may_rewrite) {
2968 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2969 }
2970 __ jmp(Done);
2971 } else {
2972 __ pop(atos);
2973 if (is_static) {
2974 Label is_nullable;
2975 __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_nullable);
2976 __ null_check(rax); // FIXME JDK-8341120
2977 __ bind(is_nullable);
2978 do_oop_store(_masm, field, rax);
2979 __ jmp(Done);
2980 } else {
2981 Label is_flat, null_free_reference, rewrite_inline;
2982 __ test_field_is_flat(flags, rscratch1, is_flat);
2983 __ test_field_is_null_free_inline_type(flags, rscratch1, null_free_reference);
2984 pop_and_check_object(obj);
2985 // Store into the field
2986 do_oop_store(_masm, field, rax);
2987 if (rc == may_rewrite) {
2988 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2989 }
2990 __ jmp(Done);
2991 __ bind(null_free_reference);
2992 __ null_check(rax); // FIXME JDK-8341120
2993 pop_and_check_object(obj);
2994 // Store into the field
2995 do_oop_store(_masm, field, rax);
2996 __ jmp(rewrite_inline);
2997 __ bind(is_flat);
2998 pop_and_check_object(rscratch2);
2999 __ write_flat_field(rcx, r8, rscratch1, rscratch2, rbx, rax);
3000 __ bind(rewrite_inline);
3001 if (rc == may_rewrite) {
3002 patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3003 }
3004 __ jmp(Done);
3005 }
3006 }
3007 }
3008
3009 __ bind(notObj);
3010 __ cmpl(tos_state, itos);
3011 __ jcc(Assembler::notEqual, notInt);
3012
3013 // itos
3014 {
3015 __ pop(itos);
3016 if (!is_static) pop_and_check_object(obj);
3017 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3018 if (!is_static && rc == may_rewrite) {
3019 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3020 }
3021 __ jmp(Done);
3022 }
3023
3024 __ bind(notInt);
3025 __ cmpl(tos_state, ctos);
3026 __ jcc(Assembler::notEqual, notChar);
3123 }
3124
3125 void TemplateTable::jvmti_post_fast_field_mod() {
3126
3127 const Register scratch = c_rarg3;
3128
3129 if (JvmtiExport::can_post_field_modification()) {
3130 // Check to see if a field modification watch has been set before
3131 // we take the time to call into the VM.
3132 Label L2;
3133 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3134 __ testl(scratch, scratch);
3135 __ jcc(Assembler::zero, L2);
3136 __ pop_ptr(rbx); // copy the object pointer from tos
3137 __ verify_oop(rbx);
3138 __ push_ptr(rbx); // put the object pointer back on tos
3139 // Save tos values before call_VM() clobbers them. Since we have
3140 // to do it for every data type, we use the saved values as the
3141 // jvalue object.
3142 switch (bytecode()) { // load values into the jvalue object
3143 case Bytecodes::_fast_vputfield: // fall through
3144 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3145 case Bytecodes::_fast_bputfield: // fall through
3146 case Bytecodes::_fast_zputfield: // fall through
3147 case Bytecodes::_fast_sputfield: // fall through
3148 case Bytecodes::_fast_cputfield: // fall through
3149 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3150 case Bytecodes::_fast_dputfield: __ push(dtos); break;
3151 case Bytecodes::_fast_fputfield: __ push(ftos); break;
3152 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3153
3154 default:
3155 ShouldNotReachHere();
3156 }
3157 __ mov(scratch, rsp); // points to jvalue on the stack
3158 // access constant pool cache entry
3159 __ load_field_entry(c_rarg2, rax);
3160 __ verify_oop(rbx);
3161 // rbx: object pointer copied above
3162 // c_rarg2: cache entry pointer
3163 // c_rarg3: jvalue object on the stack
3164 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
3165
3166 switch (bytecode()) { // restore tos values
3167 case Bytecodes::_fast_vputfield: // fall through
3168 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3169 case Bytecodes::_fast_bputfield: // fall through
3170 case Bytecodes::_fast_zputfield: // fall through
3171 case Bytecodes::_fast_sputfield: // fall through
3172 case Bytecodes::_fast_cputfield: // fall through
3173 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3174 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3175 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3176 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3177 default: break;
3178 }
3179 __ bind(L2);
3180 }
3181 }
3182
3183 void TemplateTable::fast_storefield(TosState state) {
3184 transition(state, vtos);
3185
3186 Label notVolatile, Done;
3187
3188 jvmti_post_fast_field_mod();
3189
3190 __ push(rax);
3191 __ load_field_entry(rcx, rax);
3192 load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);
3193 __ pop(rax);
3194 // RBX: field offset, RCX: RAX: TOS, RDX: flags
3195
3196 // Get object from stack
3197 pop_and_check_object(rcx);
3198
3199 // field address
3200 const Address field(rcx, rbx, Address::times_1);
3201
3202 // Check for volatile store
3203 __ movl(rscratch2, rdx); // saving flags for is_flat test
3204 __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3205 __ testl(rscratch2, rscratch2);
3206 __ jcc(Assembler::zero, notVolatile);
3207
3208 fast_storefield_helper(field, rax, rdx);
3209 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3210 Assembler::StoreStore));
3211 __ jmp(Done);
3212 __ bind(notVolatile);
3213
3214 fast_storefield_helper(field, rax, rdx);
3215
3216 __ bind(Done);
3217 }
3218
3219 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3220
3221 // DANGER: 'field' argument depends on rcx and rbx
3222
3223 // access field
3224 switch (bytecode()) {
3225 case Bytecodes::_fast_vputfield:
3226 {
3227 // Field is either flat (nullable or not) or non-flat and null-free
3228 Label is_flat, done;
3229 __ test_field_is_flat(flags, rscratch1, is_flat);
3230 __ null_check(rax); // FIXME JDK-8341120
3231 do_oop_store(_masm, field, rax);
3232 __ jmp(done);
3233 __ bind(is_flat);
3234 __ load_field_entry(r8, r9);
3235 __ movptr(rscratch2, rcx); // re-shuffle registers because of VM call calling convention
3236 __ write_flat_field(r8, rscratch1, r9, rscratch2, rbx, rax);
3237 __ bind(done);
3238 }
3239 break;
3240 case Bytecodes::_fast_aputfield:
3241 {
3242 do_oop_store(_masm, field, rax);
3243 }
3244 break;
3245 case Bytecodes::_fast_lputfield:
3246 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3247 break;
3248 case Bytecodes::_fast_iputfield:
3249 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3250 break;
3251 case Bytecodes::_fast_zputfield:
3252 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3253 break;
3254 case Bytecodes::_fast_bputfield:
3255 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3256 break;
3257 case Bytecodes::_fast_sputfield:
3258 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3259 break;
3260 case Bytecodes::_fast_cputfield:
3261 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3262 break;
3263 case Bytecodes::_fast_fputfield:
3279 // Check to see if a field access watch has been set before we
3280 // take the time to call into the VM.
3281 Label L1;
3282 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3283 __ testl(rcx, rcx);
3284 __ jcc(Assembler::zero, L1);
3285 // access constant pool cache entry
3286 __ load_field_entry(c_rarg2, rcx);
3287 __ verify_oop(rax);
3288 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3289 __ mov(c_rarg1, rax);
3290 // c_rarg1: object pointer copied above
3291 // c_rarg2: cache entry pointer
3292 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3293 __ pop_ptr(rax); // restore object pointer
3294 __ bind(L1);
3295 }
3296
3297 // access constant pool cache
3298 __ load_field_entry(rcx, rbx);
3299 __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3300
3301 // rax: object
3302 __ verify_oop(rax);
3303 __ null_check(rax);
3304 Address field(rax, rdx, Address::times_1);
3305
3306 // access field
3307 switch (bytecode()) {
3308 case Bytecodes::_fast_vgetfield:
3309 __ read_flat_field(rcx, rax);
3310 __ verify_oop(rax);
3311 break;
3312 case Bytecodes::_fast_agetfield:
3313 do_oop_load(_masm, field, rax);
3314 __ verify_oop(rax);
3315 break;
3316 case Bytecodes::_fast_lgetfield:
3317 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg);
3318 break;
3319 case Bytecodes::_fast_igetfield:
3320 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
3321 break;
3322 case Bytecodes::_fast_bgetfield:
3323 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
3324 break;
3325 case Bytecodes::_fast_sgetfield:
3326 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg);
3327 break;
3328 case Bytecodes::_fast_cgetfield:
3329 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
3330 break;
3331 case Bytecodes::_fast_fgetfield:
3716
3717 // Note: rax_callsite is already pushed
3718
3719 // %%% should make a type profile for any invokedynamic that takes a ref argument
3720 // profile this call
3721 __ profile_call(rbcp);
3722 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3723
3724 __ verify_oop(rax_callsite);
3725
3726 __ jump_from_interpreted(rbx_method, rdx);
3727 }
3728
3729 //-----------------------------------------------------------------------------
3730 // Allocation
3731
3732 void TemplateTable::_new() {
3733 transition(vtos, atos);
3734 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3735 Label slow_case;
3736 Label done;
3737
3738 __ get_cpool_and_tags(rcx, rax);
3739
3740 // Make sure the class we're about to instantiate has been resolved.
3741 // This is done before loading InstanceKlass to be consistent with the order
3742 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3743 const int tags_offset = Array<u1>::base_offset_in_bytes();
3744 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3745 __ jcc(Assembler::notEqual, slow_case);
3746
3747 // get InstanceKlass
3748 __ load_resolved_klass_at_index(rcx, rcx, rdx);
3749
3750 // make sure klass is initialized
3751 // init_state needs acquire, but x86 is TSO, and so we are already good.
3752 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3753 __ clinit_barrier(rcx, nullptr /*L_fast_path*/, &slow_case);
3754
3755 __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);
3756 __ jmp(done);
3757
3758 // slow case
3759 __ bind(slow_case);
3760
3761 __ get_constant_pool(c_rarg1);
3762 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3763 __ call_VM_preemptable(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3764 __ verify_oop(rax);
3765
3766 // continue
3767 __ bind(done);
3768 }
3769
3770 void TemplateTable::newarray() {
3771 transition(itos, atos);
3772 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3773 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3774 c_rarg1, rax);
3775 }
3776
3777 void TemplateTable::anewarray() {
3778 transition(itos, atos);
3779
3781 __ get_constant_pool(c_rarg1);
3782 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3783 c_rarg1, c_rarg2, rax);
3784 }
3785
3786 void TemplateTable::arraylength() {
3787 transition(atos, itos);
3788 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3789 }
3790
3791 void TemplateTable::checkcast() {
3792 transition(atos, atos);
3793 Label done, is_null, ok_is_subtype, quicked, resolved;
3794 __ testptr(rax, rax); // object is in rax
3795 __ jcc(Assembler::zero, is_null);
3796
3797 // Get cpool & tags index
3798 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3799 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3800 // See if bytecode has already been quicked
3801 __ movzbl(rdx, Address(rdx, rbx,
3802 Address::times_1,
3803 Array<u1>::base_offset_in_bytes()));
3804 __ cmpl(rdx, JVM_CONSTANT_Class);
3805 __ jcc(Assembler::equal, quicked);
3806 __ push(atos); // save receiver for result, and for GC
3807 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3808
3809 __ get_vm_result_metadata(rax);
3810
3811 __ pop_ptr(rdx); // restore receiver
3812 __ jmpb(resolved);
3813
3814 // Get superklass in rax and subklass in rbx
3815 __ bind(quicked);
3816 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3817 __ load_resolved_klass_at_index(rax, rcx, rbx);
3818
3819 __ bind(resolved);
3820 __ load_klass(rbx, rdx, rscratch1);
3821
3822 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3823 // Superklass in rax. Subklass in rbx.
3824 __ gen_subtype_check(rbx, ok_is_subtype);
3825
3826 // Come here on failure
3827 __ push_ptr(rdx);
3828 // object is at TOS
3829 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3830
3831 // Come here on success
3832 __ bind(ok_is_subtype);
3833 __ mov(rax, rdx); // Restore object in rdx
3834 __ jmp(done);
3835
3836 __ bind(is_null);
3837
3838 // Collect counts on whether this check-cast sees nulls a lot or not.
3839 if (ProfileInterpreter) {
3840 __ profile_null_seen(rcx);
3841 }
3842
3843 __ bind(done);
3844 }
3845
3846 void TemplateTable::instanceof() {
3847 transition(atos, itos);
3848 Label done, is_null, ok_is_subtype, quicked, resolved;
3849 __ testptr(rax, rax);
3850 __ jcc(Assembler::zero, is_null);
3851
3852 // Get cpool & tags index
3853 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3854 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3855 // See if bytecode has already been quicked
3856 __ movzbl(rdx, Address(rdx, rbx,
3857 Address::times_1,
3858 Array<u1>::base_offset_in_bytes()));
3859 __ cmpl(rdx, JVM_CONSTANT_Class);
3860 __ jcc(Assembler::equal, quicked);
3861
3862 __ push(atos); // save receiver for result, and for GC
3863 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3864
3865 __ get_vm_result_metadata(rax);
3866
3867 __ pop_ptr(rdx); // restore receiver
3868 __ verify_oop(rdx);
3869 __ load_klass(rdx, rdx, rscratch1);
3870 __ jmpb(resolved);
3871
3872 // Get superklass in rax and subklass in rdx
3873 __ bind(quicked);
3874 __ load_klass(rdx, rax, rscratch1);
3875 __ load_resolved_klass_at_index(rax, rcx, rbx);
3876
3877 __ bind(resolved);
3878
3879 // Generate subtype check. Blows rcx, rdi
3883 // Come here on failure
3884 __ xorl(rax, rax);
3885 __ jmpb(done);
3886 // Come here on success
3887 __ bind(ok_is_subtype);
3888 __ movl(rax, 1);
3889
3890 // Collect counts on whether this test sees nulls a lot or not.
3891 if (ProfileInterpreter) {
3892 __ jmp(done);
3893 __ bind(is_null);
3894 __ profile_null_seen(rcx);
3895 } else {
3896 __ bind(is_null); // same as 'done'
3897 }
3898 __ bind(done);
3899 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
3900 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
3901 }
3902
3903 //----------------------------------------------------------------------------------------------------
3904 // Breakpoints
3905 void TemplateTable::_breakpoint() {
3906 // Note: We get here even if we are single stepping..
3907 // jbug insists on setting breakpoints at every bytecode
3908 // even if we are in single step mode.
3909
3910 transition(vtos, vtos);
3911
3912 // get the unpatched byte code
3913 __ get_method(c_rarg1);
3914 __ call_VM(noreg,
3915 CAST_FROM_FN_PTR(address,
3916 InterpreterRuntime::get_original_bytecode_at),
3917 c_rarg1, rbcp);
3918 __ mov(rbx, rax); // why?
3919
3920 // post the breakpoint event
3921 __ get_method(c_rarg1);
3922 __ call_VM(noreg,
3942 // Note: monitorenter & exit are symmetric routines; which is reflected
3943 // in the assembly code structure as well
3944 //
3945 // Stack layout:
3946 //
3947 // [expressions ] <--- rsp = expression stack top
3948 // ..
3949 // [expressions ]
3950 // [monitor entry] <--- monitor block top = expression stack bot
3951 // ..
3952 // [monitor entry]
3953 // [frame data ] <--- monitor block bot
3954 // ...
3955 // [saved rbp ] <--- rbp
3956 void TemplateTable::monitorenter() {
3957 transition(atos, vtos);
3958
3959 // check for null object
3960 __ null_check(rax);
3961
3962 Label is_inline_type;
3963 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
3964 __ test_markword_is_inline_type(rbx, is_inline_type);
3965
3966 const Address monitor_block_top(
3967 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3968 const Address monitor_block_bot(
3969 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3970 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3971
3972 Label allocated;
3973
3974 Register rtop = c_rarg3;
3975 Register rbot = c_rarg2;
3976 Register rmon = c_rarg1;
3977
3978 // initialize entry pointer
3979 __ xorl(rmon, rmon); // points to free slot or null
3980
3981 // find a free slot in the monitor block (result in rmon)
3982 {
3983 Label entry, loop, exit;
3984 __ movptr(rtop, monitor_block_top); // derelativize pointer
3985 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4038 // rmon: points to monitor entry
4039 __ bind(allocated);
4040
4041 // Increment bcp to point to the next bytecode, so exception
4042 // handling for async. exceptions work correctly.
4043 // The object has already been popped from the stack, so the
4044 // expression stack looks correct.
4045 __ increment(rbcp);
4046
4047 // store object
4048 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4049 __ lock_object(rmon);
4050
4051 // check to make sure this monitor doesn't cause stack overflow after locking
4052 __ save_bcp(); // in case of exception
4053 __ generate_stack_overflow_check(0);
4054
4055 // The bcp has already been incremented. Just need to dispatch to
4056 // next instruction.
4057 __ dispatch_next(vtos);
4058
4059 __ bind(is_inline_type);
4060 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4061 InterpreterRuntime::throw_identity_exception), rax);
4062 __ should_not_reach_here();
4063 }
4064
4065 void TemplateTable::monitorexit() {
4066 transition(atos, vtos);
4067
4068 // check for null object
4069 __ null_check(rax);
4070
4071 const int is_inline_type_mask = markWord::inline_type_pattern;
4072 Label has_identity;
4073 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4074 __ andptr(rbx, is_inline_type_mask);
4075 __ cmpl(rbx, is_inline_type_mask);
4076 __ jcc(Assembler::notEqual, has_identity);
4077 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4078 InterpreterRuntime::throw_illegal_monitor_state_exception));
4079 __ should_not_reach_here();
4080 __ bind(has_identity);
4081
4082 const Address monitor_block_top(
4083 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4084 const Address monitor_block_bot(
4085 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4086 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4087
4088 Register rtop = c_rarg1;
4089 Register rbot = c_rarg2;
4090
4091 Label found;
4092
4093 // find matching slot
4094 {
4095 Label entry, loop;
4096 __ movptr(rtop, monitor_block_top); // derelativize pointer
4097 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4098 // rtop points to current entry, starting with top-most entry
4099
4100 __ lea(rbot, monitor_block_bot); // points to word before bottom
4101 // of monitor block
|