19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "compiler/disassembler.hpp"
27 #include "gc/shared/collectedHeap.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/tlab_globals.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "interpreter/interpreterRuntime.hpp"
32 #include "interpreter/interp_masm.hpp"
33 #include "interpreter/templateTable.hpp"
34 #include "memory/universe.hpp"
35 #include "oops/methodCounters.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/objArrayKlass.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "oops/resolvedFieldEntry.hpp"
40 #include "oops/resolvedIndyEntry.hpp"
41 #include "oops/resolvedMethodEntry.hpp"
42 #include "prims/jvmtiExport.hpp"
43 #include "prims/methodHandles.hpp"
44 #include "runtime/frame.inline.hpp"
45 #include "runtime/safepointMechanism.hpp"
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubRoutines.hpp"
48 #include "runtime/synchronizer.hpp"
49 #include "utilities/macros.hpp"
50
51 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
52
53 // Global Register Names
54 static const Register rbcp = r13;
55 static const Register rlocals = r14;
56
57 // Address Computation: local variables
58 static inline Address iaddress(int n) {
59 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
60 }
61
62 static inline Address laddress(int n) {
63 return iaddress(n + 1);
150 static void do_oop_load(InterpreterMacroAssembler* _masm,
151 Address src,
152 Register dst,
153 DecoratorSet decorators = 0) {
154 __ load_heap_oop(dst, src, rdx, decorators);
155 }
156
157 Address TemplateTable::at_bcp(int offset) {
158 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
159 return Address(rbcp, offset);
160 }
161
162
163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
164 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
165 int byte_no) {
166 if (!RewriteBytecodes) return;
167 Label L_patch_done;
168
169 switch (bc) {
170 case Bytecodes::_fast_aputfield:
171 case Bytecodes::_fast_bputfield:
172 case Bytecodes::_fast_zputfield:
173 case Bytecodes::_fast_cputfield:
174 case Bytecodes::_fast_dputfield:
175 case Bytecodes::_fast_fputfield:
176 case Bytecodes::_fast_iputfield:
177 case Bytecodes::_fast_lputfield:
178 case Bytecodes::_fast_sputfield:
179 {
180 // We skip bytecode quickening for putfield instructions when
181 // the put_code written to the constant pool cache is zero.
182 // This is required so that every execution of this instruction
183 // calls out to InterpreterRuntime::resolve_get_put to do
184 // additional, required work.
185 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
186 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
187 __ load_field_entry(temp_reg, bc_reg);
188 if (byte_no == f1_byte) {
189 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
758 Address(rdx, rax,
759 Address::times_4,
760 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
761 noreg);
762 }
763
764 void TemplateTable::daload() {
765 transition(itos, dtos);
766 // rax: index
767 // rdx: array
768 index_check(rdx, rax); // kills rbx
769 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
770 Address(rdx, rax,
771 Address::times_8,
772 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
773 noreg);
774 }
775
776 void TemplateTable::aaload() {
777 transition(itos, atos);
778 // rax: index
779 // rdx: array
780 index_check(rdx, rax); // kills rbx
781 do_oop_load(_masm,
782 Address(rdx, rax,
783 UseCompressedOops ? Address::times_4 : Address::times_ptr,
784 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
785 rax,
786 IS_ARRAY);
787 }
788
789 void TemplateTable::baload() {
790 transition(itos, itos);
791 // rax: index
792 // rdx: array
793 index_check(rdx, rax); // kills rbx
794 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
795 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
796 noreg);
797 }
798
799 void TemplateTable::caload() {
800 transition(itos, itos);
801 // rax: index
802 // rdx: array
803 index_check(rdx, rax); // kills rbx
804 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
805 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
806 noreg);
1040 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1041 Address(rdx, rbx, Address::times_4,
1042 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1043 noreg /* ftos */, noreg, noreg, noreg);
1044 }
1045
1046 void TemplateTable::dastore() {
1047 transition(dtos, vtos);
1048 __ pop_i(rbx);
1049 // value is in xmm0
1050 // rbx: index
1051 // rdx: array
1052 index_check(rdx, rbx); // prefer index in rbx
1053 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1054 Address(rdx, rbx, Address::times_8,
1055 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1056 noreg /* dtos */, noreg, noreg, noreg);
1057 }
1058
1059 void TemplateTable::aastore() {
1060 Label is_null, ok_is_subtype, done;
1061 transition(vtos, vtos);
1062 // stack: ..., array, index, value
1063 __ movptr(rax, at_tos()); // value
1064 __ movl(rcx, at_tos_p1()); // index
1065 __ movptr(rdx, at_tos_p2()); // array
1066
1067 Address element_address(rdx, rcx,
1068 UseCompressedOops? Address::times_4 : Address::times_ptr,
1069 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1070
1071 index_check_without_pop(rdx, rcx); // kills rbx
1072 __ testptr(rax, rax);
1073 __ jcc(Assembler::zero, is_null);
1074
1075 // Move subklass into rbx
1076 __ load_klass(rbx, rax, rscratch1);
1077 // Move superklass into rax
1078 __ load_klass(rax, rdx, rscratch1);
1079 __ movptr(rax, Address(rax,
1080 ObjArrayKlass::element_klass_offset()));
1081
1082 // Generate subtype check. Blows rcx, rdi
1083 // Superklass in rax. Subklass in rbx.
1084 __ gen_subtype_check(rbx, ok_is_subtype);
1085
1086 // Come here on failure
1087 // object is at TOS
1088 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1089
1090 // Come here on success
1091 __ bind(ok_is_subtype);
1092
1093 // Get the value we will store
1094 __ movptr(rax, at_tos());
1095 __ movl(rcx, at_tos_p1()); // index
1096 // Now store using the appropriate barrier
1097 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1098 __ jmp(done);
1099
1100 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1101 __ bind(is_null);
1102 __ profile_null_seen(rbx);
1103
1104 // Store a null
1105 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1106
1107 // Pop stack arguments
1108 __ bind(done);
1109 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1110 }
1111
1112 void TemplateTable::bastore() {
1113 transition(itos, vtos);
1114 __ pop_i(rbx);
1115 // rax: value
1116 // rbx: index
1117 // rdx: array
1118 index_check(rdx, rbx); // prefer index in rbx
1119 // Need to check whether array is boolean or byte
1120 // since both types share the bastore bytecode.
1121 __ load_klass(rcx, rdx, rscratch1);
1122 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1123 int diffbit = Klass::layout_helper_boolean_diffbit();
1124 __ testl(rcx, diffbit);
1125 Label L_skip;
1126 __ jccb(Assembler::zero, L_skip);
1874 __ jcc(j_not(cc), not_taken);
1875 branch(false, false);
1876 __ bind(not_taken);
1877 __ profile_not_taken_branch(rax);
1878 }
1879
1880 void TemplateTable::if_nullcmp(Condition cc) {
1881 transition(atos, vtos);
1882 // assume branch is more often taken than not (loops use backward branches)
1883 Label not_taken;
1884 __ testptr(rax, rax);
1885 __ jcc(j_not(cc), not_taken);
1886 branch(false, false);
1887 __ bind(not_taken);
1888 __ profile_not_taken_branch(rax);
1889 }
1890
1891 void TemplateTable::if_acmp(Condition cc) {
1892 transition(atos, vtos);
1893 // assume branch is more often taken than not (loops use backward branches)
1894 Label not_taken;
1895 __ pop_ptr(rdx);
1896 __ cmpoop(rdx, rax);
1897 __ jcc(j_not(cc), not_taken);
1898 branch(false, false);
1899 __ bind(not_taken);
1900 __ profile_not_taken_branch(rax);
1901 }
1902
1903 void TemplateTable::ret() {
1904 transition(vtos, vtos);
1905 locals_index(rbx);
1906 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1907 __ profile_ret(rbx, rcx);
1908 __ get_method(rax);
1909 __ movptr(rbcp, Address(rax, Method::const_offset()));
1910 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
1911 ConstMethod::codes_offset()));
1912 __ dispatch_next(vtos, 0, true);
1913 }
1914
1915 void TemplateTable::wide_ret() {
1916 transition(vtos, vtos);
1917 locals_index_wide(rbx);
1918 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
1919 __ profile_ret(rbx, rcx);
1920 __ get_method(rax);
2134 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2135 Label no_safepoint;
2136 NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2137 __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2138 __ jcc(Assembler::zero, no_safepoint);
2139 __ push(state);
2140 __ push_cont_fastpath();
2141 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2142 InterpreterRuntime::at_safepoint));
2143 __ pop_cont_fastpath();
2144 __ pop(state);
2145 __ bind(no_safepoint);
2146 }
2147
2148 // Narrow result if state is itos but result type is smaller.
2149 // Need to narrow in the return bytecode rather than in generate_return_entry
2150 // since compiled code callers expect the result to already be narrowed.
2151 if (state == itos) {
2152 __ narrow(rax);
2153 }
2154 __ remove_activation(state, rbcp);
2155
2156 __ jmp(rbcp);
2157 }
2158
2159 // ----------------------------------------------------------------------------
2160 // Volatile variables demand their effects be made known to all CPU's
2161 // in order. Store buffers on most chips allow reads & writes to
2162 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2163 // without some kind of memory barrier (i.e., it's not sufficient that
2164 // the interpreter does not reorder volatile references, the hardware
2165 // also must not reorder them).
2166 //
2167 // According to the new Java Memory Model (JMM):
2168 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2169 // writes act as acquire & release, so:
2170 // (2) A read cannot let unrelated NON-volatile memory refs that
2171 // happen after the read float up to before the read. It's OK for
2172 // non-volatile memory refs that happen before the volatile read to
2173 // float down below it.
2174 // (3) Similar a volatile write cannot let unrelated NON-volatile
2506 }
2507 // rax,: object pointer or null
2508 // cache: cache entry pointer
2509 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2510 rax, cache);
2511
2512 __ load_field_entry(cache, index);
2513 __ bind(L1);
2514 }
2515 }
2516
2517 void TemplateTable::pop_and_check_object(Register r) {
2518 __ pop_ptr(r);
2519 __ null_check(r); // for field access must check obj.
2520 __ verify_oop(r);
2521 }
2522
2523 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2524 transition(vtos, vtos);
2525
2526 const Register obj = c_rarg3;
2527 const Register cache = rcx;
2528 const Register index = rdx;
2529 const Register off = rbx;
2530 const Register tos_state = rax;
2531 const Register flags = rdx;
2532 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2533
2534 resolve_cache_and_index_for_field(byte_no, cache, index);
2535 jvmti_post_field_access(cache, index, is_static, false);
2536 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2537
2538 if (!is_static) pop_and_check_object(obj);
2539
2540 const Address field(obj, off, Address::times_1, 0*wordSize);
2541
2542 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
2543
2544 // Make sure we don't need to mask edx after the above shift
2545 assert(btos == 0, "change code, btos != 0");
2546 __ testl(tos_state, tos_state);
2547 __ jcc(Assembler::notZero, notByte);
2548
2549 // btos
2550 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
2551 __ push(btos);
2552 // Rewrite bytecode to be faster
2553 if (!is_static && rc == may_rewrite) {
2554 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2555 }
2556 __ jmp(Done);
2557
2558 __ bind(notByte);
2559 __ cmpl(tos_state, ztos);
2560 __ jcc(Assembler::notEqual, notBool);
2561
2562 // ztos (same code as btos)
2563 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg);
2564 __ push(ztos);
2565 // Rewrite bytecode to be faster
2566 if (!is_static && rc == may_rewrite) {
2567 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2568 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2569 }
2570 __ jmp(Done);
2571
2572 __ bind(notBool);
2573 __ cmpl(tos_state, atos);
2574 __ jcc(Assembler::notEqual, notObj);
2575 // atos
2576 do_oop_load(_masm, field, rax);
2577 __ push(atos);
2578 if (!is_static && rc == may_rewrite) {
2579 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2580 }
2581 __ jmp(Done);
2582
2583 __ bind(notObj);
2584 __ cmpl(tos_state, itos);
2585 __ jcc(Assembler::notEqual, notInt);
2586 // itos
2587 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
2588 __ push(itos);
2589 // Rewrite bytecode to be faster
2590 if (!is_static && rc == may_rewrite) {
2591 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2592 }
2593 __ jmp(Done);
2594
2595 __ bind(notInt);
2596 __ cmpl(tos_state, ctos);
2597 __ jcc(Assembler::notEqual, notChar);
2598 // ctos
2599 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
2600 __ push(ctos);
2601 // Rewrite bytecode to be faster
2602 if (!is_static && rc == may_rewrite) {
2603 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2663 #endif
2664
2665 __ bind(Done);
2666 // [jk] not needed currently
2667 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2668 // Assembler::LoadStore));
2669 }
2670
2671 void TemplateTable::getfield(int byte_no) {
2672 getfield_or_static(byte_no, false);
2673 }
2674
2675 void TemplateTable::nofast_getfield(int byte_no) {
2676 getfield_or_static(byte_no, false, may_not_rewrite);
2677 }
2678
2679 void TemplateTable::getstatic(int byte_no) {
2680 getfield_or_static(byte_no, true);
2681 }
2682
2683
2684 // The registers cache and index expected to be set before call.
2685 // The function may destroy various registers, just not the cache and index registers.
2686 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2687 // Cache is rcx and index is rdx
2688 const Register entry = c_rarg2; // ResolvedFieldEntry
2689 const Register obj = c_rarg1; // Object pointer
2690 const Register value = c_rarg3; // JValue object
2691
2692 if (JvmtiExport::can_post_field_modification()) {
2693 // Check to see if a field modification watch has been set before
2694 // we take the time to call into the VM.
2695 Label L1;
2696 assert_different_registers(cache, obj, rax);
2697 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2698 __ testl(rax, rax);
2699 __ jcc(Assembler::zero, L1);
2700
2701 __ mov(entry, cache);
2702
2703 if (is_static) {
2725 // cache: field entry pointer
2726 // value: jvalue object on the stack
2727 __ call_VM(noreg,
2728 CAST_FROM_FN_PTR(address,
2729 InterpreterRuntime::post_field_modification),
2730 obj, entry, value);
2731 // Reload field entry
2732 __ load_field_entry(cache, index);
2733 __ bind(L1);
2734 }
2735 }
2736
2737 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2738 transition(vtos, vtos);
2739
2740 const Register obj = rcx;
2741 const Register cache = rcx;
2742 const Register index = rdx;
2743 const Register tos_state = rdx;
2744 const Register off = rbx;
2745 const Register flags = rax;
2746
2747 resolve_cache_and_index_for_field(byte_no, cache, index);
2748 jvmti_post_field_mod(cache, index, is_static);
2749 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2750
2751 // [jk] not needed currently
2752 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2753 // Assembler::StoreStore));
2754
2755 Label notVolatile, Done;
2756
2757 // Check for volatile store
2758 __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
2759 __ testl(flags, flags);
2760 __ jcc(Assembler::zero, notVolatile);
2761
2762 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2763 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2764 Assembler::StoreStore));
2765 __ jmp(Done);
2766 __ bind(notVolatile);
2767
2768 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2769
2770 __ bind(Done);
2771 }
2772
2773 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2774 Register obj, Register off, Register tos_state) {
2775
2776 // field addresses
2777 const Address field(obj, off, Address::times_1, 0*wordSize);
2778
2779 Label notByte, notBool, notInt, notShort, notChar,
2780 notLong, notFloat, notObj;
2781 Label Done;
2782
2783 const Register bc = c_rarg3;
2784
2785 // Test TOS state
2786 __ testl(tos_state, tos_state);
2787 __ jcc(Assembler::notZero, notByte);
2788
2789 // btos
2790 {
2791 __ pop(btos);
2792 if (!is_static) pop_and_check_object(obj);
2793 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2794 if (!is_static && rc == may_rewrite) {
2795 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2796 }
2797 __ jmp(Done);
2798 }
2799
2800 __ bind(notByte);
2801 __ cmpl(tos_state, ztos);
2802 __ jcc(Assembler::notEqual, notBool);
2803
2804 // ztos
2805 {
2806 __ pop(ztos);
2807 if (!is_static) pop_and_check_object(obj);
2808 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2809 if (!is_static && rc == may_rewrite) {
2810 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2811 }
2812 __ jmp(Done);
2813 }
2814
2815 __ bind(notBool);
2816 __ cmpl(tos_state, atos);
2817 __ jcc(Assembler::notEqual, notObj);
2818
2819 // atos
2820 {
2821 __ pop(atos);
2822 if (!is_static) pop_and_check_object(obj);
2823 // Store into the field
2824 do_oop_store(_masm, field, rax);
2825 if (!is_static && rc == may_rewrite) {
2826 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2827 }
2828 __ jmp(Done);
2829 }
2830
2831 __ bind(notObj);
2832 __ cmpl(tos_state, itos);
2833 __ jcc(Assembler::notEqual, notInt);
2834
2835 // itos
2836 {
2837 __ pop(itos);
2838 if (!is_static) pop_and_check_object(obj);
2839 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
2840 if (!is_static && rc == may_rewrite) {
2841 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2842 }
2843 __ jmp(Done);
2844 }
2845
2846 __ bind(notInt);
2847 __ cmpl(tos_state, ctos);
2848 __ jcc(Assembler::notEqual, notChar);
2945 }
2946
2947 void TemplateTable::jvmti_post_fast_field_mod() {
2948
2949 const Register scratch = c_rarg3;
2950
2951 if (JvmtiExport::can_post_field_modification()) {
2952 // Check to see if a field modification watch has been set before
2953 // we take the time to call into the VM.
2954 Label L2;
2955 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2956 __ testl(scratch, scratch);
2957 __ jcc(Assembler::zero, L2);
2958 __ pop_ptr(rbx); // copy the object pointer from tos
2959 __ verify_oop(rbx);
2960 __ push_ptr(rbx); // put the object pointer back on tos
2961 // Save tos values before call_VM() clobbers them. Since we have
2962 // to do it for every data type, we use the saved values as the
2963 // jvalue object.
2964 switch (bytecode()) { // load values into the jvalue object
2965 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
2966 case Bytecodes::_fast_bputfield: // fall through
2967 case Bytecodes::_fast_zputfield: // fall through
2968 case Bytecodes::_fast_sputfield: // fall through
2969 case Bytecodes::_fast_cputfield: // fall through
2970 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
2971 case Bytecodes::_fast_dputfield: __ push(dtos); break;
2972 case Bytecodes::_fast_fputfield: __ push(ftos); break;
2973 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
2974
2975 default:
2976 ShouldNotReachHere();
2977 }
2978 __ mov(scratch, rsp); // points to jvalue on the stack
2979 // access constant pool cache entry
2980 __ load_field_entry(c_rarg2, rax);
2981 __ verify_oop(rbx);
2982 // rbx: object pointer copied above
2983 // c_rarg2: cache entry pointer
2984 // c_rarg3: jvalue object on the stack
2985 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
2986
2987 switch (bytecode()) { // restore tos values
2988 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
2989 case Bytecodes::_fast_bputfield: // fall through
2990 case Bytecodes::_fast_zputfield: // fall through
2991 case Bytecodes::_fast_sputfield: // fall through
2992 case Bytecodes::_fast_cputfield: // fall through
2993 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
2994 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
2995 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
2996 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
2997 default: break;
2998 }
2999 __ bind(L2);
3000 }
3001 }
3002
3003 void TemplateTable::fast_storefield(TosState state) {
3004 transition(state, vtos);
3005
3006 Register cache = rcx;
3007
3008 Label notVolatile, Done;
3009
3010 jvmti_post_fast_field_mod();
3011
3012 __ push(rax);
3013 __ load_field_entry(rcx, rax);
3014 load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3015 // RBX: field offset, RAX: TOS, RDX: flags
3016 __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3017 __ pop(rax);
3018
3019 // Get object from stack
3020 pop_and_check_object(rcx);
3021
3022 // field address
3023 const Address field(rcx, rbx, Address::times_1);
3024
3025 // Check for volatile store
3026 __ testl(rdx, rdx);
3027 __ jcc(Assembler::zero, notVolatile);
3028
3029 fast_storefield_helper(field, rax);
3030 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3031 Assembler::StoreStore));
3032 __ jmp(Done);
3033 __ bind(notVolatile);
3034
3035 fast_storefield_helper(field, rax);
3036
3037 __ bind(Done);
3038 }
3039
3040 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3041
3042 // access field
3043 switch (bytecode()) {
3044 case Bytecodes::_fast_aputfield:
3045 do_oop_store(_masm, field, rax);
3046 break;
3047 case Bytecodes::_fast_lputfield:
3048 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3049 break;
3050 case Bytecodes::_fast_iputfield:
3051 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3052 break;
3053 case Bytecodes::_fast_zputfield:
3054 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3055 break;
3056 case Bytecodes::_fast_bputfield:
3057 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3058 break;
3059 case Bytecodes::_fast_sputfield:
3060 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3061 break;
3062 case Bytecodes::_fast_cputfield:
3063 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3064 break;
3065 case Bytecodes::_fast_fputfield:
3081 // Check to see if a field access watch has been set before we
3082 // take the time to call into the VM.
3083 Label L1;
3084 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3085 __ testl(rcx, rcx);
3086 __ jcc(Assembler::zero, L1);
3087 // access constant pool cache entry
3088 __ load_field_entry(c_rarg2, rcx);
3089 __ verify_oop(rax);
3090 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3091 __ mov(c_rarg1, rax);
3092 // c_rarg1: object pointer copied above
3093 // c_rarg2: cache entry pointer
3094 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3095 __ pop_ptr(rax); // restore object pointer
3096 __ bind(L1);
3097 }
3098
3099 // access constant pool cache
3100 __ load_field_entry(rcx, rbx);
3101 __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3102
3103 // rax: object
3104 __ verify_oop(rax);
3105 __ null_check(rax);
3106 Address field(rax, rbx, Address::times_1);
3107
3108 // access field
3109 switch (bytecode()) {
3110 case Bytecodes::_fast_agetfield:
3111 do_oop_load(_masm, field, rax);
3112 __ verify_oop(rax);
3113 break;
3114 case Bytecodes::_fast_lgetfield:
3115 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg);
3116 break;
3117 case Bytecodes::_fast_igetfield:
3118 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
3119 break;
3120 case Bytecodes::_fast_bgetfield:
3121 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
3122 break;
3123 case Bytecodes::_fast_sgetfield:
3124 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg);
3125 break;
3126 case Bytecodes::_fast_cgetfield:
3127 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
3128 break;
3129 case Bytecodes::_fast_fgetfield:
3514
3515 // Note: rax_callsite is already pushed
3516
3517 // %%% should make a type profile for any invokedynamic that takes a ref argument
3518 // profile this call
3519 __ profile_call(rbcp);
3520 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3521
3522 __ verify_oop(rax_callsite);
3523
3524 __ jump_from_interpreted(rbx_method, rdx);
3525 }
3526
3527 //-----------------------------------------------------------------------------
3528 // Allocation
3529
3530 void TemplateTable::_new() {
3531 transition(vtos, atos);
3532 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3533 Label slow_case;
3534 Label slow_case_no_pop;
3535 Label done;
3536 Label initialize_header;
3537
3538 __ get_cpool_and_tags(rcx, rax);
3539
3540 // Make sure the class we're about to instantiate has been resolved.
3541 // This is done before loading InstanceKlass to be consistent with the order
3542 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3543 const int tags_offset = Array<u1>::base_offset_in_bytes();
3544 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3545 __ jcc(Assembler::notEqual, slow_case_no_pop);
3546
3547 // get InstanceKlass
3548 __ load_resolved_klass_at_index(rcx, rcx, rdx);
3549 __ push(rcx); // save the contexts of klass for initializing the header
3550
3551 // make sure klass is initialized
3552 // init_state needs acquire, but x86 is TSO, and so we are already good.
3553 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3554 __ clinit_barrier(rcx, nullptr /*L_fast_path*/, &slow_case);
3555
3556 // get instance_size in InstanceKlass (scaled to a count of bytes)
3557 __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3558 // test to see if it is malformed in some way
3559 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3560 __ jcc(Assembler::notZero, slow_case);
3561
3562 // Allocate the instance:
3563 // If TLAB is enabled:
3564 // Try to allocate in the TLAB.
3565 // If fails, go to the slow path.
3566 // Initialize the allocation.
3567 // Exit.
3568 //
3569 // Go to slow path.
3570
3571 if (UseTLAB) {
3572 __ tlab_allocate(rax, rdx, 0, rcx, rbx, slow_case);
3573 if (ZeroTLAB) {
3574 // the fields have been already cleared
3575 __ jmp(initialize_header);
3576 }
3577
3578 // The object is initialized before the header. If the object size is
3579 // zero, go directly to the header initialization.
3580 if (UseCompactObjectHeaders) {
3581 assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
3582 __ decrement(rdx, oopDesc::base_offset_in_bytes());
3583 } else {
3584 __ decrement(rdx, sizeof(oopDesc));
3585 }
3586 __ jcc(Assembler::zero, initialize_header);
3587
3588 // Initialize topmost object field, divide rdx by 8, check if odd and
3589 // test if zero.
3590 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
3591 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
3592
3593 // rdx must have been multiple of 8
3594 #ifdef ASSERT
3595 // make sure rdx was multiple of 8
3596 Label L;
3597 // Ignore partial flag stall after shrl() since it is debug VM
3598 __ jcc(Assembler::carryClear, L);
3599 __ stop("object size is not multiple of 2 - adjust this code");
3600 __ bind(L);
3601 // rdx must be > 0, no extra check needed here
3602 #endif
3603
3604 // initialize remaining object fields: rdx was a multiple of 8
3605 { Label loop;
3606 __ bind(loop);
3607 int header_size_bytes = oopDesc::header_size() * HeapWordSize;
3608 assert(is_aligned(header_size_bytes, BytesPerLong), "oop header size must be 8-byte-aligned");
3609 __ movptr(Address(rax, rdx, Address::times_8, header_size_bytes - 1*oopSize), rcx);
3610 __ decrement(rdx);
3611 __ jcc(Assembler::notZero, loop);
3612 }
3613
3614 // initialize object header only.
3615 __ bind(initialize_header);
3616 if (UseCompactObjectHeaders) {
3617 __ pop(rcx); // get saved klass back in the register.
3618 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
3619 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), rbx);
3620 } else {
3621 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
3622 (intptr_t)markWord::prototype().value()); // header
3623 __ pop(rcx); // get saved klass back in the register.
3624 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
3625 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
3626 __ store_klass(rax, rcx, rscratch1); // klass
3627 }
3628
3629 if (DTraceAllocProbes) {
3630 // Trigger dtrace event for fastpath
3631 __ push(atos);
3632 __ call_VM_leaf(
3633 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3634 __ pop(atos);
3635 }
3636
3637 __ jmp(done);
3638 }
3639
3640 // slow case
3641 __ bind(slow_case);
3642 __ pop(rcx); // restore stack pointer to what it was when we came in.
3643 __ bind(slow_case_no_pop);
3644
3645 __ get_constant_pool(c_rarg1);
3646 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3647 __ call_VM_preemptable(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3648 __ verify_oop(rax);
3649
3650 // continue
3651 __ bind(done);
3652 }
3653
3654 void TemplateTable::newarray() {
3655 transition(itos, atos);
3656 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3657 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3658 c_rarg1, rax);
3659 }
3660
3661 void TemplateTable::anewarray() {
3662 transition(itos, atos);
3663
3665 __ get_constant_pool(c_rarg1);
3666 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3667 c_rarg1, c_rarg2, rax);
3668 }
3669
3670 void TemplateTable::arraylength() {
3671 transition(atos, itos);
3672 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3673 }
3674
3675 void TemplateTable::checkcast() {
3676 transition(atos, atos);
3677 Label done, is_null, ok_is_subtype, quicked, resolved;
3678 __ testptr(rax, rax); // object is in rax
3679 __ jcc(Assembler::zero, is_null);
3680
3681 // Get cpool & tags index
3682 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3683 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3684 // See if bytecode has already been quicked
3685 __ cmpb(Address(rdx, rbx,
3686 Address::times_1,
3687 Array<u1>::base_offset_in_bytes()),
3688 JVM_CONSTANT_Class);
3689 __ jcc(Assembler::equal, quicked);
3690 __ push(atos); // save receiver for result, and for GC
3691 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3692
3693 __ get_vm_result_metadata(rax);
3694
3695 __ pop_ptr(rdx); // restore receiver
3696 __ jmpb(resolved);
3697
3698 // Get superklass in rax and subklass in rbx
3699 __ bind(quicked);
3700 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3701 __ load_resolved_klass_at_index(rax, rcx, rbx);
3702
3703 __ bind(resolved);
3704 __ load_klass(rbx, rdx, rscratch1);
3705
3706 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3707 // Superklass in rax. Subklass in rbx.
3708 __ gen_subtype_check(rbx, ok_is_subtype);
3709
3710 // Come here on failure
3711 __ push_ptr(rdx);
3712 // object is at TOS
3713 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3714
3715 // Come here on success
3716 __ bind(ok_is_subtype);
3717 __ mov(rax, rdx); // Restore object in rdx
3718
3719 // Collect counts on whether this check-cast sees nulls a lot or not.
3720 if (ProfileInterpreter) {
3721 __ jmp(done);
3722 __ bind(is_null);
3723 __ profile_null_seen(rcx);
3724 } else {
3725 __ bind(is_null); // same as 'done'
3726 }
3727 __ bind(done);
3728 }
3729
3730 void TemplateTable::instanceof() {
3731 transition(atos, itos);
3732 Label done, is_null, ok_is_subtype, quicked, resolved;
3733 __ testptr(rax, rax);
3734 __ jcc(Assembler::zero, is_null);
3735
3736 // Get cpool & tags index
3737 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3738 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3739 // See if bytecode has already been quicked
3740 __ cmpb(Address(rdx, rbx,
3741 Address::times_1,
3742 Array<u1>::base_offset_in_bytes()),
3743 JVM_CONSTANT_Class);
3744 __ jcc(Assembler::equal, quicked);
3745
3746 __ push(atos); // save receiver for result, and for GC
3747 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3748
3749 __ get_vm_result_metadata(rax);
3750
3751 __ pop_ptr(rdx); // restore receiver
3752 __ verify_oop(rdx);
3753 __ load_klass(rdx, rdx, rscratch1);
3754 __ jmpb(resolved);
3755
3756 // Get superklass in rax and subklass in rdx
3757 __ bind(quicked);
3758 __ load_klass(rdx, rax, rscratch1);
3759 __ load_resolved_klass_at_index(rax, rcx, rbx);
3760
3761 __ bind(resolved);
3762
3763 // Generate subtype check. Blows rcx, rdi
3767 // Come here on failure
3768 __ xorl(rax, rax);
3769 __ jmpb(done);
3770 // Come here on success
3771 __ bind(ok_is_subtype);
3772 __ movl(rax, 1);
3773
3774 // Collect counts on whether this test sees nulls a lot or not.
3775 if (ProfileInterpreter) {
3776 __ jmp(done);
3777 __ bind(is_null);
3778 __ profile_null_seen(rcx);
3779 } else {
3780 __ bind(is_null); // same as 'done'
3781 }
3782 __ bind(done);
3783 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
3784 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
3785 }
3786
3787
3788 //----------------------------------------------------------------------------------------------------
3789 // Breakpoints
3790 void TemplateTable::_breakpoint() {
3791 // Note: We get here even if we are single stepping..
3792 // jbug insists on setting breakpoints at every bytecode
3793 // even if we are in single step mode.
3794
3795 transition(vtos, vtos);
3796
3797 // get the unpatched byte code
3798 __ get_method(c_rarg1);
3799 __ call_VM(noreg,
3800 CAST_FROM_FN_PTR(address,
3801 InterpreterRuntime::get_original_bytecode_at),
3802 c_rarg1, rbcp);
3803 __ mov(rbx, rax); // why?
3804
3805 // post the breakpoint event
3806 __ get_method(c_rarg1);
3807 __ call_VM(noreg,
3827 // Note: monitorenter & exit are symmetric routines; which is reflected
3828 // in the assembly code structure as well
3829 //
3830 // Stack layout:
3831 //
3832 // [expressions ] <--- rsp = expression stack top
3833 // ..
3834 // [expressions ]
3835 // [monitor entry] <--- monitor block top = expression stack bot
3836 // ..
3837 // [monitor entry]
3838 // [frame data ] <--- monitor block bot
3839 // ...
3840 // [saved rbp ] <--- rbp
3841 void TemplateTable::monitorenter() {
3842 transition(atos, vtos);
3843
3844 // check for null object
3845 __ null_check(rax);
3846
3847 const Address monitor_block_top(
3848 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3849 const Address monitor_block_bot(
3850 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3851 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3852
3853 Label allocated;
3854
3855 Register rtop = c_rarg3;
3856 Register rbot = c_rarg2;
3857 Register rmon = c_rarg1;
3858
3859 // initialize entry pointer
3860 __ xorl(rmon, rmon); // points to free slot or null
3861
3862 // find a free slot in the monitor block (result in rmon)
3863 {
3864 Label entry, loop, exit;
3865 __ movptr(rtop, monitor_block_top); // derelativize pointer
3866 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3919 // rmon: points to monitor entry
3920 __ bind(allocated);
3921
3922 // Increment bcp to point to the next bytecode, so exception
3923 // handling for async. exceptions work correctly.
3924 // The object has already been popped from the stack, so the
3925 // expression stack looks correct.
3926 __ increment(rbcp);
3927
3928 // store object
3929 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
3930 __ lock_object(rmon);
3931
3932 // check to make sure this monitor doesn't cause stack overflow after locking
3933 __ save_bcp(); // in case of exception
3934 __ generate_stack_overflow_check(0);
3935
3936 // The bcp has already been incremented. Just need to dispatch to
3937 // next instruction.
3938 __ dispatch_next(vtos);
3939 }
3940
3941 void TemplateTable::monitorexit() {
3942 transition(atos, vtos);
3943
3944 // check for null object
3945 __ null_check(rax);
3946
3947 const Address monitor_block_top(
3948 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3949 const Address monitor_block_bot(
3950 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3951 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3952
3953 Register rtop = c_rarg1;
3954 Register rbot = c_rarg2;
3955
3956 Label found;
3957
3958 // find matching slot
3959 {
3960 Label entry, loop;
3961 __ movptr(rtop, monitor_block_top); // derelativize pointer
3962 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3963 // rtop points to current entry, starting with top-most entry
3964
3965 __ lea(rbot, monitor_block_bot); // points to word before bottom
3966 // of monitor block
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "compiler/disassembler.hpp"
27 #include "gc/shared/collectedHeap.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/tlab_globals.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "interpreter/interpreterRuntime.hpp"
32 #include "interpreter/interp_masm.hpp"
33 #include "interpreter/templateTable.hpp"
34 #include "memory/universe.hpp"
35 #include "oops/methodCounters.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/objArrayKlass.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "oops/inlineKlass.hpp"
40 #include "oops/resolvedFieldEntry.hpp"
41 #include "oops/resolvedIndyEntry.hpp"
42 #include "oops/resolvedMethodEntry.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/methodHandles.hpp"
45 #include "runtime/arguments.hpp"
46 #include "runtime/frame.inline.hpp"
47 #include "runtime/safepointMechanism.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "runtime/stubRoutines.hpp"
50 #include "runtime/synchronizer.hpp"
51 #include "utilities/macros.hpp"
52
53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
54
55 // Global Register Names
56 static const Register rbcp = r13;
57 static const Register rlocals = r14;
58
59 // Address Computation: local variables
60 static inline Address iaddress(int n) {
61 return Address(rlocals, Interpreter::local_offset_in_bytes(n));
62 }
63
64 static inline Address laddress(int n) {
65 return iaddress(n + 1);
152 static void do_oop_load(InterpreterMacroAssembler* _masm,
153 Address src,
154 Register dst,
155 DecoratorSet decorators = 0) {
156 __ load_heap_oop(dst, src, rdx, decorators);
157 }
158
159 Address TemplateTable::at_bcp(int offset) {
160 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
161 return Address(rbcp, offset);
162 }
163
164
165 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
166 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
167 int byte_no) {
168 if (!RewriteBytecodes) return;
169 Label L_patch_done;
170
171 switch (bc) {
172 case Bytecodes::_fast_vputfield:
173 case Bytecodes::_fast_aputfield:
174 case Bytecodes::_fast_bputfield:
175 case Bytecodes::_fast_zputfield:
176 case Bytecodes::_fast_cputfield:
177 case Bytecodes::_fast_dputfield:
178 case Bytecodes::_fast_fputfield:
179 case Bytecodes::_fast_iputfield:
180 case Bytecodes::_fast_lputfield:
181 case Bytecodes::_fast_sputfield:
182 {
183 // We skip bytecode quickening for putfield instructions when
184 // the put_code written to the constant pool cache is zero.
185 // This is required so that every execution of this instruction
186 // calls out to InterpreterRuntime::resolve_get_put to do
187 // additional, required work.
188 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
189 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
190 __ load_field_entry(temp_reg, bc_reg);
191 if (byte_no == f1_byte) {
192 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
761 Address(rdx, rax,
762 Address::times_4,
763 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
764 noreg);
765 }
766
767 void TemplateTable::daload() {
768 transition(itos, dtos);
769 // rax: index
770 // rdx: array
771 index_check(rdx, rax); // kills rbx
772 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
773 Address(rdx, rax,
774 Address::times_8,
775 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
776 noreg);
777 }
778
779 void TemplateTable::aaload() {
780 transition(itos, atos);
781 Register array = rdx;
782 Register index = rax;
783
784 index_check(array, index); // kills rbx
785 __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
786 if (UseArrayFlattening) {
787 Label is_flat_array, done;
788 __ test_flat_array_oop(array, rbx, is_flat_array);
789 do_oop_load(_masm,
790 Address(array, index,
791 UseCompressedOops ? Address::times_4 : Address::times_ptr,
792 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
793 rax,
794 IS_ARRAY);
795 __ jmp(done);
796 __ bind(is_flat_array);
797 __ movptr(rcx, array);
798 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), rcx, index);
799 __ bind(done);
800 } else {
801 do_oop_load(_masm,
802 Address(array, index,
803 UseCompressedOops ? Address::times_4 : Address::times_ptr,
804 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
805 rax,
806 IS_ARRAY);
807 }
808 __ profile_element_type(rbx, rax, rcx);
809 }
810
811 void TemplateTable::baload() {
812 transition(itos, itos);
813 // rax: index
814 // rdx: array
815 index_check(rdx, rax); // kills rbx
816 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
817 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
818 noreg);
819 }
820
821 void TemplateTable::caload() {
822 transition(itos, itos);
823 // rax: index
824 // rdx: array
825 index_check(rdx, rax); // kills rbx
826 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
827 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
828 noreg);
1062 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1063 Address(rdx, rbx, Address::times_4,
1064 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1065 noreg /* ftos */, noreg, noreg, noreg);
1066 }
1067
1068 void TemplateTable::dastore() {
1069 transition(dtos, vtos);
1070 __ pop_i(rbx);
1071 // value is in xmm0
1072 // rbx: index
1073 // rdx: array
1074 index_check(rdx, rbx); // prefer index in rbx
1075 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1076 Address(rdx, rbx, Address::times_8,
1077 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1078 noreg /* dtos */, noreg, noreg, noreg);
1079 }
1080
1081 void TemplateTable::aastore() {
1082 Label is_null, is_flat_array, ok_is_subtype, done;
1083 transition(vtos, vtos);
1084 // stack: ..., array, index, value
1085 __ movptr(rax, at_tos()); // value
1086 __ movl(rcx, at_tos_p1()); // index
1087 __ movptr(rdx, at_tos_p2()); // array
1088
1089 Address element_address(rdx, rcx,
1090 UseCompressedOops? Address::times_4 : Address::times_ptr,
1091 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1092
1093 index_check_without_pop(rdx, rcx); // kills rbx
1094
1095 __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1096 __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1097
1098 __ testptr(rax, rax);
1099 __ jcc(Assembler::zero, is_null);
1100
1101 // Move array class to rdi
1102 __ load_klass(rdi, rdx, rscratch1);
1103 if (UseArrayFlattening) {
1104 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1105 __ test_flat_array_layout(rbx, is_flat_array);
1106 }
1107
1108 // Move subklass into rbx
1109 __ load_klass(rbx, rax, rscratch1);
1110 // Move array element superklass into rax
1111 __ movptr(rax, Address(rdi,
1112 ObjArrayKlass::element_klass_offset()));
1113
1114 // Generate subtype check. Blows rcx, rdi
1115 // Superklass in rax. Subklass in rbx.
1116 // is "rbx <: rax" ? (value subclass <: array element superclass)
1117 __ gen_subtype_check(rbx, ok_is_subtype, false);
1118
1119 // Come here on failure
1120 // object is at TOS
1121 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1122
1123 // Come here on success
1124 __ bind(ok_is_subtype);
1125
1126 // Get the value we will store
1127 __ movptr(rax, at_tos());
1128 __ movl(rcx, at_tos_p1()); // index
1129 // Now store using the appropriate barrier
1130 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1131 __ jmp(done);
1132
1133 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1134 __ bind(is_null);
1135 if (Arguments::is_valhalla_enabled()) {
1136 Label write_null_to_null_free_array, store_null;
1137
1138 // Move array class to rdi
1139 __ load_klass(rdi, rdx, rscratch1);
1140 if (UseArrayFlattening) {
1141 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1142 __ test_flat_array_layout(rbx, is_flat_array);
1143 }
1144
1145 // No way to store null in null-free array
1146 __ test_null_free_array_oop(rdx, rbx, write_null_to_null_free_array);
1147 __ jmp(store_null);
1148
1149 __ bind(write_null_to_null_free_array);
1150 __ jump(RuntimeAddress(Interpreter::_throw_NullPointerException_entry));
1151
1152 __ bind(store_null);
1153 }
1154 // Store a null
1155 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1156 __ jmp(done);
1157
1158 if (UseArrayFlattening) {
1159 Label is_type_ok;
1160 __ bind(is_flat_array); // Store non-null value to flat
1161
1162 __ movptr(rax, at_tos());
1163 __ movl(rcx, at_tos_p1()); // index
1164 __ movptr(rdx, at_tos_p2()); // array
1165
1166 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), rax, rdx, rcx);
1167 }
1168 // Pop stack arguments
1169 __ bind(done);
1170 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1171 }
1172
1173 void TemplateTable::bastore() {
1174 transition(itos, vtos);
1175 __ pop_i(rbx);
1176 // rax: value
1177 // rbx: index
1178 // rdx: array
1179 index_check(rdx, rbx); // prefer index in rbx
1180 // Need to check whether array is boolean or byte
1181 // since both types share the bastore bytecode.
1182 __ load_klass(rcx, rdx, rscratch1);
1183 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1184 int diffbit = Klass::layout_helper_boolean_diffbit();
1185 __ testl(rcx, diffbit);
1186 Label L_skip;
1187 __ jccb(Assembler::zero, L_skip);
1935 __ jcc(j_not(cc), not_taken);
1936 branch(false, false);
1937 __ bind(not_taken);
1938 __ profile_not_taken_branch(rax);
1939 }
1940
1941 void TemplateTable::if_nullcmp(Condition cc) {
1942 transition(atos, vtos);
1943 // assume branch is more often taken than not (loops use backward branches)
1944 Label not_taken;
1945 __ testptr(rax, rax);
1946 __ jcc(j_not(cc), not_taken);
1947 branch(false, false);
1948 __ bind(not_taken);
1949 __ profile_not_taken_branch(rax);
1950 }
1951
1952 void TemplateTable::if_acmp(Condition cc) {
1953 transition(atos, vtos);
1954 // assume branch is more often taken than not (loops use backward branches)
1955 Label taken, not_taken;
1956 __ pop_ptr(rdx);
1957
1958 __ profile_acmp(rbx, rdx, rax, rcx);
1959
1960 const int is_inline_type_mask = markWord::inline_type_pattern;
1961 if (Arguments::is_valhalla_enabled()) {
1962 __ cmpoop(rdx, rax);
1963 __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
1964
1965 // might be substitutable, test if either rax or rdx is null
1966 __ testptr(rax, rax);
1967 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1968 __ testptr(rdx, rdx);
1969 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1970
1971 // and both are values ?
1972 __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
1973 __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
1974 __ andptr(rbx, is_inline_type_mask);
1975 __ cmpptr(rbx, is_inline_type_mask);
1976 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
1977
1978 // same value klass ?
1979 __ load_metadata(rbx, rdx);
1980 __ load_metadata(rcx, rax);
1981 __ cmpptr(rbx, rcx);
1982 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
1983
1984 // Know both are the same type, let's test for substitutability...
1985 if (cc == equal) {
1986 invoke_is_substitutable(rax, rdx, taken, not_taken);
1987 } else {
1988 invoke_is_substitutable(rax, rdx, not_taken, taken);
1989 }
1990 __ stop("Not reachable");
1991 }
1992
1993 __ cmpoop(rdx, rax);
1994 __ jcc(j_not(cc), not_taken);
1995 __ bind(taken);
1996 branch(false, false);
1997 __ bind(not_taken);
1998 __ profile_not_taken_branch(rax, true);
1999 }
2000
2001 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2002 Label& is_subst, Label& not_subst) {
2003 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2004 // Restored...rax answer, jmp to outcome...
2005 __ testl(rax, rax);
2006 __ jcc(Assembler::zero, not_subst);
2007 __ jmp(is_subst);
2008 }
2009
2010 void TemplateTable::ret() {
2011 transition(vtos, vtos);
2012 locals_index(rbx);
2013 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
2014 __ profile_ret(rbx, rcx);
2015 __ get_method(rax);
2016 __ movptr(rbcp, Address(rax, Method::const_offset()));
2017 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2018 ConstMethod::codes_offset()));
2019 __ dispatch_next(vtos, 0, true);
2020 }
2021
2022 void TemplateTable::wide_ret() {
2023 transition(vtos, vtos);
2024 locals_index_wide(rbx);
2025 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2026 __ profile_ret(rbx, rcx);
2027 __ get_method(rax);
2241 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2242 Label no_safepoint;
2243 NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2244 __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2245 __ jcc(Assembler::zero, no_safepoint);
2246 __ push(state);
2247 __ push_cont_fastpath();
2248 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2249 InterpreterRuntime::at_safepoint));
2250 __ pop_cont_fastpath();
2251 __ pop(state);
2252 __ bind(no_safepoint);
2253 }
2254
2255 // Narrow result if state is itos but result type is smaller.
2256 // Need to narrow in the return bytecode rather than in generate_return_entry
2257 // since compiled code callers expect the result to already be narrowed.
2258 if (state == itos) {
2259 __ narrow(rax);
2260 }
2261
2262 __ remove_activation(state, rbcp, true, true, true);
2263
2264 __ jmp(rbcp);
2265 }
2266
2267 // ----------------------------------------------------------------------------
2268 // Volatile variables demand their effects be made known to all CPU's
2269 // in order. Store buffers on most chips allow reads & writes to
2270 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2271 // without some kind of memory barrier (i.e., it's not sufficient that
2272 // the interpreter does not reorder volatile references, the hardware
2273 // also must not reorder them).
2274 //
2275 // According to the new Java Memory Model (JMM):
2276 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2277 // writes act as acquire & release, so:
2278 // (2) A read cannot let unrelated NON-volatile memory refs that
2279 // happen after the read float up to before the read. It's OK for
2280 // non-volatile memory refs that happen before the volatile read to
2281 // float down below it.
2282 // (3) Similar a volatile write cannot let unrelated NON-volatile
2614 }
2615 // rax,: object pointer or null
2616 // cache: cache entry pointer
2617 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2618 rax, cache);
2619
2620 __ load_field_entry(cache, index);
2621 __ bind(L1);
2622 }
2623 }
2624
2625 void TemplateTable::pop_and_check_object(Register r) {
2626 __ pop_ptr(r);
2627 __ null_check(r); // for field access must check obj.
2628 __ verify_oop(r);
2629 }
2630
2631 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2632 transition(vtos, vtos);
2633
2634 const Register obj = r9;
2635 const Register cache = rcx;
2636 const Register index = rdx;
2637 const Register off = rbx;
2638 const Register tos_state = rax;
2639 const Register flags = rdx;
2640 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2641
2642 resolve_cache_and_index_for_field(byte_no, cache, index);
2643 jvmti_post_field_access(cache, index, is_static, false);
2644 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2645
2646 const Address field(obj, off, Address::times_1, 0*wordSize);
2647
2648 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
2649
2650 // Make sure we don't need to mask edx after the above shift
2651 assert(btos == 0, "change code, btos != 0");
2652 __ testl(tos_state, tos_state);
2653 __ jcc(Assembler::notZero, notByte);
2654
2655 // btos
2656 if (!is_static) pop_and_check_object(obj);
2657 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
2658 __ push(btos);
2659 // Rewrite bytecode to be faster
2660 if (!is_static && rc == may_rewrite) {
2661 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2662 }
2663 __ jmp(Done);
2664
2665 __ bind(notByte);
2666 __ cmpl(tos_state, ztos);
2667 __ jcc(Assembler::notEqual, notBool);
2668 if (!is_static) pop_and_check_object(obj);
2669 // ztos (same code as btos)
2670 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg);
2671 __ push(ztos);
2672 // Rewrite bytecode to be faster
2673 if (!is_static && rc == may_rewrite) {
2674 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2675 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2676 }
2677 __ jmp(Done);
2678
2679 __ bind(notBool);
2680 __ cmpl(tos_state, atos);
2681 __ jcc(Assembler::notEqual, notObj);
2682 // atos
2683 if (!Arguments::is_valhalla_enabled()) {
2684 if (!is_static) pop_and_check_object(obj);
2685 do_oop_load(_masm, field, rax);
2686 __ push(atos);
2687 if (!is_static && rc == may_rewrite) {
2688 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2689 }
2690 __ jmp(Done);
2691 } else {
2692 if (is_static) {
2693 __ load_heap_oop(rax, field);
2694 __ push(atos);
2695 __ jmp(Done);
2696 } else {
2697 Label is_flat, rewrite_inline;
2698 __ test_field_is_flat(flags, rscratch1, is_flat);
2699 pop_and_check_object(obj);
2700 __ load_heap_oop(rax, field);
2701 __ push(atos);
2702 if (rc == may_rewrite) {
2703 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2704 }
2705 __ jmp(Done);
2706 __ bind(is_flat);
2707 // field is flat (null-free or nullable with a null-marker)
2708 pop_and_check_object(rax);
2709 __ read_flat_field(rcx, rdx, rbx, rax);
2710 __ verify_oop(rax);
2711 __ push(atos);
2712 __ bind(rewrite_inline);
2713 if (rc == may_rewrite) {
2714 patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
2715 }
2716 __ jmp(Done);
2717 }
2718 }
2719
2720 __ bind(notObj);
2721
2722 if (!is_static) pop_and_check_object(obj);
2723
2724 __ cmpl(tos_state, itos);
2725 __ jcc(Assembler::notEqual, notInt);
2726 // itos
2727 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
2728 __ push(itos);
2729 // Rewrite bytecode to be faster
2730 if (!is_static && rc == may_rewrite) {
2731 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2732 }
2733 __ jmp(Done);
2734
2735 __ bind(notInt);
2736 __ cmpl(tos_state, ctos);
2737 __ jcc(Assembler::notEqual, notChar);
2738 // ctos
2739 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
2740 __ push(ctos);
2741 // Rewrite bytecode to be faster
2742 if (!is_static && rc == may_rewrite) {
2743 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2803 #endif
2804
2805 __ bind(Done);
2806 // [jk] not needed currently
2807 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2808 // Assembler::LoadStore));
2809 }
2810
2811 void TemplateTable::getfield(int byte_no) {
2812 getfield_or_static(byte_no, false);
2813 }
2814
2815 void TemplateTable::nofast_getfield(int byte_no) {
2816 getfield_or_static(byte_no, false, may_not_rewrite);
2817 }
2818
2819 void TemplateTable::getstatic(int byte_no) {
2820 getfield_or_static(byte_no, true);
2821 }
2822
2823 // The registers cache and index expected to be set before call.
2824 // The function may destroy various registers, just not the cache and index registers.
2825 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2826 // Cache is rcx and index is rdx
2827 const Register entry = c_rarg2; // ResolvedFieldEntry
2828 const Register obj = c_rarg1; // Object pointer
2829 const Register value = c_rarg3; // JValue object
2830
2831 if (JvmtiExport::can_post_field_modification()) {
2832 // Check to see if a field modification watch has been set before
2833 // we take the time to call into the VM.
2834 Label L1;
2835 assert_different_registers(cache, obj, rax);
2836 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2837 __ testl(rax, rax);
2838 __ jcc(Assembler::zero, L1);
2839
2840 __ mov(entry, cache);
2841
2842 if (is_static) {
2864 // cache: field entry pointer
2865 // value: jvalue object on the stack
2866 __ call_VM(noreg,
2867 CAST_FROM_FN_PTR(address,
2868 InterpreterRuntime::post_field_modification),
2869 obj, entry, value);
2870 // Reload field entry
2871 __ load_field_entry(cache, index);
2872 __ bind(L1);
2873 }
2874 }
2875
2876 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2877 transition(vtos, vtos);
2878
2879 const Register obj = rcx;
2880 const Register cache = rcx;
2881 const Register index = rdx;
2882 const Register tos_state = rdx;
2883 const Register off = rbx;
2884 const Register flags = r9;
2885
2886 resolve_cache_and_index_for_field(byte_no, cache, index);
2887 jvmti_post_field_mod(cache, index, is_static);
2888 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2889
2890 // [jk] not needed currently
2891 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2892 // Assembler::StoreStore));
2893
2894 Label notVolatile, Done;
2895
2896 // Check for volatile store
2897 __ movl(rscratch1, flags);
2898 __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
2899 __ testl(rscratch1, rscratch1);
2900 __ jcc(Assembler::zero, notVolatile);
2901
2902 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2903 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2904 Assembler::StoreStore));
2905 __ jmp(Done);
2906 __ bind(notVolatile);
2907
2908 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2909
2910 __ bind(Done);
2911 }
2912
2913 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2914 Register obj, Register off, Register tos_state, Register flags) {
2915
2916 // field addresses
2917 const Address field(obj, off, Address::times_1, 0*wordSize);
2918
2919 Label notByte, notBool, notInt, notShort, notChar,
2920 notLong, notFloat, notObj, notInlineType;
2921 Label Done;
2922
2923 const Register bc = c_rarg3;
2924
2925 // Test TOS state
2926 __ testl(tos_state, tos_state);
2927 __ jcc(Assembler::notZero, notByte);
2928
2929 // btos
2930 {
2931 __ pop(btos);
2932 if (!is_static) pop_and_check_object(obj);
2933 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2934 if (!is_static && rc == may_rewrite) {
2935 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2936 }
2937 __ jmp(Done);
2938 }
2939
2940 __ bind(notByte);
2941 __ cmpl(tos_state, ztos);
2942 __ jcc(Assembler::notEqual, notBool);
2943
2944 // ztos
2945 {
2946 __ pop(ztos);
2947 if (!is_static) pop_and_check_object(obj);
2948 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2949 if (!is_static && rc == may_rewrite) {
2950 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2951 }
2952 __ jmp(Done);
2953 }
2954
2955 __ bind(notBool);
2956 __ cmpl(tos_state, atos);
2957 __ jcc(Assembler::notEqual, notObj);
2958
2959 // atos
2960 {
2961 if (!Arguments::is_valhalla_enabled()) {
2962 __ pop(atos);
2963 if (!is_static) pop_and_check_object(obj);
2964 // Store into the field
2965 do_oop_store(_masm, field, rax);
2966 if (!is_static && rc == may_rewrite) {
2967 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2968 }
2969 __ jmp(Done);
2970 } else {
2971 __ pop(atos);
2972 if (is_static) {
2973 Label is_nullable;
2974 __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_nullable);
2975 __ null_check(rax); // FIXME JDK-8341120
2976 __ bind(is_nullable);
2977 do_oop_store(_masm, field, rax);
2978 __ jmp(Done);
2979 } else {
2980 Label is_flat, null_free_reference, rewrite_inline;
2981 __ test_field_is_flat(flags, rscratch1, is_flat);
2982 __ test_field_is_null_free_inline_type(flags, rscratch1, null_free_reference);
2983 pop_and_check_object(obj);
2984 // Store into the field
2985 do_oop_store(_masm, field, rax);
2986 if (rc == may_rewrite) {
2987 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2988 }
2989 __ jmp(Done);
2990 __ bind(null_free_reference);
2991 __ null_check(rax); // FIXME JDK-8341120
2992 pop_and_check_object(obj);
2993 // Store into the field
2994 do_oop_store(_masm, field, rax);
2995 __ jmp(rewrite_inline);
2996 __ bind(is_flat);
2997 pop_and_check_object(rscratch2);
2998 __ write_flat_field(rcx, r8, rscratch1, rscratch2, rbx, rax);
2999 __ bind(rewrite_inline);
3000 if (rc == may_rewrite) {
3001 patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3002 }
3003 __ jmp(Done);
3004 }
3005 }
3006 }
3007
3008 __ bind(notObj);
3009 __ cmpl(tos_state, itos);
3010 __ jcc(Assembler::notEqual, notInt);
3011
3012 // itos
3013 {
3014 __ pop(itos);
3015 if (!is_static) pop_and_check_object(obj);
3016 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3017 if (!is_static && rc == may_rewrite) {
3018 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3019 }
3020 __ jmp(Done);
3021 }
3022
3023 __ bind(notInt);
3024 __ cmpl(tos_state, ctos);
3025 __ jcc(Assembler::notEqual, notChar);
3122 }
3123
3124 void TemplateTable::jvmti_post_fast_field_mod() {
3125
3126 const Register scratch = c_rarg3;
3127
3128 if (JvmtiExport::can_post_field_modification()) {
3129 // Check to see if a field modification watch has been set before
3130 // we take the time to call into the VM.
3131 Label L2;
3132 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3133 __ testl(scratch, scratch);
3134 __ jcc(Assembler::zero, L2);
3135 __ pop_ptr(rbx); // copy the object pointer from tos
3136 __ verify_oop(rbx);
3137 __ push_ptr(rbx); // put the object pointer back on tos
3138 // Save tos values before call_VM() clobbers them. Since we have
3139 // to do it for every data type, we use the saved values as the
3140 // jvalue object.
3141 switch (bytecode()) { // load values into the jvalue object
3142 case Bytecodes::_fast_vputfield: //fall through
3143 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3144 case Bytecodes::_fast_bputfield: // fall through
3145 case Bytecodes::_fast_zputfield: // fall through
3146 case Bytecodes::_fast_sputfield: // fall through
3147 case Bytecodes::_fast_cputfield: // fall through
3148 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3149 case Bytecodes::_fast_dputfield: __ push(dtos); break;
3150 case Bytecodes::_fast_fputfield: __ push(ftos); break;
3151 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3152
3153 default:
3154 ShouldNotReachHere();
3155 }
3156 __ mov(scratch, rsp); // points to jvalue on the stack
3157 // access constant pool cache entry
3158 __ load_field_entry(c_rarg2, rax);
3159 __ verify_oop(rbx);
3160 // rbx: object pointer copied above
3161 // c_rarg2: cache entry pointer
3162 // c_rarg3: jvalue object on the stack
3163 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
3164
3165 switch (bytecode()) { // restore tos values
3166 case Bytecodes::_fast_vputfield: // fall through
3167 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3168 case Bytecodes::_fast_bputfield: // fall through
3169 case Bytecodes::_fast_zputfield: // fall through
3170 case Bytecodes::_fast_sputfield: // fall through
3171 case Bytecodes::_fast_cputfield: // fall through
3172 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3173 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3174 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3175 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3176 default: break;
3177 }
3178 __ bind(L2);
3179 }
3180 }
3181
3182 void TemplateTable::fast_storefield(TosState state) {
3183 transition(state, vtos);
3184
3185 Label notVolatile, Done;
3186
3187 jvmti_post_fast_field_mod();
3188
3189 __ push(rax);
3190 __ load_field_entry(rcx, rax);
3191 load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);
3192 __ pop(rax);
3193 // RBX: field offset, RCX: RAX: TOS, RDX: flags
3194
3195 // Get object from stack
3196 pop_and_check_object(rcx);
3197
3198 // field address
3199 const Address field(rcx, rbx, Address::times_1);
3200
3201 // Check for volatile store
3202 __ movl(rscratch2, rdx); // saving flags for is_flat test
3203 __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3204 __ testl(rscratch2, rscratch2);
3205 __ jcc(Assembler::zero, notVolatile);
3206
3207 fast_storefield_helper(field, rax, rdx);
3208 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3209 Assembler::StoreStore));
3210 __ jmp(Done);
3211 __ bind(notVolatile);
3212
3213 fast_storefield_helper(field, rax, rdx);
3214
3215 __ bind(Done);
3216 }
3217
3218 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3219
3220 // DANGER: 'field' argument depends on rcx and rbx
3221
3222 // access field
3223 switch (bytecode()) {
3224 case Bytecodes::_fast_vputfield:
3225 {
3226 // Field is either flat (nullable or not) or non-flat and null-free
3227 Label is_flat, done;
3228 __ test_field_is_flat(flags, rscratch1, is_flat);
3229 __ null_check(rax); // FIXME JDK-8341120
3230 do_oop_store(_masm, field, rax);
3231 __ jmp(done);
3232 __ bind(is_flat);
3233 __ load_field_entry(r8, r9);
3234 __ movptr(rscratch2, rcx); // re-shuffle registers because of VM call calling convention
3235 __ write_flat_field(r8, rscratch1, r9, rscratch2, rbx, rax);
3236 __ bind(done);
3237 }
3238 break;
3239 case Bytecodes::_fast_aputfield:
3240 {
3241 do_oop_store(_masm, field, rax);
3242 }
3243 break;
3244 case Bytecodes::_fast_lputfield:
3245 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3246 break;
3247 case Bytecodes::_fast_iputfield:
3248 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3249 break;
3250 case Bytecodes::_fast_zputfield:
3251 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3252 break;
3253 case Bytecodes::_fast_bputfield:
3254 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3255 break;
3256 case Bytecodes::_fast_sputfield:
3257 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3258 break;
3259 case Bytecodes::_fast_cputfield:
3260 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3261 break;
3262 case Bytecodes::_fast_fputfield:
3278 // Check to see if a field access watch has been set before we
3279 // take the time to call into the VM.
3280 Label L1;
3281 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3282 __ testl(rcx, rcx);
3283 __ jcc(Assembler::zero, L1);
3284 // access constant pool cache entry
3285 __ load_field_entry(c_rarg2, rcx);
3286 __ verify_oop(rax);
3287 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3288 __ mov(c_rarg1, rax);
3289 // c_rarg1: object pointer copied above
3290 // c_rarg2: cache entry pointer
3291 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3292 __ pop_ptr(rax); // restore object pointer
3293 __ bind(L1);
3294 }
3295
3296 // access constant pool cache
3297 __ load_field_entry(rcx, rbx);
3298 __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3299
3300 // rax: object
3301 __ verify_oop(rax);
3302 __ null_check(rax);
3303 Address field(rax, rdx, Address::times_1);
3304
3305 // access field
3306 switch (bytecode()) {
3307 case Bytecodes::_fast_vgetfield:
3308 __ read_flat_field(rcx, rdx, rbx, rax);
3309 __ verify_oop(rax);
3310 break;
3311 case Bytecodes::_fast_agetfield:
3312 do_oop_load(_masm, field, rax);
3313 __ verify_oop(rax);
3314 break;
3315 case Bytecodes::_fast_lgetfield:
3316 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg);
3317 break;
3318 case Bytecodes::_fast_igetfield:
3319 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg);
3320 break;
3321 case Bytecodes::_fast_bgetfield:
3322 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg);
3323 break;
3324 case Bytecodes::_fast_sgetfield:
3325 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg);
3326 break;
3327 case Bytecodes::_fast_cgetfield:
3328 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg);
3329 break;
3330 case Bytecodes::_fast_fgetfield:
3715
3716 // Note: rax_callsite is already pushed
3717
3718 // %%% should make a type profile for any invokedynamic that takes a ref argument
3719 // profile this call
3720 __ profile_call(rbcp);
3721 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3722
3723 __ verify_oop(rax_callsite);
3724
3725 __ jump_from_interpreted(rbx_method, rdx);
3726 }
3727
3728 //-----------------------------------------------------------------------------
3729 // Allocation
3730
3731 void TemplateTable::_new() {
3732 transition(vtos, atos);
3733 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3734 Label slow_case;
3735 Label done;
3736
3737 __ get_cpool_and_tags(rcx, rax);
3738
3739 // Make sure the class we're about to instantiate has been resolved.
3740 // This is done before loading InstanceKlass to be consistent with the order
3741 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3742 const int tags_offset = Array<u1>::base_offset_in_bytes();
3743 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3744 __ jcc(Assembler::notEqual, slow_case);
3745
3746 // get InstanceKlass
3747 __ load_resolved_klass_at_index(rcx, rcx, rdx);
3748
3749 // make sure klass is initialized
3750 // init_state needs acquire, but x86 is TSO, and so we are already good.
3751 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3752 __ clinit_barrier(rcx, nullptr /*L_fast_path*/, &slow_case);
3753
3754 __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);
3755 __ jmp(done);
3756
3757 // slow case
3758 __ bind(slow_case);
3759
3760 __ get_constant_pool(c_rarg1);
3761 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3762 __ call_VM_preemptable(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3763 __ verify_oop(rax);
3764
3765 // continue
3766 __ bind(done);
3767 }
3768
3769 void TemplateTable::newarray() {
3770 transition(itos, atos);
3771 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3772 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3773 c_rarg1, rax);
3774 }
3775
3776 void TemplateTable::anewarray() {
3777 transition(itos, atos);
3778
3780 __ get_constant_pool(c_rarg1);
3781 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3782 c_rarg1, c_rarg2, rax);
3783 }
3784
3785 void TemplateTable::arraylength() {
3786 transition(atos, itos);
3787 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3788 }
3789
3790 void TemplateTable::checkcast() {
3791 transition(atos, atos);
3792 Label done, is_null, ok_is_subtype, quicked, resolved;
3793 __ testptr(rax, rax); // object is in rax
3794 __ jcc(Assembler::zero, is_null);
3795
3796 // Get cpool & tags index
3797 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3798 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3799 // See if bytecode has already been quicked
3800 __ movzbl(rdx, Address(rdx, rbx,
3801 Address::times_1,
3802 Array<u1>::base_offset_in_bytes()));
3803 __ cmpl(rdx, JVM_CONSTANT_Class);
3804 __ jcc(Assembler::equal, quicked);
3805 __ push(atos); // save receiver for result, and for GC
3806 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3807
3808 __ get_vm_result_metadata(rax);
3809
3810 __ pop_ptr(rdx); // restore receiver
3811 __ jmpb(resolved);
3812
3813 // Get superklass in rax and subklass in rbx
3814 __ bind(quicked);
3815 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3816 __ load_resolved_klass_at_index(rax, rcx, rbx);
3817
3818 __ bind(resolved);
3819 __ load_klass(rbx, rdx, rscratch1);
3820
3821 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3822 // Superklass in rax. Subklass in rbx.
3823 __ gen_subtype_check(rbx, ok_is_subtype);
3824
3825 // Come here on failure
3826 __ push_ptr(rdx);
3827 // object is at TOS
3828 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3829
3830 // Come here on success
3831 __ bind(ok_is_subtype);
3832 __ mov(rax, rdx); // Restore object in rdx
3833 __ jmp(done);
3834
3835 __ bind(is_null);
3836
3837 // Collect counts on whether this check-cast sees nulls a lot or not.
3838 if (ProfileInterpreter) {
3839 __ profile_null_seen(rcx);
3840 }
3841
3842 __ bind(done);
3843 }
3844
3845 void TemplateTable::instanceof() {
3846 transition(atos, itos);
3847 Label done, is_null, ok_is_subtype, quicked, resolved;
3848 __ testptr(rax, rax);
3849 __ jcc(Assembler::zero, is_null);
3850
3851 // Get cpool & tags index
3852 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3853 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3854 // See if bytecode has already been quicked
3855 __ movzbl(rdx, Address(rdx, rbx,
3856 Address::times_1,
3857 Array<u1>::base_offset_in_bytes()));
3858 __ cmpl(rdx, JVM_CONSTANT_Class);
3859 __ jcc(Assembler::equal, quicked);
3860
3861 __ push(atos); // save receiver for result, and for GC
3862 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3863
3864 __ get_vm_result_metadata(rax);
3865
3866 __ pop_ptr(rdx); // restore receiver
3867 __ verify_oop(rdx);
3868 __ load_klass(rdx, rdx, rscratch1);
3869 __ jmpb(resolved);
3870
3871 // Get superklass in rax and subklass in rdx
3872 __ bind(quicked);
3873 __ load_klass(rdx, rax, rscratch1);
3874 __ load_resolved_klass_at_index(rax, rcx, rbx);
3875
3876 __ bind(resolved);
3877
3878 // Generate subtype check. Blows rcx, rdi
3882 // Come here on failure
3883 __ xorl(rax, rax);
3884 __ jmpb(done);
3885 // Come here on success
3886 __ bind(ok_is_subtype);
3887 __ movl(rax, 1);
3888
3889 // Collect counts on whether this test sees nulls a lot or not.
3890 if (ProfileInterpreter) {
3891 __ jmp(done);
3892 __ bind(is_null);
3893 __ profile_null_seen(rcx);
3894 } else {
3895 __ bind(is_null); // same as 'done'
3896 }
3897 __ bind(done);
3898 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
3899 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
3900 }
3901
3902 //----------------------------------------------------------------------------------------------------
3903 // Breakpoints
3904 void TemplateTable::_breakpoint() {
3905 // Note: We get here even if we are single stepping..
3906 // jbug insists on setting breakpoints at every bytecode
3907 // even if we are in single step mode.
3908
3909 transition(vtos, vtos);
3910
3911 // get the unpatched byte code
3912 __ get_method(c_rarg1);
3913 __ call_VM(noreg,
3914 CAST_FROM_FN_PTR(address,
3915 InterpreterRuntime::get_original_bytecode_at),
3916 c_rarg1, rbcp);
3917 __ mov(rbx, rax); // why?
3918
3919 // post the breakpoint event
3920 __ get_method(c_rarg1);
3921 __ call_VM(noreg,
3941 // Note: monitorenter & exit are symmetric routines; which is reflected
3942 // in the assembly code structure as well
3943 //
3944 // Stack layout:
3945 //
3946 // [expressions ] <--- rsp = expression stack top
3947 // ..
3948 // [expressions ]
3949 // [monitor entry] <--- monitor block top = expression stack bot
3950 // ..
3951 // [monitor entry]
3952 // [frame data ] <--- monitor block bot
3953 // ...
3954 // [saved rbp ] <--- rbp
3955 void TemplateTable::monitorenter() {
3956 transition(atos, vtos);
3957
3958 // check for null object
3959 __ null_check(rax);
3960
3961 Label is_inline_type;
3962 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
3963 __ test_markword_is_inline_type(rbx, is_inline_type);
3964
3965 const Address monitor_block_top(
3966 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3967 const Address monitor_block_bot(
3968 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3969 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3970
3971 Label allocated;
3972
3973 Register rtop = c_rarg3;
3974 Register rbot = c_rarg2;
3975 Register rmon = c_rarg1;
3976
3977 // initialize entry pointer
3978 __ xorl(rmon, rmon); // points to free slot or null
3979
3980 // find a free slot in the monitor block (result in rmon)
3981 {
3982 Label entry, loop, exit;
3983 __ movptr(rtop, monitor_block_top); // derelativize pointer
3984 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4037 // rmon: points to monitor entry
4038 __ bind(allocated);
4039
4040 // Increment bcp to point to the next bytecode, so exception
4041 // handling for async. exceptions work correctly.
4042 // The object has already been popped from the stack, so the
4043 // expression stack looks correct.
4044 __ increment(rbcp);
4045
4046 // store object
4047 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4048 __ lock_object(rmon);
4049
4050 // check to make sure this monitor doesn't cause stack overflow after locking
4051 __ save_bcp(); // in case of exception
4052 __ generate_stack_overflow_check(0);
4053
4054 // The bcp has already been incremented. Just need to dispatch to
4055 // next instruction.
4056 __ dispatch_next(vtos);
4057
4058 __ bind(is_inline_type);
4059 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4060 InterpreterRuntime::throw_identity_exception), rax);
4061 __ should_not_reach_here();
4062 }
4063
4064 void TemplateTable::monitorexit() {
4065 transition(atos, vtos);
4066
4067 // check for null object
4068 __ null_check(rax);
4069
4070 const int is_inline_type_mask = markWord::inline_type_pattern;
4071 Label has_identity;
4072 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4073 __ andptr(rbx, is_inline_type_mask);
4074 __ cmpl(rbx, is_inline_type_mask);
4075 __ jcc(Assembler::notEqual, has_identity);
4076 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4077 InterpreterRuntime::throw_illegal_monitor_state_exception));
4078 __ should_not_reach_here();
4079 __ bind(has_identity);
4080
4081 const Address monitor_block_top(
4082 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4083 const Address monitor_block_bot(
4084 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4085 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4086
4087 Register rtop = c_rarg1;
4088 Register rbot = c_rarg2;
4089
4090 Label found;
4091
4092 // find matching slot
4093 {
4094 Label entry, loop;
4095 __ movptr(rtop, monitor_block_top); // derelativize pointer
4096 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4097 // rtop points to current entry, starting with top-most entry
4098
4099 __ lea(rbot, monitor_block_bot); // points to word before bottom
4100 // of monitor block
|