20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "gc/shared/collectedHeap.hpp"
29 #include "gc/shared/gc_globals.hpp"
30 #include "gc/shared/tlab_globals.hpp"
31 #include "interpreter/interpreter.hpp"
32 #include "interpreter/interpreterRuntime.hpp"
33 #include "interpreter/interp_masm.hpp"
34 #include "interpreter/templateTable.hpp"
35 #include "memory/universe.hpp"
36 #include "oops/methodCounters.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/objArrayKlass.hpp"
39 #include "oops/oop.inline.hpp"
40 #include "oops/resolvedFieldEntry.hpp"
41 #include "oops/resolvedIndyEntry.hpp"
42 #include "oops/resolvedMethodEntry.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/methodHandles.hpp"
45 #include "runtime/frame.inline.hpp"
46 #include "runtime/safepointMechanism.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/synchronizer.hpp"
50 #include "utilities/macros.hpp"
51
52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
53
54 // Global Register Names
55 static const Register rbcp = LP64_ONLY(r13) NOT_LP64(rsi);
56 static const Register rlocals = LP64_ONLY(r14) NOT_LP64(rdi);
57
58 // Address Computation: local variables
59 static inline Address iaddress(int n) {
166 static void do_oop_load(InterpreterMacroAssembler* _masm,
167 Address src,
168 Register dst,
169 DecoratorSet decorators = 0) {
170 __ load_heap_oop(dst, src, rdx, rbx, decorators);
171 }
172
173 Address TemplateTable::at_bcp(int offset) {
174 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
175 return Address(rbcp, offset);
176 }
177
178
179 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
180 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
181 int byte_no) {
182 if (!RewriteBytecodes) return;
183 Label L_patch_done;
184
185 switch (bc) {
186 case Bytecodes::_fast_aputfield:
187 case Bytecodes::_fast_bputfield:
188 case Bytecodes::_fast_zputfield:
189 case Bytecodes::_fast_cputfield:
190 case Bytecodes::_fast_dputfield:
191 case Bytecodes::_fast_fputfield:
192 case Bytecodes::_fast_iputfield:
193 case Bytecodes::_fast_lputfield:
194 case Bytecodes::_fast_sputfield:
195 {
196 // We skip bytecode quickening for putfield instructions when
197 // the put_code written to the constant pool cache is zero.
198 // This is required so that every execution of this instruction
199 // calls out to InterpreterRuntime::resolve_get_put to do
200 // additional, required work.
201 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
202 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
203 __ load_field_entry(temp_reg, bc_reg);
204 if (byte_no == f1_byte) {
205 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
813 Address(rdx, rax,
814 Address::times_4,
815 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
816 noreg, noreg);
817 }
818
819 void TemplateTable::daload() {
820 transition(itos, dtos);
821 // rax: index
822 // rdx: array
823 index_check(rdx, rax); // kills rbx
824 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
825 Address(rdx, rax,
826 Address::times_8,
827 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
828 noreg, noreg);
829 }
830
831 void TemplateTable::aaload() {
832 transition(itos, atos);
833 // rax: index
834 // rdx: array
835 index_check(rdx, rax); // kills rbx
836 do_oop_load(_masm,
837 Address(rdx, rax,
838 UseCompressedOops ? Address::times_4 : Address::times_ptr,
839 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
840 rax,
841 IS_ARRAY);
842 }
843
844 void TemplateTable::baload() {
845 transition(itos, itos);
846 // rax: index
847 // rdx: array
848 index_check(rdx, rax); // kills rbx
849 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
850 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
851 noreg, noreg);
852 }
853
854 void TemplateTable::caload() {
855 transition(itos, itos);
856 // rax: index
857 // rdx: array
858 index_check(rdx, rax); // kills rbx
859 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
860 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
861 noreg, noreg);
1107 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1108 Address(rdx, rbx, Address::times_4,
1109 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1110 noreg /* ftos */, noreg, noreg, noreg);
1111 }
1112
1113 void TemplateTable::dastore() {
1114 transition(dtos, vtos);
1115 __ pop_i(rbx);
1116 // value is in UseSSE >= 2 ? xmm0 : ST(0)
1117 // rbx: index
1118 // rdx: array
1119 index_check(rdx, rbx); // prefer index in rbx
1120 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1121 Address(rdx, rbx, Address::times_8,
1122 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1123 noreg /* dtos */, noreg, noreg, noreg);
1124 }
1125
1126 void TemplateTable::aastore() {
1127 Label is_null, ok_is_subtype, done;
1128 transition(vtos, vtos);
1129 // stack: ..., array, index, value
1130 __ movptr(rax, at_tos()); // value
1131 __ movl(rcx, at_tos_p1()); // index
1132 __ movptr(rdx, at_tos_p2()); // array
1133
1134 Address element_address(rdx, rcx,
1135 UseCompressedOops? Address::times_4 : Address::times_ptr,
1136 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1137
1138 index_check_without_pop(rdx, rcx); // kills rbx
1139 __ testptr(rax, rax);
1140 __ jcc(Assembler::zero, is_null);
1141
1142 // Move subklass into rbx
1143 __ load_klass(rbx, rax, rscratch1);
1144 // Move superklass into rax
1145 __ load_klass(rax, rdx, rscratch1);
1146 __ movptr(rax, Address(rax,
1147 ObjArrayKlass::element_klass_offset()));
1148
1149 // Generate subtype check. Blows rcx, rdi
1150 // Superklass in rax. Subklass in rbx.
1151 __ gen_subtype_check(rbx, ok_is_subtype);
1152
1153 // Come here on failure
1154 // object is at TOS
1155 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1156
1157 // Come here on success
1158 __ bind(ok_is_subtype);
1159
1160 // Get the value we will store
1161 __ movptr(rax, at_tos());
1162 __ movl(rcx, at_tos_p1()); // index
1163 // Now store using the appropriate barrier
1164 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1165 __ jmp(done);
1166
1167 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1168 __ bind(is_null);
1169 __ profile_null_seen(rbx);
1170
1171 // Store a null
1172 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1173
1174 // Pop stack arguments
1175 __ bind(done);
1176 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1177 }
1178
1179 void TemplateTable::bastore() {
1180 transition(itos, vtos);
1181 __ pop_i(rbx);
1182 // rax: value
1183 // rbx: index
1184 // rdx: array
1185 index_check(rdx, rbx); // prefer index in rbx
1186 // Need to check whether array is boolean or byte
1187 // since both types share the bastore bytecode.
1188 __ load_klass(rcx, rdx, rscratch1);
1189 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1190 int diffbit = Klass::layout_helper_boolean_diffbit();
1191 __ testl(rcx, diffbit);
1192 Label L_skip;
1193 __ jccb(Assembler::zero, L_skip);
2322 __ jcc(j_not(cc), not_taken);
2323 branch(false, false);
2324 __ bind(not_taken);
2325 __ profile_not_taken_branch(rax);
2326 }
2327
2328 void TemplateTable::if_nullcmp(Condition cc) {
2329 transition(atos, vtos);
2330 // assume branch is more often taken than not (loops use backward branches)
2331 Label not_taken;
2332 __ testptr(rax, rax);
2333 __ jcc(j_not(cc), not_taken);
2334 branch(false, false);
2335 __ bind(not_taken);
2336 __ profile_not_taken_branch(rax);
2337 }
2338
2339 void TemplateTable::if_acmp(Condition cc) {
2340 transition(atos, vtos);
2341 // assume branch is more often taken than not (loops use backward branches)
2342 Label not_taken;
2343 __ pop_ptr(rdx);
2344 __ cmpoop(rdx, rax);
2345 __ jcc(j_not(cc), not_taken);
2346 branch(false, false);
2347 __ bind(not_taken);
2348 __ profile_not_taken_branch(rax);
2349 }
2350
2351 void TemplateTable::ret() {
2352 transition(vtos, vtos);
2353 locals_index(rbx);
2354 LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2355 NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2356 __ profile_ret(rbx, rcx);
2357 __ get_method(rax);
2358 __ movptr(rbcp, Address(rax, Method::const_offset()));
2359 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2360 ConstMethod::codes_offset()));
2361 __ dispatch_next(vtos, 0, true);
2362 }
2363
2364 void TemplateTable::wide_ret() {
2365 transition(vtos, vtos);
2366 locals_index_wide(rbx);
2367 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2368 __ profile_ret(rbx, rcx);
2597 const Register thread = rdi;
2598 __ get_thread(thread);
2599 __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2600 #endif
2601 __ jcc(Assembler::zero, no_safepoint);
2602 __ push(state);
2603 __ push_cont_fastpath();
2604 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2605 InterpreterRuntime::at_safepoint));
2606 __ pop_cont_fastpath();
2607 __ pop(state);
2608 __ bind(no_safepoint);
2609 }
2610
2611 // Narrow result if state is itos but result type is smaller.
2612 // Need to narrow in the return bytecode rather than in generate_return_entry
2613 // since compiled code callers expect the result to already be narrowed.
2614 if (state == itos) {
2615 __ narrow(rax);
2616 }
2617 __ remove_activation(state, rbcp);
2618
2619 __ jmp(rbcp);
2620 }
2621
2622 // ----------------------------------------------------------------------------
2623 // Volatile variables demand their effects be made known to all CPU's
2624 // in order. Store buffers on most chips allow reads & writes to
2625 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2626 // without some kind of memory barrier (i.e., it's not sufficient that
2627 // the interpreter does not reorder volatile references, the hardware
2628 // also must not reorder them).
2629 //
2630 // According to the new Java Memory Model (JMM):
2631 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2632 // writes act as acquire & release, so:
2633 // (2) A read cannot let unrelated NON-volatile memory refs that
2634 // happen after the read float up to before the read. It's OK for
2635 // non-volatile memory refs that happen before the volatile read to
2636 // float down below it.
2637 // (3) Similar a volatile write cannot let unrelated NON-volatile
2963 }
2964 // rax,: object pointer or null
2965 // cache: cache entry pointer
2966 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2967 rax, cache);
2968
2969 __ load_field_entry(cache, index);
2970 __ bind(L1);
2971 }
2972 }
2973
2974 void TemplateTable::pop_and_check_object(Register r) {
2975 __ pop_ptr(r);
2976 __ null_check(r); // for field access must check obj.
2977 __ verify_oop(r);
2978 }
2979
2980 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2981 transition(vtos, vtos);
2982
2983 const Register obj = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2984 const Register cache = rcx;
2985 const Register index = rdx;
2986 const Register off = rbx;
2987 const Register tos_state = rax;
2988 const Register flags = rdx;
2989 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2990
2991 resolve_cache_and_index_for_field(byte_no, cache, index);
2992 jvmti_post_field_access(cache, index, is_static, false);
2993 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2994
2995 if (!is_static) pop_and_check_object(obj);
2996
2997 const Address field(obj, off, Address::times_1, 0*wordSize);
2998
2999 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
3000
3001 // Make sure we don't need to mask edx after the above shift
3002 assert(btos == 0, "change code, btos != 0");
3003 __ testl(tos_state, tos_state);
3004 __ jcc(Assembler::notZero, notByte);
3005
3006 // btos
3007 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3008 __ push(btos);
3009 // Rewrite bytecode to be faster
3010 if (!is_static && rc == may_rewrite) {
3011 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3012 }
3013 __ jmp(Done);
3014
3015 __ bind(notByte);
3016 __ cmpl(tos_state, ztos);
3017 __ jcc(Assembler::notEqual, notBool);
3018
3019 // ztos (same code as btos)
3020 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3021 __ push(ztos);
3022 // Rewrite bytecode to be faster
3023 if (!is_static && rc == may_rewrite) {
3024 // use btos rewriting, no truncating to t/f bit is needed for getfield.
3025 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3026 }
3027 __ jmp(Done);
3028
3029 __ bind(notBool);
3030 __ cmpl(tos_state, atos);
3031 __ jcc(Assembler::notEqual, notObj);
3032 // atos
3033 do_oop_load(_masm, field, rax);
3034 __ push(atos);
3035 if (!is_static && rc == may_rewrite) {
3036 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3037 }
3038 __ jmp(Done);
3039
3040 __ bind(notObj);
3041 __ cmpl(tos_state, itos);
3042 __ jcc(Assembler::notEqual, notInt);
3043 // itos
3044 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3045 __ push(itos);
3046 // Rewrite bytecode to be faster
3047 if (!is_static && rc == may_rewrite) {
3048 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3049 }
3050 __ jmp(Done);
3051
3052 __ bind(notInt);
3053 __ cmpl(tos_state, ctos);
3054 __ jcc(Assembler::notEqual, notChar);
3055 // ctos
3056 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3057 __ push(ctos);
3058 // Rewrite bytecode to be faster
3059 if (!is_static && rc == may_rewrite) {
3060 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
3120 #endif
3121
3122 __ bind(Done);
3123 // [jk] not needed currently
3124 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3125 // Assembler::LoadStore));
3126 }
3127
3128 void TemplateTable::getfield(int byte_no) {
3129 getfield_or_static(byte_no, false);
3130 }
3131
3132 void TemplateTable::nofast_getfield(int byte_no) {
3133 getfield_or_static(byte_no, false, may_not_rewrite);
3134 }
3135
3136 void TemplateTable::getstatic(int byte_no) {
3137 getfield_or_static(byte_no, true);
3138 }
3139
3140
3141 // The registers cache and index expected to be set before call.
3142 // The function may destroy various registers, just not the cache and index registers.
3143 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3144 // Cache is rcx and index is rdx
3145 const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3146 const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx); // Object pointer
3147 const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3148
3149 if (JvmtiExport::can_post_field_modification()) {
3150 // Check to see if a field modification watch has been set before
3151 // we take the time to call into the VM.
3152 Label L1;
3153 assert_different_registers(cache, obj, rax);
3154 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3155 __ testl(rax, rax);
3156 __ jcc(Assembler::zero, L1);
3157
3158 __ mov(entry, cache);
3159
3160 if (is_static) {
3202 // cache: field entry pointer
3203 // value: jvalue object on the stack
3204 __ call_VM(noreg,
3205 CAST_FROM_FN_PTR(address,
3206 InterpreterRuntime::post_field_modification),
3207 obj, entry, value);
3208 // Reload field entry
3209 __ load_field_entry(cache, index);
3210 __ bind(L1);
3211 }
3212 }
3213
3214 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3215 transition(vtos, vtos);
3216
3217 const Register obj = rcx;
3218 const Register cache = rcx;
3219 const Register index = rdx;
3220 const Register tos_state = rdx;
3221 const Register off = rbx;
3222 const Register flags = rax;
3223
3224 resolve_cache_and_index_for_field(byte_no, cache, index);
3225 jvmti_post_field_mod(cache, index, is_static);
3226 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3227
3228 // [jk] not needed currently
3229 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3230 // Assembler::StoreStore));
3231
3232 Label notVolatile, Done;
3233
3234 // Check for volatile store
3235 __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
3236 __ testl(flags, flags);
3237 __ jcc(Assembler::zero, notVolatile);
3238
3239 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3240 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3241 Assembler::StoreStore));
3242 __ jmp(Done);
3243 __ bind(notVolatile);
3244
3245 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3246
3247 __ bind(Done);
3248 }
3249
3250 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3251 Register obj, Register off, Register tos_state) {
3252
3253 // field addresses
3254 const Address field(obj, off, Address::times_1, 0*wordSize);
3255 NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3256
3257 Label notByte, notBool, notInt, notShort, notChar,
3258 notLong, notFloat, notObj;
3259 Label Done;
3260
3261 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3262
3263 // Test TOS state
3264 __ testl(tos_state, tos_state);
3265 __ jcc(Assembler::notZero, notByte);
3266
3267 // btos
3268 {
3269 __ pop(btos);
3270 if (!is_static) pop_and_check_object(obj);
3271 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3272 if (!is_static && rc == may_rewrite) {
3273 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3274 }
3275 __ jmp(Done);
3276 }
3277
3278 __ bind(notByte);
3279 __ cmpl(tos_state, ztos);
3280 __ jcc(Assembler::notEqual, notBool);
3281
3282 // ztos
3283 {
3284 __ pop(ztos);
3285 if (!is_static) pop_and_check_object(obj);
3286 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3287 if (!is_static && rc == may_rewrite) {
3288 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3289 }
3290 __ jmp(Done);
3291 }
3292
3293 __ bind(notBool);
3294 __ cmpl(tos_state, atos);
3295 __ jcc(Assembler::notEqual, notObj);
3296
3297 // atos
3298 {
3299 __ pop(atos);
3300 if (!is_static) pop_and_check_object(obj);
3301 // Store into the field
3302 do_oop_store(_masm, field, rax);
3303 if (!is_static && rc == may_rewrite) {
3304 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3305 }
3306 __ jmp(Done);
3307 }
3308
3309 __ bind(notObj);
3310 __ cmpl(tos_state, itos);
3311 __ jcc(Assembler::notEqual, notInt);
3312
3313 // itos
3314 {
3315 __ pop(itos);
3316 if (!is_static) pop_and_check_object(obj);
3317 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3318 if (!is_static && rc == may_rewrite) {
3319 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3320 }
3321 __ jmp(Done);
3322 }
3323
3324 __ bind(notInt);
3325 __ cmpl(tos_state, ctos);
3326 __ jcc(Assembler::notEqual, notChar);
3425 }
3426
3427 void TemplateTable::jvmti_post_fast_field_mod() {
3428
3429 const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3430
3431 if (JvmtiExport::can_post_field_modification()) {
3432 // Check to see if a field modification watch has been set before
3433 // we take the time to call into the VM.
3434 Label L2;
3435 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3436 __ testl(scratch, scratch);
3437 __ jcc(Assembler::zero, L2);
3438 __ pop_ptr(rbx); // copy the object pointer from tos
3439 __ verify_oop(rbx);
3440 __ push_ptr(rbx); // put the object pointer back on tos
3441 // Save tos values before call_VM() clobbers them. Since we have
3442 // to do it for every data type, we use the saved values as the
3443 // jvalue object.
3444 switch (bytecode()) { // load values into the jvalue object
3445 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3446 case Bytecodes::_fast_bputfield: // fall through
3447 case Bytecodes::_fast_zputfield: // fall through
3448 case Bytecodes::_fast_sputfield: // fall through
3449 case Bytecodes::_fast_cputfield: // fall through
3450 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3451 case Bytecodes::_fast_dputfield: __ push(dtos); break;
3452 case Bytecodes::_fast_fputfield: __ push(ftos); break;
3453 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3454
3455 default:
3456 ShouldNotReachHere();
3457 }
3458 __ mov(scratch, rsp); // points to jvalue on the stack
3459 // access constant pool cache entry
3460 LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3461 NOT_LP64(__ load_field_entry(rax, rdx));
3462 __ verify_oop(rbx);
3463 // rbx: object pointer copied above
3464 // c_rarg2: cache entry pointer
3465 // c_rarg3: jvalue object on the stack
3466 LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3467 NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3468
3469 switch (bytecode()) { // restore tos values
3470 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3471 case Bytecodes::_fast_bputfield: // fall through
3472 case Bytecodes::_fast_zputfield: // fall through
3473 case Bytecodes::_fast_sputfield: // fall through
3474 case Bytecodes::_fast_cputfield: // fall through
3475 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3476 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3477 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3478 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3479 default: break;
3480 }
3481 __ bind(L2);
3482 }
3483 }
3484
3485 void TemplateTable::fast_storefield(TosState state) {
3486 transition(state, vtos);
3487
3488 Register cache = rcx;
3489
3490 Label notVolatile, Done;
3491
3492 jvmti_post_fast_field_mod();
3493
3494 __ push(rax);
3495 __ load_field_entry(rcx, rax);
3496 load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3497 // RBX: field offset, RAX: TOS, RDX: flags
3498 __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3499 __ pop(rax);
3500
3501 // Get object from stack
3502 pop_and_check_object(rcx);
3503
3504 // field address
3505 const Address field(rcx, rbx, Address::times_1);
3506
3507 // Check for volatile store
3508 __ testl(rdx, rdx);
3509 __ jcc(Assembler::zero, notVolatile);
3510
3511 fast_storefield_helper(field, rax);
3512 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3513 Assembler::StoreStore));
3514 __ jmp(Done);
3515 __ bind(notVolatile);
3516
3517 fast_storefield_helper(field, rax);
3518
3519 __ bind(Done);
3520 }
3521
3522 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3523
3524 // access field
3525 switch (bytecode()) {
3526 case Bytecodes::_fast_aputfield:
3527 do_oop_store(_masm, field, rax);
3528 break;
3529 case Bytecodes::_fast_lputfield:
3530 #ifdef _LP64
3531 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3532 #else
3533 __ stop("should not be rewritten");
3534 #endif
3535 break;
3536 case Bytecodes::_fast_iputfield:
3537 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3538 break;
3539 case Bytecodes::_fast_zputfield:
3540 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3541 break;
3542 case Bytecodes::_fast_bputfield:
3543 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3544 break;
3545 case Bytecodes::_fast_sputfield:
3546 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3547 break;
3569 Label L1;
3570 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3571 __ testl(rcx, rcx);
3572 __ jcc(Assembler::zero, L1);
3573 // access constant pool cache entry
3574 LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3575 NOT_LP64(__ load_field_entry(rcx, rdx));
3576 __ verify_oop(rax);
3577 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3578 LP64_ONLY(__ mov(c_rarg1, rax));
3579 // c_rarg1: object pointer copied above
3580 // c_rarg2: cache entry pointer
3581 LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3582 NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3583 __ pop_ptr(rax); // restore object pointer
3584 __ bind(L1);
3585 }
3586
3587 // access constant pool cache
3588 __ load_field_entry(rcx, rbx);
3589 __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3590
3591 // rax: object
3592 __ verify_oop(rax);
3593 __ null_check(rax);
3594 Address field(rax, rbx, Address::times_1);
3595
3596 // access field
3597 switch (bytecode()) {
3598 case Bytecodes::_fast_agetfield:
3599 do_oop_load(_masm, field, rax);
3600 __ verify_oop(rax);
3601 break;
3602 case Bytecodes::_fast_lgetfield:
3603 #ifdef _LP64
3604 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3605 #else
3606 __ stop("should not be rewritten");
3607 #endif
3608 break;
3609 case Bytecodes::_fast_igetfield:
3610 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3611 break;
3612 case Bytecodes::_fast_bgetfield:
3613 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3614 break;
3615 case Bytecodes::_fast_sgetfield:
3616 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3617 break;
4013
4014 // Note: rax_callsite is already pushed
4015
4016 // %%% should make a type profile for any invokedynamic that takes a ref argument
4017 // profile this call
4018 __ profile_call(rbcp);
4019 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4020
4021 __ verify_oop(rax_callsite);
4022
4023 __ jump_from_interpreted(rbx_method, rdx);
4024 }
4025
4026 //-----------------------------------------------------------------------------
4027 // Allocation
4028
4029 void TemplateTable::_new() {
4030 transition(vtos, atos);
4031 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4032 Label slow_case;
4033 Label slow_case_no_pop;
4034 Label done;
4035 Label initialize_header;
4036
4037 __ get_cpool_and_tags(rcx, rax);
4038
4039 // Make sure the class we're about to instantiate has been resolved.
4040 // This is done before loading InstanceKlass to be consistent with the order
4041 // how Constant Pool is updated (see ConstantPool::klass_at_put)
4042 const int tags_offset = Array<u1>::base_offset_in_bytes();
4043 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4044 __ jcc(Assembler::notEqual, slow_case_no_pop);
4045
4046 // get InstanceKlass
4047 __ load_resolved_klass_at_index(rcx, rcx, rdx);
4048 __ push(rcx); // save the contexts of klass for initializing the header
4049
4050 // make sure klass is initialized
4051 // init_state needs acquire, but x86 is TSO, and so we are already good.
4052 #ifdef _LP64
4053 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
4054 __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
4055 #else
4056 __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4057 __ jcc(Assembler::notEqual, slow_case);
4058 #endif
4059
4060 // get instance_size in InstanceKlass (scaled to a count of bytes)
4061 __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
4062 // test to see if it is malformed in some way
4063 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
4064 __ jcc(Assembler::notZero, slow_case);
4065
4066 // Allocate the instance:
4067 // If TLAB is enabled:
4068 // Try to allocate in the TLAB.
4069 // If fails, go to the slow path.
4070 // Initialize the allocation.
4071 // Exit.
4072 //
4073 // Go to slow path.
4074
4075 const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4076
4077 if (UseTLAB) {
4078 NOT_LP64(__ get_thread(thread);)
4079 __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4080 if (ZeroTLAB) {
4081 // the fields have been already cleared
4082 __ jmp(initialize_header);
4083 }
4084
4085 // The object is initialized before the header. If the object size is
4086 // zero, go directly to the header initialization.
4087 __ decrement(rdx, sizeof(oopDesc));
4088 __ jcc(Assembler::zero, initialize_header);
4089
4090 // Initialize topmost object field, divide rdx by 8, check if odd and
4091 // test if zero.
4092 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
4093 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4094
4095 // rdx must have been multiple of 8
4096 #ifdef ASSERT
4097 // make sure rdx was multiple of 8
4098 Label L;
4099 // Ignore partial flag stall after shrl() since it is debug VM
4100 __ jcc(Assembler::carryClear, L);
4101 __ stop("object size is not multiple of 2 - adjust this code");
4102 __ bind(L);
4103 // rdx must be > 0, no extra check needed here
4104 #endif
4105
4106 // initialize remaining object fields: rdx was a multiple of 8
4107 { Label loop;
4108 __ bind(loop);
4109 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4110 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4111 __ decrement(rdx);
4112 __ jcc(Assembler::notZero, loop);
4113 }
4114
4115 // initialize object header only.
4116 __ bind(initialize_header);
4117 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4118 (intptr_t)markWord::prototype().value()); // header
4119 __ pop(rcx); // get saved klass back in the register.
4120 #ifdef _LP64
4121 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4122 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
4123 #endif
4124 __ store_klass(rax, rcx, rscratch1); // klass
4125
4126 if (DTraceAllocProbes) {
4127 // Trigger dtrace event for fastpath
4128 __ push(atos);
4129 __ call_VM_leaf(
4130 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4131 __ pop(atos);
4132 }
4133
4134 __ jmp(done);
4135 }
4136
4137 // slow case
4138 __ bind(slow_case);
4139 __ pop(rcx); // restore stack pointer to what it was when we came in.
4140 __ bind(slow_case_no_pop);
4141
4142 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4143 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4144
4145 __ get_constant_pool(rarg1);
4146 __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4147 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4148 __ verify_oop(rax);
4149
4150 // continue
4151 __ bind(done);
4152 }
4153
4154 void TemplateTable::newarray() {
4155 transition(itos, atos);
4156 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4157 __ load_unsigned_byte(rarg1, at_bcp(1));
4158 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4159 rarg1, rax);
4160 }
4169 __ get_constant_pool(rarg1);
4170 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4171 rarg1, rarg2, rax);
4172 }
4173
4174 void TemplateTable::arraylength() {
4175 transition(atos, itos);
4176 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4177 }
4178
4179 void TemplateTable::checkcast() {
4180 transition(atos, atos);
4181 Label done, is_null, ok_is_subtype, quicked, resolved;
4182 __ testptr(rax, rax); // object is in rax
4183 __ jcc(Assembler::zero, is_null);
4184
4185 // Get cpool & tags index
4186 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4187 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4188 // See if bytecode has already been quicked
4189 __ cmpb(Address(rdx, rbx,
4190 Address::times_1,
4191 Array<u1>::base_offset_in_bytes()),
4192 JVM_CONSTANT_Class);
4193 __ jcc(Assembler::equal, quicked);
4194 __ push(atos); // save receiver for result, and for GC
4195 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4196
4197 // vm_result_2 has metadata result
4198 #ifndef _LP64
4199 // borrow rdi from locals
4200 __ get_thread(rdi);
4201 __ get_vm_result_2(rax, rdi);
4202 __ restore_locals();
4203 #else
4204 __ get_vm_result_2(rax, r15_thread);
4205 #endif
4206
4207 __ pop_ptr(rdx); // restore receiver
4208 __ jmpb(resolved);
4209
4210 // Get superklass in rax and subklass in rbx
4211 __ bind(quicked);
4212 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4213 __ load_resolved_klass_at_index(rax, rcx, rbx);
4214
4215 __ bind(resolved);
4216 __ load_klass(rbx, rdx, rscratch1);
4217
4218 // Generate subtype check. Blows rcx, rdi. Object in rdx.
4219 // Superklass in rax. Subklass in rbx.
4220 __ gen_subtype_check(rbx, ok_is_subtype);
4221
4222 // Come here on failure
4223 __ push_ptr(rdx);
4224 // object is at TOS
4225 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
4226
4227 // Come here on success
4228 __ bind(ok_is_subtype);
4229 __ mov(rax, rdx); // Restore object in rdx
4230
4231 // Collect counts on whether this check-cast sees nulls a lot or not.
4232 if (ProfileInterpreter) {
4233 __ jmp(done);
4234 __ bind(is_null);
4235 __ profile_null_seen(rcx);
4236 } else {
4237 __ bind(is_null); // same as 'done'
4238 }
4239 __ bind(done);
4240 }
4241
4242 void TemplateTable::instanceof() {
4243 transition(atos, itos);
4244 Label done, is_null, ok_is_subtype, quicked, resolved;
4245 __ testptr(rax, rax);
4246 __ jcc(Assembler::zero, is_null);
4247
4248 // Get cpool & tags index
4249 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4250 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4251 // See if bytecode has already been quicked
4252 __ cmpb(Address(rdx, rbx,
4253 Address::times_1,
4254 Array<u1>::base_offset_in_bytes()),
4255 JVM_CONSTANT_Class);
4256 __ jcc(Assembler::equal, quicked);
4257
4258 __ push(atos); // save receiver for result, and for GC
4259 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4260 // vm_result_2 has metadata result
4261
4262 #ifndef _LP64
4263 // borrow rdi from locals
4264 __ get_thread(rdi);
4265 __ get_vm_result_2(rax, rdi);
4266 __ restore_locals();
4267 #else
4268 __ get_vm_result_2(rax, r15_thread);
4269 #endif
4270
4271 __ pop_ptr(rdx); // restore receiver
4272 __ verify_oop(rdx);
4273 __ load_klass(rdx, rdx, rscratch1);
4274 __ jmpb(resolved);
4275
4287 // Come here on failure
4288 __ xorl(rax, rax);
4289 __ jmpb(done);
4290 // Come here on success
4291 __ bind(ok_is_subtype);
4292 __ movl(rax, 1);
4293
4294 // Collect counts on whether this test sees nulls a lot or not.
4295 if (ProfileInterpreter) {
4296 __ jmp(done);
4297 __ bind(is_null);
4298 __ profile_null_seen(rcx);
4299 } else {
4300 __ bind(is_null); // same as 'done'
4301 }
4302 __ bind(done);
4303 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
4304 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
4305 }
4306
4307
4308 //----------------------------------------------------------------------------------------------------
4309 // Breakpoints
4310 void TemplateTable::_breakpoint() {
4311 // Note: We get here even if we are single stepping..
4312 // jbug insists on setting breakpoints at every bytecode
4313 // even if we are in single step mode.
4314
4315 transition(vtos, vtos);
4316
4317 Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4318
4319 // get the unpatched byte code
4320 __ get_method(rarg);
4321 __ call_VM(noreg,
4322 CAST_FROM_FN_PTR(address,
4323 InterpreterRuntime::get_original_bytecode_at),
4324 rarg, rbcp);
4325 __ mov(rbx, rax); // why?
4326
4327 // post the breakpoint event
4349 // Note: monitorenter & exit are symmetric routines; which is reflected
4350 // in the assembly code structure as well
4351 //
4352 // Stack layout:
4353 //
4354 // [expressions ] <--- rsp = expression stack top
4355 // ..
4356 // [expressions ]
4357 // [monitor entry] <--- monitor block top = expression stack bot
4358 // ..
4359 // [monitor entry]
4360 // [frame data ] <--- monitor block bot
4361 // ...
4362 // [saved rbp ] <--- rbp
4363 void TemplateTable::monitorenter() {
4364 transition(atos, vtos);
4365
4366 // check for null object
4367 __ null_check(rax);
4368
4369 const Address monitor_block_top(
4370 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4371 const Address monitor_block_bot(
4372 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4373 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4374
4375 Label allocated;
4376
4377 Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4378 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4379 Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4380
4381 // initialize entry pointer
4382 __ xorl(rmon, rmon); // points to free slot or null
4383
4384 // find a free slot in the monitor block (result in rmon)
4385 {
4386 Label entry, loop, exit;
4387 __ movptr(rtop, monitor_block_top); // derelativize pointer
4388 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4441 // rmon: points to monitor entry
4442 __ bind(allocated);
4443
4444 // Increment bcp to point to the next bytecode, so exception
4445 // handling for async. exceptions work correctly.
4446 // The object has already been popped from the stack, so the
4447 // expression stack looks correct.
4448 __ increment(rbcp);
4449
4450 // store object
4451 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4452 __ lock_object(rmon);
4453
4454 // check to make sure this monitor doesn't cause stack overflow after locking
4455 __ save_bcp(); // in case of exception
4456 __ generate_stack_overflow_check(0);
4457
4458 // The bcp has already been incremented. Just need to dispatch to
4459 // next instruction.
4460 __ dispatch_next(vtos);
4461 }
4462
4463 void TemplateTable::monitorexit() {
4464 transition(atos, vtos);
4465
4466 // check for null object
4467 __ null_check(rax);
4468
4469 const Address monitor_block_top(
4470 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4471 const Address monitor_block_bot(
4472 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4473 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4474
4475 Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4476 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4477
4478 Label found;
4479
4480 // find matching slot
4481 {
4482 Label entry, loop;
4483 __ movptr(rtop, monitor_block_top); // derelativize pointer
4484 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4485 // rtop points to current entry, starting with top-most entry
4486
4487 __ lea(rbot, monitor_block_bot); // points to word before bottom
4488 // of monitor block
|
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "gc/shared/collectedHeap.hpp"
29 #include "gc/shared/gc_globals.hpp"
30 #include "gc/shared/tlab_globals.hpp"
31 #include "interpreter/interpreter.hpp"
32 #include "interpreter/interpreterRuntime.hpp"
33 #include "interpreter/interp_masm.hpp"
34 #include "interpreter/templateTable.hpp"
35 #include "memory/universe.hpp"
36 #include "oops/methodCounters.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/objArrayKlass.hpp"
39 #include "oops/oop.inline.hpp"
40 #include "oops/inlineKlass.hpp"
41 #include "oops/resolvedFieldEntry.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/frame.inline.hpp"
47 #include "runtime/safepointMechanism.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "runtime/stubRoutines.hpp"
50 #include "runtime/synchronizer.hpp"
51 #include "utilities/macros.hpp"
52
53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
54
55 // Global Register Names
56 static const Register rbcp = LP64_ONLY(r13) NOT_LP64(rsi);
57 static const Register rlocals = LP64_ONLY(r14) NOT_LP64(rdi);
58
59 // Address Computation: local variables
60 static inline Address iaddress(int n) {
167 static void do_oop_load(InterpreterMacroAssembler* _masm,
168 Address src,
169 Register dst,
170 DecoratorSet decorators = 0) {
171 __ load_heap_oop(dst, src, rdx, rbx, decorators);
172 }
173
174 Address TemplateTable::at_bcp(int offset) {
175 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
176 return Address(rbcp, offset);
177 }
178
179
180 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
181 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
182 int byte_no) {
183 if (!RewriteBytecodes) return;
184 Label L_patch_done;
185
186 switch (bc) {
187 case Bytecodes::_fast_vputfield:
188 case Bytecodes::_fast_aputfield:
189 case Bytecodes::_fast_bputfield:
190 case Bytecodes::_fast_zputfield:
191 case Bytecodes::_fast_cputfield:
192 case Bytecodes::_fast_dputfield:
193 case Bytecodes::_fast_fputfield:
194 case Bytecodes::_fast_iputfield:
195 case Bytecodes::_fast_lputfield:
196 case Bytecodes::_fast_sputfield:
197 {
198 // We skip bytecode quickening for putfield instructions when
199 // the put_code written to the constant pool cache is zero.
200 // This is required so that every execution of this instruction
201 // calls out to InterpreterRuntime::resolve_get_put to do
202 // additional, required work.
203 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
204 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
205 __ load_field_entry(temp_reg, bc_reg);
206 if (byte_no == f1_byte) {
207 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
815 Address(rdx, rax,
816 Address::times_4,
817 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
818 noreg, noreg);
819 }
820
821 void TemplateTable::daload() {
822 transition(itos, dtos);
823 // rax: index
824 // rdx: array
825 index_check(rdx, rax); // kills rbx
826 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
827 Address(rdx, rax,
828 Address::times_8,
829 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
830 noreg, noreg);
831 }
832
833 void TemplateTable::aaload() {
834 transition(itos, atos);
835 Register array = rdx;
836 Register index = rax;
837
838 index_check(array, index); // kills rbx
839 __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
840 if (UseFlatArray) {
841 Label is_flat_array, done;
842 __ test_flat_array_oop(array, rbx, is_flat_array);
843 do_oop_load(_masm,
844 Address(array, index,
845 UseCompressedOops ? Address::times_4 : Address::times_ptr,
846 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
847 rax,
848 IS_ARRAY);
849 __ jmp(done);
850 __ bind(is_flat_array);
851 __ read_flat_element(array, index, rbx, rcx, rax);
852 __ bind(done);
853 } else {
854 do_oop_load(_masm,
855 Address(array, index,
856 UseCompressedOops ? Address::times_4 : Address::times_ptr,
857 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
858 rax,
859 IS_ARRAY);
860 }
861 __ profile_element_type(rbx, rax, rcx);
862 }
863
864 void TemplateTable::baload() {
865 transition(itos, itos);
866 // rax: index
867 // rdx: array
868 index_check(rdx, rax); // kills rbx
869 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
870 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
871 noreg, noreg);
872 }
873
874 void TemplateTable::caload() {
875 transition(itos, itos);
876 // rax: index
877 // rdx: array
878 index_check(rdx, rax); // kills rbx
879 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
880 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
881 noreg, noreg);
1127 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1128 Address(rdx, rbx, Address::times_4,
1129 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1130 noreg /* ftos */, noreg, noreg, noreg);
1131 }
1132
1133 void TemplateTable::dastore() {
1134 transition(dtos, vtos);
1135 __ pop_i(rbx);
1136 // value is in UseSSE >= 2 ? xmm0 : ST(0)
1137 // rbx: index
1138 // rdx: array
1139 index_check(rdx, rbx); // prefer index in rbx
1140 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1141 Address(rdx, rbx, Address::times_8,
1142 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1143 noreg /* dtos */, noreg, noreg, noreg);
1144 }
1145
1146 void TemplateTable::aastore() {
1147 Label is_null, is_flat_array, ok_is_subtype, done;
1148 transition(vtos, vtos);
1149 // stack: ..., array, index, value
1150 __ movptr(rax, at_tos()); // value
1151 __ movl(rcx, at_tos_p1()); // index
1152 __ movptr(rdx, at_tos_p2()); // array
1153
1154 Address element_address(rdx, rcx,
1155 UseCompressedOops? Address::times_4 : Address::times_ptr,
1156 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1157
1158 index_check_without_pop(rdx, rcx); // kills rbx
1159
1160 __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1161 __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1162
1163 __ testptr(rax, rax);
1164 __ jcc(Assembler::zero, is_null);
1165
1166 // Move array class to rdi
1167 __ load_klass(rdi, rdx, rscratch1);
1168 if (UseFlatArray) {
1169 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1170 __ test_flat_array_layout(rbx, is_flat_array);
1171 }
1172
1173 // Move subklass into rbx
1174 __ load_klass(rbx, rax, rscratch1);
1175 // Move array element superklass into rax
1176 __ movptr(rax, Address(rdi,
1177 ObjArrayKlass::element_klass_offset()));
1178
1179 // Generate subtype check. Blows rcx, rdi
1180 // Superklass in rax. Subklass in rbx.
1181 // is "rbx <: rax" ? (value subclass <: array element superclass)
1182 __ gen_subtype_check(rbx, ok_is_subtype, false);
1183
1184 // Come here on failure
1185 // object is at TOS
1186 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1187
1188 // Come here on success
1189 __ bind(ok_is_subtype);
1190
1191 // Get the value we will store
1192 __ movptr(rax, at_tos());
1193 __ movl(rcx, at_tos_p1()); // index
1194 // Now store using the appropriate barrier
1195 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1196 __ jmp(done);
1197
1198 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1199 __ bind(is_null);
1200 if (EnableValhalla) {
1201 Label is_null_into_value_array_npe, store_null;
1202
1203 // No way to store null in null-free array
1204 __ test_null_free_array_oop(rdx, rbx, is_null_into_value_array_npe);
1205 __ jmp(store_null);
1206
1207 __ bind(is_null_into_value_array_npe);
1208 __ jump(RuntimeAddress(Interpreter::_throw_NullPointerException_entry));
1209
1210 __ bind(store_null);
1211 }
1212 // Store a null
1213 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1214 __ jmp(done);
1215
1216 if (UseFlatArray) {
1217 Label is_type_ok;
1218 __ bind(is_flat_array); // Store non-null value to flat
1219
1220 // Simplistic type check...
1221
1222 // Profile the not-null value's klass.
1223 __ load_klass(rbx, rax, rscratch1);
1224 // Move element klass into rax
1225 __ movptr(rax, Address(rdi, ArrayKlass::element_klass_offset()));
1226 // flat value array needs exact type match
1227 // is "rax == rbx" (value subclass == array element superclass)
1228 __ cmpptr(rax, rbx);
1229 __ jccb(Assembler::equal, is_type_ok);
1230
1231 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1232
1233 __ bind(is_type_ok);
1234 // rbx: value's klass
1235 // rdx: array
1236 // rdi: array klass
1237 __ test_klass_is_empty_inline_type(rbx, rax, done);
1238
1239 // calc dst for copy
1240 __ movl(rax, at_tos_p1()); // index
1241 __ data_for_value_array_index(rdx, rdi, rax, rax);
1242
1243 // ...and src for copy
1244 __ movptr(rcx, at_tos()); // value
1245 __ data_for_oop(rcx, rcx, rbx);
1246
1247 __ access_value_copy(IN_HEAP, rcx, rax, rbx);
1248 }
1249 // Pop stack arguments
1250 __ bind(done);
1251 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1252 }
1253
1254 void TemplateTable::bastore() {
1255 transition(itos, vtos);
1256 __ pop_i(rbx);
1257 // rax: value
1258 // rbx: index
1259 // rdx: array
1260 index_check(rdx, rbx); // prefer index in rbx
1261 // Need to check whether array is boolean or byte
1262 // since both types share the bastore bytecode.
1263 __ load_klass(rcx, rdx, rscratch1);
1264 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1265 int diffbit = Klass::layout_helper_boolean_diffbit();
1266 __ testl(rcx, diffbit);
1267 Label L_skip;
1268 __ jccb(Assembler::zero, L_skip);
2397 __ jcc(j_not(cc), not_taken);
2398 branch(false, false);
2399 __ bind(not_taken);
2400 __ profile_not_taken_branch(rax);
2401 }
2402
2403 void TemplateTable::if_nullcmp(Condition cc) {
2404 transition(atos, vtos);
2405 // assume branch is more often taken than not (loops use backward branches)
2406 Label not_taken;
2407 __ testptr(rax, rax);
2408 __ jcc(j_not(cc), not_taken);
2409 branch(false, false);
2410 __ bind(not_taken);
2411 __ profile_not_taken_branch(rax);
2412 }
2413
2414 void TemplateTable::if_acmp(Condition cc) {
2415 transition(atos, vtos);
2416 // assume branch is more often taken than not (loops use backward branches)
2417 Label taken, not_taken;
2418 __ pop_ptr(rdx);
2419
2420 __ profile_acmp(rbx, rdx, rax, rcx);
2421
2422 const int is_inline_type_mask = markWord::inline_type_pattern;
2423 if (EnableValhalla) {
2424 __ cmpoop(rdx, rax);
2425 __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
2426
2427 // might be substitutable, test if either rax or rdx is null
2428 __ testptr(rax, rax);
2429 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2430 __ testptr(rdx, rdx);
2431 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2432
2433 // and both are values ?
2434 __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2435 __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
2436 __ andptr(rbx, is_inline_type_mask);
2437 __ cmpptr(rbx, is_inline_type_mask);
2438 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2439
2440 // same value klass ?
2441 __ load_metadata(rbx, rdx);
2442 __ load_metadata(rcx, rax);
2443 __ cmpptr(rbx, rcx);
2444 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2445
2446 // Know both are the same type, let's test for substitutability...
2447 if (cc == equal) {
2448 invoke_is_substitutable(rax, rdx, taken, not_taken);
2449 } else {
2450 invoke_is_substitutable(rax, rdx, not_taken, taken);
2451 }
2452 __ stop("Not reachable");
2453 }
2454
2455 __ cmpoop(rdx, rax);
2456 __ jcc(j_not(cc), not_taken);
2457 __ bind(taken);
2458 branch(false, false);
2459 __ bind(not_taken);
2460 __ profile_not_taken_branch(rax, true);
2461 }
2462
2463 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2464 Label& is_subst, Label& not_subst) {
2465 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2466 // Restored...rax answer, jmp to outcome...
2467 __ testl(rax, rax);
2468 __ jcc(Assembler::zero, not_subst);
2469 __ jmp(is_subst);
2470 }
2471
2472 void TemplateTable::ret() {
2473 transition(vtos, vtos);
2474 locals_index(rbx);
2475 LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2476 NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2477 __ profile_ret(rbx, rcx);
2478 __ get_method(rax);
2479 __ movptr(rbcp, Address(rax, Method::const_offset()));
2480 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2481 ConstMethod::codes_offset()));
2482 __ dispatch_next(vtos, 0, true);
2483 }
2484
2485 void TemplateTable::wide_ret() {
2486 transition(vtos, vtos);
2487 locals_index_wide(rbx);
2488 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2489 __ profile_ret(rbx, rcx);
2718 const Register thread = rdi;
2719 __ get_thread(thread);
2720 __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2721 #endif
2722 __ jcc(Assembler::zero, no_safepoint);
2723 __ push(state);
2724 __ push_cont_fastpath();
2725 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2726 InterpreterRuntime::at_safepoint));
2727 __ pop_cont_fastpath();
2728 __ pop(state);
2729 __ bind(no_safepoint);
2730 }
2731
2732 // Narrow result if state is itos but result type is smaller.
2733 // Need to narrow in the return bytecode rather than in generate_return_entry
2734 // since compiled code callers expect the result to already be narrowed.
2735 if (state == itos) {
2736 __ narrow(rax);
2737 }
2738
2739 __ remove_activation(state, rbcp, true, true, true);
2740
2741 __ jmp(rbcp);
2742 }
2743
2744 // ----------------------------------------------------------------------------
2745 // Volatile variables demand their effects be made known to all CPU's
2746 // in order. Store buffers on most chips allow reads & writes to
2747 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2748 // without some kind of memory barrier (i.e., it's not sufficient that
2749 // the interpreter does not reorder volatile references, the hardware
2750 // also must not reorder them).
2751 //
2752 // According to the new Java Memory Model (JMM):
2753 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2754 // writes act as acquire & release, so:
2755 // (2) A read cannot let unrelated NON-volatile memory refs that
2756 // happen after the read float up to before the read. It's OK for
2757 // non-volatile memory refs that happen before the volatile read to
2758 // float down below it.
2759 // (3) Similar a volatile write cannot let unrelated NON-volatile
3085 }
3086 // rax,: object pointer or null
3087 // cache: cache entry pointer
3088 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
3089 rax, cache);
3090
3091 __ load_field_entry(cache, index);
3092 __ bind(L1);
3093 }
3094 }
3095
3096 void TemplateTable::pop_and_check_object(Register r) {
3097 __ pop_ptr(r);
3098 __ null_check(r); // for field access must check obj.
3099 __ verify_oop(r);
3100 }
3101
3102 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3103 transition(vtos, vtos);
3104
3105 const Register obj = LP64_ONLY(r9) NOT_LP64(rcx);
3106 const Register cache = rcx;
3107 const Register index = rdx;
3108 const Register off = rbx;
3109 const Register tos_state = rax;
3110 const Register flags = rdx;
3111 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
3112
3113 resolve_cache_and_index_for_field(byte_no, cache, index);
3114 jvmti_post_field_access(cache, index, is_static, false);
3115 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3116
3117 const Address field(obj, off, Address::times_1, 0*wordSize);
3118
3119 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
3120
3121 // Make sure we don't need to mask edx after the above shift
3122 assert(btos == 0, "change code, btos != 0");
3123 __ testl(tos_state, tos_state);
3124 __ jcc(Assembler::notZero, notByte);
3125
3126 // btos
3127 if (!is_static) pop_and_check_object(obj);
3128 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3129 __ push(btos);
3130 // Rewrite bytecode to be faster
3131 if (!is_static && rc == may_rewrite) {
3132 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3133 }
3134 __ jmp(Done);
3135
3136 __ bind(notByte);
3137 __ cmpl(tos_state, ztos);
3138 __ jcc(Assembler::notEqual, notBool);
3139 if (!is_static) pop_and_check_object(obj);
3140 // ztos (same code as btos)
3141 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3142 __ push(ztos);
3143 // Rewrite bytecode to be faster
3144 if (!is_static && rc == may_rewrite) {
3145 // use btos rewriting, no truncating to t/f bit is needed for getfield.
3146 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3147 }
3148 __ jmp(Done);
3149
3150 __ bind(notBool);
3151 __ cmpl(tos_state, atos);
3152 __ jcc(Assembler::notEqual, notObj);
3153 // atos
3154 if (!EnableValhalla) {
3155 if (!is_static) pop_and_check_object(obj);
3156 do_oop_load(_masm, field, rax);
3157 __ push(atos);
3158 if (!is_static && rc == may_rewrite) {
3159 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3160 }
3161 __ jmp(Done);
3162 } else {
3163 if (is_static) {
3164 __ load_heap_oop(rax, field);
3165 Label is_null_free_inline_type, uninitialized;
3166 // Issue below if the static field has not been initialized yet
3167 __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3168 // field is not a null free inline type
3169 __ push(atos);
3170 __ jmp(Done);
3171 // field is a null free inline type, must not return null even if uninitialized
3172 __ bind(is_null_free_inline_type);
3173 __ testptr(rax, rax);
3174 __ jcc(Assembler::zero, uninitialized);
3175 __ push(atos);
3176 __ jmp(Done);
3177 __ bind(uninitialized);
3178 #ifdef _LP64
3179 Label slow_case, finish;
3180 __ movptr(rbx, Address(obj, java_lang_Class::klass_offset()));
3181 __ cmpb(Address(rbx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3182 __ jcc(Assembler::notEqual, slow_case);
3183 __ get_default_value_oop(rbx, rscratch1, rax);
3184 __ jmp(finish);
3185 __ bind(slow_case);
3186 #endif // LP64
3187 __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_inline_type_field),
3188 obj, cache);
3189 #ifdef _LP64
3190 __ bind(finish);
3191 #endif // _LP64
3192 __ verify_oop(rax);
3193 __ push(atos);
3194 __ jmp(Done);
3195 } else {
3196 Label is_flat, nonnull, is_inline_type, rewrite_inline, has_null_marker;
3197 __ test_field_is_null_free_inline_type(flags, rscratch1, is_inline_type);
3198 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3199 // field is not a null free inline type
3200 pop_and_check_object(obj);
3201 __ load_heap_oop(rax, field);
3202 __ push(atos);
3203 if (rc == may_rewrite) {
3204 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3205 }
3206 __ jmp(Done);
3207 __ bind(is_inline_type);
3208 __ test_field_is_flat(flags, rscratch1, is_flat);
3209 // field is not flat
3210 pop_and_check_object(obj);
3211 __ load_heap_oop(rax, field);
3212 __ testptr(rax, rax);
3213 __ jcc(Assembler::notZero, nonnull);
3214 __ load_unsigned_short(flags, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
3215 __ movptr(rcx, Address(cache, ResolvedFieldEntry::field_holder_offset()));
3216 __ get_inline_type_field_klass(rcx, flags, rbx);
3217 __ get_default_value_oop(rbx, rcx, rax);
3218 __ bind(nonnull);
3219 __ verify_oop(rax);
3220 __ push(atos);
3221 __ jmp(rewrite_inline);
3222 __ bind(is_flat);
3223 pop_and_check_object(rax);
3224 __ read_flat_field(rcx, rdx, rbx, rax);
3225 __ verify_oop(rax);
3226 __ push(atos);
3227 __ jmp(rewrite_inline);
3228 __ bind(has_null_marker);
3229 pop_and_check_object(rax);
3230 __ load_field_entry(rcx, rbx);
3231 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3232 __ get_vm_result(rax, r15_thread);
3233 __ push(atos);
3234 __ bind(rewrite_inline);
3235 if (rc == may_rewrite) {
3236 patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
3237 }
3238 __ jmp(Done);
3239 }
3240 }
3241
3242 __ bind(notObj);
3243
3244 if (!is_static) pop_and_check_object(obj);
3245
3246 __ cmpl(tos_state, itos);
3247 __ jcc(Assembler::notEqual, notInt);
3248 // itos
3249 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3250 __ push(itos);
3251 // Rewrite bytecode to be faster
3252 if (!is_static && rc == may_rewrite) {
3253 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3254 }
3255 __ jmp(Done);
3256
3257 __ bind(notInt);
3258 __ cmpl(tos_state, ctos);
3259 __ jcc(Assembler::notEqual, notChar);
3260 // ctos
3261 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3262 __ push(ctos);
3263 // Rewrite bytecode to be faster
3264 if (!is_static && rc == may_rewrite) {
3265 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
3325 #endif
3326
3327 __ bind(Done);
3328 // [jk] not needed currently
3329 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3330 // Assembler::LoadStore));
3331 }
3332
3333 void TemplateTable::getfield(int byte_no) {
3334 getfield_or_static(byte_no, false);
3335 }
3336
3337 void TemplateTable::nofast_getfield(int byte_no) {
3338 getfield_or_static(byte_no, false, may_not_rewrite);
3339 }
3340
3341 void TemplateTable::getstatic(int byte_no) {
3342 getfield_or_static(byte_no, true);
3343 }
3344
3345 // The registers cache and index expected to be set before call.
3346 // The function may destroy various registers, just not the cache and index registers.
3347 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3348 // Cache is rcx and index is rdx
3349 const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3350 const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx); // Object pointer
3351 const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3352
3353 if (JvmtiExport::can_post_field_modification()) {
3354 // Check to see if a field modification watch has been set before
3355 // we take the time to call into the VM.
3356 Label L1;
3357 assert_different_registers(cache, obj, rax);
3358 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3359 __ testl(rax, rax);
3360 __ jcc(Assembler::zero, L1);
3361
3362 __ mov(entry, cache);
3363
3364 if (is_static) {
3406 // cache: field entry pointer
3407 // value: jvalue object on the stack
3408 __ call_VM(noreg,
3409 CAST_FROM_FN_PTR(address,
3410 InterpreterRuntime::post_field_modification),
3411 obj, entry, value);
3412 // Reload field entry
3413 __ load_field_entry(cache, index);
3414 __ bind(L1);
3415 }
3416 }
3417
3418 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3419 transition(vtos, vtos);
3420
3421 const Register obj = rcx;
3422 const Register cache = rcx;
3423 const Register index = rdx;
3424 const Register tos_state = rdx;
3425 const Register off = rbx;
3426 const Register flags = r9;
3427
3428 resolve_cache_and_index_for_field(byte_no, cache, index);
3429 jvmti_post_field_mod(cache, index, is_static);
3430 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3431
3432 // [jk] not needed currently
3433 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3434 // Assembler::StoreStore));
3435
3436 Label notVolatile, Done;
3437
3438 // Check for volatile store
3439 __ movl(rscratch1, flags);
3440 __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
3441 __ testl(rscratch1, rscratch1);
3442 __ jcc(Assembler::zero, notVolatile);
3443
3444 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3445 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3446 Assembler::StoreStore));
3447 __ jmp(Done);
3448 __ bind(notVolatile);
3449
3450 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3451
3452 __ bind(Done);
3453 }
3454
3455 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3456 Register obj, Register off, Register tos_state, Register flags) {
3457
3458 // field addresses
3459 const Address field(obj, off, Address::times_1, 0*wordSize);
3460 NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3461
3462 Label notByte, notBool, notInt, notShort, notChar,
3463 notLong, notFloat, notObj, notInlineType;
3464 Label Done;
3465
3466 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3467
3468 // Test TOS state
3469 __ testl(tos_state, tos_state);
3470 __ jcc(Assembler::notZero, notByte);
3471
3472 // btos
3473 {
3474 __ pop(btos);
3475 if (!is_static) pop_and_check_object(obj);
3476 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3477 if (!is_static && rc == may_rewrite) {
3478 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3479 }
3480 __ jmp(Done);
3481 }
3482
3483 __ bind(notByte);
3484 __ cmpl(tos_state, ztos);
3485 __ jcc(Assembler::notEqual, notBool);
3486
3487 // ztos
3488 {
3489 __ pop(ztos);
3490 if (!is_static) pop_and_check_object(obj);
3491 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3492 if (!is_static && rc == may_rewrite) {
3493 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3494 }
3495 __ jmp(Done);
3496 }
3497
3498 __ bind(notBool);
3499 __ cmpl(tos_state, atos);
3500 __ jcc(Assembler::notEqual, notObj);
3501
3502 // atos
3503 {
3504 if (!EnableValhalla) {
3505 __ pop(atos);
3506 if (!is_static) pop_and_check_object(obj);
3507 // Store into the field
3508 do_oop_store(_masm, field, rax);
3509 if (!is_static && rc == may_rewrite) {
3510 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3511 }
3512 __ jmp(Done);
3513 } else {
3514 __ pop(atos);
3515 if (is_static) {
3516 Label is_inline_type;
3517 __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_inline_type);
3518 __ null_check(rax);
3519 __ bind(is_inline_type);
3520 do_oop_store(_masm, field, rax);
3521 __ jmp(Done);
3522 } else {
3523 Label is_null_free_inline_type, is_flat, has_null_marker,
3524 write_null, rewrite_not_inline, rewrite_inline;
3525 __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3526 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3527 // Not an inline type
3528 pop_and_check_object(obj);
3529 // Store into the field
3530 do_oop_store(_masm, field, rax);
3531 __ bind(rewrite_not_inline);
3532 if (rc == may_rewrite) {
3533 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3534 }
3535 __ jmp(Done);
3536 // Implementation of the inline type semantic
3537 __ bind(is_null_free_inline_type);
3538 __ null_check(rax);
3539 __ test_field_is_flat(flags, rscratch1, is_flat);
3540 // field is not flat
3541 pop_and_check_object(obj);
3542 // Store into the field
3543 do_oop_store(_masm, field, rax);
3544 __ jmp(rewrite_inline);
3545 __ bind(is_flat);
3546 // field is flat
3547 __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3548 __ movptr(r9, Address(rcx, in_bytes(ResolvedFieldEntry::field_holder_offset())));
3549 pop_and_check_object(obj); // obj = rcx
3550 __ load_klass(r8, rax, rscratch1);
3551 __ data_for_oop(rax, rax, r8);
3552 __ addptr(obj, off);
3553 __ inline_layout_info(r9, rdx, rbx);
3554 // because we use InlineLayoutInfo, we need special value access code specialized for fields (arrays will need a different API)
3555 __ flat_field_copy(IN_HEAP, rax, obj, rbx);
3556 __ jmp(rewrite_inline);
3557 __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3558 pop_and_check_object(rbx);
3559 __ load_field_entry(rcx, rdx);
3560 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3561 __ bind(rewrite_inline);
3562 if (rc == may_rewrite) {
3563 patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3564 }
3565 __ jmp(Done);
3566 }
3567 }
3568 }
3569
3570 __ bind(notObj);
3571 __ cmpl(tos_state, itos);
3572 __ jcc(Assembler::notEqual, notInt);
3573
3574 // itos
3575 {
3576 __ pop(itos);
3577 if (!is_static) pop_and_check_object(obj);
3578 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3579 if (!is_static && rc == may_rewrite) {
3580 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3581 }
3582 __ jmp(Done);
3583 }
3584
3585 __ bind(notInt);
3586 __ cmpl(tos_state, ctos);
3587 __ jcc(Assembler::notEqual, notChar);
3686 }
3687
3688 void TemplateTable::jvmti_post_fast_field_mod() {
3689
3690 const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3691
3692 if (JvmtiExport::can_post_field_modification()) {
3693 // Check to see if a field modification watch has been set before
3694 // we take the time to call into the VM.
3695 Label L2;
3696 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3697 __ testl(scratch, scratch);
3698 __ jcc(Assembler::zero, L2);
3699 __ pop_ptr(rbx); // copy the object pointer from tos
3700 __ verify_oop(rbx);
3701 __ push_ptr(rbx); // put the object pointer back on tos
3702 // Save tos values before call_VM() clobbers them. Since we have
3703 // to do it for every data type, we use the saved values as the
3704 // jvalue object.
3705 switch (bytecode()) { // load values into the jvalue object
3706 case Bytecodes::_fast_vputfield: //fall through
3707 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3708 case Bytecodes::_fast_bputfield: // fall through
3709 case Bytecodes::_fast_zputfield: // fall through
3710 case Bytecodes::_fast_sputfield: // fall through
3711 case Bytecodes::_fast_cputfield: // fall through
3712 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3713 case Bytecodes::_fast_dputfield: __ push(dtos); break;
3714 case Bytecodes::_fast_fputfield: __ push(ftos); break;
3715 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3716
3717 default:
3718 ShouldNotReachHere();
3719 }
3720 __ mov(scratch, rsp); // points to jvalue on the stack
3721 // access constant pool cache entry
3722 LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3723 NOT_LP64(__ load_field_entry(rax, rdx));
3724 __ verify_oop(rbx);
3725 // rbx: object pointer copied above
3726 // c_rarg2: cache entry pointer
3727 // c_rarg3: jvalue object on the stack
3728 LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3729 NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3730
3731 switch (bytecode()) { // restore tos values
3732 case Bytecodes::_fast_vputfield: // fall through
3733 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3734 case Bytecodes::_fast_bputfield: // fall through
3735 case Bytecodes::_fast_zputfield: // fall through
3736 case Bytecodes::_fast_sputfield: // fall through
3737 case Bytecodes::_fast_cputfield: // fall through
3738 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3739 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3740 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3741 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3742 default: break;
3743 }
3744 __ bind(L2);
3745 }
3746 }
3747
3748 void TemplateTable::fast_storefield(TosState state) {
3749 transition(state, vtos);
3750
3751 Label notVolatile, Done;
3752
3753 jvmti_post_fast_field_mod();
3754
3755 __ push(rax);
3756 __ load_field_entry(rcx, rax);
3757 load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);
3758 __ pop(rax);
3759 // RBX: field offset, RCX: RAX: TOS, RDX: flags
3760
3761 // Get object from stack
3762 pop_and_check_object(rcx);
3763
3764 // field address
3765 const Address field(rcx, rbx, Address::times_1);
3766
3767 // Check for volatile store
3768 __ movl(rscratch2, rdx); // saving flags for is_flat test
3769 __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3770 __ testl(rscratch2, rscratch2);
3771 __ jcc(Assembler::zero, notVolatile);
3772
3773 fast_storefield_helper(field, rax, rdx);
3774 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3775 Assembler::StoreStore));
3776 __ jmp(Done);
3777 __ bind(notVolatile);
3778
3779 fast_storefield_helper(field, rax, rdx);
3780
3781 __ bind(Done);
3782 }
3783
3784 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3785
3786 // DANGER: 'field' argument depends on rcx and rbx
3787
3788 // access field
3789 switch (bytecode()) {
3790 case Bytecodes::_fast_vputfield:
3791 {
3792 Label is_flat, has_null_marker, write_null, done;
3793 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3794 // Null free field cases: flat or not flat
3795 __ null_check(rax);
3796 __ test_field_is_flat(flags, rscratch1, is_flat);
3797 // field is not flat
3798 do_oop_store(_masm, field, rax);
3799 __ jmp(done);
3800 __ bind(is_flat);
3801 __ load_field_entry(r8, r9);
3802 __ load_unsigned_short(r9, Address(r8, in_bytes(ResolvedFieldEntry::field_index_offset())));
3803 __ movptr(r8, Address(r8, in_bytes(ResolvedFieldEntry::field_holder_offset())));
3804 __ inline_layout_info(r8, r9, r8);
3805 __ load_klass(rdx, rax, rscratch1);
3806 __ data_for_oop(rax, rax, rdx);
3807 __ lea(rcx, field);
3808 __ flat_field_copy(IN_HEAP, rax, rcx, r8);
3809 __ jmp(done);
3810 __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3811 __ movptr(rbx, rcx);
3812 __ load_field_entry(rcx, rdx);
3813 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3814 __ bind(done);
3815 }
3816 break;
3817 case Bytecodes::_fast_aputfield:
3818 {
3819 do_oop_store(_masm, field, rax);
3820 }
3821 break;
3822 case Bytecodes::_fast_lputfield:
3823 #ifdef _LP64
3824 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3825 #else
3826 __ stop("should not be rewritten");
3827 #endif
3828 break;
3829 case Bytecodes::_fast_iputfield:
3830 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3831 break;
3832 case Bytecodes::_fast_zputfield:
3833 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3834 break;
3835 case Bytecodes::_fast_bputfield:
3836 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3837 break;
3838 case Bytecodes::_fast_sputfield:
3839 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3840 break;
3862 Label L1;
3863 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3864 __ testl(rcx, rcx);
3865 __ jcc(Assembler::zero, L1);
3866 // access constant pool cache entry
3867 LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3868 NOT_LP64(__ load_field_entry(rcx, rdx));
3869 __ verify_oop(rax);
3870 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3871 LP64_ONLY(__ mov(c_rarg1, rax));
3872 // c_rarg1: object pointer copied above
3873 // c_rarg2: cache entry pointer
3874 LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3875 NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3876 __ pop_ptr(rax); // restore object pointer
3877 __ bind(L1);
3878 }
3879
3880 // access constant pool cache
3881 __ load_field_entry(rcx, rbx);
3882 __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3883
3884 // rax: object
3885 __ verify_oop(rax);
3886 __ null_check(rax);
3887 Address field(rax, rdx, Address::times_1);
3888
3889 // access field
3890 switch (bytecode()) {
3891 case Bytecodes::_fast_vgetfield:
3892 {
3893 Label is_flat, nonnull, Done, has_null_marker;
3894 __ load_unsigned_byte(rscratch1, Address(rcx, in_bytes(ResolvedFieldEntry::flags_offset())));
3895 __ test_field_has_null_marker(rscratch1, rscratch2, has_null_marker);
3896 __ test_field_is_flat(rscratch1, rscratch2, is_flat);
3897 // field is not flat
3898 __ load_heap_oop(rax, field);
3899 __ testptr(rax, rax);
3900 __ jcc(Assembler::notZero, nonnull);
3901 __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3902 __ movptr(rcx, Address(rcx, ResolvedFieldEntry::field_holder_offset()));
3903 __ get_inline_type_field_klass(rcx, rdx, rbx);
3904 __ get_default_value_oop(rbx, rcx, rax);
3905 __ bind(nonnull);
3906 __ verify_oop(rax);
3907 __ jmp(Done);
3908 __ bind(is_flat);
3909 // field is flat
3910 __ read_flat_field(rcx, rdx, rbx, rax);
3911 __ jmp(Done);
3912 __ bind(has_null_marker);
3913 // rax = instance, rcx = resolved entry
3914 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3915 __ get_vm_result(rax, r15_thread);
3916 __ bind(Done);
3917 __ verify_oop(rax);
3918 }
3919 break;
3920 case Bytecodes::_fast_agetfield:
3921 do_oop_load(_masm, field, rax);
3922 __ verify_oop(rax);
3923 break;
3924 case Bytecodes::_fast_lgetfield:
3925 #ifdef _LP64
3926 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3927 #else
3928 __ stop("should not be rewritten");
3929 #endif
3930 break;
3931 case Bytecodes::_fast_igetfield:
3932 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3933 break;
3934 case Bytecodes::_fast_bgetfield:
3935 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3936 break;
3937 case Bytecodes::_fast_sgetfield:
3938 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3939 break;
4335
4336 // Note: rax_callsite is already pushed
4337
4338 // %%% should make a type profile for any invokedynamic that takes a ref argument
4339 // profile this call
4340 __ profile_call(rbcp);
4341 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4342
4343 __ verify_oop(rax_callsite);
4344
4345 __ jump_from_interpreted(rbx_method, rdx);
4346 }
4347
4348 //-----------------------------------------------------------------------------
4349 // Allocation
4350
4351 void TemplateTable::_new() {
4352 transition(vtos, atos);
4353 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4354 Label slow_case;
4355 Label done;
4356
4357 __ get_cpool_and_tags(rcx, rax);
4358
4359 // Make sure the class we're about to instantiate has been resolved.
4360 // This is done before loading InstanceKlass to be consistent with the order
4361 // how Constant Pool is updated (see ConstantPool::klass_at_put)
4362 const int tags_offset = Array<u1>::base_offset_in_bytes();
4363 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4364 __ jcc(Assembler::notEqual, slow_case);
4365
4366 // get InstanceKlass
4367 __ load_resolved_klass_at_index(rcx, rcx, rdx);
4368
4369 // make sure klass is initialized
4370 // init_state needs acquire, but x86 is TSO, and so we are already good.
4371 #ifdef _LP64
4372 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
4373 __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
4374 #else
4375 __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4376 __ jcc(Assembler::notEqual, slow_case);
4377 #endif
4378
4379 __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);
4380 if (DTraceAllocProbes) {
4381 // Trigger dtrace event for fastpath
4382 __ push(atos);
4383 __ call_VM_leaf(
4384 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4385 __ pop(atos);
4386 }
4387 __ jmp(done);
4388
4389 // slow case
4390 __ bind(slow_case);
4391
4392 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4393 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4394
4395 __ get_constant_pool(rarg1);
4396 __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4397 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4398 __ verify_oop(rax);
4399
4400 // continue
4401 __ bind(done);
4402 }
4403
4404 void TemplateTable::newarray() {
4405 transition(itos, atos);
4406 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4407 __ load_unsigned_byte(rarg1, at_bcp(1));
4408 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4409 rarg1, rax);
4410 }
4419 __ get_constant_pool(rarg1);
4420 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4421 rarg1, rarg2, rax);
4422 }
4423
4424 void TemplateTable::arraylength() {
4425 transition(atos, itos);
4426 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4427 }
4428
4429 void TemplateTable::checkcast() {
4430 transition(atos, atos);
4431 Label done, is_null, ok_is_subtype, quicked, resolved;
4432 __ testptr(rax, rax); // object is in rax
4433 __ jcc(Assembler::zero, is_null);
4434
4435 // Get cpool & tags index
4436 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4437 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4438 // See if bytecode has already been quicked
4439 __ movzbl(rdx, Address(rdx, rbx,
4440 Address::times_1,
4441 Array<u1>::base_offset_in_bytes()));
4442 __ cmpl(rdx, JVM_CONSTANT_Class);
4443 __ jcc(Assembler::equal, quicked);
4444 __ push(atos); // save receiver for result, and for GC
4445 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4446
4447 // vm_result_2 has metadata result
4448 #ifndef _LP64
4449 // borrow rdi from locals
4450 __ get_thread(rdi);
4451 __ get_vm_result_2(rax, rdi);
4452 __ restore_locals();
4453 #else
4454 __ get_vm_result_2(rax, r15_thread);
4455 #endif
4456
4457 __ pop_ptr(rdx); // restore receiver
4458 __ jmpb(resolved);
4459
4460 // Get superklass in rax and subklass in rbx
4461 __ bind(quicked);
4462 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4463 __ load_resolved_klass_at_index(rax, rcx, rbx);
4464
4465 __ bind(resolved);
4466 __ load_klass(rbx, rdx, rscratch1);
4467
4468 // Generate subtype check. Blows rcx, rdi. Object in rdx.
4469 // Superklass in rax. Subklass in rbx.
4470 __ gen_subtype_check(rbx, ok_is_subtype);
4471
4472 // Come here on failure
4473 __ push_ptr(rdx);
4474 // object is at TOS
4475 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
4476
4477 // Come here on success
4478 __ bind(ok_is_subtype);
4479 __ mov(rax, rdx); // Restore object in rdx
4480 __ jmp(done);
4481
4482 __ bind(is_null);
4483
4484 // Collect counts on whether this check-cast sees nulls a lot or not.
4485 if (ProfileInterpreter) {
4486 __ profile_null_seen(rcx);
4487 }
4488
4489 __ bind(done);
4490 }
4491
4492 void TemplateTable::instanceof() {
4493 transition(atos, itos);
4494 Label done, is_null, ok_is_subtype, quicked, resolved;
4495 __ testptr(rax, rax);
4496 __ jcc(Assembler::zero, is_null);
4497
4498 // Get cpool & tags index
4499 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4500 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4501 // See if bytecode has already been quicked
4502 __ movzbl(rdx, Address(rdx, rbx,
4503 Address::times_1,
4504 Array<u1>::base_offset_in_bytes()));
4505 __ cmpl(rdx, JVM_CONSTANT_Class);
4506 __ jcc(Assembler::equal, quicked);
4507
4508 __ push(atos); // save receiver for result, and for GC
4509 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4510 // vm_result_2 has metadata result
4511
4512 #ifndef _LP64
4513 // borrow rdi from locals
4514 __ get_thread(rdi);
4515 __ get_vm_result_2(rax, rdi);
4516 __ restore_locals();
4517 #else
4518 __ get_vm_result_2(rax, r15_thread);
4519 #endif
4520
4521 __ pop_ptr(rdx); // restore receiver
4522 __ verify_oop(rdx);
4523 __ load_klass(rdx, rdx, rscratch1);
4524 __ jmpb(resolved);
4525
4537 // Come here on failure
4538 __ xorl(rax, rax);
4539 __ jmpb(done);
4540 // Come here on success
4541 __ bind(ok_is_subtype);
4542 __ movl(rax, 1);
4543
4544 // Collect counts on whether this test sees nulls a lot or not.
4545 if (ProfileInterpreter) {
4546 __ jmp(done);
4547 __ bind(is_null);
4548 __ profile_null_seen(rcx);
4549 } else {
4550 __ bind(is_null); // same as 'done'
4551 }
4552 __ bind(done);
4553 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
4554 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
4555 }
4556
4557 //----------------------------------------------------------------------------------------------------
4558 // Breakpoints
4559 void TemplateTable::_breakpoint() {
4560 // Note: We get here even if we are single stepping..
4561 // jbug insists on setting breakpoints at every bytecode
4562 // even if we are in single step mode.
4563
4564 transition(vtos, vtos);
4565
4566 Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4567
4568 // get the unpatched byte code
4569 __ get_method(rarg);
4570 __ call_VM(noreg,
4571 CAST_FROM_FN_PTR(address,
4572 InterpreterRuntime::get_original_bytecode_at),
4573 rarg, rbcp);
4574 __ mov(rbx, rax); // why?
4575
4576 // post the breakpoint event
4598 // Note: monitorenter & exit are symmetric routines; which is reflected
4599 // in the assembly code structure as well
4600 //
4601 // Stack layout:
4602 //
4603 // [expressions ] <--- rsp = expression stack top
4604 // ..
4605 // [expressions ]
4606 // [monitor entry] <--- monitor block top = expression stack bot
4607 // ..
4608 // [monitor entry]
4609 // [frame data ] <--- monitor block bot
4610 // ...
4611 // [saved rbp ] <--- rbp
4612 void TemplateTable::monitorenter() {
4613 transition(atos, vtos);
4614
4615 // check for null object
4616 __ null_check(rax);
4617
4618 Label is_inline_type;
4619 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4620 __ test_markword_is_inline_type(rbx, is_inline_type);
4621
4622 const Address monitor_block_top(
4623 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4624 const Address monitor_block_bot(
4625 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4626 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4627
4628 Label allocated;
4629
4630 Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4631 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4632 Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4633
4634 // initialize entry pointer
4635 __ xorl(rmon, rmon); // points to free slot or null
4636
4637 // find a free slot in the monitor block (result in rmon)
4638 {
4639 Label entry, loop, exit;
4640 __ movptr(rtop, monitor_block_top); // derelativize pointer
4641 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4694 // rmon: points to monitor entry
4695 __ bind(allocated);
4696
4697 // Increment bcp to point to the next bytecode, so exception
4698 // handling for async. exceptions work correctly.
4699 // The object has already been popped from the stack, so the
4700 // expression stack looks correct.
4701 __ increment(rbcp);
4702
4703 // store object
4704 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4705 __ lock_object(rmon);
4706
4707 // check to make sure this monitor doesn't cause stack overflow after locking
4708 __ save_bcp(); // in case of exception
4709 __ generate_stack_overflow_check(0);
4710
4711 // The bcp has already been incremented. Just need to dispatch to
4712 // next instruction.
4713 __ dispatch_next(vtos);
4714
4715 __ bind(is_inline_type);
4716 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4717 InterpreterRuntime::throw_identity_exception), rax);
4718 __ should_not_reach_here();
4719 }
4720
4721 void TemplateTable::monitorexit() {
4722 transition(atos, vtos);
4723
4724 // check for null object
4725 __ null_check(rax);
4726
4727 const int is_inline_type_mask = markWord::inline_type_pattern;
4728 Label has_identity;
4729 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4730 __ andptr(rbx, is_inline_type_mask);
4731 __ cmpl(rbx, is_inline_type_mask);
4732 __ jcc(Assembler::notEqual, has_identity);
4733 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4734 InterpreterRuntime::throw_illegal_monitor_state_exception));
4735 __ should_not_reach_here();
4736 __ bind(has_identity);
4737
4738 const Address monitor_block_top(
4739 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4740 const Address monitor_block_bot(
4741 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4742 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4743
4744 Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4745 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4746
4747 Label found;
4748
4749 // find matching slot
4750 {
4751 Label entry, loop;
4752 __ movptr(rtop, monitor_block_top); // derelativize pointer
4753 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4754 // rtop points to current entry, starting with top-most entry
4755
4756 __ lea(rbot, monitor_block_bot); // points to word before bottom
4757 // of monitor block
|