20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "gc/shared/collectedHeap.hpp"
29 #include "gc/shared/gc_globals.hpp"
30 #include "gc/shared/tlab_globals.hpp"
31 #include "interpreter/interpreter.hpp"
32 #include "interpreter/interpreterRuntime.hpp"
33 #include "interpreter/interp_masm.hpp"
34 #include "interpreter/templateTable.hpp"
35 #include "memory/universe.hpp"
36 #include "oops/methodCounters.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/objArrayKlass.hpp"
39 #include "oops/oop.inline.hpp"
40 #include "oops/resolvedFieldEntry.hpp"
41 #include "oops/resolvedIndyEntry.hpp"
42 #include "oops/resolvedMethodEntry.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/methodHandles.hpp"
45 #include "runtime/frame.inline.hpp"
46 #include "runtime/safepointMechanism.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/synchronizer.hpp"
50 #include "utilities/macros.hpp"
51
52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
53
54 // Global Register Names
55 static const Register rbcp = LP64_ONLY(r13) NOT_LP64(rsi);
56 static const Register rlocals = LP64_ONLY(r14) NOT_LP64(rdi);
57
58 // Address Computation: local variables
59 static inline Address iaddress(int n) {
166 static void do_oop_load(InterpreterMacroAssembler* _masm,
167 Address src,
168 Register dst,
169 DecoratorSet decorators = 0) {
170 __ load_heap_oop(dst, src, rdx, rbx, decorators);
171 }
172
173 Address TemplateTable::at_bcp(int offset) {
174 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
175 return Address(rbcp, offset);
176 }
177
178
179 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
180 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
181 int byte_no) {
182 if (!RewriteBytecodes) return;
183 Label L_patch_done;
184
185 switch (bc) {
186 case Bytecodes::_fast_aputfield:
187 case Bytecodes::_fast_bputfield:
188 case Bytecodes::_fast_zputfield:
189 case Bytecodes::_fast_cputfield:
190 case Bytecodes::_fast_dputfield:
191 case Bytecodes::_fast_fputfield:
192 case Bytecodes::_fast_iputfield:
193 case Bytecodes::_fast_lputfield:
194 case Bytecodes::_fast_sputfield:
195 {
196 // We skip bytecode quickening for putfield instructions when
197 // the put_code written to the constant pool cache is zero.
198 // This is required so that every execution of this instruction
199 // calls out to InterpreterRuntime::resolve_get_put to do
200 // additional, required work.
201 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
202 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
203 __ load_field_entry(temp_reg, bc_reg);
204 if (byte_no == f1_byte) {
205 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
813 Address(rdx, rax,
814 Address::times_4,
815 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
816 noreg, noreg);
817 }
818
819 void TemplateTable::daload() {
820 transition(itos, dtos);
821 // rax: index
822 // rdx: array
823 index_check(rdx, rax); // kills rbx
824 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
825 Address(rdx, rax,
826 Address::times_8,
827 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
828 noreg, noreg);
829 }
830
831 void TemplateTable::aaload() {
832 transition(itos, atos);
833 // rax: index
834 // rdx: array
835 index_check(rdx, rax); // kills rbx
836 do_oop_load(_masm,
837 Address(rdx, rax,
838 UseCompressedOops ? Address::times_4 : Address::times_ptr,
839 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
840 rax,
841 IS_ARRAY);
842 }
843
844 void TemplateTable::baload() {
845 transition(itos, itos);
846 // rax: index
847 // rdx: array
848 index_check(rdx, rax); // kills rbx
849 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
850 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
851 noreg, noreg);
852 }
853
854 void TemplateTable::caload() {
855 transition(itos, itos);
856 // rax: index
857 // rdx: array
858 index_check(rdx, rax); // kills rbx
859 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
860 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
861 noreg, noreg);
1107 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1108 Address(rdx, rbx, Address::times_4,
1109 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1110 noreg /* ftos */, noreg, noreg, noreg);
1111 }
1112
1113 void TemplateTable::dastore() {
1114 transition(dtos, vtos);
1115 __ pop_i(rbx);
1116 // value is in UseSSE >= 2 ? xmm0 : ST(0)
1117 // rbx: index
1118 // rdx: array
1119 index_check(rdx, rbx); // prefer index in rbx
1120 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1121 Address(rdx, rbx, Address::times_8,
1122 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1123 noreg /* dtos */, noreg, noreg, noreg);
1124 }
1125
1126 void TemplateTable::aastore() {
1127 Label is_null, ok_is_subtype, done;
1128 transition(vtos, vtos);
1129 // stack: ..., array, index, value
1130 __ movptr(rax, at_tos()); // value
1131 __ movl(rcx, at_tos_p1()); // index
1132 __ movptr(rdx, at_tos_p2()); // array
1133
1134 Address element_address(rdx, rcx,
1135 UseCompressedOops? Address::times_4 : Address::times_ptr,
1136 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1137
1138 index_check_without_pop(rdx, rcx); // kills rbx
1139 __ testptr(rax, rax);
1140 __ jcc(Assembler::zero, is_null);
1141
1142 // Move subklass into rbx
1143 __ load_klass(rbx, rax, rscratch1);
1144 // Move superklass into rax
1145 __ load_klass(rax, rdx, rscratch1);
1146 __ movptr(rax, Address(rax,
1147 ObjArrayKlass::element_klass_offset()));
1148
1149 // Generate subtype check. Blows rcx, rdi
1150 // Superklass in rax. Subklass in rbx.
1151 __ gen_subtype_check(rbx, ok_is_subtype);
1152
1153 // Come here on failure
1154 // object is at TOS
1155 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1156
1157 // Come here on success
1158 __ bind(ok_is_subtype);
1159
1160 // Get the value we will store
1161 __ movptr(rax, at_tos());
1162 __ movl(rcx, at_tos_p1()); // index
1163 // Now store using the appropriate barrier
1164 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1165 __ jmp(done);
1166
1167 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1168 __ bind(is_null);
1169 __ profile_null_seen(rbx);
1170
1171 // Store a null
1172 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1173
1174 // Pop stack arguments
1175 __ bind(done);
1176 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1177 }
1178
1179 void TemplateTable::bastore() {
1180 transition(itos, vtos);
1181 __ pop_i(rbx);
1182 // rax: value
1183 // rbx: index
1184 // rdx: array
1185 index_check(rdx, rbx); // prefer index in rbx
1186 // Need to check whether array is boolean or byte
1187 // since both types share the bastore bytecode.
1188 __ load_klass(rcx, rdx, rscratch1);
1189 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1190 int diffbit = Klass::layout_helper_boolean_diffbit();
1191 __ testl(rcx, diffbit);
1192 Label L_skip;
1193 __ jccb(Assembler::zero, L_skip);
2322 __ jcc(j_not(cc), not_taken);
2323 branch(false, false);
2324 __ bind(not_taken);
2325 __ profile_not_taken_branch(rax);
2326 }
2327
2328 void TemplateTable::if_nullcmp(Condition cc) {
2329 transition(atos, vtos);
2330 // assume branch is more often taken than not (loops use backward branches)
2331 Label not_taken;
2332 __ testptr(rax, rax);
2333 __ jcc(j_not(cc), not_taken);
2334 branch(false, false);
2335 __ bind(not_taken);
2336 __ profile_not_taken_branch(rax);
2337 }
2338
2339 void TemplateTable::if_acmp(Condition cc) {
2340 transition(atos, vtos);
2341 // assume branch is more often taken than not (loops use backward branches)
2342 Label not_taken;
2343 __ pop_ptr(rdx);
2344 __ cmpoop(rdx, rax);
2345 __ jcc(j_not(cc), not_taken);
2346 branch(false, false);
2347 __ bind(not_taken);
2348 __ profile_not_taken_branch(rax);
2349 }
2350
2351 void TemplateTable::ret() {
2352 transition(vtos, vtos);
2353 locals_index(rbx);
2354 LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2355 NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2356 __ profile_ret(rbx, rcx);
2357 __ get_method(rax);
2358 __ movptr(rbcp, Address(rax, Method::const_offset()));
2359 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2360 ConstMethod::codes_offset()));
2361 __ dispatch_next(vtos, 0, true);
2362 }
2363
2364 void TemplateTable::wide_ret() {
2365 transition(vtos, vtos);
2366 locals_index_wide(rbx);
2367 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2368 __ profile_ret(rbx, rcx);
2598 const Register thread = rdi;
2599 __ get_thread(thread);
2600 __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2601 #endif
2602 __ jcc(Assembler::zero, no_safepoint);
2603 __ push(state);
2604 __ push_cont_fastpath();
2605 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2606 InterpreterRuntime::at_safepoint));
2607 __ pop_cont_fastpath();
2608 __ pop(state);
2609 __ bind(no_safepoint);
2610 }
2611
2612 // Narrow result if state is itos but result type is smaller.
2613 // Need to narrow in the return bytecode rather than in generate_return_entry
2614 // since compiled code callers expect the result to already be narrowed.
2615 if (state == itos) {
2616 __ narrow(rax);
2617 }
2618 __ remove_activation(state, rbcp);
2619
2620 __ jmp(rbcp);
2621 }
2622
2623 // ----------------------------------------------------------------------------
2624 // Volatile variables demand their effects be made known to all CPU's
2625 // in order. Store buffers on most chips allow reads & writes to
2626 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2627 // without some kind of memory barrier (i.e., it's not sufficient that
2628 // the interpreter does not reorder volatile references, the hardware
2629 // also must not reorder them).
2630 //
2631 // According to the new Java Memory Model (JMM):
2632 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2633 // writes act as acquire & release, so:
2634 // (2) A read cannot let unrelated NON-volatile memory refs that
2635 // happen after the read float up to before the read. It's OK for
2636 // non-volatile memory refs that happen before the volatile read to
2637 // float down below it.
2638 // (3) Similar a volatile write cannot let unrelated NON-volatile
2964 }
2965 // rax,: object pointer or null
2966 // cache: cache entry pointer
2967 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2968 rax, cache);
2969
2970 __ load_field_entry(cache, index);
2971 __ bind(L1);
2972 }
2973 }
2974
2975 void TemplateTable::pop_and_check_object(Register r) {
2976 __ pop_ptr(r);
2977 __ null_check(r); // for field access must check obj.
2978 __ verify_oop(r);
2979 }
2980
2981 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2982 transition(vtos, vtos);
2983
2984 const Register obj = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2985 const Register cache = rcx;
2986 const Register index = rdx;
2987 const Register off = rbx;
2988 const Register tos_state = rax;
2989 const Register flags = rdx;
2990 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2991
2992 resolve_cache_and_index_for_field(byte_no, cache, index);
2993 jvmti_post_field_access(cache, index, is_static, false);
2994 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2995
2996 if (!is_static) pop_and_check_object(obj);
2997
2998 const Address field(obj, off, Address::times_1, 0*wordSize);
2999
3000 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
3001
3002 // Make sure we don't need to mask edx after the above shift
3003 assert(btos == 0, "change code, btos != 0");
3004 __ testl(tos_state, tos_state);
3005 __ jcc(Assembler::notZero, notByte);
3006
3007 // btos
3008 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3009 __ push(btos);
3010 // Rewrite bytecode to be faster
3011 if (!is_static && rc == may_rewrite) {
3012 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3013 }
3014 __ jmp(Done);
3015
3016 __ bind(notByte);
3017 __ cmpl(tos_state, ztos);
3018 __ jcc(Assembler::notEqual, notBool);
3019
3020 // ztos (same code as btos)
3021 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3022 __ push(ztos);
3023 // Rewrite bytecode to be faster
3024 if (!is_static && rc == may_rewrite) {
3025 // use btos rewriting, no truncating to t/f bit is needed for getfield.
3026 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3027 }
3028 __ jmp(Done);
3029
3030 __ bind(notBool);
3031 __ cmpl(tos_state, atos);
3032 __ jcc(Assembler::notEqual, notObj);
3033 // atos
3034 do_oop_load(_masm, field, rax);
3035 __ push(atos);
3036 if (!is_static && rc == may_rewrite) {
3037 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3038 }
3039 __ jmp(Done);
3040
3041 __ bind(notObj);
3042 __ cmpl(tos_state, itos);
3043 __ jcc(Assembler::notEqual, notInt);
3044 // itos
3045 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3046 __ push(itos);
3047 // Rewrite bytecode to be faster
3048 if (!is_static && rc == may_rewrite) {
3049 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3050 }
3051 __ jmp(Done);
3052
3053 __ bind(notInt);
3054 __ cmpl(tos_state, ctos);
3055 __ jcc(Assembler::notEqual, notChar);
3056 // ctos
3057 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3058 __ push(ctos);
3059 // Rewrite bytecode to be faster
3060 if (!is_static && rc == may_rewrite) {
3061 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
3121 #endif
3122
3123 __ bind(Done);
3124 // [jk] not needed currently
3125 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3126 // Assembler::LoadStore));
3127 }
3128
3129 void TemplateTable::getfield(int byte_no) {
3130 getfield_or_static(byte_no, false);
3131 }
3132
3133 void TemplateTable::nofast_getfield(int byte_no) {
3134 getfield_or_static(byte_no, false, may_not_rewrite);
3135 }
3136
3137 void TemplateTable::getstatic(int byte_no) {
3138 getfield_or_static(byte_no, true);
3139 }
3140
3141
3142 // The registers cache and index expected to be set before call.
3143 // The function may destroy various registers, just not the cache and index registers.
3144 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3145 // Cache is rcx and index is rdx
3146 const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3147 const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx); // Object pointer
3148 const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3149
3150 if (JvmtiExport::can_post_field_modification()) {
3151 // Check to see if a field modification watch has been set before
3152 // we take the time to call into the VM.
3153 Label L1;
3154 assert_different_registers(cache, obj, rax);
3155 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3156 __ testl(rax, rax);
3157 __ jcc(Assembler::zero, L1);
3158
3159 __ mov(entry, cache);
3160
3161 if (is_static) {
3203 // cache: field entry pointer
3204 // value: jvalue object on the stack
3205 __ call_VM(noreg,
3206 CAST_FROM_FN_PTR(address,
3207 InterpreterRuntime::post_field_modification),
3208 obj, entry, value);
3209 // Reload field entry
3210 __ load_field_entry(cache, index);
3211 __ bind(L1);
3212 }
3213 }
3214
3215 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3216 transition(vtos, vtos);
3217
3218 const Register obj = rcx;
3219 const Register cache = rcx;
3220 const Register index = rdx;
3221 const Register tos_state = rdx;
3222 const Register off = rbx;
3223 const Register flags = rax;
3224
3225 resolve_cache_and_index_for_field(byte_no, cache, index);
3226 jvmti_post_field_mod(cache, index, is_static);
3227 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3228
3229 // [jk] not needed currently
3230 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3231 // Assembler::StoreStore));
3232
3233 Label notVolatile, Done;
3234
3235 // Check for volatile store
3236 __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
3237 __ testl(flags, flags);
3238 __ jcc(Assembler::zero, notVolatile);
3239
3240 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3241 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3242 Assembler::StoreStore));
3243 __ jmp(Done);
3244 __ bind(notVolatile);
3245
3246 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
3247
3248 __ bind(Done);
3249 }
3250
3251 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3252 Register obj, Register off, Register tos_state) {
3253
3254 // field addresses
3255 const Address field(obj, off, Address::times_1, 0*wordSize);
3256 NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3257
3258 Label notByte, notBool, notInt, notShort, notChar,
3259 notLong, notFloat, notObj;
3260 Label Done;
3261
3262 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3263
3264 // Test TOS state
3265 __ testl(tos_state, tos_state);
3266 __ jcc(Assembler::notZero, notByte);
3267
3268 // btos
3269 {
3270 __ pop(btos);
3271 if (!is_static) pop_and_check_object(obj);
3272 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3273 if (!is_static && rc == may_rewrite) {
3274 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3275 }
3276 __ jmp(Done);
3277 }
3278
3279 __ bind(notByte);
3280 __ cmpl(tos_state, ztos);
3281 __ jcc(Assembler::notEqual, notBool);
3282
3283 // ztos
3284 {
3285 __ pop(ztos);
3286 if (!is_static) pop_and_check_object(obj);
3287 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3288 if (!is_static && rc == may_rewrite) {
3289 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3290 }
3291 __ jmp(Done);
3292 }
3293
3294 __ bind(notBool);
3295 __ cmpl(tos_state, atos);
3296 __ jcc(Assembler::notEqual, notObj);
3297
3298 // atos
3299 {
3300 __ pop(atos);
3301 if (!is_static) pop_and_check_object(obj);
3302 // Store into the field
3303 do_oop_store(_masm, field, rax);
3304 if (!is_static && rc == may_rewrite) {
3305 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3306 }
3307 __ jmp(Done);
3308 }
3309
3310 __ bind(notObj);
3311 __ cmpl(tos_state, itos);
3312 __ jcc(Assembler::notEqual, notInt);
3313
3314 // itos
3315 {
3316 __ pop(itos);
3317 if (!is_static) pop_and_check_object(obj);
3318 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3319 if (!is_static && rc == may_rewrite) {
3320 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3321 }
3322 __ jmp(Done);
3323 }
3324
3325 __ bind(notInt);
3326 __ cmpl(tos_state, ctos);
3327 __ jcc(Assembler::notEqual, notChar);
3426 }
3427
3428 void TemplateTable::jvmti_post_fast_field_mod() {
3429
3430 const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3431
3432 if (JvmtiExport::can_post_field_modification()) {
3433 // Check to see if a field modification watch has been set before
3434 // we take the time to call into the VM.
3435 Label L2;
3436 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3437 __ testl(scratch, scratch);
3438 __ jcc(Assembler::zero, L2);
3439 __ pop_ptr(rbx); // copy the object pointer from tos
3440 __ verify_oop(rbx);
3441 __ push_ptr(rbx); // put the object pointer back on tos
3442 // Save tos values before call_VM() clobbers them. Since we have
3443 // to do it for every data type, we use the saved values as the
3444 // jvalue object.
3445 switch (bytecode()) { // load values into the jvalue object
3446 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3447 case Bytecodes::_fast_bputfield: // fall through
3448 case Bytecodes::_fast_zputfield: // fall through
3449 case Bytecodes::_fast_sputfield: // fall through
3450 case Bytecodes::_fast_cputfield: // fall through
3451 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3452 case Bytecodes::_fast_dputfield: __ push(dtos); break;
3453 case Bytecodes::_fast_fputfield: __ push(ftos); break;
3454 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3455
3456 default:
3457 ShouldNotReachHere();
3458 }
3459 __ mov(scratch, rsp); // points to jvalue on the stack
3460 // access constant pool cache entry
3461 LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3462 NOT_LP64(__ load_field_entry(rax, rdx));
3463 __ verify_oop(rbx);
3464 // rbx: object pointer copied above
3465 // c_rarg2: cache entry pointer
3466 // c_rarg3: jvalue object on the stack
3467 LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3468 NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3469
3470 switch (bytecode()) { // restore tos values
3471 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3472 case Bytecodes::_fast_bputfield: // fall through
3473 case Bytecodes::_fast_zputfield: // fall through
3474 case Bytecodes::_fast_sputfield: // fall through
3475 case Bytecodes::_fast_cputfield: // fall through
3476 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3477 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3478 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3479 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3480 default: break;
3481 }
3482 __ bind(L2);
3483 }
3484 }
3485
3486 void TemplateTable::fast_storefield(TosState state) {
3487 transition(state, vtos);
3488
3489 Register cache = rcx;
3490
3491 Label notVolatile, Done;
3492
3493 jvmti_post_fast_field_mod();
3494
3495 __ push(rax);
3496 __ load_field_entry(rcx, rax);
3497 load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3498 // RBX: field offset, RAX: TOS, RDX: flags
3499 __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3500 __ pop(rax);
3501
3502 // Get object from stack
3503 pop_and_check_object(rcx);
3504
3505 // field address
3506 const Address field(rcx, rbx, Address::times_1);
3507
3508 // Check for volatile store
3509 __ testl(rdx, rdx);
3510 __ jcc(Assembler::zero, notVolatile);
3511
3512 fast_storefield_helper(field, rax);
3513 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3514 Assembler::StoreStore));
3515 __ jmp(Done);
3516 __ bind(notVolatile);
3517
3518 fast_storefield_helper(field, rax);
3519
3520 __ bind(Done);
3521 }
3522
3523 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3524
3525 // access field
3526 switch (bytecode()) {
3527 case Bytecodes::_fast_aputfield:
3528 do_oop_store(_masm, field, rax);
3529 break;
3530 case Bytecodes::_fast_lputfield:
3531 #ifdef _LP64
3532 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3533 #else
3534 __ stop("should not be rewritten");
3535 #endif
3536 break;
3537 case Bytecodes::_fast_iputfield:
3538 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3539 break;
3540 case Bytecodes::_fast_zputfield:
3541 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3542 break;
3543 case Bytecodes::_fast_bputfield:
3544 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3545 break;
3546 case Bytecodes::_fast_sputfield:
3547 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3548 break;
3570 Label L1;
3571 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3572 __ testl(rcx, rcx);
3573 __ jcc(Assembler::zero, L1);
3574 // access constant pool cache entry
3575 LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3576 NOT_LP64(__ load_field_entry(rcx, rdx));
3577 __ verify_oop(rax);
3578 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3579 LP64_ONLY(__ mov(c_rarg1, rax));
3580 // c_rarg1: object pointer copied above
3581 // c_rarg2: cache entry pointer
3582 LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3583 NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3584 __ pop_ptr(rax); // restore object pointer
3585 __ bind(L1);
3586 }
3587
3588 // access constant pool cache
3589 __ load_field_entry(rcx, rbx);
3590 __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3591
3592 // rax: object
3593 __ verify_oop(rax);
3594 __ null_check(rax);
3595 Address field(rax, rbx, Address::times_1);
3596
3597 // access field
3598 switch (bytecode()) {
3599 case Bytecodes::_fast_agetfield:
3600 do_oop_load(_masm, field, rax);
3601 __ verify_oop(rax);
3602 break;
3603 case Bytecodes::_fast_lgetfield:
3604 #ifdef _LP64
3605 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3606 #else
3607 __ stop("should not be rewritten");
3608 #endif
3609 break;
3610 case Bytecodes::_fast_igetfield:
3611 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3612 break;
3613 case Bytecodes::_fast_bgetfield:
3614 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3615 break;
3616 case Bytecodes::_fast_sgetfield:
3617 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3618 break;
4014
4015 // Note: rax_callsite is already pushed
4016
4017 // %%% should make a type profile for any invokedynamic that takes a ref argument
4018 // profile this call
4019 __ profile_call(rbcp);
4020 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4021
4022 __ verify_oop(rax_callsite);
4023
4024 __ jump_from_interpreted(rbx_method, rdx);
4025 }
4026
4027 //-----------------------------------------------------------------------------
4028 // Allocation
4029
4030 void TemplateTable::_new() {
4031 transition(vtos, atos);
4032 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4033 Label slow_case;
4034 Label slow_case_no_pop;
4035 Label done;
4036 Label initialize_header;
4037
4038 __ get_cpool_and_tags(rcx, rax);
4039
4040 // Make sure the class we're about to instantiate has been resolved.
4041 // This is done before loading InstanceKlass to be consistent with the order
4042 // how Constant Pool is updated (see ConstantPool::klass_at_put)
4043 const int tags_offset = Array<u1>::base_offset_in_bytes();
4044 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4045 __ jcc(Assembler::notEqual, slow_case_no_pop);
4046
4047 // get InstanceKlass
4048 __ load_resolved_klass_at_index(rcx, rcx, rdx);
4049 __ push(rcx); // save the contexts of klass for initializing the header
4050
4051 // make sure klass is initialized
4052 #ifdef _LP64
4053 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
4054 __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
4055 #else
4056 __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4057 __ jcc(Assembler::notEqual, slow_case);
4058 #endif
4059
4060 // get instance_size in InstanceKlass (scaled to a count of bytes)
4061 __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
4062 // test to see if it is malformed in some way
4063 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
4064 __ jcc(Assembler::notZero, slow_case);
4065
4066 // Allocate the instance:
4067 // If TLAB is enabled:
4068 // Try to allocate in the TLAB.
4069 // If fails, go to the slow path.
4070 // Initialize the allocation.
4071 // Exit.
4072 //
4073 // Go to slow path.
4074
4075 const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4076
4077 if (UseTLAB) {
4078 NOT_LP64(__ get_thread(thread);)
4079 __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4080 if (ZeroTLAB) {
4081 // the fields have been already cleared
4082 __ jmp(initialize_header);
4083 }
4084
4085 // The object is initialized before the header. If the object size is
4086 // zero, go directly to the header initialization.
4087 __ decrement(rdx, sizeof(oopDesc));
4088 __ jcc(Assembler::zero, initialize_header);
4089
4090 // Initialize topmost object field, divide rdx by 8, check if odd and
4091 // test if zero.
4092 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
4093 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4094
4095 // rdx must have been multiple of 8
4096 #ifdef ASSERT
4097 // make sure rdx was multiple of 8
4098 Label L;
4099 // Ignore partial flag stall after shrl() since it is debug VM
4100 __ jcc(Assembler::carryClear, L);
4101 __ stop("object size is not multiple of 2 - adjust this code");
4102 __ bind(L);
4103 // rdx must be > 0, no extra check needed here
4104 #endif
4105
4106 // initialize remaining object fields: rdx was a multiple of 8
4107 { Label loop;
4108 __ bind(loop);
4109 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4110 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4111 __ decrement(rdx);
4112 __ jcc(Assembler::notZero, loop);
4113 }
4114
4115 // initialize object header only.
4116 __ bind(initialize_header);
4117 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4118 (intptr_t)markWord::prototype().value()); // header
4119 __ pop(rcx); // get saved klass back in the register.
4120 #ifdef _LP64
4121 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4122 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
4123 #endif
4124 __ store_klass(rax, rcx, rscratch1); // klass
4125
4126 {
4127 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0, rscratch1);
4128 // Trigger dtrace event for fastpath
4129 __ push(atos);
4130 __ call_VM_leaf(
4131 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4132 __ pop(atos);
4133 }
4134
4135 __ jmp(done);
4136 }
4137
4138 // slow case
4139 __ bind(slow_case);
4140 __ pop(rcx); // restore stack pointer to what it was when we came in.
4141 __ bind(slow_case_no_pop);
4142
4143 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4144 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4145
4146 __ get_constant_pool(rarg1);
4147 __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4148 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4149 __ verify_oop(rax);
4150
4151 // continue
4152 __ bind(done);
4153 }
4154
4155 void TemplateTable::newarray() {
4156 transition(itos, atos);
4157 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4158 __ load_unsigned_byte(rarg1, at_bcp(1));
4159 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4160 rarg1, rax);
4161 }
4170 __ get_constant_pool(rarg1);
4171 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4172 rarg1, rarg2, rax);
4173 }
4174
4175 void TemplateTable::arraylength() {
4176 transition(atos, itos);
4177 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4178 }
4179
4180 void TemplateTable::checkcast() {
4181 transition(atos, atos);
4182 Label done, is_null, ok_is_subtype, quicked, resolved;
4183 __ testptr(rax, rax); // object is in rax
4184 __ jcc(Assembler::zero, is_null);
4185
4186 // Get cpool & tags index
4187 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4188 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4189 // See if bytecode has already been quicked
4190 __ cmpb(Address(rdx, rbx,
4191 Address::times_1,
4192 Array<u1>::base_offset_in_bytes()),
4193 JVM_CONSTANT_Class);
4194 __ jcc(Assembler::equal, quicked);
4195 __ push(atos); // save receiver for result, and for GC
4196 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4197
4198 // vm_result_2 has metadata result
4199 #ifndef _LP64
4200 // borrow rdi from locals
4201 __ get_thread(rdi);
4202 __ get_vm_result_2(rax, rdi);
4203 __ restore_locals();
4204 #else
4205 __ get_vm_result_2(rax, r15_thread);
4206 #endif
4207
4208 __ pop_ptr(rdx); // restore receiver
4209 __ jmpb(resolved);
4210
4211 // Get superklass in rax and subklass in rbx
4212 __ bind(quicked);
4213 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4214 __ load_resolved_klass_at_index(rax, rcx, rbx);
4215
4216 __ bind(resolved);
4217 __ load_klass(rbx, rdx, rscratch1);
4218
4219 // Generate subtype check. Blows rcx, rdi. Object in rdx.
4220 // Superklass in rax. Subklass in rbx.
4221 __ gen_subtype_check(rbx, ok_is_subtype);
4222
4223 // Come here on failure
4224 __ push_ptr(rdx);
4225 // object is at TOS
4226 __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4227
4228 // Come here on success
4229 __ bind(ok_is_subtype);
4230 __ mov(rax, rdx); // Restore object in rdx
4231
4232 // Collect counts on whether this check-cast sees nulls a lot or not.
4233 if (ProfileInterpreter) {
4234 __ jmp(done);
4235 __ bind(is_null);
4236 __ profile_null_seen(rcx);
4237 } else {
4238 __ bind(is_null); // same as 'done'
4239 }
4240 __ bind(done);
4241 }
4242
4243 void TemplateTable::instanceof() {
4244 transition(atos, itos);
4245 Label done, is_null, ok_is_subtype, quicked, resolved;
4246 __ testptr(rax, rax);
4247 __ jcc(Assembler::zero, is_null);
4248
4249 // Get cpool & tags index
4250 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4251 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4252 // See if bytecode has already been quicked
4253 __ cmpb(Address(rdx, rbx,
4254 Address::times_1,
4255 Array<u1>::base_offset_in_bytes()),
4256 JVM_CONSTANT_Class);
4257 __ jcc(Assembler::equal, quicked);
4258
4259 __ push(atos); // save receiver for result, and for GC
4260 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4261 // vm_result_2 has metadata result
4262
4263 #ifndef _LP64
4264 // borrow rdi from locals
4265 __ get_thread(rdi);
4266 __ get_vm_result_2(rax, rdi);
4267 __ restore_locals();
4268 #else
4269 __ get_vm_result_2(rax, r15_thread);
4270 #endif
4271
4272 __ pop_ptr(rdx); // restore receiver
4273 __ verify_oop(rdx);
4274 __ load_klass(rdx, rdx, rscratch1);
4275 __ jmpb(resolved);
4276
4288 // Come here on failure
4289 __ xorl(rax, rax);
4290 __ jmpb(done);
4291 // Come here on success
4292 __ bind(ok_is_subtype);
4293 __ movl(rax, 1);
4294
4295 // Collect counts on whether this test sees nulls a lot or not.
4296 if (ProfileInterpreter) {
4297 __ jmp(done);
4298 __ bind(is_null);
4299 __ profile_null_seen(rcx);
4300 } else {
4301 __ bind(is_null); // same as 'done'
4302 }
4303 __ bind(done);
4304 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
4305 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
4306 }
4307
4308
4309 //----------------------------------------------------------------------------------------------------
4310 // Breakpoints
4311 void TemplateTable::_breakpoint() {
4312 // Note: We get here even if we are single stepping..
4313 // jbug insists on setting breakpoints at every bytecode
4314 // even if we are in single step mode.
4315
4316 transition(vtos, vtos);
4317
4318 Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4319
4320 // get the unpatched byte code
4321 __ get_method(rarg);
4322 __ call_VM(noreg,
4323 CAST_FROM_FN_PTR(address,
4324 InterpreterRuntime::get_original_bytecode_at),
4325 rarg, rbcp);
4326 __ mov(rbx, rax); // why?
4327
4328 // post the breakpoint event
4350 // Note: monitorenter & exit are symmetric routines; which is reflected
4351 // in the assembly code structure as well
4352 //
4353 // Stack layout:
4354 //
4355 // [expressions ] <--- rsp = expression stack top
4356 // ..
4357 // [expressions ]
4358 // [monitor entry] <--- monitor block top = expression stack bot
4359 // ..
4360 // [monitor entry]
4361 // [frame data ] <--- monitor block bot
4362 // ...
4363 // [saved rbp ] <--- rbp
4364 void TemplateTable::monitorenter() {
4365 transition(atos, vtos);
4366
4367 // check for null object
4368 __ null_check(rax);
4369
4370 const Address monitor_block_top(
4371 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4372 const Address monitor_block_bot(
4373 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4374 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4375
4376 Label allocated;
4377
4378 Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4379 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4380 Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4381
4382 // initialize entry pointer
4383 __ xorl(rmon, rmon); // points to free slot or null
4384
4385 // find a free slot in the monitor block (result in rmon)
4386 {
4387 Label entry, loop, exit;
4388 __ movptr(rtop, monitor_block_top); // derelativize pointer
4389 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4442 // rmon: points to monitor entry
4443 __ bind(allocated);
4444
4445 // Increment bcp to point to the next bytecode, so exception
4446 // handling for async. exceptions work correctly.
4447 // The object has already been popped from the stack, so the
4448 // expression stack looks correct.
4449 __ increment(rbcp);
4450
4451 // store object
4452 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4453 __ lock_object(rmon);
4454
4455 // check to make sure this monitor doesn't cause stack overflow after locking
4456 __ save_bcp(); // in case of exception
4457 __ generate_stack_overflow_check(0);
4458
4459 // The bcp has already been incremented. Just need to dispatch to
4460 // next instruction.
4461 __ dispatch_next(vtos);
4462 }
4463
4464 void TemplateTable::monitorexit() {
4465 transition(atos, vtos);
4466
4467 // check for null object
4468 __ null_check(rax);
4469
4470 const Address monitor_block_top(
4471 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4472 const Address monitor_block_bot(
4473 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4474 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4475
4476 Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4477 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4478
4479 Label found;
4480
4481 // find matching slot
4482 {
4483 Label entry, loop;
4484 __ movptr(rtop, monitor_block_top); // derelativize pointer
4485 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4486 // rtop points to current entry, starting with top-most entry
4487
4488 __ lea(rbot, monitor_block_bot); // points to word before bottom
4489 // of monitor block
|
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "compiler/disassembler.hpp"
28 #include "gc/shared/collectedHeap.hpp"
29 #include "gc/shared/gc_globals.hpp"
30 #include "gc/shared/tlab_globals.hpp"
31 #include "interpreter/interpreter.hpp"
32 #include "interpreter/interpreterRuntime.hpp"
33 #include "interpreter/interp_masm.hpp"
34 #include "interpreter/templateTable.hpp"
35 #include "memory/universe.hpp"
36 #include "oops/methodCounters.hpp"
37 #include "oops/methodData.hpp"
38 #include "oops/objArrayKlass.hpp"
39 #include "oops/oop.inline.hpp"
40 #include "oops/inlineKlass.hpp"
41 #include "oops/resolvedFieldEntry.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/methodHandles.hpp"
46 #include "runtime/frame.inline.hpp"
47 #include "runtime/safepointMechanism.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "runtime/stubRoutines.hpp"
50 #include "runtime/synchronizer.hpp"
51 #include "utilities/macros.hpp"
52
53 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
54
55 // Global Register Names
56 static const Register rbcp = LP64_ONLY(r13) NOT_LP64(rsi);
57 static const Register rlocals = LP64_ONLY(r14) NOT_LP64(rdi);
58
59 // Address Computation: local variables
60 static inline Address iaddress(int n) {
167 static void do_oop_load(InterpreterMacroAssembler* _masm,
168 Address src,
169 Register dst,
170 DecoratorSet decorators = 0) {
171 __ load_heap_oop(dst, src, rdx, rbx, decorators);
172 }
173
174 Address TemplateTable::at_bcp(int offset) {
175 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
176 return Address(rbcp, offset);
177 }
178
179
180 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
181 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
182 int byte_no) {
183 if (!RewriteBytecodes) return;
184 Label L_patch_done;
185
186 switch (bc) {
187 case Bytecodes::_fast_vputfield:
188 case Bytecodes::_fast_aputfield:
189 case Bytecodes::_fast_bputfield:
190 case Bytecodes::_fast_zputfield:
191 case Bytecodes::_fast_cputfield:
192 case Bytecodes::_fast_dputfield:
193 case Bytecodes::_fast_fputfield:
194 case Bytecodes::_fast_iputfield:
195 case Bytecodes::_fast_lputfield:
196 case Bytecodes::_fast_sputfield:
197 {
198 // We skip bytecode quickening for putfield instructions when
199 // the put_code written to the constant pool cache is zero.
200 // This is required so that every execution of this instruction
201 // calls out to InterpreterRuntime::resolve_get_put to do
202 // additional, required work.
203 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
204 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
205 __ load_field_entry(temp_reg, bc_reg);
206 if (byte_no == f1_byte) {
207 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
815 Address(rdx, rax,
816 Address::times_4,
817 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
818 noreg, noreg);
819 }
820
821 void TemplateTable::daload() {
822 transition(itos, dtos);
823 // rax: index
824 // rdx: array
825 index_check(rdx, rax); // kills rbx
826 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
827 Address(rdx, rax,
828 Address::times_8,
829 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
830 noreg, noreg);
831 }
832
833 void TemplateTable::aaload() {
834 transition(itos, atos);
835 Register array = rdx;
836 Register index = rax;
837
838 index_check(array, index); // kills rbx
839 __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
840 if (UseFlatArray) {
841 Label is_flat_array, done;
842 __ test_flat_array_oop(array, rbx, is_flat_array);
843 do_oop_load(_masm,
844 Address(array, index,
845 UseCompressedOops ? Address::times_4 : Address::times_ptr,
846 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
847 rax,
848 IS_ARRAY);
849 __ jmp(done);
850 __ bind(is_flat_array);
851 __ read_flat_element(array, index, rbx, rcx, rax);
852 __ bind(done);
853 } else {
854 do_oop_load(_masm,
855 Address(array, index,
856 UseCompressedOops ? Address::times_4 : Address::times_ptr,
857 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
858 rax,
859 IS_ARRAY);
860 }
861 __ profile_element_type(rbx, rax, rcx);
862 }
863
864 void TemplateTable::baload() {
865 transition(itos, itos);
866 // rax: index
867 // rdx: array
868 index_check(rdx, rax); // kills rbx
869 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
870 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
871 noreg, noreg);
872 }
873
874 void TemplateTable::caload() {
875 transition(itos, itos);
876 // rax: index
877 // rdx: array
878 index_check(rdx, rax); // kills rbx
879 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
880 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
881 noreg, noreg);
1127 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1128 Address(rdx, rbx, Address::times_4,
1129 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1130 noreg /* ftos */, noreg, noreg, noreg);
1131 }
1132
1133 void TemplateTable::dastore() {
1134 transition(dtos, vtos);
1135 __ pop_i(rbx);
1136 // value is in UseSSE >= 2 ? xmm0 : ST(0)
1137 // rbx: index
1138 // rdx: array
1139 index_check(rdx, rbx); // prefer index in rbx
1140 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1141 Address(rdx, rbx, Address::times_8,
1142 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1143 noreg /* dtos */, noreg, noreg, noreg);
1144 }
1145
1146 void TemplateTable::aastore() {
1147 Label is_null, is_flat_array, ok_is_subtype, done;
1148 transition(vtos, vtos);
1149 // stack: ..., array, index, value
1150 __ movptr(rax, at_tos()); // value
1151 __ movl(rcx, at_tos_p1()); // index
1152 __ movptr(rdx, at_tos_p2()); // array
1153
1154 Address element_address(rdx, rcx,
1155 UseCompressedOops? Address::times_4 : Address::times_ptr,
1156 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1157
1158 index_check_without_pop(rdx, rcx); // kills rbx
1159
1160 __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1161 __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1162
1163 __ testptr(rax, rax);
1164 __ jcc(Assembler::zero, is_null);
1165
1166 // Move array class to rdi
1167 __ load_klass(rdi, rdx, rscratch1);
1168 if (UseFlatArray) {
1169 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1170 __ test_flat_array_layout(rbx, is_flat_array);
1171 }
1172
1173 // Move subklass into rbx
1174 __ load_klass(rbx, rax, rscratch1);
1175 // Move array element superklass into rax
1176 __ movptr(rax, Address(rdi,
1177 ObjArrayKlass::element_klass_offset()));
1178
1179 // Generate subtype check. Blows rcx, rdi
1180 // Superklass in rax. Subklass in rbx.
1181 // is "rbx <: rax" ? (value subclass <: array element superclass)
1182 __ gen_subtype_check(rbx, ok_is_subtype, false);
1183
1184 // Come here on failure
1185 // object is at TOS
1186 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1187
1188 // Come here on success
1189 __ bind(ok_is_subtype);
1190
1191 // Get the value we will store
1192 __ movptr(rax, at_tos());
1193 __ movl(rcx, at_tos_p1()); // index
1194 // Now store using the appropriate barrier
1195 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1196 __ jmp(done);
1197
1198 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1199 __ bind(is_null);
1200 if (EnableValhalla) {
1201 Label is_null_into_value_array_npe, store_null;
1202
1203 // No way to store null in null-free array
1204 __ test_null_free_array_oop(rdx, rbx, is_null_into_value_array_npe);
1205 __ jmp(store_null);
1206
1207 __ bind(is_null_into_value_array_npe);
1208 __ jump(ExternalAddress(Interpreter::_throw_NullPointerException_entry));
1209
1210 __ bind(store_null);
1211 }
1212 // Store a null
1213 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1214 __ jmp(done);
1215
1216 if (UseFlatArray) {
1217 Label is_type_ok;
1218 __ bind(is_flat_array); // Store non-null value to flat
1219
1220 // Simplistic type check...
1221
1222 // Profile the not-null value's klass.
1223 __ load_klass(rbx, rax, rscratch1);
1224 // Move element klass into rax
1225 __ movptr(rax, Address(rdi, ArrayKlass::element_klass_offset()));
1226 // flat value array needs exact type match
1227 // is "rax == rbx" (value subclass == array element superclass)
1228 __ cmpptr(rax, rbx);
1229 __ jccb(Assembler::equal, is_type_ok);
1230
1231 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1232
1233 __ bind(is_type_ok);
1234 // rbx: value's klass
1235 // rdx: array
1236 // rdi: array klass
1237 __ test_klass_is_empty_inline_type(rbx, rax, done);
1238
1239 // calc dst for copy
1240 __ movl(rax, at_tos_p1()); // index
1241 __ data_for_value_array_index(rdx, rdi, rax, rax);
1242
1243 // ...and src for copy
1244 __ movptr(rcx, at_tos()); // value
1245 __ data_for_oop(rcx, rcx, rbx);
1246
1247 __ access_value_copy(IN_HEAP, rcx, rax, rbx);
1248 }
1249 // Pop stack arguments
1250 __ bind(done);
1251 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1252 }
1253
1254 void TemplateTable::bastore() {
1255 transition(itos, vtos);
1256 __ pop_i(rbx);
1257 // rax: value
1258 // rbx: index
1259 // rdx: array
1260 index_check(rdx, rbx); // prefer index in rbx
1261 // Need to check whether array is boolean or byte
1262 // since both types share the bastore bytecode.
1263 __ load_klass(rcx, rdx, rscratch1);
1264 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1265 int diffbit = Klass::layout_helper_boolean_diffbit();
1266 __ testl(rcx, diffbit);
1267 Label L_skip;
1268 __ jccb(Assembler::zero, L_skip);
2397 __ jcc(j_not(cc), not_taken);
2398 branch(false, false);
2399 __ bind(not_taken);
2400 __ profile_not_taken_branch(rax);
2401 }
2402
2403 void TemplateTable::if_nullcmp(Condition cc) {
2404 transition(atos, vtos);
2405 // assume branch is more often taken than not (loops use backward branches)
2406 Label not_taken;
2407 __ testptr(rax, rax);
2408 __ jcc(j_not(cc), not_taken);
2409 branch(false, false);
2410 __ bind(not_taken);
2411 __ profile_not_taken_branch(rax);
2412 }
2413
2414 void TemplateTable::if_acmp(Condition cc) {
2415 transition(atos, vtos);
2416 // assume branch is more often taken than not (loops use backward branches)
2417 Label taken, not_taken;
2418 __ pop_ptr(rdx);
2419
2420 __ profile_acmp(rbx, rdx, rax, rcx);
2421
2422 const int is_inline_type_mask = markWord::inline_type_pattern;
2423 if (EnableValhalla) {
2424 __ cmpoop(rdx, rax);
2425 __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
2426
2427 // might be substitutable, test if either rax or rdx is null
2428 __ testptr(rax, rax);
2429 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2430 __ testptr(rdx, rdx);
2431 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2432
2433 // and both are values ?
2434 __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2435 __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
2436 __ andptr(rbx, is_inline_type_mask);
2437 __ cmpptr(rbx, is_inline_type_mask);
2438 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2439
2440 // same value klass ?
2441 __ load_metadata(rbx, rdx);
2442 __ load_metadata(rcx, rax);
2443 __ cmpptr(rbx, rcx);
2444 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2445
2446 // Know both are the same type, let's test for substitutability...
2447 if (cc == equal) {
2448 invoke_is_substitutable(rax, rdx, taken, not_taken);
2449 } else {
2450 invoke_is_substitutable(rax, rdx, not_taken, taken);
2451 }
2452 __ stop("Not reachable");
2453 }
2454
2455 __ cmpoop(rdx, rax);
2456 __ jcc(j_not(cc), not_taken);
2457 __ bind(taken);
2458 branch(false, false);
2459 __ bind(not_taken);
2460 __ profile_not_taken_branch(rax, true);
2461 }
2462
2463 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2464 Label& is_subst, Label& not_subst) {
2465 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2466 // Restored...rax answer, jmp to outcome...
2467 __ testl(rax, rax);
2468 __ jcc(Assembler::zero, not_subst);
2469 __ jmp(is_subst);
2470 }
2471
2472 void TemplateTable::ret() {
2473 transition(vtos, vtos);
2474 locals_index(rbx);
2475 LP64_ONLY(__ movslq(rbx, iaddress(rbx))); // get return bci, compute return bcp
2476 NOT_LP64(__ movptr(rbx, iaddress(rbx)));
2477 __ profile_ret(rbx, rcx);
2478 __ get_method(rax);
2479 __ movptr(rbcp, Address(rax, Method::const_offset()));
2480 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2481 ConstMethod::codes_offset()));
2482 __ dispatch_next(vtos, 0, true);
2483 }
2484
2485 void TemplateTable::wide_ret() {
2486 transition(vtos, vtos);
2487 locals_index_wide(rbx);
2488 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2489 __ profile_ret(rbx, rcx);
2719 const Register thread = rdi;
2720 __ get_thread(thread);
2721 __ testb(Address(thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2722 #endif
2723 __ jcc(Assembler::zero, no_safepoint);
2724 __ push(state);
2725 __ push_cont_fastpath();
2726 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2727 InterpreterRuntime::at_safepoint));
2728 __ pop_cont_fastpath();
2729 __ pop(state);
2730 __ bind(no_safepoint);
2731 }
2732
2733 // Narrow result if state is itos but result type is smaller.
2734 // Need to narrow in the return bytecode rather than in generate_return_entry
2735 // since compiled code callers expect the result to already be narrowed.
2736 if (state == itos) {
2737 __ narrow(rax);
2738 }
2739
2740 __ remove_activation(state, rbcp, true, true, true);
2741
2742 __ jmp(rbcp);
2743 }
2744
2745 // ----------------------------------------------------------------------------
2746 // Volatile variables demand their effects be made known to all CPU's
2747 // in order. Store buffers on most chips allow reads & writes to
2748 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2749 // without some kind of memory barrier (i.e., it's not sufficient that
2750 // the interpreter does not reorder volatile references, the hardware
2751 // also must not reorder them).
2752 //
2753 // According to the new Java Memory Model (JMM):
2754 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2755 // writes act as acquire & release, so:
2756 // (2) A read cannot let unrelated NON-volatile memory refs that
2757 // happen after the read float up to before the read. It's OK for
2758 // non-volatile memory refs that happen before the volatile read to
2759 // float down below it.
2760 // (3) Similar a volatile write cannot let unrelated NON-volatile
3086 }
3087 // rax,: object pointer or null
3088 // cache: cache entry pointer
3089 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
3090 rax, cache);
3091
3092 __ load_field_entry(cache, index);
3093 __ bind(L1);
3094 }
3095 }
3096
3097 void TemplateTable::pop_and_check_object(Register r) {
3098 __ pop_ptr(r);
3099 __ null_check(r); // for field access must check obj.
3100 __ verify_oop(r);
3101 }
3102
3103 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3104 transition(vtos, vtos);
3105
3106 const Register obj = LP64_ONLY(r9) NOT_LP64(rcx);
3107 const Register cache = rcx;
3108 const Register index = rdx;
3109 const Register off = rbx;
3110 const Register tos_state = rax;
3111 const Register flags = rdx;
3112 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
3113
3114 resolve_cache_and_index_for_field(byte_no, cache, index);
3115 jvmti_post_field_access(cache, index, is_static, false);
3116 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3117
3118 const Address field(obj, off, Address::times_1, 0*wordSize);
3119
3120 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
3121
3122 // Make sure we don't need to mask edx after the above shift
3123 assert(btos == 0, "change code, btos != 0");
3124 __ testl(tos_state, tos_state);
3125 __ jcc(Assembler::notZero, notByte);
3126
3127 // btos
3128 if (!is_static) pop_and_check_object(obj);
3129 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3130 __ push(btos);
3131 // Rewrite bytecode to be faster
3132 if (!is_static && rc == may_rewrite) {
3133 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3134 }
3135 __ jmp(Done);
3136
3137 __ bind(notByte);
3138 __ cmpl(tos_state, ztos);
3139 __ jcc(Assembler::notEqual, notBool);
3140 if (!is_static) pop_and_check_object(obj);
3141 // ztos (same code as btos)
3142 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
3143 __ push(ztos);
3144 // Rewrite bytecode to be faster
3145 if (!is_static && rc == may_rewrite) {
3146 // use btos rewriting, no truncating to t/f bit is needed for getfield.
3147 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
3148 }
3149 __ jmp(Done);
3150
3151 __ bind(notBool);
3152 __ cmpl(tos_state, atos);
3153 __ jcc(Assembler::notEqual, notObj);
3154 // atos
3155 if (!EnableValhalla) {
3156 if (!is_static) pop_and_check_object(obj);
3157 do_oop_load(_masm, field, rax);
3158 __ push(atos);
3159 if (!is_static && rc == may_rewrite) {
3160 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3161 }
3162 __ jmp(Done);
3163 } else {
3164 if (is_static) {
3165 __ load_heap_oop(rax, field);
3166 Label is_null_free_inline_type, uninitialized;
3167 // Issue below if the static field has not been initialized yet
3168 __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3169 // field is not a null free inline type
3170 __ push(atos);
3171 __ jmp(Done);
3172 // field is a null free inline type, must not return null even if uninitialized
3173 __ bind(is_null_free_inline_type);
3174 __ testptr(rax, rax);
3175 __ jcc(Assembler::zero, uninitialized);
3176 __ push(atos);
3177 __ jmp(Done);
3178 __ bind(uninitialized);
3179 #ifdef _LP64
3180 Label slow_case, finish;
3181 __ movptr(rbx, Address(obj, java_lang_Class::klass_offset()));
3182 __ cmpb(Address(rbx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3183 __ jcc(Assembler::notEqual, slow_case);
3184 __ get_default_value_oop(rbx, rscratch1, rax);
3185 __ jmp(finish);
3186 __ bind(slow_case);
3187 #endif // LP64
3188 __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_static_inline_type_field),
3189 obj, cache);
3190 #ifdef _LP64
3191 __ bind(finish);
3192 #endif // _LP64
3193 __ verify_oop(rax);
3194 __ push(atos);
3195 __ jmp(Done);
3196 } else {
3197 Label is_flat, nonnull, is_inline_type, rewrite_inline, has_null_marker;
3198 __ test_field_is_null_free_inline_type(flags, rscratch1, is_inline_type);
3199 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3200 // field is not a null free inline type
3201 pop_and_check_object(obj);
3202 __ load_heap_oop(rax, field);
3203 __ push(atos);
3204 if (rc == may_rewrite) {
3205 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
3206 }
3207 __ jmp(Done);
3208 __ bind(is_inline_type);
3209 __ test_field_is_flat(flags, rscratch1, is_flat);
3210 // field is not flat
3211 pop_and_check_object(obj);
3212 __ load_heap_oop(rax, field);
3213 __ testptr(rax, rax);
3214 __ jcc(Assembler::notZero, nonnull);
3215 __ load_unsigned_short(flags, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
3216 __ movptr(rcx, Address(cache, ResolvedFieldEntry::field_holder_offset()));
3217 __ get_inline_type_field_klass(rcx, flags, rbx);
3218 __ get_default_value_oop(rbx, rcx, rax);
3219 __ bind(nonnull);
3220 __ verify_oop(rax);
3221 __ push(atos);
3222 __ jmp(rewrite_inline);
3223 __ bind(is_flat);
3224 pop_and_check_object(rax);
3225 __ load_unsigned_short(rdx, Address(cache, in_bytes(ResolvedFieldEntry::field_index_offset())));
3226 __ movptr(rcx, Address(cache, ResolvedFieldEntry::field_holder_offset()));
3227 __ read_flat_field(rcx, rdx, rbx, rax);
3228 __ verify_oop(rax);
3229 __ push(atos);
3230 __ jmp(rewrite_inline);
3231 __ bind(has_null_marker);
3232 pop_and_check_object(rax);
3233 __ load_field_entry(rcx, rbx);
3234 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3235 __ get_vm_result(rax, r15_thread);
3236 __ push(atos);
3237 __ bind(rewrite_inline);
3238 if (rc == may_rewrite) {
3239 patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
3240 }
3241 __ jmp(Done);
3242 }
3243 }
3244
3245 __ bind(notObj);
3246
3247 if (!is_static) pop_and_check_object(obj);
3248
3249 __ cmpl(tos_state, itos);
3250 __ jcc(Assembler::notEqual, notInt);
3251 // itos
3252 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3253 __ push(itos);
3254 // Rewrite bytecode to be faster
3255 if (!is_static && rc == may_rewrite) {
3256 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
3257 }
3258 __ jmp(Done);
3259
3260 __ bind(notInt);
3261 __ cmpl(tos_state, ctos);
3262 __ jcc(Assembler::notEqual, notChar);
3263 // ctos
3264 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3265 __ push(ctos);
3266 // Rewrite bytecode to be faster
3267 if (!is_static && rc == may_rewrite) {
3268 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
3328 #endif
3329
3330 __ bind(Done);
3331 // [jk] not needed currently
3332 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3333 // Assembler::LoadStore));
3334 }
3335
3336 void TemplateTable::getfield(int byte_no) {
3337 getfield_or_static(byte_no, false);
3338 }
3339
3340 void TemplateTable::nofast_getfield(int byte_no) {
3341 getfield_or_static(byte_no, false, may_not_rewrite);
3342 }
3343
3344 void TemplateTable::getstatic(int byte_no) {
3345 getfield_or_static(byte_no, true);
3346 }
3347
3348 // The registers cache and index expected to be set before call.
3349 // The function may destroy various registers, just not the cache and index registers.
3350 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
3351 // Cache is rcx and index is rdx
3352 const Register entry = LP64_ONLY(c_rarg2) NOT_LP64(rax); // ResolvedFieldEntry
3353 const Register obj = LP64_ONLY(c_rarg1) NOT_LP64(rbx); // Object pointer
3354 const Register value = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // JValue object
3355
3356 if (JvmtiExport::can_post_field_modification()) {
3357 // Check to see if a field modification watch has been set before
3358 // we take the time to call into the VM.
3359 Label L1;
3360 assert_different_registers(cache, obj, rax);
3361 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3362 __ testl(rax, rax);
3363 __ jcc(Assembler::zero, L1);
3364
3365 __ mov(entry, cache);
3366
3367 if (is_static) {
3409 // cache: field entry pointer
3410 // value: jvalue object on the stack
3411 __ call_VM(noreg,
3412 CAST_FROM_FN_PTR(address,
3413 InterpreterRuntime::post_field_modification),
3414 obj, entry, value);
3415 // Reload field entry
3416 __ load_field_entry(cache, index);
3417 __ bind(L1);
3418 }
3419 }
3420
3421 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3422 transition(vtos, vtos);
3423
3424 const Register obj = rcx;
3425 const Register cache = rcx;
3426 const Register index = rdx;
3427 const Register tos_state = rdx;
3428 const Register off = rbx;
3429 const Register flags = r9;
3430
3431 resolve_cache_and_index_for_field(byte_no, cache, index);
3432 jvmti_post_field_mod(cache, index, is_static);
3433 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
3434
3435 // [jk] not needed currently
3436 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3437 // Assembler::StoreStore));
3438
3439 Label notVolatile, Done;
3440
3441 // Check for volatile store
3442 __ movl(rscratch1, flags);
3443 __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
3444 __ testl(rscratch1, rscratch1);
3445 __ jcc(Assembler::zero, notVolatile);
3446
3447 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3448 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3449 Assembler::StoreStore));
3450 __ jmp(Done);
3451 __ bind(notVolatile);
3452
3453 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
3454
3455 __ bind(Done);
3456 }
3457
3458 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3459 Register obj, Register off, Register tos_state, Register flags) {
3460
3461 // field addresses
3462 const Address field(obj, off, Address::times_1, 0*wordSize);
3463 NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3464
3465 Label notByte, notBool, notInt, notShort, notChar,
3466 notLong, notFloat, notObj, notInlineType;
3467 Label Done;
3468
3469 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3470
3471 // Test TOS state
3472 __ testl(tos_state, tos_state);
3473 __ jcc(Assembler::notZero, notByte);
3474
3475 // btos
3476 {
3477 __ pop(btos);
3478 if (!is_static) pop_and_check_object(obj);
3479 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3480 if (!is_static && rc == may_rewrite) {
3481 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3482 }
3483 __ jmp(Done);
3484 }
3485
3486 __ bind(notByte);
3487 __ cmpl(tos_state, ztos);
3488 __ jcc(Assembler::notEqual, notBool);
3489
3490 // ztos
3491 {
3492 __ pop(ztos);
3493 if (!is_static) pop_and_check_object(obj);
3494 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3495 if (!is_static && rc == may_rewrite) {
3496 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3497 }
3498 __ jmp(Done);
3499 }
3500
3501 __ bind(notBool);
3502 __ cmpl(tos_state, atos);
3503 __ jcc(Assembler::notEqual, notObj);
3504
3505 // atos
3506 {
3507 if (!EnableValhalla) {
3508 __ pop(atos);
3509 if (!is_static) pop_and_check_object(obj);
3510 // Store into the field
3511 do_oop_store(_masm, field, rax);
3512 if (!is_static && rc == may_rewrite) {
3513 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3514 }
3515 __ jmp(Done);
3516 } else {
3517 __ pop(atos);
3518 if (is_static) {
3519 Label is_inline_type;
3520 __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_inline_type);
3521 __ null_check(rax);
3522 __ bind(is_inline_type);
3523 do_oop_store(_masm, field, rax);
3524 __ jmp(Done);
3525 } else {
3526 Label is_null_free_inline_type, is_flat, has_null_marker,
3527 write_null, rewrite_not_inline, rewrite_inline;
3528 __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3529 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3530 // Not an inline type
3531 pop_and_check_object(obj);
3532 // Store into the field
3533 do_oop_store(_masm, field, rax);
3534 __ bind(rewrite_not_inline);
3535 if (rc == may_rewrite) {
3536 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3537 }
3538 __ jmp(Done);
3539 // Implementation of the inline type semantic
3540 __ bind(is_null_free_inline_type);
3541 __ null_check(rax);
3542 __ test_field_is_flat(flags, rscratch1, is_flat);
3543 // field is not flat
3544 pop_and_check_object(obj);
3545 // Store into the field
3546 do_oop_store(_masm, field, rax);
3547 __ jmp(rewrite_inline);
3548 __ bind(is_flat);
3549 // field is flat
3550 pop_and_check_object(obj);
3551 assert_different_registers(rax, rdx, obj, off);
3552 __ load_klass(rdx, rax, rscratch1);
3553 __ data_for_oop(rax, rax, rdx);
3554 __ addptr(obj, off);
3555 __ access_value_copy(IN_HEAP, rax, obj, rdx);
3556 __ jmp(rewrite_inline);
3557 __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3558 pop_and_check_object(rbx);
3559 __ load_field_entry(rcx, rdx);
3560 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3561 __ bind(rewrite_inline);
3562 if (rc == may_rewrite) {
3563 patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3564 }
3565 __ jmp(Done);
3566 }
3567 }
3568 }
3569
3570 __ bind(notObj);
3571 __ cmpl(tos_state, itos);
3572 __ jcc(Assembler::notEqual, notInt);
3573
3574 // itos
3575 {
3576 __ pop(itos);
3577 if (!is_static) pop_and_check_object(obj);
3578 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3579 if (!is_static && rc == may_rewrite) {
3580 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3581 }
3582 __ jmp(Done);
3583 }
3584
3585 __ bind(notInt);
3586 __ cmpl(tos_state, ctos);
3587 __ jcc(Assembler::notEqual, notChar);
3686 }
3687
3688 void TemplateTable::jvmti_post_fast_field_mod() {
3689
3690 const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3691
3692 if (JvmtiExport::can_post_field_modification()) {
3693 // Check to see if a field modification watch has been set before
3694 // we take the time to call into the VM.
3695 Label L2;
3696 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3697 __ testl(scratch, scratch);
3698 __ jcc(Assembler::zero, L2);
3699 __ pop_ptr(rbx); // copy the object pointer from tos
3700 __ verify_oop(rbx);
3701 __ push_ptr(rbx); // put the object pointer back on tos
3702 // Save tos values before call_VM() clobbers them. Since we have
3703 // to do it for every data type, we use the saved values as the
3704 // jvalue object.
3705 switch (bytecode()) { // load values into the jvalue object
3706 case Bytecodes::_fast_vputfield: //fall through
3707 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3708 case Bytecodes::_fast_bputfield: // fall through
3709 case Bytecodes::_fast_zputfield: // fall through
3710 case Bytecodes::_fast_sputfield: // fall through
3711 case Bytecodes::_fast_cputfield: // fall through
3712 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3713 case Bytecodes::_fast_dputfield: __ push(dtos); break;
3714 case Bytecodes::_fast_fputfield: __ push(ftos); break;
3715 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3716
3717 default:
3718 ShouldNotReachHere();
3719 }
3720 __ mov(scratch, rsp); // points to jvalue on the stack
3721 // access constant pool cache entry
3722 LP64_ONLY(__ load_field_entry(c_rarg2, rax));
3723 NOT_LP64(__ load_field_entry(rax, rdx));
3724 __ verify_oop(rbx);
3725 // rbx: object pointer copied above
3726 // c_rarg2: cache entry pointer
3727 // c_rarg3: jvalue object on the stack
3728 LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3729 NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3730
3731 switch (bytecode()) { // restore tos values
3732 case Bytecodes::_fast_vputfield: // fall through
3733 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3734 case Bytecodes::_fast_bputfield: // fall through
3735 case Bytecodes::_fast_zputfield: // fall through
3736 case Bytecodes::_fast_sputfield: // fall through
3737 case Bytecodes::_fast_cputfield: // fall through
3738 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3739 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3740 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3741 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3742 default: break;
3743 }
3744 __ bind(L2);
3745 }
3746 }
3747
3748 void TemplateTable::fast_storefield(TosState state) {
3749 transition(state, vtos);
3750
3751 Label notVolatile, Done;
3752
3753 jvmti_post_fast_field_mod();
3754
3755 __ push(rax);
3756 __ load_field_entry(rcx, rax);
3757 load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);
3758 __ pop(rax);
3759 // RBX: field offset, RCX: RAX: TOS, RDX: flags
3760
3761 // Get object from stack
3762 pop_and_check_object(rcx);
3763
3764 // field address
3765 const Address field(rcx, rbx, Address::times_1);
3766
3767 // Check for volatile store
3768 __ movl(rscratch2, rdx); // saving flags for is_flat test
3769 __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3770 __ testl(rscratch2, rscratch2);
3771 __ jcc(Assembler::zero, notVolatile);
3772
3773 fast_storefield_helper(field, rax, rdx);
3774 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3775 Assembler::StoreStore));
3776 __ jmp(Done);
3777 __ bind(notVolatile);
3778
3779 fast_storefield_helper(field, rax, rdx);
3780
3781 __ bind(Done);
3782 }
3783
3784 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3785
3786 // DANGER: 'field' argument depends on rcx and rbx
3787
3788 // access field
3789 switch (bytecode()) {
3790 case Bytecodes::_fast_vputfield:
3791 {
3792 Label is_flat, has_null_marker, write_null, done;
3793 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3794 // Null free field cases: flat or not flat
3795 __ null_check(rax);
3796 __ test_field_is_flat(flags, rscratch1, is_flat);
3797 // field is not flat
3798 do_oop_store(_masm, field, rax);
3799 __ jmp(done);
3800 __ bind(is_flat);
3801 // field is flat
3802 __ load_klass(rdx, rax, rscratch1);
3803 __ data_for_oop(rax, rax, rdx);
3804 __ lea(rcx, field);
3805 __ access_value_copy(IN_HEAP, rax, rcx, rdx);
3806 __ jmp(done);
3807 __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3808 __ movptr(rbx, rcx);
3809 __ load_field_entry(rcx, rdx);
3810 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3811 __ bind(done);
3812 }
3813 break;
3814 case Bytecodes::_fast_aputfield:
3815 {
3816 do_oop_store(_masm, field, rax);
3817 }
3818 break;
3819 case Bytecodes::_fast_lputfield:
3820 #ifdef _LP64
3821 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3822 #else
3823 __ stop("should not be rewritten");
3824 #endif
3825 break;
3826 case Bytecodes::_fast_iputfield:
3827 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3828 break;
3829 case Bytecodes::_fast_zputfield:
3830 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3831 break;
3832 case Bytecodes::_fast_bputfield:
3833 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3834 break;
3835 case Bytecodes::_fast_sputfield:
3836 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3837 break;
3859 Label L1;
3860 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3861 __ testl(rcx, rcx);
3862 __ jcc(Assembler::zero, L1);
3863 // access constant pool cache entry
3864 LP64_ONLY(__ load_field_entry(c_rarg2, rcx));
3865 NOT_LP64(__ load_field_entry(rcx, rdx));
3866 __ verify_oop(rax);
3867 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3868 LP64_ONLY(__ mov(c_rarg1, rax));
3869 // c_rarg1: object pointer copied above
3870 // c_rarg2: cache entry pointer
3871 LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2));
3872 NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx));
3873 __ pop_ptr(rax); // restore object pointer
3874 __ bind(L1);
3875 }
3876
3877 // access constant pool cache
3878 __ load_field_entry(rcx, rbx);
3879 __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3880
3881 // rax: object
3882 __ verify_oop(rax);
3883 __ null_check(rax);
3884 Address field(rax, rdx, Address::times_1);
3885
3886 // access field
3887 switch (bytecode()) {
3888 case Bytecodes::_fast_vgetfield:
3889 {
3890 Label is_flat, nonnull, Done, has_null_marker;
3891 __ load_unsigned_byte(rscratch1, Address(rcx, in_bytes(ResolvedFieldEntry::flags_offset())));
3892 __ test_field_has_null_marker(rscratch1, rscratch2, has_null_marker);
3893 __ test_field_is_flat(rscratch1, rscratch2, is_flat);
3894 // field is not flat
3895 __ load_heap_oop(rax, field);
3896 __ testptr(rax, rax);
3897 __ jcc(Assembler::notZero, nonnull);
3898 __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3899 __ movptr(rcx, Address(rcx, ResolvedFieldEntry::field_holder_offset()));
3900 __ get_inline_type_field_klass(rcx, rdx, rbx);
3901 __ get_default_value_oop(rbx, rcx, rax);
3902 __ bind(nonnull);
3903 __ verify_oop(rax);
3904 __ jmp(Done);
3905 __ bind(is_flat);
3906 // field is flat
3907 __ push(rdx); // save offset
3908 __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3909 __ movptr(rcx, Address(rcx, ResolvedFieldEntry::field_holder_offset()));
3910 __ pop(rbx); // restore offset
3911 __ read_flat_field(rcx, rdx, rbx, rax);
3912 __ jmp(Done);
3913 __ bind(has_null_marker);
3914 // rax = instance, rcx = resolved entry
3915 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3916 __ get_vm_result(rax, r15_thread);
3917 __ bind(Done);
3918 __ verify_oop(rax);
3919 }
3920 break;
3921 case Bytecodes::_fast_agetfield:
3922 do_oop_load(_masm, field, rax);
3923 __ verify_oop(rax);
3924 break;
3925 case Bytecodes::_fast_lgetfield:
3926 #ifdef _LP64
3927 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3928 #else
3929 __ stop("should not be rewritten");
3930 #endif
3931 break;
3932 case Bytecodes::_fast_igetfield:
3933 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3934 break;
3935 case Bytecodes::_fast_bgetfield:
3936 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3937 break;
3938 case Bytecodes::_fast_sgetfield:
3939 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3940 break;
4336
4337 // Note: rax_callsite is already pushed
4338
4339 // %%% should make a type profile for any invokedynamic that takes a ref argument
4340 // profile this call
4341 __ profile_call(rbcp);
4342 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
4343
4344 __ verify_oop(rax_callsite);
4345
4346 __ jump_from_interpreted(rbx_method, rdx);
4347 }
4348
4349 //-----------------------------------------------------------------------------
4350 // Allocation
4351
4352 void TemplateTable::_new() {
4353 transition(vtos, atos);
4354 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
4355 Label slow_case;
4356 Label done;
4357
4358 __ get_cpool_and_tags(rcx, rax);
4359
4360 // Make sure the class we're about to instantiate has been resolved.
4361 // This is done before loading InstanceKlass to be consistent with the order
4362 // how Constant Pool is updated (see ConstantPool::klass_at_put)
4363 const int tags_offset = Array<u1>::base_offset_in_bytes();
4364 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
4365 __ jcc(Assembler::notEqual, slow_case);
4366
4367 // get InstanceKlass
4368 __ load_resolved_klass_at_index(rcx, rcx, rdx);
4369
4370 // make sure klass is initialized
4371 #ifdef _LP64
4372 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
4373 __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
4374 #else
4375 __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
4376 __ jcc(Assembler::notEqual, slow_case);
4377 #endif
4378
4379 __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);
4380 __ jmp(done);
4381
4382 // slow case
4383 __ bind(slow_case);
4384
4385 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4386 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
4387
4388 __ get_constant_pool(rarg1);
4389 __ get_unsigned_2_byte_index_at_bcp(rarg2, 1);
4390 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rarg1, rarg2);
4391 __ verify_oop(rax);
4392
4393 // continue
4394 __ bind(done);
4395 }
4396
4397 void TemplateTable::newarray() {
4398 transition(itos, atos);
4399 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4400 __ load_unsigned_byte(rarg1, at_bcp(1));
4401 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
4402 rarg1, rax);
4403 }
4412 __ get_constant_pool(rarg1);
4413 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
4414 rarg1, rarg2, rax);
4415 }
4416
4417 void TemplateTable::arraylength() {
4418 transition(atos, itos);
4419 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
4420 }
4421
4422 void TemplateTable::checkcast() {
4423 transition(atos, atos);
4424 Label done, is_null, ok_is_subtype, quicked, resolved;
4425 __ testptr(rax, rax); // object is in rax
4426 __ jcc(Assembler::zero, is_null);
4427
4428 // Get cpool & tags index
4429 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4430 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4431 // See if bytecode has already been quicked
4432 __ movzbl(rdx, Address(rdx, rbx,
4433 Address::times_1,
4434 Array<u1>::base_offset_in_bytes()));
4435 __ cmpl(rdx, JVM_CONSTANT_Class);
4436 __ jcc(Assembler::equal, quicked);
4437 __ push(atos); // save receiver for result, and for GC
4438 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4439
4440 // vm_result_2 has metadata result
4441 #ifndef _LP64
4442 // borrow rdi from locals
4443 __ get_thread(rdi);
4444 __ get_vm_result_2(rax, rdi);
4445 __ restore_locals();
4446 #else
4447 __ get_vm_result_2(rax, r15_thread);
4448 #endif
4449
4450 __ pop_ptr(rdx); // restore receiver
4451 __ jmpb(resolved);
4452
4453 // Get superklass in rax and subklass in rbx
4454 __ bind(quicked);
4455 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
4456 __ load_resolved_klass_at_index(rax, rcx, rbx);
4457
4458 __ bind(resolved);
4459 __ load_klass(rbx, rdx, rscratch1);
4460
4461 // Generate subtype check. Blows rcx, rdi. Object in rdx.
4462 // Superklass in rax. Subklass in rbx.
4463 __ gen_subtype_check(rbx, ok_is_subtype);
4464
4465 // Come here on failure
4466 __ push_ptr(rdx);
4467 // object is at TOS
4468 __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
4469
4470 // Come here on success
4471 __ bind(ok_is_subtype);
4472 __ mov(rax, rdx); // Restore object in rdx
4473 __ jmp(done);
4474
4475 __ bind(is_null);
4476
4477 // Collect counts on whether this check-cast sees nulls a lot or not.
4478 if (ProfileInterpreter) {
4479 __ profile_null_seen(rcx);
4480 }
4481
4482 __ bind(done);
4483 }
4484
4485 void TemplateTable::instanceof() {
4486 transition(atos, itos);
4487 Label done, is_null, ok_is_subtype, quicked, resolved;
4488 __ testptr(rax, rax);
4489 __ jcc(Assembler::zero, is_null);
4490
4491 // Get cpool & tags index
4492 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
4493 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
4494 // See if bytecode has already been quicked
4495 __ movzbl(rdx, Address(rdx, rbx,
4496 Address::times_1,
4497 Array<u1>::base_offset_in_bytes()));
4498 __ cmpl(rdx, JVM_CONSTANT_Class);
4499 __ jcc(Assembler::equal, quicked);
4500
4501 __ push(atos); // save receiver for result, and for GC
4502 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4503 // vm_result_2 has metadata result
4504
4505 #ifndef _LP64
4506 // borrow rdi from locals
4507 __ get_thread(rdi);
4508 __ get_vm_result_2(rax, rdi);
4509 __ restore_locals();
4510 #else
4511 __ get_vm_result_2(rax, r15_thread);
4512 #endif
4513
4514 __ pop_ptr(rdx); // restore receiver
4515 __ verify_oop(rdx);
4516 __ load_klass(rdx, rdx, rscratch1);
4517 __ jmpb(resolved);
4518
4530 // Come here on failure
4531 __ xorl(rax, rax);
4532 __ jmpb(done);
4533 // Come here on success
4534 __ bind(ok_is_subtype);
4535 __ movl(rax, 1);
4536
4537 // Collect counts on whether this test sees nulls a lot or not.
4538 if (ProfileInterpreter) {
4539 __ jmp(done);
4540 __ bind(is_null);
4541 __ profile_null_seen(rcx);
4542 } else {
4543 __ bind(is_null); // same as 'done'
4544 }
4545 __ bind(done);
4546 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
4547 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
4548 }
4549
4550 //----------------------------------------------------------------------------------------------------
4551 // Breakpoints
4552 void TemplateTable::_breakpoint() {
4553 // Note: We get here even if we are single stepping..
4554 // jbug insists on setting breakpoints at every bytecode
4555 // even if we are in single step mode.
4556
4557 transition(vtos, vtos);
4558
4559 Register rarg = LP64_ONLY(c_rarg1) NOT_LP64(rcx);
4560
4561 // get the unpatched byte code
4562 __ get_method(rarg);
4563 __ call_VM(noreg,
4564 CAST_FROM_FN_PTR(address,
4565 InterpreterRuntime::get_original_bytecode_at),
4566 rarg, rbcp);
4567 __ mov(rbx, rax); // why?
4568
4569 // post the breakpoint event
4591 // Note: monitorenter & exit are symmetric routines; which is reflected
4592 // in the assembly code structure as well
4593 //
4594 // Stack layout:
4595 //
4596 // [expressions ] <--- rsp = expression stack top
4597 // ..
4598 // [expressions ]
4599 // [monitor entry] <--- monitor block top = expression stack bot
4600 // ..
4601 // [monitor entry]
4602 // [frame data ] <--- monitor block bot
4603 // ...
4604 // [saved rbp ] <--- rbp
4605 void TemplateTable::monitorenter() {
4606 transition(atos, vtos);
4607
4608 // check for null object
4609 __ null_check(rax);
4610
4611 Label is_inline_type;
4612 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4613 __ test_markword_is_inline_type(rbx, is_inline_type);
4614
4615 const Address monitor_block_top(
4616 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4617 const Address monitor_block_bot(
4618 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4619 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4620
4621 Label allocated;
4622
4623 Register rtop = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
4624 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4625 Register rmon = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4626
4627 // initialize entry pointer
4628 __ xorl(rmon, rmon); // points to free slot or null
4629
4630 // find a free slot in the monitor block (result in rmon)
4631 {
4632 Label entry, loop, exit;
4633 __ movptr(rtop, monitor_block_top); // derelativize pointer
4634 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4687 // rmon: points to monitor entry
4688 __ bind(allocated);
4689
4690 // Increment bcp to point to the next bytecode, so exception
4691 // handling for async. exceptions work correctly.
4692 // The object has already been popped from the stack, so the
4693 // expression stack looks correct.
4694 __ increment(rbcp);
4695
4696 // store object
4697 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4698 __ lock_object(rmon);
4699
4700 // check to make sure this monitor doesn't cause stack overflow after locking
4701 __ save_bcp(); // in case of exception
4702 __ generate_stack_overflow_check(0);
4703
4704 // The bcp has already been incremented. Just need to dispatch to
4705 // next instruction.
4706 __ dispatch_next(vtos);
4707
4708 __ bind(is_inline_type);
4709 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4710 InterpreterRuntime::throw_identity_exception), rax);
4711 __ should_not_reach_here();
4712 }
4713
4714 void TemplateTable::monitorexit() {
4715 transition(atos, vtos);
4716
4717 // check for null object
4718 __ null_check(rax);
4719
4720 const int is_inline_type_mask = markWord::inline_type_pattern;
4721 Label has_identity;
4722 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4723 __ andptr(rbx, is_inline_type_mask);
4724 __ cmpl(rbx, is_inline_type_mask);
4725 __ jcc(Assembler::notEqual, has_identity);
4726 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4727 InterpreterRuntime::throw_illegal_monitor_state_exception));
4728 __ should_not_reach_here();
4729 __ bind(has_identity);
4730
4731 const Address monitor_block_top(
4732 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4733 const Address monitor_block_bot(
4734 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4735 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4736
4737 Register rtop = LP64_ONLY(c_rarg1) NOT_LP64(rdx);
4738 Register rbot = LP64_ONLY(c_rarg2) NOT_LP64(rbx);
4739
4740 Label found;
4741
4742 // find matching slot
4743 {
4744 Label entry, loop;
4745 __ movptr(rtop, monitor_block_top); // derelativize pointer
4746 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4747 // rtop points to current entry, starting with top-most entry
4748
4749 __ lea(rbot, monitor_block_bot); // points to word before bottom
4750 // of monitor block
|