2274 Bytecodes::Code code = bytecode();
2275 __ load_method_entry(Rcache, index);
2276 switch(byte_no) {
2277 case f1_byte:
2278 __ lea(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::bytecode1_offset())));
2279 break;
2280 case f2_byte:
2281 __ lea(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::bytecode2_offset())));
2282 break;
2283 }
2284 // Load-acquire the bytecode to match store-release in InterpreterRuntime
2285 __ ldarb(temp, temp);
2286 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2287 __ br(Assembler::EQ, resolved);
2288
2289 // resolve first time through
2290 // Class initialization barrier slow path lands here as well.
2291 __ bind(clinit_barrier_slow);
2292 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2293 __ mov(temp, (int) code);
2294 __ call_VM(noreg, entry, temp);
2295
2296 // Update registers with resolved info
2297 __ load_method_entry(Rcache, index);
2298 // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2299 // so all clients ofthis method must be modified accordingly
2300 __ bind(resolved);
2301
2302 // Class initialization barrier for static methods
2303 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2304 __ ldr(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2305 __ load_method_holder(temp, temp);
2306 __ clinit_barrier(temp, rscratch1, nullptr, &clinit_barrier_slow);
2307 }
2308 }
2309
2310 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2311 Register Rcache,
2312 Register index) {
2313 const Register temp = r19;
2314 assert_different_registers(Rcache, index, temp);
2320 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2321 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2322 default: break;
2323 }
2324
2325 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2326 __ load_field_entry(Rcache, index);
2327 if (byte_no == f1_byte) {
2328 __ lea(temp, Address(Rcache, in_bytes(ResolvedFieldEntry::get_code_offset())));
2329 } else {
2330 __ lea(temp, Address(Rcache, in_bytes(ResolvedFieldEntry::put_code_offset())));
2331 }
2332 // Load-acquire the bytecode to match store-release in ResolvedFieldEntry::fill_in()
2333 __ ldarb(temp, temp);
2334 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2335 __ br(Assembler::EQ, resolved);
2336
2337 // resolve first time through
2338 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2339 __ mov(temp, (int) code);
2340 __ call_VM(noreg, entry, temp);
2341
2342 // Update registers with resolved info
2343 __ load_field_entry(Rcache, index);
2344 __ bind(resolved);
2345 }
2346
2347 void TemplateTable::load_resolved_field_entry(Register obj,
2348 Register cache,
2349 Register tos_state,
2350 Register offset,
2351 Register flags,
2352 bool is_static = false) {
2353 assert_different_registers(cache, tos_state, flags, offset);
2354
2355 // Field offset
2356 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2357
2358 // Flags
2359 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2360
2481 const Register index = r3;
2482 assert_different_registers(method, appendix, cache, index, rcpool);
2483
2484 __ save_bcp();
2485
2486 Label resolved;
2487
2488 __ load_resolved_indy_entry(cache, index);
2489 // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2490 __ lea(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2491 __ ldar(method, method);
2492
2493 // Compare the method to zero
2494 __ cbnz(method, resolved);
2495
2496 Bytecodes::Code code = bytecode();
2497
2498 // Call to the interpreter runtime to resolve invokedynamic
2499 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2500 __ mov(method, code); // this is essentially Bytecodes::_invokedynamic
2501 __ call_VM(noreg, entry, method);
2502 // Update registers with resolved info
2503 __ load_resolved_indy_entry(cache, index);
2504 // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2505 __ lea(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2506 __ ldar(method, method);
2507
2508 #ifdef ASSERT
2509 __ cbnz(method, resolved);
2510 __ stop("Should be resolved by now");
2511 #endif // ASSERT
2512 __ bind(resolved);
2513
2514 Label L_no_push;
2515 // Check if there is an appendix
2516 __ load_unsigned_byte(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2517 __ tbz(index, ResolvedIndyEntry::has_appendix_shift, L_no_push);
2518
2519 // Get appendix
2520 __ load_unsigned_short(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2521 // Push the appendix as a trailing parameter
3659 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3660 __ store_klass_gap(r0, zr); // zero klass gap for compressed oops
3661 __ store_klass(r0, r4); // store klass last
3662 }
3663
3664 if (DTraceAllocProbes) {
3665 // Trigger dtrace event for fastpath
3666 __ push(atos); // save the return value
3667 __ call_VM_leaf(
3668 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3669 __ pop(atos); // restore the return value
3670
3671 }
3672 __ b(done);
3673 }
3674
3675 // slow case
3676 __ bind(slow_case);
3677 __ get_constant_pool(c_rarg1);
3678 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3679 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3680 __ verify_oop(r0);
3681
3682 // continue
3683 __ bind(done);
3684 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3685 __ membar(Assembler::StoreStore);
3686 }
3687
3688 void TemplateTable::newarray() {
3689 transition(itos, atos);
3690 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3691 __ mov(c_rarg2, r0);
3692 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3693 c_rarg1, c_rarg2);
3694 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3695 __ membar(Assembler::StoreStore);
3696 }
3697
3698 void TemplateTable::anewarray() {
3699 transition(itos, atos);
|
2274 Bytecodes::Code code = bytecode();
2275 __ load_method_entry(Rcache, index);
2276 switch(byte_no) {
2277 case f1_byte:
2278 __ lea(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::bytecode1_offset())));
2279 break;
2280 case f2_byte:
2281 __ lea(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::bytecode2_offset())));
2282 break;
2283 }
2284 // Load-acquire the bytecode to match store-release in InterpreterRuntime
2285 __ ldarb(temp, temp);
2286 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2287 __ br(Assembler::EQ, resolved);
2288
2289 // resolve first time through
2290 // Class initialization barrier slow path lands here as well.
2291 __ bind(clinit_barrier_slow);
2292 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2293 __ mov(temp, (int) code);
2294 __ call_VM_preemptable(noreg, entry, temp);
2295
2296 // Update registers with resolved info
2297 __ load_method_entry(Rcache, index);
2298 // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2299 // so all clients ofthis method must be modified accordingly
2300 __ bind(resolved);
2301
2302 // Class initialization barrier for static methods
2303 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2304 __ ldr(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2305 __ load_method_holder(temp, temp);
2306 __ clinit_barrier(temp, rscratch1, nullptr, &clinit_barrier_slow);
2307 }
2308 }
2309
2310 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2311 Register Rcache,
2312 Register index) {
2313 const Register temp = r19;
2314 assert_different_registers(Rcache, index, temp);
2320 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2321 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2322 default: break;
2323 }
2324
2325 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2326 __ load_field_entry(Rcache, index);
2327 if (byte_no == f1_byte) {
2328 __ lea(temp, Address(Rcache, in_bytes(ResolvedFieldEntry::get_code_offset())));
2329 } else {
2330 __ lea(temp, Address(Rcache, in_bytes(ResolvedFieldEntry::put_code_offset())));
2331 }
2332 // Load-acquire the bytecode to match store-release in ResolvedFieldEntry::fill_in()
2333 __ ldarb(temp, temp);
2334 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2335 __ br(Assembler::EQ, resolved);
2336
2337 // resolve first time through
2338 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2339 __ mov(temp, (int) code);
2340 __ call_VM_preemptable(noreg, entry, temp);
2341
2342 // Update registers with resolved info
2343 __ load_field_entry(Rcache, index);
2344 __ bind(resolved);
2345 }
2346
2347 void TemplateTable::load_resolved_field_entry(Register obj,
2348 Register cache,
2349 Register tos_state,
2350 Register offset,
2351 Register flags,
2352 bool is_static = false) {
2353 assert_different_registers(cache, tos_state, flags, offset);
2354
2355 // Field offset
2356 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2357
2358 // Flags
2359 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2360
2481 const Register index = r3;
2482 assert_different_registers(method, appendix, cache, index, rcpool);
2483
2484 __ save_bcp();
2485
2486 Label resolved;
2487
2488 __ load_resolved_indy_entry(cache, index);
2489 // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2490 __ lea(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2491 __ ldar(method, method);
2492
2493 // Compare the method to zero
2494 __ cbnz(method, resolved);
2495
2496 Bytecodes::Code code = bytecode();
2497
2498 // Call to the interpreter runtime to resolve invokedynamic
2499 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2500 __ mov(method, code); // this is essentially Bytecodes::_invokedynamic
2501 __ call_VM_preemptable(noreg, entry, method);
2502 // Update registers with resolved info
2503 __ load_resolved_indy_entry(cache, index);
2504 // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2505 __ lea(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2506 __ ldar(method, method);
2507
2508 #ifdef ASSERT
2509 __ cbnz(method, resolved);
2510 __ stop("Should be resolved by now");
2511 #endif // ASSERT
2512 __ bind(resolved);
2513
2514 Label L_no_push;
2515 // Check if there is an appendix
2516 __ load_unsigned_byte(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2517 __ tbz(index, ResolvedIndyEntry::has_appendix_shift, L_no_push);
2518
2519 // Get appendix
2520 __ load_unsigned_short(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2521 // Push the appendix as a trailing parameter
3659 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3660 __ store_klass_gap(r0, zr); // zero klass gap for compressed oops
3661 __ store_klass(r0, r4); // store klass last
3662 }
3663
3664 if (DTraceAllocProbes) {
3665 // Trigger dtrace event for fastpath
3666 __ push(atos); // save the return value
3667 __ call_VM_leaf(
3668 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3669 __ pop(atos); // restore the return value
3670
3671 }
3672 __ b(done);
3673 }
3674
3675 // slow case
3676 __ bind(slow_case);
3677 __ get_constant_pool(c_rarg1);
3678 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3679 __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3680 __ verify_oop(r0);
3681
3682 // continue
3683 __ bind(done);
3684 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3685 __ membar(Assembler::StoreStore);
3686 }
3687
3688 void TemplateTable::newarray() {
3689 transition(itos, atos);
3690 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3691 __ mov(c_rarg2, r0);
3692 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3693 c_rarg1, c_rarg2);
3694 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3695 __ membar(Assembler::StoreStore);
3696 }
3697
3698 void TemplateTable::anewarray() {
3699 transition(itos, atos);
|