2287 }
2288 // Load-acquire the bytecode to match store-release in InterpreterRuntime
2289 __ ldarb(temp, temp);
2290 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2291
2292 // Class initialization barrier for static methods
2293 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2294 __ br(Assembler::NE, L_clinit_barrier_slow);
2295 __ ldr(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2296 __ load_method_holder(temp, temp);
2297 __ clinit_barrier(temp, rscratch1, &L_done, /*L_slow_path*/ nullptr);
2298 __ bind(L_clinit_barrier_slow);
2299 } else {
2300 __ br(Assembler::EQ, L_done);
2301 }
2302
2303 // resolve first time through
2304 // Class initialization barrier slow path lands here as well.
2305 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2306 __ mov(temp, (int) code);
2307 __ call_VM(noreg, entry, temp);
2308
2309 // Update registers with resolved info
2310 __ load_method_entry(Rcache, index);
2311 // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2312 // so all clients ofthis method must be modified accordingly
2313 __ bind(L_done);
2314 }
2315
2316 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2317 Register Rcache,
2318 Register index) {
2319 const Register temp = r19;
2320 assert_different_registers(Rcache, index, temp);
2321
2322 Label L_clinit_barrier_slow, L_done;
2323
2324 Bytecodes::Code code = bytecode();
2325 switch (code) {
2326 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2327 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2339 __ ldarb(temp, temp);
2340 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2341
2342 // Class initialization barrier for static fields
2343 if (VM_Version::supports_fast_class_init_checks() &&
2344 (bytecode() == Bytecodes::_getstatic || bytecode() == Bytecodes::_putstatic)) {
2345 const Register field_holder = temp;
2346
2347 __ br(Assembler::NE, L_clinit_barrier_slow);
2348 __ ldr(field_holder, Address(Rcache, in_bytes(ResolvedFieldEntry::field_holder_offset())));
2349 __ clinit_barrier(field_holder, rscratch1, &L_done, /*L_slow_path*/ nullptr);
2350 __ bind(L_clinit_barrier_slow);
2351 } else {
2352 __ br(Assembler::EQ, L_done);
2353 }
2354
2355 // resolve first time through
2356 // Class initialization barrier slow path lands here as well.
2357 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2358 __ mov(temp, (int) code);
2359 __ call_VM(noreg, entry, temp);
2360
2361 // Update registers with resolved info
2362 __ load_field_entry(Rcache, index);
2363 __ bind(L_done);
2364 }
2365
2366 void TemplateTable::load_resolved_field_entry(Register obj,
2367 Register cache,
2368 Register tos_state,
2369 Register offset,
2370 Register flags,
2371 bool is_static = false) {
2372 assert_different_registers(cache, tos_state, flags, offset);
2373
2374 // Field offset
2375 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2376
2377 // Flags
2378 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2379
3683 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3684 __ store_klass_gap(r0, zr); // zero klass gap for compressed oops
3685 __ store_klass(r0, r4); // store klass last
3686 }
3687
3688 if (DTraceAllocProbes) {
3689 // Trigger dtrace event for fastpath
3690 __ push(atos); // save the return value
3691 __ call_VM_leaf(
3692 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3693 __ pop(atos); // restore the return value
3694
3695 }
3696 __ b(done);
3697 }
3698
3699 // slow case
3700 __ bind(slow_case);
3701 __ get_constant_pool(c_rarg1);
3702 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3703 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3704 __ verify_oop(r0);
3705
3706 // continue
3707 __ bind(done);
3708 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3709 __ membar(Assembler::StoreStore);
3710 }
3711
3712 void TemplateTable::newarray() {
3713 transition(itos, atos);
3714 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3715 __ mov(c_rarg2, r0);
3716 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3717 c_rarg1, c_rarg2);
3718 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3719 __ membar(Assembler::StoreStore);
3720 }
3721
3722 void TemplateTable::anewarray() {
3723 transition(itos, atos);
|
2287 }
2288 // Load-acquire the bytecode to match store-release in InterpreterRuntime
2289 __ ldarb(temp, temp);
2290 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2291
2292 // Class initialization barrier for static methods
2293 if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2294 __ br(Assembler::NE, L_clinit_barrier_slow);
2295 __ ldr(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2296 __ load_method_holder(temp, temp);
2297 __ clinit_barrier(temp, rscratch1, &L_done, /*L_slow_path*/ nullptr);
2298 __ bind(L_clinit_barrier_slow);
2299 } else {
2300 __ br(Assembler::EQ, L_done);
2301 }
2302
2303 // resolve first time through
2304 // Class initialization barrier slow path lands here as well.
2305 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2306 __ mov(temp, (int) code);
2307 __ call_VM_preemptable(noreg, entry, temp);
2308
2309 // Update registers with resolved info
2310 __ load_method_entry(Rcache, index);
2311 // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2312 // so all clients ofthis method must be modified accordingly
2313 __ bind(L_done);
2314 }
2315
2316 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2317 Register Rcache,
2318 Register index) {
2319 const Register temp = r19;
2320 assert_different_registers(Rcache, index, temp);
2321
2322 Label L_clinit_barrier_slow, L_done;
2323
2324 Bytecodes::Code code = bytecode();
2325 switch (code) {
2326 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2327 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2339 __ ldarb(temp, temp);
2340 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2341
2342 // Class initialization barrier for static fields
2343 if (VM_Version::supports_fast_class_init_checks() &&
2344 (bytecode() == Bytecodes::_getstatic || bytecode() == Bytecodes::_putstatic)) {
2345 const Register field_holder = temp;
2346
2347 __ br(Assembler::NE, L_clinit_barrier_slow);
2348 __ ldr(field_holder, Address(Rcache, in_bytes(ResolvedFieldEntry::field_holder_offset())));
2349 __ clinit_barrier(field_holder, rscratch1, &L_done, /*L_slow_path*/ nullptr);
2350 __ bind(L_clinit_barrier_slow);
2351 } else {
2352 __ br(Assembler::EQ, L_done);
2353 }
2354
2355 // resolve first time through
2356 // Class initialization barrier slow path lands here as well.
2357 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2358 __ mov(temp, (int) code);
2359 __ call_VM_preemptable(noreg, entry, temp);
2360
2361 // Update registers with resolved info
2362 __ load_field_entry(Rcache, index);
2363 __ bind(L_done);
2364 }
2365
2366 void TemplateTable::load_resolved_field_entry(Register obj,
2367 Register cache,
2368 Register tos_state,
2369 Register offset,
2370 Register flags,
2371 bool is_static = false) {
2372 assert_different_registers(cache, tos_state, flags, offset);
2373
2374 // Field offset
2375 __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2376
2377 // Flags
2378 __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2379
3683 __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3684 __ store_klass_gap(r0, zr); // zero klass gap for compressed oops
3685 __ store_klass(r0, r4); // store klass last
3686 }
3687
3688 if (DTraceAllocProbes) {
3689 // Trigger dtrace event for fastpath
3690 __ push(atos); // save the return value
3691 __ call_VM_leaf(
3692 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3693 __ pop(atos); // restore the return value
3694
3695 }
3696 __ b(done);
3697 }
3698
3699 // slow case
3700 __ bind(slow_case);
3701 __ get_constant_pool(c_rarg1);
3702 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3703 __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3704 __ verify_oop(r0);
3705
3706 // continue
3707 __ bind(done);
3708 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3709 __ membar(Assembler::StoreStore);
3710 }
3711
3712 void TemplateTable::newarray() {
3713 transition(itos, atos);
3714 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3715 __ mov(c_rarg2, r0);
3716 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3717 c_rarg1, c_rarg2);
3718 // Must prevent reordering of stores for object initialization with stores that publish the new object.
3719 __ membar(Assembler::StoreStore);
3720 }
3721
3722 void TemplateTable::anewarray() {
3723 transition(itos, atos);
|