< prev index next >

src/hotspot/cpu/aarch64/templateTable_aarch64.cpp

Print this page

2270   Bytecodes::Code code = bytecode();
2271   __ load_method_entry(Rcache, index);
2272   switch(byte_no) {
2273     case f1_byte:
2274       __ lea(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::bytecode1_offset())));
2275       break;
2276     case f2_byte:
2277       __ lea(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::bytecode2_offset())));
2278       break;
2279   }
2280   // Load-acquire the bytecode to match store-release in InterpreterRuntime
2281   __ ldarb(temp, temp);
2282   __ subs(zr, temp, (int) code);  // have we resolved this bytecode?
2283   __ br(Assembler::EQ, resolved);
2284 
2285   // resolve first time through
2286   // Class initialization barrier slow path lands here as well.
2287   __ bind(clinit_barrier_slow);
2288   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2289   __ mov(temp, (int) code);
2290   __ call_VM(noreg, entry, temp);
2291 
2292   // Update registers with resolved info
2293   __ load_method_entry(Rcache, index);
2294   // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2295   // so all clients ofthis method must be modified accordingly
2296   __ bind(resolved);
2297 
2298   // Class initialization barrier for static methods
2299   if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2300     __ ldr(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2301     __ load_method_holder(temp, temp);
2302     __ clinit_barrier(temp, rscratch1, nullptr, &clinit_barrier_slow);
2303   }
2304 }
2305 
2306 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2307                                             Register Rcache,
2308                                             Register index) {
2309   const Register temp = r19;
2310   assert_different_registers(Rcache, index, temp);

2316   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2317   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2318   default: break;
2319   }
2320 
2321   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2322   __ load_field_entry(Rcache, index);
2323   if (byte_no == f1_byte) {
2324     __ lea(temp, Address(Rcache, in_bytes(ResolvedFieldEntry::get_code_offset())));
2325   } else {
2326     __ lea(temp, Address(Rcache, in_bytes(ResolvedFieldEntry::put_code_offset())));
2327   }
2328   // Load-acquire the bytecode to match store-release in ResolvedFieldEntry::fill_in()
2329   __ ldarb(temp, temp);
2330   __ subs(zr, temp, (int) code);  // have we resolved this bytecode?
2331   __ br(Assembler::EQ, resolved);
2332 
2333   // resolve first time through
2334   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2335   __ mov(temp, (int) code);
2336   __ call_VM(noreg, entry, temp);
2337 
2338   // Update registers with resolved info
2339   __ load_field_entry(Rcache, index);
2340   __ bind(resolved);
2341 }
2342 
2343 void TemplateTable::load_resolved_field_entry(Register obj,
2344                                               Register cache,
2345                                               Register tos_state,
2346                                               Register offset,
2347                                               Register flags,
2348                                               bool is_static = false) {
2349   assert_different_registers(cache, tos_state, flags, offset);
2350 
2351   // Field offset
2352   __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2353 
2354   // Flags
2355   __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2356 

2477   const Register index = r3;
2478   assert_different_registers(method, appendix, cache, index, rcpool);
2479 
2480   __ save_bcp();
2481 
2482   Label resolved;
2483 
2484   __ load_resolved_indy_entry(cache, index);
2485   // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2486   __ lea(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2487   __ ldar(method, method);
2488 
2489   // Compare the method to zero
2490   __ cbnz(method, resolved);
2491 
2492   Bytecodes::Code code = bytecode();
2493 
2494   // Call to the interpreter runtime to resolve invokedynamic
2495   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2496   __ mov(method, code); // this is essentially Bytecodes::_invokedynamic
2497   __ call_VM(noreg, entry, method);
2498   // Update registers with resolved info
2499   __ load_resolved_indy_entry(cache, index);
2500   // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2501   __ lea(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2502   __ ldar(method, method);
2503 
2504 #ifdef ASSERT
2505   __ cbnz(method, resolved);
2506   __ stop("Should be resolved by now");
2507 #endif // ASSERT
2508   __ bind(resolved);
2509 
2510   Label L_no_push;
2511   // Check if there is an appendix
2512   __ load_unsigned_byte(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2513   __ tbz(index, ResolvedIndyEntry::has_appendix_shift, L_no_push);
2514 
2515   // Get appendix
2516   __ load_unsigned_short(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2517   // Push the appendix as a trailing parameter

3653       __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3654       __ store_klass_gap(r0, zr);  // zero klass gap for compressed oops
3655       __ store_klass(r0, r4);      // store klass last
3656     }
3657 
3658     if (DTraceAllocProbes) {
3659       // Trigger dtrace event for fastpath
3660       __ push(atos); // save the return value
3661       __ call_VM_leaf(
3662            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3663       __ pop(atos); // restore the return value
3664 
3665     }
3666     __ b(done);
3667   }
3668 
3669   // slow case
3670   __ bind(slow_case);
3671   __ get_constant_pool(c_rarg1);
3672   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3673   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3674   __ verify_oop(r0);
3675 
3676   // continue
3677   __ bind(done);
3678   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3679   __ membar(Assembler::StoreStore);
3680 }
3681 
3682 void TemplateTable::newarray() {
3683   transition(itos, atos);
3684   __ load_unsigned_byte(c_rarg1, at_bcp(1));
3685   __ mov(c_rarg2, r0);
3686   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3687           c_rarg1, c_rarg2);
3688   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3689   __ membar(Assembler::StoreStore);
3690 }
3691 
3692 void TemplateTable::anewarray() {
3693   transition(itos, atos);

2270   Bytecodes::Code code = bytecode();
2271   __ load_method_entry(Rcache, index);
2272   switch(byte_no) {
2273     case f1_byte:
2274       __ lea(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::bytecode1_offset())));
2275       break;
2276     case f2_byte:
2277       __ lea(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::bytecode2_offset())));
2278       break;
2279   }
2280   // Load-acquire the bytecode to match store-release in InterpreterRuntime
2281   __ ldarb(temp, temp);
2282   __ subs(zr, temp, (int) code);  // have we resolved this bytecode?
2283   __ br(Assembler::EQ, resolved);
2284 
2285   // resolve first time through
2286   // Class initialization barrier slow path lands here as well.
2287   __ bind(clinit_barrier_slow);
2288   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2289   __ mov(temp, (int) code);
2290   __ call_VM_preemptable(noreg, entry, temp);
2291 
2292   // Update registers with resolved info
2293   __ load_method_entry(Rcache, index);
2294   // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2295   // so all clients ofthis method must be modified accordingly
2296   __ bind(resolved);
2297 
2298   // Class initialization barrier for static methods
2299   if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2300     __ ldr(temp, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2301     __ load_method_holder(temp, temp);
2302     __ clinit_barrier(temp, rscratch1, nullptr, &clinit_barrier_slow);
2303   }
2304 }
2305 
2306 void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2307                                             Register Rcache,
2308                                             Register index) {
2309   const Register temp = r19;
2310   assert_different_registers(Rcache, index, temp);

2316   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2317   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2318   default: break;
2319   }
2320 
2321   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2322   __ load_field_entry(Rcache, index);
2323   if (byte_no == f1_byte) {
2324     __ lea(temp, Address(Rcache, in_bytes(ResolvedFieldEntry::get_code_offset())));
2325   } else {
2326     __ lea(temp, Address(Rcache, in_bytes(ResolvedFieldEntry::put_code_offset())));
2327   }
2328   // Load-acquire the bytecode to match store-release in ResolvedFieldEntry::fill_in()
2329   __ ldarb(temp, temp);
2330   __ subs(zr, temp, (int) code);  // have we resolved this bytecode?
2331   __ br(Assembler::EQ, resolved);
2332 
2333   // resolve first time through
2334   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2335   __ mov(temp, (int) code);
2336   __ call_VM_preemptable(noreg, entry, temp);
2337 
2338   // Update registers with resolved info
2339   __ load_field_entry(Rcache, index);
2340   __ bind(resolved);
2341 }
2342 
2343 void TemplateTable::load_resolved_field_entry(Register obj,
2344                                               Register cache,
2345                                               Register tos_state,
2346                                               Register offset,
2347                                               Register flags,
2348                                               bool is_static = false) {
2349   assert_different_registers(cache, tos_state, flags, offset);
2350 
2351   // Field offset
2352   __ load_sized_value(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
2353 
2354   // Flags
2355   __ load_unsigned_byte(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2356 

2477   const Register index = r3;
2478   assert_different_registers(method, appendix, cache, index, rcpool);
2479 
2480   __ save_bcp();
2481 
2482   Label resolved;
2483 
2484   __ load_resolved_indy_entry(cache, index);
2485   // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2486   __ lea(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2487   __ ldar(method, method);
2488 
2489   // Compare the method to zero
2490   __ cbnz(method, resolved);
2491 
2492   Bytecodes::Code code = bytecode();
2493 
2494   // Call to the interpreter runtime to resolve invokedynamic
2495   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2496   __ mov(method, code); // this is essentially Bytecodes::_invokedynamic
2497   __ call_VM_preemptable(noreg, entry, method);
2498   // Update registers with resolved info
2499   __ load_resolved_indy_entry(cache, index);
2500   // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2501   __ lea(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2502   __ ldar(method, method);
2503 
2504 #ifdef ASSERT
2505   __ cbnz(method, resolved);
2506   __ stop("Should be resolved by now");
2507 #endif // ASSERT
2508   __ bind(resolved);
2509 
2510   Label L_no_push;
2511   // Check if there is an appendix
2512   __ load_unsigned_byte(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2513   __ tbz(index, ResolvedIndyEntry::has_appendix_shift, L_no_push);
2514 
2515   // Get appendix
2516   __ load_unsigned_short(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2517   // Push the appendix as a trailing parameter

3653       __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3654       __ store_klass_gap(r0, zr);  // zero klass gap for compressed oops
3655       __ store_klass(r0, r4);      // store klass last
3656     }
3657 
3658     if (DTraceAllocProbes) {
3659       // Trigger dtrace event for fastpath
3660       __ push(atos); // save the return value
3661       __ call_VM_leaf(
3662            CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), r0);
3663       __ pop(atos); // restore the return value
3664 
3665     }
3666     __ b(done);
3667   }
3668 
3669   // slow case
3670   __ bind(slow_case);
3671   __ get_constant_pool(c_rarg1);
3672   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3673   __ call_VM_preemptable(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3674   __ verify_oop(r0);
3675 
3676   // continue
3677   __ bind(done);
3678   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3679   __ membar(Assembler::StoreStore);
3680 }
3681 
3682 void TemplateTable::newarray() {
3683   transition(itos, atos);
3684   __ load_unsigned_byte(c_rarg1, at_bcp(1));
3685   __ mov(c_rarg2, r0);
3686   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3687           c_rarg1, c_rarg2);
3688   // Must prevent reordering of stores for object initialization with stores that publish the new object.
3689   __ membar(Assembler::StoreStore);
3690 }
3691 
3692 void TemplateTable::anewarray() {
3693   transition(itos, atos);
< prev index next >