< prev index next >

src/hotspot/cpu/ppc/templateTable_ppc_64.cpp

Print this page




2215 //   - _f12_oop
2216 // acquired, because these are asked if the cache is already resolved. We don't
2217 // want to float loads above this check.
2218 // See also comments in ConstantPoolCacheEntry::bytecode_1(),
2219 // ConstantPoolCacheEntry::bytecode_2() and ConstantPoolCacheEntry::f1();
2220 
2221 // Call into the VM if call site is not yet resolved
2222 //
2223 // Input regs:
2224 //   - None, all passed regs are outputs.
2225 //
2226 // Returns:
2227 //   - Rcache:  The const pool cache entry that contains the resolved result.
2228 //   - Rresult: Either noreg or output for f1/f2.
2229 //
2230 // Kills:
2231 //   - Rscratch
2232 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register Rscratch, size_t index_size) {
2233 
2234   __ get_cache_and_index_at_bcp(Rcache, 1, index_size);
2235   Label Lresolved, Ldone, L_clinit_barrier_slow;
2236 
2237   Bytecodes::Code code = bytecode();
2238   switch (code) {
2239     case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2240     case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2241     default:
2242       break;
2243   }
2244 
2245   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2246   // We are resolved if the indices offset contains the current bytecode.
2247 #if defined(VM_LITTLE_ENDIAN)
2248   __ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + byte_no + 1, Rcache);
2249 #else
2250   __ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + 7 - (byte_no + 1), Rcache);
2251 #endif
2252   // Acquire by cmp-br-isync (see below).
2253   __ cmpdi(CCR0, Rscratch, (int)code);
2254   __ beq(CCR0, Lresolved);
2255 
2256   // Class initialization barrier slow path lands here as well.
2257   __ bind(L_clinit_barrier_slow);
2258 
2259   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2260   __ li(R4_ARG2, code);
2261   __ call_VM(noreg, entry, R4_ARG2, true);
2262 
2263   // Update registers with resolved info.
2264   __ get_cache_and_index_at_bcp(Rcache, 1, index_size);
2265   __ b(Ldone);
2266 
2267   __ bind(Lresolved);
2268   __ isync(); // Order load wrt. succeeding loads.
2269 
2270   // Class initialization barrier for static methods
2271   if (VM_Version::supports_fast_class_init_checks() && bytecode() == Bytecodes::_invokestatic) {
2272     const Register method = Rscratch;
2273     const Register klass  = Rscratch;
2274 
2275     __ load_resolved_method_at_index(byte_no, Rcache, method);
2276     __ load_method_holder(klass, method);
2277     __ clinit_barrier(klass, R16_thread, NULL /*L_fast_path*/, &L_clinit_barrier_slow);
2278   }
2279 
2280   __ bind(Ldone);
2281 }
2282 
2283 // Load the constant pool cache entry at field accesses into registers.
2284 // The Rcache and Rindex registers must be set before call.
2285 // Input:
2286 //   - Rcache, Rindex
2287 // Output:
2288 //   - Robj, Roffset, Rflags
2289 void TemplateTable::load_field_cp_cache_entry(Register Robj,
2290                                               Register Rcache,
2291                                               Register Rindex /* unused on PPC64 */,
2292                                               Register Roffset,
2293                                               Register Rflags,
2294                                               bool is_static = false) {
2295   assert_different_registers(Rcache, Rflags, Roffset);
2296   // assert(Rindex == noreg, "parameter not used on PPC64");
2297 
2298   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2299   __ ld(Rflags, in_bytes(cp_base_offset) + in_bytes(ConstantPoolCacheEntry::flags_offset()), Rcache);


2326                                                Register Rflags,
2327                                                bool is_invokevirtual,
2328                                                bool is_invokevfinal,
2329                                                bool is_invokedynamic) {
2330 
2331   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2332   // Determine constant pool cache field offsets.
2333   assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
2334   const int method_offset = in_bytes(cp_base_offset + (is_invokevirtual ? ConstantPoolCacheEntry::f2_offset() : ConstantPoolCacheEntry::f1_offset()));
2335   const int flags_offset  = in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset());
2336   // Access constant pool cache fields.
2337   const int index_offset  = in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset());
2338 
2339   Register Rcache = R21_tmp1; // Note: same register as R21_sender_SP.
2340 
2341   if (is_invokevfinal) {
2342     assert(Ritable_index == noreg, "register not used");
2343     // Already resolved.
2344     __ get_cache_and_index_at_bcp(Rcache, 1);
2345   } else {
2346     resolve_cache_and_index(byte_no, Rcache, /* temp */ Rmethod, is_invokedynamic ? sizeof(u4) : sizeof(u2));
2347   }
2348 
2349   __ ld(Rmethod, method_offset, Rcache);
2350   __ ld(Rflags, flags_offset, Rcache);
2351 
2352   if (Ritable_index != noreg) {
2353     __ ld(Ritable_index, index_offset, Rcache);
2354   }
2355 }
2356 
2357 // ============================================================================
2358 // Field access
2359 
2360 // Volatile variables demand their effects be made known to all CPU's
2361 // in order. Store buffers on most chips allow reads & writes to
2362 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2363 // without some kind of memory barrier (i.e., it's not sufficient that
2364 // the interpreter does not reorder volatile references, the hardware
2365 // also must not reorder them).
2366 //


3631 
3632   // do the call
3633 
3634   Register Rscratch = Rflags; // Rflags is dead now.
3635 
3636   __ profile_final_call(Rscratch1, Rscratch);
3637   __ profile_arguments_type(Rmethod, Rscratch, Rrecv_klass /* scratch */, true);
3638 
3639   __ call_from_interpreter(Rmethod, Rret_addr, Rscratch, Rrecv_klass /* scratch */);
3640 
3641   __ bind(LnotVFinal);
3642 
3643   __ lookup_interface_method(Rrecv_klass, Rinterface_klass, noreg, noreg, Rscratch1, Rscratch2,
3644                              L_no_such_interface, /*return_method=*/false);
3645 
3646   __ profile_virtual_call(Rrecv_klass, Rscratch1, Rscratch2, false);
3647 
3648   // Find entry point to call.
3649 
3650   // Get declaring interface class from method
3651   __ load_method_holder(Rinterface_klass, Rmethod);


3652 
3653   // Get itable index from method
3654   __ lwa(Rindex, in_bytes(Method::itable_index_offset()), Rmethod);
3655   __ subfic(Rindex, Rindex, Method::itable_index_max);
3656 
3657   __ lookup_interface_method(Rrecv_klass, Rinterface_klass, Rindex, Rmethod2, Rscratch1, Rscratch2,
3658                              L_no_such_interface);
3659 
3660   __ cmpdi(CCR0, Rmethod2, 0);
3661   __ beq(CCR0, Lthrow_ame);
3662   // Found entry. Jump off!
3663   // Argument and return type profiling.
3664   __ profile_arguments_type(Rmethod2, Rscratch1, Rscratch2, true);
3665   //__ profile_called_method(Rindex, Rscratch1);
3666   __ call_from_interpreter(Rmethod2, Rret_addr, Rscratch1, Rscratch2);
3667 
3668   // Vtable entry was NULL => Throw abstract method error.
3669   __ bind(Lthrow_ame);
3670   // Pass arguments for generating a verbose error message.
3671   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorVerbose),




2215 //   - _f12_oop
2216 // acquired, because these are asked if the cache is already resolved. We don't
2217 // want to float loads above this check.
2218 // See also comments in ConstantPoolCacheEntry::bytecode_1(),
2219 // ConstantPoolCacheEntry::bytecode_2() and ConstantPoolCacheEntry::f1();
2220 
2221 // Call into the VM if call site is not yet resolved
2222 //
2223 // Input regs:
2224 //   - None, all passed regs are outputs.
2225 //
2226 // Returns:
2227 //   - Rcache:  The const pool cache entry that contains the resolved result.
2228 //   - Rresult: Either noreg or output for f1/f2.
2229 //
2230 // Kills:
2231 //   - Rscratch
2232 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register Rscratch, size_t index_size) {
2233 
2234   __ get_cache_and_index_at_bcp(Rcache, 1, index_size);
2235   Label Lresolved, Ldone;
2236 
2237   Bytecodes::Code code = bytecode();
2238   switch (code) {
2239     case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2240     case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2241     default:
2242       break;
2243   }
2244 
2245   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2246   // We are resolved if the indices offset contains the current bytecode.
2247 #if defined(VM_LITTLE_ENDIAN)
2248   __ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + byte_no + 1, Rcache);
2249 #else
2250   __ lbz(Rscratch, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::indices_offset()) + 7 - (byte_no + 1), Rcache);
2251 #endif
2252   // Acquire by cmp-br-isync (see below).
2253   __ cmpdi(CCR0, Rscratch, (int)code);
2254   __ beq(CCR0, Lresolved);
2255 



2256   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2257   __ li(R4_ARG2, code);
2258   __ call_VM(noreg, entry, R4_ARG2, true);
2259 
2260   // Update registers with resolved info.
2261   __ get_cache_and_index_at_bcp(Rcache, 1, index_size);
2262   __ b(Ldone);
2263 
2264   __ bind(Lresolved);
2265   __ isync(); // Order load wrt. succeeding loads.











2266   __ bind(Ldone);
2267 }
2268 
2269 // Load the constant pool cache entry at field accesses into registers.
2270 // The Rcache and Rindex registers must be set before call.
2271 // Input:
2272 //   - Rcache, Rindex
2273 // Output:
2274 //   - Robj, Roffset, Rflags
2275 void TemplateTable::load_field_cp_cache_entry(Register Robj,
2276                                               Register Rcache,
2277                                               Register Rindex /* unused on PPC64 */,
2278                                               Register Roffset,
2279                                               Register Rflags,
2280                                               bool is_static = false) {
2281   assert_different_registers(Rcache, Rflags, Roffset);
2282   // assert(Rindex == noreg, "parameter not used on PPC64");
2283 
2284   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2285   __ ld(Rflags, in_bytes(cp_base_offset) + in_bytes(ConstantPoolCacheEntry::flags_offset()), Rcache);


2312                                                Register Rflags,
2313                                                bool is_invokevirtual,
2314                                                bool is_invokevfinal,
2315                                                bool is_invokedynamic) {
2316 
2317   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2318   // Determine constant pool cache field offsets.
2319   assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
2320   const int method_offset = in_bytes(cp_base_offset + (is_invokevirtual ? ConstantPoolCacheEntry::f2_offset() : ConstantPoolCacheEntry::f1_offset()));
2321   const int flags_offset  = in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset());
2322   // Access constant pool cache fields.
2323   const int index_offset  = in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset());
2324 
2325   Register Rcache = R21_tmp1; // Note: same register as R21_sender_SP.
2326 
2327   if (is_invokevfinal) {
2328     assert(Ritable_index == noreg, "register not used");
2329     // Already resolved.
2330     __ get_cache_and_index_at_bcp(Rcache, 1);
2331   } else {
2332     resolve_cache_and_index(byte_no, Rcache, R0, is_invokedynamic ? sizeof(u4) : sizeof(u2));
2333   }
2334 
2335   __ ld(Rmethod, method_offset, Rcache);
2336   __ ld(Rflags, flags_offset, Rcache);
2337 
2338   if (Ritable_index != noreg) {
2339     __ ld(Ritable_index, index_offset, Rcache);
2340   }
2341 }
2342 
2343 // ============================================================================
2344 // Field access
2345 
2346 // Volatile variables demand their effects be made known to all CPU's
2347 // in order. Store buffers on most chips allow reads & writes to
2348 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2349 // without some kind of memory barrier (i.e., it's not sufficient that
2350 // the interpreter does not reorder volatile references, the hardware
2351 // also must not reorder them).
2352 //


3617 
3618   // do the call
3619 
3620   Register Rscratch = Rflags; // Rflags is dead now.
3621 
3622   __ profile_final_call(Rscratch1, Rscratch);
3623   __ profile_arguments_type(Rmethod, Rscratch, Rrecv_klass /* scratch */, true);
3624 
3625   __ call_from_interpreter(Rmethod, Rret_addr, Rscratch, Rrecv_klass /* scratch */);
3626 
3627   __ bind(LnotVFinal);
3628 
3629   __ lookup_interface_method(Rrecv_klass, Rinterface_klass, noreg, noreg, Rscratch1, Rscratch2,
3630                              L_no_such_interface, /*return_method=*/false);
3631 
3632   __ profile_virtual_call(Rrecv_klass, Rscratch1, Rscratch2, false);
3633 
3634   // Find entry point to call.
3635 
3636   // Get declaring interface class from method
3637   __ ld(Rinterface_klass, in_bytes(Method::const_offset()), Rmethod);
3638   __ ld(Rinterface_klass, in_bytes(ConstMethod::constants_offset()), Rinterface_klass);
3639   __ ld(Rinterface_klass, ConstantPool::pool_holder_offset_in_bytes(), Rinterface_klass);
3640 
3641   // Get itable index from method
3642   __ lwa(Rindex, in_bytes(Method::itable_index_offset()), Rmethod);
3643   __ subfic(Rindex, Rindex, Method::itable_index_max);
3644 
3645   __ lookup_interface_method(Rrecv_klass, Rinterface_klass, Rindex, Rmethod2, Rscratch1, Rscratch2,
3646                              L_no_such_interface);
3647 
3648   __ cmpdi(CCR0, Rmethod2, 0);
3649   __ beq(CCR0, Lthrow_ame);
3650   // Found entry. Jump off!
3651   // Argument and return type profiling.
3652   __ profile_arguments_type(Rmethod2, Rscratch1, Rscratch2, true);
3653   //__ profile_called_method(Rindex, Rscratch1);
3654   __ call_from_interpreter(Rmethod2, Rret_addr, Rscratch1, Rscratch2);
3655 
3656   // Vtable entry was NULL => Throw abstract method error.
3657   __ bind(Lthrow_ame);
3658   // Pass arguments for generating a verbose error message.
3659   call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorVerbose),


< prev index next >