< prev index next >

src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp

Print this page

2228 
2229     // r0 is -1^K where K == partial copied count
2230     __ eonw(rscratch1, r0, zr);
2231     // adjust length down and src/end pos up by partial copied count
2232     __ subw(length, length, rscratch1);
2233     __ addw(src_pos, src_pos, rscratch1);
2234     __ addw(dst_pos, dst_pos, rscratch1);
2235     __ b(*stub->entry());
2236 
2237     __ bind(*stub->continuation());
2238     return;
2239   }
2240 
2241   assert(default_type != nullptr && default_type->is_array_klass() && default_type->is_loaded(), "must be true at this point");
2242 
2243   int elem_size = type2aelembytes(basic_type);
2244   int scale = exact_log2(elem_size);
2245 
2246   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
2247   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());
2248   Address src_klass_addr = Address(src, oopDesc::klass_offset_in_bytes());
2249   Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
2250 
2251   // test for null
2252   if (flags & LIR_OpArrayCopy::src_null_check) {
2253     __ cbz(src, *stub->entry());
2254   }
2255   if (flags & LIR_OpArrayCopy::dst_null_check) {
2256     __ cbz(dst, *stub->entry());
2257   }
2258 
2259   // If the compiler was not able to prove that exact type of the source or the destination
2260   // of the arraycopy is an array type, check at runtime if the source or the destination is
2261   // an instance type.
2262   if (flags & LIR_OpArrayCopy::type_check) {
2263     if (!(flags & LIR_OpArrayCopy::LIR_OpArrayCopy::dst_objarray)) {
2264       __ load_klass(tmp, dst);
2265       __ ldrw(rscratch1, Address(tmp, in_bytes(Klass::layout_helper_offset())));
2266       __ cmpw(rscratch1, Klass::_lh_neutral_value);
2267       __ br(Assembler::GE, *stub->entry());
2268     }
2269 

2290     __ br(Assembler::LT, *stub->entry());
2291   }
2292 
2293   if (flags & LIR_OpArrayCopy::src_range_check) {
2294     __ addw(tmp, src_pos, length);
2295     __ ldrw(rscratch1, src_length_addr);
2296     __ cmpw(tmp, rscratch1);
2297     __ br(Assembler::HI, *stub->entry());
2298   }
2299   if (flags & LIR_OpArrayCopy::dst_range_check) {
2300     __ addw(tmp, dst_pos, length);
2301     __ ldrw(rscratch1, dst_length_addr);
2302     __ cmpw(tmp, rscratch1);
2303     __ br(Assembler::HI, *stub->entry());
2304   }
2305 
2306   if (flags & LIR_OpArrayCopy::type_check) {
2307     // We don't know the array types are compatible
2308     if (basic_type != T_OBJECT) {
2309       // Simple test for basic type arrays
2310       if (UseCompressedClassPointers) {
2311         __ ldrw(tmp, src_klass_addr);
2312         __ ldrw(rscratch1, dst_klass_addr);
2313         __ cmpw(tmp, rscratch1);
2314       } else {
2315         __ ldr(tmp, src_klass_addr);
2316         __ ldr(rscratch1, dst_klass_addr);
2317         __ cmp(tmp, rscratch1);
2318       }
2319       __ br(Assembler::NE, *stub->entry());
2320     } else {
2321       // For object arrays, if src is a sub class of dst then we can
2322       // safely do the copy.
2323       Label cont, slow;
2324 
2325 #define PUSH(r1, r2)                                    \
2326       stp(r1, r2, __ pre(sp, -2 * wordSize));
2327 
2328 #define POP(r1, r2)                                     \
2329       ldp(r1, r2, __ post(sp, 2 * wordSize));
2330 
2331       __ PUSH(src, dst);
2332 
2333       __ load_klass(src, src);
2334       __ load_klass(dst, dst);
2335 
2336       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
2337 
2338       __ PUSH(src, dst);

2420       }
2421 
2422       __ b(*stub->entry());
2423 
2424       __ bind(cont);
2425       __ POP(src, dst);
2426     }
2427   }
2428 
2429 #ifdef ASSERT
2430   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
2431     // Sanity check the known type with the incoming class.  For the
2432     // primitive case the types must match exactly with src.klass and
2433     // dst.klass each exactly matching the default type.  For the
2434     // object array case, if no type check is needed then either the
2435     // dst type is exactly the expected type and the src type is a
2436     // subtype which we can't check or src is the same array as dst
2437     // but not necessarily exactly of type default_type.
2438     Label known_ok, halt;
2439     __ mov_metadata(tmp, default_type->constant_encoding());
2440     if (UseCompressedClassPointers) {
2441       __ encode_klass_not_null(tmp);
2442     }
2443 
2444     if (basic_type != T_OBJECT) {
2445 
2446       if (UseCompressedClassPointers) {
2447         __ ldrw(rscratch1, dst_klass_addr);
2448         __ cmpw(tmp, rscratch1);
2449       } else {
2450         __ ldr(rscratch1, dst_klass_addr);
2451         __ cmp(tmp, rscratch1);
2452       }
2453       __ br(Assembler::NE, halt);
2454       if (UseCompressedClassPointers) {
2455         __ ldrw(rscratch1, src_klass_addr);
2456         __ cmpw(tmp, rscratch1);
2457       } else {
2458         __ ldr(rscratch1, src_klass_addr);
2459         __ cmp(tmp, rscratch1);
2460       }
2461       __ br(Assembler::EQ, known_ok);
2462     } else {
2463       if (UseCompressedClassPointers) {
2464         __ ldrw(rscratch1, dst_klass_addr);
2465         __ cmpw(tmp, rscratch1);
2466       } else {
2467         __ ldr(rscratch1, dst_klass_addr);
2468         __ cmp(tmp, rscratch1);
2469       }
2470       __ br(Assembler::EQ, known_ok);
2471       __ cmp(src, dst);
2472       __ br(Assembler::EQ, known_ok);
2473     }
2474     __ bind(halt);
2475     __ stop("incorrect type information in arraycopy");
2476     __ bind(known_ok);
2477   }
2478 #endif
2479 
2480 #ifndef PRODUCT
2481   if (PrintC1Statistics) {
2482     __ incrementw(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)));
2483   }
2484 #endif
2485 
2486   __ lea(c_rarg0, Address(src, src_pos, Address::uxtw(scale)));
2487   __ add(c_rarg0, c_rarg0, arrayOopDesc::base_offset_in_bytes(basic_type));
2488   assert_different_registers(c_rarg0, dst, dst_pos, length);
2489   __ lea(c_rarg1, Address(dst, dst_pos, Address::uxtw(scale)));

2531     // done
2532   } else if (op->code() == lir_unlock) {
2533     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2534     __ unlock_object(hdr, obj, lock, temp, *op->stub()->entry());
2535   } else {
2536     Unimplemented();
2537   }
2538   __ bind(*op->stub()->continuation());
2539 }
2540 
2541 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2542   Register obj = op->obj()->as_pointer_register();
2543   Register result = op->result_opr()->as_pointer_register();
2544 
2545   CodeEmitInfo* info = op->info();
2546   if (info != nullptr) {
2547     add_debug_info_for_null_check_here(info);
2548   }
2549 
2550   if (UseCompressedClassPointers) {
2551     __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));











2552     __ decode_klass_not_null(result);
2553   } else {
2554     __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2555   }
2556 }
2557 
2558 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2559   ciMethod* method = op->profiled_method();
2560   int bci          = op->profiled_bci();
2561   ciMethod* callee = op->profiled_callee();
2562 
2563   // Update counter for all call types
2564   ciMethodData* md = method->method_data_or_null();
2565   assert(md != nullptr, "Sanity");
2566   ciProfileData* data = md->bci_to_data(bci);
2567   assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
2568   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
2569   Register mdo  = op->mdo()->as_register();
2570   __ mov_metadata(mdo, md->constant_encoding());
2571   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));

2228 
2229     // r0 is -1^K where K == partial copied count
2230     __ eonw(rscratch1, r0, zr);
2231     // adjust length down and src/end pos up by partial copied count
2232     __ subw(length, length, rscratch1);
2233     __ addw(src_pos, src_pos, rscratch1);
2234     __ addw(dst_pos, dst_pos, rscratch1);
2235     __ b(*stub->entry());
2236 
2237     __ bind(*stub->continuation());
2238     return;
2239   }
2240 
2241   assert(default_type != nullptr && default_type->is_array_klass() && default_type->is_loaded(), "must be true at this point");
2242 
2243   int elem_size = type2aelembytes(basic_type);
2244   int scale = exact_log2(elem_size);
2245 
2246   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
2247   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());


2248 
2249   // test for null
2250   if (flags & LIR_OpArrayCopy::src_null_check) {
2251     __ cbz(src, *stub->entry());
2252   }
2253   if (flags & LIR_OpArrayCopy::dst_null_check) {
2254     __ cbz(dst, *stub->entry());
2255   }
2256 
2257   // If the compiler was not able to prove that exact type of the source or the destination
2258   // of the arraycopy is an array type, check at runtime if the source or the destination is
2259   // an instance type.
2260   if (flags & LIR_OpArrayCopy::type_check) {
2261     if (!(flags & LIR_OpArrayCopy::LIR_OpArrayCopy::dst_objarray)) {
2262       __ load_klass(tmp, dst);
2263       __ ldrw(rscratch1, Address(tmp, in_bytes(Klass::layout_helper_offset())));
2264       __ cmpw(rscratch1, Klass::_lh_neutral_value);
2265       __ br(Assembler::GE, *stub->entry());
2266     }
2267 

2288     __ br(Assembler::LT, *stub->entry());
2289   }
2290 
2291   if (flags & LIR_OpArrayCopy::src_range_check) {
2292     __ addw(tmp, src_pos, length);
2293     __ ldrw(rscratch1, src_length_addr);
2294     __ cmpw(tmp, rscratch1);
2295     __ br(Assembler::HI, *stub->entry());
2296   }
2297   if (flags & LIR_OpArrayCopy::dst_range_check) {
2298     __ addw(tmp, dst_pos, length);
2299     __ ldrw(rscratch1, dst_length_addr);
2300     __ cmpw(tmp, rscratch1);
2301     __ br(Assembler::HI, *stub->entry());
2302   }
2303 
2304   if (flags & LIR_OpArrayCopy::type_check) {
2305     // We don't know the array types are compatible
2306     if (basic_type != T_OBJECT) {
2307       // Simple test for basic type arrays
2308       __ cmp_klass(src, dst, tmp, rscratch1);








2309       __ br(Assembler::NE, *stub->entry());
2310     } else {
2311       // For object arrays, if src is a sub class of dst then we can
2312       // safely do the copy.
2313       Label cont, slow;
2314 
2315 #define PUSH(r1, r2)                                    \
2316       stp(r1, r2, __ pre(sp, -2 * wordSize));
2317 
2318 #define POP(r1, r2)                                     \
2319       ldp(r1, r2, __ post(sp, 2 * wordSize));
2320 
2321       __ PUSH(src, dst);
2322 
2323       __ load_klass(src, src);
2324       __ load_klass(dst, dst);
2325 
2326       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
2327 
2328       __ PUSH(src, dst);

2410       }
2411 
2412       __ b(*stub->entry());
2413 
2414       __ bind(cont);
2415       __ POP(src, dst);
2416     }
2417   }
2418 
2419 #ifdef ASSERT
2420   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
2421     // Sanity check the known type with the incoming class.  For the
2422     // primitive case the types must match exactly with src.klass and
2423     // dst.klass each exactly matching the default type.  For the
2424     // object array case, if no type check is needed then either the
2425     // dst type is exactly the expected type and the src type is a
2426     // subtype which we can't check or src is the same array as dst
2427     // but not necessarily exactly of type default_type.
2428     Label known_ok, halt;
2429     __ mov_metadata(tmp, default_type->constant_encoding());



2430 
2431     if (basic_type != T_OBJECT) {
2432       __ cmp_klass(dst, tmp, rscratch1);







2433       __ br(Assembler::NE, halt);
2434       __ cmp_klass(src, tmp, rscratch1);






2435       __ br(Assembler::EQ, known_ok);
2436     } else {
2437       __ cmp_klass(dst, tmp, rscratch1);






2438       __ br(Assembler::EQ, known_ok);
2439       __ cmp(src, dst);
2440       __ br(Assembler::EQ, known_ok);
2441     }
2442     __ bind(halt);
2443     __ stop("incorrect type information in arraycopy");
2444     __ bind(known_ok);
2445   }
2446 #endif
2447 
2448 #ifndef PRODUCT
2449   if (PrintC1Statistics) {
2450     __ incrementw(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)));
2451   }
2452 #endif
2453 
2454   __ lea(c_rarg0, Address(src, src_pos, Address::uxtw(scale)));
2455   __ add(c_rarg0, c_rarg0, arrayOopDesc::base_offset_in_bytes(basic_type));
2456   assert_different_registers(c_rarg0, dst, dst_pos, length);
2457   __ lea(c_rarg1, Address(dst, dst_pos, Address::uxtw(scale)));

2499     // done
2500   } else if (op->code() == lir_unlock) {
2501     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2502     __ unlock_object(hdr, obj, lock, temp, *op->stub()->entry());
2503   } else {
2504     Unimplemented();
2505   }
2506   __ bind(*op->stub()->continuation());
2507 }
2508 
2509 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2510   Register obj = op->obj()->as_pointer_register();
2511   Register result = op->result_opr()->as_pointer_register();
2512 
2513   CodeEmitInfo* info = op->info();
2514   if (info != nullptr) {
2515     add_debug_info_for_null_check_here(info);
2516   }
2517 
2518   if (UseCompressedClassPointers) {
2519     if (UseCompactObjectHeaders) {
2520       // Check if we can take the (common) fast path, if obj is unlocked.
2521       __ ldr(result, Address(obj, oopDesc::mark_offset_in_bytes()));
2522       __ tst(result, markWord::monitor_value);
2523       __ br(Assembler::NE, *op->stub()->entry());
2524       __ bind(*op->stub()->continuation());
2525 
2526       // Shift to get proper narrow Klass*.
2527       __ lsr(result, result, markWord::klass_shift);
2528     } else {
2529       __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2530     }
2531     __ decode_klass_not_null(result);
2532   } else {
2533     __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2534   }
2535 }
2536 
2537 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2538   ciMethod* method = op->profiled_method();
2539   int bci          = op->profiled_bci();
2540   ciMethod* callee = op->profiled_callee();
2541 
2542   // Update counter for all call types
2543   ciMethodData* md = method->method_data_or_null();
2544   assert(md != nullptr, "Sanity");
2545   ciProfileData* data = md->bci_to_data(bci);
2546   assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
2547   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
2548   Register mdo  = op->mdo()->as_register();
2549   __ mov_metadata(mdo, md->constant_encoding());
2550   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
< prev index next >