1213 (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1214 (!UseFastNewTypeArray && !is_reference_type(op->type()))) {
1215 __ b(*op->stub()->entry());
1216 } else {
1217 Register tmp1 = op->tmp1()->as_register();
1218 Register tmp2 = op->tmp2()->as_register();
1219 Register tmp3 = op->tmp3()->as_register();
1220 if (len == tmp1) {
1221 tmp1 = tmp3;
1222 } else if (len == tmp2) {
1223 tmp2 = tmp3;
1224 } else if (len == tmp3) {
1225 // everything is ok
1226 } else {
1227 __ mov(tmp3, len);
1228 }
1229 __ allocate_array(op->obj()->as_register(),
1230 len,
1231 tmp1,
1232 tmp2,
1233 arrayOopDesc::header_size(op->type()),
1234 array_element_size(op->type()),
1235 op->klass()->as_register(),
1236 *op->stub()->entry());
1237 }
1238 __ bind(*op->stub()->continuation());
1239 }
1240
1241 void LIR_Assembler::type_profile_helper(Register mdo,
1242 ciMethodData *md, ciProfileData *data,
1243 Register recv, Label* update_done) {
1244 for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1245 Label next_test;
1246 // See if the receiver is receiver[n].
1247 __ lea(rscratch2, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1248 __ ldr(rscratch1, Address(rscratch2));
1249 __ cmp(recv, rscratch1);
1250 __ br(Assembler::NE, next_test);
1251 Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1252 __ addptr(data_addr, DataLayout::counter_increment);
1253 __ b(*update_done);
2273
2274 // r0 is -1^K where K == partial copied count
2275 __ eonw(rscratch1, r0, zr);
2276 // adjust length down and src/end pos up by partial copied count
2277 __ subw(length, length, rscratch1);
2278 __ addw(src_pos, src_pos, rscratch1);
2279 __ addw(dst_pos, dst_pos, rscratch1);
2280 __ b(*stub->entry());
2281
2282 __ bind(*stub->continuation());
2283 return;
2284 }
2285
2286 assert(default_type != nullptr && default_type->is_array_klass() && default_type->is_loaded(), "must be true at this point");
2287
2288 int elem_size = type2aelembytes(basic_type);
2289 int scale = exact_log2(elem_size);
2290
2291 Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
2292 Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());
2293 Address src_klass_addr = Address(src, oopDesc::klass_offset_in_bytes());
2294 Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
2295
2296 // test for null
2297 if (flags & LIR_OpArrayCopy::src_null_check) {
2298 __ cbz(src, *stub->entry());
2299 }
2300 if (flags & LIR_OpArrayCopy::dst_null_check) {
2301 __ cbz(dst, *stub->entry());
2302 }
2303
2304 // If the compiler was not able to prove that exact type of the source or the destination
2305 // of the arraycopy is an array type, check at runtime if the source or the destination is
2306 // an instance type.
2307 if (flags & LIR_OpArrayCopy::type_check) {
2308 if (!(flags & LIR_OpArrayCopy::LIR_OpArrayCopy::dst_objarray)) {
2309 __ load_klass(tmp, dst);
2310 __ ldrw(rscratch1, Address(tmp, in_bytes(Klass::layout_helper_offset())));
2311 __ cmpw(rscratch1, Klass::_lh_neutral_value);
2312 __ br(Assembler::GE, *stub->entry());
2313 }
2314
2335 __ br(Assembler::LT, *stub->entry());
2336 }
2337
2338 if (flags & LIR_OpArrayCopy::src_range_check) {
2339 __ addw(tmp, src_pos, length);
2340 __ ldrw(rscratch1, src_length_addr);
2341 __ cmpw(tmp, rscratch1);
2342 __ br(Assembler::HI, *stub->entry());
2343 }
2344 if (flags & LIR_OpArrayCopy::dst_range_check) {
2345 __ addw(tmp, dst_pos, length);
2346 __ ldrw(rscratch1, dst_length_addr);
2347 __ cmpw(tmp, rscratch1);
2348 __ br(Assembler::HI, *stub->entry());
2349 }
2350
2351 if (flags & LIR_OpArrayCopy::type_check) {
2352 // We don't know the array types are compatible
2353 if (basic_type != T_OBJECT) {
2354 // Simple test for basic type arrays
2355 if (UseCompressedClassPointers) {
2356 __ ldrw(tmp, src_klass_addr);
2357 __ ldrw(rscratch1, dst_klass_addr);
2358 __ cmpw(tmp, rscratch1);
2359 } else {
2360 __ ldr(tmp, src_klass_addr);
2361 __ ldr(rscratch1, dst_klass_addr);
2362 __ cmp(tmp, rscratch1);
2363 }
2364 __ br(Assembler::NE, *stub->entry());
2365 } else {
2366 // For object arrays, if src is a sub class of dst then we can
2367 // safely do the copy.
2368 Label cont, slow;
2369
2370 #define PUSH(r1, r2) \
2371 stp(r1, r2, __ pre(sp, -2 * wordSize));
2372
2373 #define POP(r1, r2) \
2374 ldp(r1, r2, __ post(sp, 2 * wordSize));
2375
2376 __ PUSH(src, dst);
2377
2378 __ load_klass(src, src);
2379 __ load_klass(dst, dst);
2380
2381 __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
2382
2383 __ PUSH(src, dst);
2465 }
2466
2467 __ b(*stub->entry());
2468
2469 __ bind(cont);
2470 __ POP(src, dst);
2471 }
2472 }
2473
2474 #ifdef ASSERT
2475 if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
2476 // Sanity check the known type with the incoming class. For the
2477 // primitive case the types must match exactly with src.klass and
2478 // dst.klass each exactly matching the default type. For the
2479 // object array case, if no type check is needed then either the
2480 // dst type is exactly the expected type and the src type is a
2481 // subtype which we can't check or src is the same array as dst
2482 // but not necessarily exactly of type default_type.
2483 Label known_ok, halt;
2484 __ mov_metadata(tmp, default_type->constant_encoding());
2485 if (UseCompressedClassPointers) {
2486 __ encode_klass_not_null(tmp);
2487 }
2488
2489 if (basic_type != T_OBJECT) {
2490
2491 if (UseCompressedClassPointers) {
2492 __ ldrw(rscratch1, dst_klass_addr);
2493 __ cmpw(tmp, rscratch1);
2494 } else {
2495 __ ldr(rscratch1, dst_klass_addr);
2496 __ cmp(tmp, rscratch1);
2497 }
2498 __ br(Assembler::NE, halt);
2499 if (UseCompressedClassPointers) {
2500 __ ldrw(rscratch1, src_klass_addr);
2501 __ cmpw(tmp, rscratch1);
2502 } else {
2503 __ ldr(rscratch1, src_klass_addr);
2504 __ cmp(tmp, rscratch1);
2505 }
2506 __ br(Assembler::EQ, known_ok);
2507 } else {
2508 if (UseCompressedClassPointers) {
2509 __ ldrw(rscratch1, dst_klass_addr);
2510 __ cmpw(tmp, rscratch1);
2511 } else {
2512 __ ldr(rscratch1, dst_klass_addr);
2513 __ cmp(tmp, rscratch1);
2514 }
2515 __ br(Assembler::EQ, known_ok);
2516 __ cmp(src, dst);
2517 __ br(Assembler::EQ, known_ok);
2518 }
2519 __ bind(halt);
2520 __ stop("incorrect type information in arraycopy");
2521 __ bind(known_ok);
2522 }
2523 #endif
2524
2525 #ifndef PRODUCT
2526 if (PrintC1Statistics) {
2527 __ incrementw(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)));
2528 }
2529 #endif
2530
2531 __ lea(c_rarg0, Address(src, src_pos, Address::uxtw(scale)));
2532 __ add(c_rarg0, c_rarg0, arrayOopDesc::base_offset_in_bytes(basic_type));
2533 assert_different_registers(c_rarg0, dst, dst_pos, length);
2534 __ lea(c_rarg1, Address(dst, dst_pos, Address::uxtw(scale)));
2576 // done
2577 } else if (op->code() == lir_unlock) {
2578 assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2579 __ unlock_object(hdr, obj, lock, temp, *op->stub()->entry());
2580 } else {
2581 Unimplemented();
2582 }
2583 __ bind(*op->stub()->continuation());
2584 }
2585
2586 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2587 Register obj = op->obj()->as_pointer_register();
2588 Register result = op->result_opr()->as_pointer_register();
2589
2590 CodeEmitInfo* info = op->info();
2591 if (info != nullptr) {
2592 add_debug_info_for_null_check_here(info);
2593 }
2594
2595 if (UseCompressedClassPointers) {
2596 __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2597 __ decode_klass_not_null(result);
2598 } else {
2599 __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2600 }
2601 }
2602
2603 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2604 ciMethod* method = op->profiled_method();
2605 int bci = op->profiled_bci();
2606 ciMethod* callee = op->profiled_callee();
2607
2608 // Update counter for all call types
2609 ciMethodData* md = method->method_data_or_null();
2610 assert(md != nullptr, "Sanity");
2611 ciProfileData* data = md->bci_to_data(bci);
2612 assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
2613 assert(op->mdo()->is_single_cpu(), "mdo must be allocated");
2614 Register mdo = op->mdo()->as_register();
2615 __ mov_metadata(mdo, md->constant_encoding());
2616 Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
|
1213 (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1214 (!UseFastNewTypeArray && !is_reference_type(op->type()))) {
1215 __ b(*op->stub()->entry());
1216 } else {
1217 Register tmp1 = op->tmp1()->as_register();
1218 Register tmp2 = op->tmp2()->as_register();
1219 Register tmp3 = op->tmp3()->as_register();
1220 if (len == tmp1) {
1221 tmp1 = tmp3;
1222 } else if (len == tmp2) {
1223 tmp2 = tmp3;
1224 } else if (len == tmp3) {
1225 // everything is ok
1226 } else {
1227 __ mov(tmp3, len);
1228 }
1229 __ allocate_array(op->obj()->as_register(),
1230 len,
1231 tmp1,
1232 tmp2,
1233 arrayOopDesc::base_offset_in_bytes(op->type()),
1234 array_element_size(op->type()),
1235 op->klass()->as_register(),
1236 *op->stub()->entry());
1237 }
1238 __ bind(*op->stub()->continuation());
1239 }
1240
1241 void LIR_Assembler::type_profile_helper(Register mdo,
1242 ciMethodData *md, ciProfileData *data,
1243 Register recv, Label* update_done) {
1244 for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1245 Label next_test;
1246 // See if the receiver is receiver[n].
1247 __ lea(rscratch2, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1248 __ ldr(rscratch1, Address(rscratch2));
1249 __ cmp(recv, rscratch1);
1250 __ br(Assembler::NE, next_test);
1251 Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1252 __ addptr(data_addr, DataLayout::counter_increment);
1253 __ b(*update_done);
2273
2274 // r0 is -1^K where K == partial copied count
2275 __ eonw(rscratch1, r0, zr);
2276 // adjust length down and src/end pos up by partial copied count
2277 __ subw(length, length, rscratch1);
2278 __ addw(src_pos, src_pos, rscratch1);
2279 __ addw(dst_pos, dst_pos, rscratch1);
2280 __ b(*stub->entry());
2281
2282 __ bind(*stub->continuation());
2283 return;
2284 }
2285
2286 assert(default_type != nullptr && default_type->is_array_klass() && default_type->is_loaded(), "must be true at this point");
2287
2288 int elem_size = type2aelembytes(basic_type);
2289 int scale = exact_log2(elem_size);
2290
2291 Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
2292 Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());
2293
2294 // test for null
2295 if (flags & LIR_OpArrayCopy::src_null_check) {
2296 __ cbz(src, *stub->entry());
2297 }
2298 if (flags & LIR_OpArrayCopy::dst_null_check) {
2299 __ cbz(dst, *stub->entry());
2300 }
2301
2302 // If the compiler was not able to prove that exact type of the source or the destination
2303 // of the arraycopy is an array type, check at runtime if the source or the destination is
2304 // an instance type.
2305 if (flags & LIR_OpArrayCopy::type_check) {
2306 if (!(flags & LIR_OpArrayCopy::LIR_OpArrayCopy::dst_objarray)) {
2307 __ load_klass(tmp, dst);
2308 __ ldrw(rscratch1, Address(tmp, in_bytes(Klass::layout_helper_offset())));
2309 __ cmpw(rscratch1, Klass::_lh_neutral_value);
2310 __ br(Assembler::GE, *stub->entry());
2311 }
2312
2333 __ br(Assembler::LT, *stub->entry());
2334 }
2335
2336 if (flags & LIR_OpArrayCopy::src_range_check) {
2337 __ addw(tmp, src_pos, length);
2338 __ ldrw(rscratch1, src_length_addr);
2339 __ cmpw(tmp, rscratch1);
2340 __ br(Assembler::HI, *stub->entry());
2341 }
2342 if (flags & LIR_OpArrayCopy::dst_range_check) {
2343 __ addw(tmp, dst_pos, length);
2344 __ ldrw(rscratch1, dst_length_addr);
2345 __ cmpw(tmp, rscratch1);
2346 __ br(Assembler::HI, *stub->entry());
2347 }
2348
2349 if (flags & LIR_OpArrayCopy::type_check) {
2350 // We don't know the array types are compatible
2351 if (basic_type != T_OBJECT) {
2352 // Simple test for basic type arrays
2353 __ cmp_klass(src, dst, tmp, rscratch1);
2354 __ br(Assembler::NE, *stub->entry());
2355 } else {
2356 // For object arrays, if src is a sub class of dst then we can
2357 // safely do the copy.
2358 Label cont, slow;
2359
2360 #define PUSH(r1, r2) \
2361 stp(r1, r2, __ pre(sp, -2 * wordSize));
2362
2363 #define POP(r1, r2) \
2364 ldp(r1, r2, __ post(sp, 2 * wordSize));
2365
2366 __ PUSH(src, dst);
2367
2368 __ load_klass(src, src);
2369 __ load_klass(dst, dst);
2370
2371 __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
2372
2373 __ PUSH(src, dst);
2455 }
2456
2457 __ b(*stub->entry());
2458
2459 __ bind(cont);
2460 __ POP(src, dst);
2461 }
2462 }
2463
2464 #ifdef ASSERT
2465 if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
2466 // Sanity check the known type with the incoming class. For the
2467 // primitive case the types must match exactly with src.klass and
2468 // dst.klass each exactly matching the default type. For the
2469 // object array case, if no type check is needed then either the
2470 // dst type is exactly the expected type and the src type is a
2471 // subtype which we can't check or src is the same array as dst
2472 // but not necessarily exactly of type default_type.
2473 Label known_ok, halt;
2474 __ mov_metadata(tmp, default_type->constant_encoding());
2475
2476 if (basic_type != T_OBJECT) {
2477 __ cmp_klass(dst, tmp, rscratch1);
2478 __ br(Assembler::NE, halt);
2479 __ cmp_klass(src, tmp, rscratch1);
2480 __ br(Assembler::EQ, known_ok);
2481 } else {
2482 __ cmp_klass(dst, tmp, rscratch1);
2483 __ br(Assembler::EQ, known_ok);
2484 __ cmp(src, dst);
2485 __ br(Assembler::EQ, known_ok);
2486 }
2487 __ bind(halt);
2488 __ stop("incorrect type information in arraycopy");
2489 __ bind(known_ok);
2490 }
2491 #endif
2492
2493 #ifndef PRODUCT
2494 if (PrintC1Statistics) {
2495 __ incrementw(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)));
2496 }
2497 #endif
2498
2499 __ lea(c_rarg0, Address(src, src_pos, Address::uxtw(scale)));
2500 __ add(c_rarg0, c_rarg0, arrayOopDesc::base_offset_in_bytes(basic_type));
2501 assert_different_registers(c_rarg0, dst, dst_pos, length);
2502 __ lea(c_rarg1, Address(dst, dst_pos, Address::uxtw(scale)));
2544 // done
2545 } else if (op->code() == lir_unlock) {
2546 assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2547 __ unlock_object(hdr, obj, lock, temp, *op->stub()->entry());
2548 } else {
2549 Unimplemented();
2550 }
2551 __ bind(*op->stub()->continuation());
2552 }
2553
2554 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2555 Register obj = op->obj()->as_pointer_register();
2556 Register result = op->result_opr()->as_pointer_register();
2557
2558 CodeEmitInfo* info = op->info();
2559 if (info != nullptr) {
2560 add_debug_info_for_null_check_here(info);
2561 }
2562
2563 if (UseCompressedClassPointers) {
2564 if (UseCompactObjectHeaders) {
2565 // Check if we can take the (common) fast path, if obj is unlocked.
2566 __ ldr(result, Address(obj, oopDesc::mark_offset_in_bytes()));
2567 __ tst(result, markWord::monitor_value);
2568 __ br(Assembler::NE, *op->stub()->entry());
2569 __ bind(*op->stub()->continuation());
2570
2571 // Shift to get proper narrow Klass*.
2572 __ lsr(result, result, markWord::klass_shift);
2573 } else {
2574 __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2575 }
2576 __ decode_klass_not_null(result);
2577 } else {
2578 __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2579 }
2580 }
2581
2582 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2583 ciMethod* method = op->profiled_method();
2584 int bci = op->profiled_bci();
2585 ciMethod* callee = op->profiled_callee();
2586
2587 // Update counter for all call types
2588 ciMethodData* md = method->method_data_or_null();
2589 assert(md != nullptr, "Sanity");
2590 ciProfileData* data = md->bci_to_data(bci);
2591 assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
2592 assert(op->mdo()->is_single_cpu(), "mdo must be allocated");
2593 Register mdo = op->mdo()->as_register();
2594 __ mov_metadata(mdo, md->constant_encoding());
2595 Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
|