< prev index next >

src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp

Print this page

1226       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1227       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1228     __ b(*op->stub()->entry());
1229   } else {
1230     Register tmp1 = op->tmp1()->as_register();
1231     Register tmp2 = op->tmp2()->as_register();
1232     Register tmp3 = op->tmp3()->as_register();
1233     if (len == tmp1) {
1234       tmp1 = tmp3;
1235     } else if (len == tmp2) {
1236       tmp2 = tmp3;
1237     } else if (len == tmp3) {
1238       // everything is ok
1239     } else {
1240       __ mov(tmp3, len);
1241     }
1242     __ allocate_array(op->obj()->as_register(),
1243                       len,
1244                       tmp1,
1245                       tmp2,
1246                       arrayOopDesc::header_size(op->type()),
1247                       array_element_size(op->type()),
1248                       op->klass()->as_register(),
1249                       *op->stub()->entry());
1250   }
1251   __ bind(*op->stub()->continuation());
1252 }
1253 
1254 void LIR_Assembler::type_profile_helper(Register mdo,
1255                                         ciMethodData *md, ciProfileData *data,
1256                                         Register recv, Label* update_done) {
1257   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1258     Label next_test;
1259     // See if the receiver is receiver[n].
1260     __ lea(rscratch2, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1261     __ ldr(rscratch1, Address(rscratch2));
1262     __ cmp(recv, rscratch1);
1263     __ br(Assembler::NE, next_test);
1264     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1265     __ addptr(data_addr, DataLayout::counter_increment);
1266     __ b(*update_done);

2282 
2283     // r0 is -1^K where K == partial copied count
2284     __ eonw(rscratch1, r0, zr);
2285     // adjust length down and src/end pos up by partial copied count
2286     __ subw(length, length, rscratch1);
2287     __ addw(src_pos, src_pos, rscratch1);
2288     __ addw(dst_pos, dst_pos, rscratch1);
2289     __ b(*stub->entry());
2290 
2291     __ bind(*stub->continuation());
2292     return;
2293   }
2294 
2295   assert(default_type != NULL && default_type->is_array_klass() && default_type->is_loaded(), "must be true at this point");
2296 
2297   int elem_size = type2aelembytes(basic_type);
2298   int scale = exact_log2(elem_size);
2299 
2300   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
2301   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());
2302   Address src_klass_addr = Address(src, oopDesc::klass_offset_in_bytes());
2303   Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
2304 
2305   // test for NULL
2306   if (flags & LIR_OpArrayCopy::src_null_check) {
2307     __ cbz(src, *stub->entry());
2308   }
2309   if (flags & LIR_OpArrayCopy::dst_null_check) {
2310     __ cbz(dst, *stub->entry());
2311   }
2312 
2313   // If the compiler was not able to prove that exact type of the source or the destination
2314   // of the arraycopy is an array type, check at runtime if the source or the destination is
2315   // an instance type.
2316   if (flags & LIR_OpArrayCopy::type_check) {
2317     if (!(flags & LIR_OpArrayCopy::LIR_OpArrayCopy::dst_objarray)) {
2318       __ load_klass(tmp, dst);
2319       __ ldrw(rscratch1, Address(tmp, in_bytes(Klass::layout_helper_offset())));
2320       __ cmpw(rscratch1, Klass::_lh_neutral_value);
2321       __ br(Assembler::GE, *stub->entry());
2322     }
2323 

2344     __ br(Assembler::LT, *stub->entry());
2345   }
2346 
2347   if (flags & LIR_OpArrayCopy::src_range_check) {
2348     __ addw(tmp, src_pos, length);
2349     __ ldrw(rscratch1, src_length_addr);
2350     __ cmpw(tmp, rscratch1);
2351     __ br(Assembler::HI, *stub->entry());
2352   }
2353   if (flags & LIR_OpArrayCopy::dst_range_check) {
2354     __ addw(tmp, dst_pos, length);
2355     __ ldrw(rscratch1, dst_length_addr);
2356     __ cmpw(tmp, rscratch1);
2357     __ br(Assembler::HI, *stub->entry());
2358   }
2359 
2360   if (flags & LIR_OpArrayCopy::type_check) {
2361     // We don't know the array types are compatible
2362     if (basic_type != T_OBJECT) {
2363       // Simple test for basic type arrays
2364       if (UseCompressedClassPointers) {
2365         __ ldrw(tmp, src_klass_addr);
2366         __ ldrw(rscratch1, dst_klass_addr);
2367         __ cmpw(tmp, rscratch1);
2368       } else {
2369         __ ldr(tmp, src_klass_addr);
2370         __ ldr(rscratch1, dst_klass_addr);
2371         __ cmp(tmp, rscratch1);
2372       }
2373       __ br(Assembler::NE, *stub->entry());
2374     } else {
2375       // For object arrays, if src is a sub class of dst then we can
2376       // safely do the copy.
2377       Label cont, slow;
2378 
2379 #define PUSH(r1, r2)                                    \
2380       stp(r1, r2, __ pre(sp, -2 * wordSize));
2381 
2382 #define POP(r1, r2)                                     \
2383       ldp(r1, r2, __ post(sp, 2 * wordSize));
2384 
2385       __ PUSH(src, dst);
2386 
2387       __ load_klass(src, src);
2388       __ load_klass(dst, dst);


2389 
2390       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, NULL);
2391 
2392       __ PUSH(src, dst);
2393       __ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
2394       __ POP(src, dst);
2395 
2396       __ cbnz(src, cont);
2397 
2398       __ bind(slow);
2399       __ POP(src, dst);
2400 
2401       address copyfunc_addr = StubRoutines::checkcast_arraycopy();
2402       if (copyfunc_addr != NULL) { // use stub if available
2403         // src is not a sub class of dst so we have to do a
2404         // per-element check.
2405 
2406         int mask = LIR_OpArrayCopy::src_objarray|LIR_OpArrayCopy::dst_objarray;
2407         if ((flags & mask) != mask) {
2408           // Check that at least both of them object arrays.

2478       __ bind(cont);
2479       __ POP(src, dst);
2480     }
2481   }
2482 
2483 #ifdef ASSERT
2484   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
2485     // Sanity check the known type with the incoming class.  For the
2486     // primitive case the types must match exactly with src.klass and
2487     // dst.klass each exactly matching the default type.  For the
2488     // object array case, if no type check is needed then either the
2489     // dst type is exactly the expected type and the src type is a
2490     // subtype which we can't check or src is the same array as dst
2491     // but not necessarily exactly of type default_type.
2492     Label known_ok, halt;
2493     __ mov_metadata(tmp, default_type->constant_encoding());
2494     if (UseCompressedClassPointers) {
2495       __ encode_klass_not_null(tmp);
2496     }
2497 

2498     if (basic_type != T_OBJECT) {
2499 
2500       if (UseCompressedClassPointers) {
2501         __ ldrw(rscratch1, dst_klass_addr);
2502         __ cmpw(tmp, rscratch1);
2503       } else {
2504         __ ldr(rscratch1, dst_klass_addr);
2505         __ cmp(tmp, rscratch1);
2506       }
2507       __ br(Assembler::NE, halt);
2508       if (UseCompressedClassPointers) {
2509         __ ldrw(rscratch1, src_klass_addr);
2510         __ cmpw(tmp, rscratch1);
2511       } else {
2512         __ ldr(rscratch1, src_klass_addr);
2513         __ cmp(tmp, rscratch1);
2514       }
2515       __ br(Assembler::EQ, known_ok);
2516     } else {
2517       if (UseCompressedClassPointers) {
2518         __ ldrw(rscratch1, dst_klass_addr);
2519         __ cmpw(tmp, rscratch1);
2520       } else {
2521         __ ldr(rscratch1, dst_klass_addr);
2522         __ cmp(tmp, rscratch1);
2523       }
2524       __ br(Assembler::EQ, known_ok);
2525       __ cmp(src, dst);
2526       __ br(Assembler::EQ, known_ok);
2527     }
2528     __ bind(halt);
2529     __ stop("incorrect type information in arraycopy");
2530     __ bind(known_ok);
2531   }
2532 #endif
2533 
2534 #ifndef PRODUCT
2535   if (PrintC1Statistics) {
2536     __ incrementw(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)));
2537   }
2538 #endif
2539 
2540   __ lea(c_rarg0, Address(src, src_pos, Address::uxtw(scale)));
2541   __ add(c_rarg0, c_rarg0, arrayOopDesc::base_offset_in_bytes(basic_type));
2542   assert_different_registers(c_rarg0, dst, dst_pos, length);
2543   __ lea(c_rarg1, Address(dst, dst_pos, Address::uxtw(scale)));

2573   } else if (op->code() == lir_lock) {
2574     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2575     // add debug info for NullPointerException only if one is possible
2576     int null_check_offset = __ lock_object(hdr, obj, lock, *op->stub()->entry());
2577     if (op->info() != NULL) {
2578       add_debug_info_for_null_check(null_check_offset, op->info());
2579     }
2580     // done
2581   } else if (op->code() == lir_unlock) {
2582     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2583     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
2584   } else {
2585     Unimplemented();
2586   }
2587   __ bind(*op->stub()->continuation());
2588 }
2589 
2590 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2591   Register obj = op->obj()->as_pointer_register();
2592   Register result = op->result_opr()->as_pointer_register();

2593 
2594   CodeEmitInfo* info = op->info();
2595   if (info != NULL) {
2596     add_debug_info_for_null_check_here(info);
2597   }
2598 
2599   if (UseCompressedClassPointers) {
2600     __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2601     __ decode_klass_not_null(result);
2602   } else {
2603     __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
2604   }








2605 }
2606 
2607 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2608   ciMethod* method = op->profiled_method();
2609   int bci          = op->profiled_bci();
2610   ciMethod* callee = op->profiled_callee();
2611 
2612   // Update counter for all call types
2613   ciMethodData* md = method->method_data_or_null();
2614   assert(md != NULL, "Sanity");
2615   ciProfileData* data = md->bci_to_data(bci);
2616   assert(data != NULL && data->is_CounterData(), "need CounterData for calls");
2617   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
2618   Register mdo  = op->mdo()->as_register();
2619   __ mov_metadata(mdo, md->constant_encoding());
2620   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
2621   // Perform additional virtual call profiling for invokevirtual and
2622   // invokeinterface bytecodes
2623   if (op->should_profile_receiver_type()) {
2624     assert(op->recv()->is_single_cpu(), "recv must be allocated");

2645       }
2646 
2647       // Receiver type not found in profile data; select an empty slot
2648 
2649       // Note that this is less efficient than it should be because it
2650       // always does a write to the receiver part of the
2651       // VirtualCallData rather than just the first time
2652       for (i = 0; i < VirtualCallData::row_limit(); i++) {
2653         ciKlass* receiver = vc_data->receiver(i);
2654         if (receiver == NULL) {
2655           Address recv_addr(mdo, md->byte_offset_of_slot(data, VirtualCallData::receiver_offset(i)));
2656           __ mov_metadata(rscratch1, known_klass->constant_encoding());
2657           __ lea(rscratch2, recv_addr);
2658           __ str(rscratch1, Address(rscratch2));
2659           Address data_addr(mdo, md->byte_offset_of_slot(data, VirtualCallData::receiver_count_offset(i)));
2660           __ addptr(data_addr, DataLayout::counter_increment);
2661           return;
2662         }
2663       }
2664     } else {
2665       __ load_klass(recv, recv);

2666       Label update_done;
2667       type_profile_helper(mdo, md, data, recv, &update_done);
2668       // Receiver did not match any saved receiver and there is no empty row for it.
2669       // Increment total counter to indicate polymorphic case.
2670       __ addptr(counter_addr, DataLayout::counter_increment);
2671 
2672       __ bind(update_done);
2673     }
2674   } else {
2675     // Static call
2676     __ addptr(counter_addr, DataLayout::counter_increment);
2677   }
2678 }
2679 
2680 
2681 void LIR_Assembler::emit_delay(LIR_OpDelay*) {
2682   Unimplemented();
2683 }
2684 
2685 

2739     }
2740     if (do_update) {
2741 #ifndef ASSERT
2742       __ b(next);
2743     }
2744 #else
2745       __ b(next);
2746     }
2747   } else {
2748     __ cbnz(tmp, update);
2749     __ stop("unexpected null obj");
2750 #endif
2751   }
2752 
2753   __ bind(update);
2754 
2755   if (do_update) {
2756 #ifdef ASSERT
2757     if (exact_klass != NULL) {
2758       Label ok;
2759       __ load_klass(tmp, tmp);

2760       __ mov_metadata(rscratch1, exact_klass->constant_encoding());
2761       __ eor(rscratch1, tmp, rscratch1);
2762       __ cbz(rscratch1, ok);
2763       __ stop("exact klass and actual klass differ");
2764       __ bind(ok);
2765     }
2766 #endif
2767     if (!no_conflict) {
2768       if (exact_klass == NULL || TypeEntries::is_type_none(current_klass)) {
2769         if (exact_klass != NULL) {
2770           __ mov_metadata(tmp, exact_klass->constant_encoding());
2771         } else {
2772           __ load_klass(tmp, tmp);

2773         }
2774 
2775         __ ldr(rscratch2, mdo_addr);
2776         __ eor(tmp, tmp, rscratch2);
2777         __ andr(rscratch1, tmp, TypeEntries::type_klass_mask);
2778         // klass seen before, nothing to do. The unknown bit may have been
2779         // set already but no need to check.
2780         __ cbz(rscratch1, next);
2781 
2782         __ tbnz(tmp, exact_log2(TypeEntries::type_unknown), next); // already unknown. Nothing to do anymore.
2783 
2784         if (TypeEntries::is_type_none(current_klass)) {
2785           __ cbz(rscratch2, none);
2786           __ cmp(rscratch2, (u1)TypeEntries::null_seen);
2787           __ br(Assembler::EQ, none);
2788           // There is a chance that the checks above (re-reading profiling
2789           // data from memory) fail if another thread has just set the
2790           // profiling to this obj's klass
2791           __ dmb(Assembler::ISHLD);
2792           __ ldr(rscratch2, mdo_addr);

1226       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1227       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1228     __ b(*op->stub()->entry());
1229   } else {
1230     Register tmp1 = op->tmp1()->as_register();
1231     Register tmp2 = op->tmp2()->as_register();
1232     Register tmp3 = op->tmp3()->as_register();
1233     if (len == tmp1) {
1234       tmp1 = tmp3;
1235     } else if (len == tmp2) {
1236       tmp2 = tmp3;
1237     } else if (len == tmp3) {
1238       // everything is ok
1239     } else {
1240       __ mov(tmp3, len);
1241     }
1242     __ allocate_array(op->obj()->as_register(),
1243                       len,
1244                       tmp1,
1245                       tmp2,
1246                       arrayOopDesc::base_offset_in_bytes(op->type()),
1247                       array_element_size(op->type()),
1248                       op->klass()->as_register(),
1249                       *op->stub()->entry());
1250   }
1251   __ bind(*op->stub()->continuation());
1252 }
1253 
1254 void LIR_Assembler::type_profile_helper(Register mdo,
1255                                         ciMethodData *md, ciProfileData *data,
1256                                         Register recv, Label* update_done) {
1257   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1258     Label next_test;
1259     // See if the receiver is receiver[n].
1260     __ lea(rscratch2, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1261     __ ldr(rscratch1, Address(rscratch2));
1262     __ cmp(recv, rscratch1);
1263     __ br(Assembler::NE, next_test);
1264     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1265     __ addptr(data_addr, DataLayout::counter_increment);
1266     __ b(*update_done);

2282 
2283     // r0 is -1^K where K == partial copied count
2284     __ eonw(rscratch1, r0, zr);
2285     // adjust length down and src/end pos up by partial copied count
2286     __ subw(length, length, rscratch1);
2287     __ addw(src_pos, src_pos, rscratch1);
2288     __ addw(dst_pos, dst_pos, rscratch1);
2289     __ b(*stub->entry());
2290 
2291     __ bind(*stub->continuation());
2292     return;
2293   }
2294 
2295   assert(default_type != NULL && default_type->is_array_klass() && default_type->is_loaded(), "must be true at this point");
2296 
2297   int elem_size = type2aelembytes(basic_type);
2298   int scale = exact_log2(elem_size);
2299 
2300   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
2301   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());


2302 
2303   // test for NULL
2304   if (flags & LIR_OpArrayCopy::src_null_check) {
2305     __ cbz(src, *stub->entry());
2306   }
2307   if (flags & LIR_OpArrayCopy::dst_null_check) {
2308     __ cbz(dst, *stub->entry());
2309   }
2310 
2311   // If the compiler was not able to prove that exact type of the source or the destination
2312   // of the arraycopy is an array type, check at runtime if the source or the destination is
2313   // an instance type.
2314   if (flags & LIR_OpArrayCopy::type_check) {
2315     if (!(flags & LIR_OpArrayCopy::LIR_OpArrayCopy::dst_objarray)) {
2316       __ load_klass(tmp, dst);
2317       __ ldrw(rscratch1, Address(tmp, in_bytes(Klass::layout_helper_offset())));
2318       __ cmpw(rscratch1, Klass::_lh_neutral_value);
2319       __ br(Assembler::GE, *stub->entry());
2320     }
2321 

2342     __ br(Assembler::LT, *stub->entry());
2343   }
2344 
2345   if (flags & LIR_OpArrayCopy::src_range_check) {
2346     __ addw(tmp, src_pos, length);
2347     __ ldrw(rscratch1, src_length_addr);
2348     __ cmpw(tmp, rscratch1);
2349     __ br(Assembler::HI, *stub->entry());
2350   }
2351   if (flags & LIR_OpArrayCopy::dst_range_check) {
2352     __ addw(tmp, dst_pos, length);
2353     __ ldrw(rscratch1, dst_length_addr);
2354     __ cmpw(tmp, rscratch1);
2355     __ br(Assembler::HI, *stub->entry());
2356   }
2357 
2358   if (flags & LIR_OpArrayCopy::type_check) {
2359     // We don't know the array types are compatible
2360     if (basic_type != T_OBJECT) {
2361       // Simple test for basic type arrays
2362       assert(UseCompressedClassPointers, "Lilliput");
2363       __ load_nklass(tmp, src);
2364       __ load_nklass(rscratch1, dst);
2365       __ cmpw(tmp, rscratch1);





2366       __ br(Assembler::NE, *stub->entry());
2367     } else {
2368       // For object arrays, if src is a sub class of dst then we can
2369       // safely do the copy.
2370       Label cont, slow;
2371 
2372 #define PUSH(r1, r2)                                    \
2373       stp(r1, r2, __ pre(sp, -2 * wordSize));
2374 
2375 #define POP(r1, r2)                                     \
2376       ldp(r1, r2, __ post(sp, 2 * wordSize));
2377 
2378       __ PUSH(src, dst);
2379 
2380       __ load_klass(tmp, src);
2381       __ mov(src, tmp);
2382       __ load_klass(tmp, dst);
2383       __ mov(dst, tmp);
2384 
2385       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, NULL);
2386 
2387       __ PUSH(src, dst);
2388       __ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
2389       __ POP(src, dst);
2390 
2391       __ cbnz(src, cont);
2392 
2393       __ bind(slow);
2394       __ POP(src, dst);
2395 
2396       address copyfunc_addr = StubRoutines::checkcast_arraycopy();
2397       if (copyfunc_addr != NULL) { // use stub if available
2398         // src is not a sub class of dst so we have to do a
2399         // per-element check.
2400 
2401         int mask = LIR_OpArrayCopy::src_objarray|LIR_OpArrayCopy::dst_objarray;
2402         if ((flags & mask) != mask) {
2403           // Check that at least both of them object arrays.

2473       __ bind(cont);
2474       __ POP(src, dst);
2475     }
2476   }
2477 
2478 #ifdef ASSERT
2479   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
2480     // Sanity check the known type with the incoming class.  For the
2481     // primitive case the types must match exactly with src.klass and
2482     // dst.klass each exactly matching the default type.  For the
2483     // object array case, if no type check is needed then either the
2484     // dst type is exactly the expected type and the src type is a
2485     // subtype which we can't check or src is the same array as dst
2486     // but not necessarily exactly of type default_type.
2487     Label known_ok, halt;
2488     __ mov_metadata(tmp, default_type->constant_encoding());
2489     if (UseCompressedClassPointers) {
2490       __ encode_klass_not_null(tmp);
2491     }
2492 
2493     assert(UseCompressedClassPointers, "Lilliput");
2494     if (basic_type != T_OBJECT) {
2495       __ load_nklass(rscratch1, dst);
2496       __ cmpw(tmp, rscratch1);






2497       __ br(Assembler::NE, halt);
2498       __ load_nklass(rscratch1, src);
2499       __ cmpw(tmp, rscratch1);





2500       __ br(Assembler::EQ, known_ok);
2501     } else {
2502       __ load_nklass(rscratch1, dst);
2503       __ cmpw(tmp, rscratch1);





2504       __ br(Assembler::EQ, known_ok);
2505       __ cmp(src, dst);
2506       __ br(Assembler::EQ, known_ok);
2507     }
2508     __ bind(halt);
2509     __ stop("incorrect type information in arraycopy");
2510     __ bind(known_ok);
2511   }
2512 #endif
2513 
2514 #ifndef PRODUCT
2515   if (PrintC1Statistics) {
2516     __ incrementw(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)));
2517   }
2518 #endif
2519 
2520   __ lea(c_rarg0, Address(src, src_pos, Address::uxtw(scale)));
2521   __ add(c_rarg0, c_rarg0, arrayOopDesc::base_offset_in_bytes(basic_type));
2522   assert_different_registers(c_rarg0, dst, dst_pos, length);
2523   __ lea(c_rarg1, Address(dst, dst_pos, Address::uxtw(scale)));

2553   } else if (op->code() == lir_lock) {
2554     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2555     // add debug info for NullPointerException only if one is possible
2556     int null_check_offset = __ lock_object(hdr, obj, lock, *op->stub()->entry());
2557     if (op->info() != NULL) {
2558       add_debug_info_for_null_check(null_check_offset, op->info());
2559     }
2560     // done
2561   } else if (op->code() == lir_unlock) {
2562     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2563     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
2564   } else {
2565     Unimplemented();
2566   }
2567   __ bind(*op->stub()->continuation());
2568 }
2569 
2570 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
2571   Register obj = op->obj()->as_pointer_register();
2572   Register result = op->result_opr()->as_pointer_register();
2573   Register tmp = rscratch1;
2574 
2575   CodeEmitInfo* info = op->info();
2576   if (info != NULL) {
2577     add_debug_info_for_null_check_here(info);
2578   }
2579 
2580   assert(UseCompressedClassPointers, "expects UseCompressedClassPointers");
2581 
2582   // Check if we can take the (common) fast path, if obj is unlocked.
2583   __ ldr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
2584   __ eor(tmp, tmp, markWord::unlocked_value);
2585   __ tst(tmp, markWord::lock_mask_in_place);
2586   __ br(Assembler::NE, *op->stub()->entry());
2587 
2588   // Fast-path: shift and decode Klass*.
2589   __ mov(result, tmp);
2590   __ lsr(result, result, markWord::klass_shift);
2591 
2592   __ bind(*op->stub()->continuation());
2593   __ decode_klass_not_null(result);
2594 }
2595 
2596 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
2597   ciMethod* method = op->profiled_method();
2598   int bci          = op->profiled_bci();
2599   ciMethod* callee = op->profiled_callee();
2600 
2601   // Update counter for all call types
2602   ciMethodData* md = method->method_data_or_null();
2603   assert(md != NULL, "Sanity");
2604   ciProfileData* data = md->bci_to_data(bci);
2605   assert(data != NULL && data->is_CounterData(), "need CounterData for calls");
2606   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
2607   Register mdo  = op->mdo()->as_register();
2608   __ mov_metadata(mdo, md->constant_encoding());
2609   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
2610   // Perform additional virtual call profiling for invokevirtual and
2611   // invokeinterface bytecodes
2612   if (op->should_profile_receiver_type()) {
2613     assert(op->recv()->is_single_cpu(), "recv must be allocated");

2634       }
2635 
2636       // Receiver type not found in profile data; select an empty slot
2637 
2638       // Note that this is less efficient than it should be because it
2639       // always does a write to the receiver part of the
2640       // VirtualCallData rather than just the first time
2641       for (i = 0; i < VirtualCallData::row_limit(); i++) {
2642         ciKlass* receiver = vc_data->receiver(i);
2643         if (receiver == NULL) {
2644           Address recv_addr(mdo, md->byte_offset_of_slot(data, VirtualCallData::receiver_offset(i)));
2645           __ mov_metadata(rscratch1, known_klass->constant_encoding());
2646           __ lea(rscratch2, recv_addr);
2647           __ str(rscratch1, Address(rscratch2));
2648           Address data_addr(mdo, md->byte_offset_of_slot(data, VirtualCallData::receiver_count_offset(i)));
2649           __ addptr(data_addr, DataLayout::counter_increment);
2650           return;
2651         }
2652       }
2653     } else {
2654       __ load_klass(rscratch1, recv);
2655       __ mov(recv, rscratch1);
2656       Label update_done;
2657       type_profile_helper(mdo, md, data, recv, &update_done);
2658       // Receiver did not match any saved receiver and there is no empty row for it.
2659       // Increment total counter to indicate polymorphic case.
2660       __ addptr(counter_addr, DataLayout::counter_increment);
2661 
2662       __ bind(update_done);
2663     }
2664   } else {
2665     // Static call
2666     __ addptr(counter_addr, DataLayout::counter_increment);
2667   }
2668 }
2669 
2670 
2671 void LIR_Assembler::emit_delay(LIR_OpDelay*) {
2672   Unimplemented();
2673 }
2674 
2675 

2729     }
2730     if (do_update) {
2731 #ifndef ASSERT
2732       __ b(next);
2733     }
2734 #else
2735       __ b(next);
2736     }
2737   } else {
2738     __ cbnz(tmp, update);
2739     __ stop("unexpected null obj");
2740 #endif
2741   }
2742 
2743   __ bind(update);
2744 
2745   if (do_update) {
2746 #ifdef ASSERT
2747     if (exact_klass != NULL) {
2748       Label ok;
2749       __ load_klass(rscratch1, tmp);
2750       __ mov(tmp, rscratch1);
2751       __ mov_metadata(rscratch1, exact_klass->constant_encoding());
2752       __ eor(rscratch1, tmp, rscratch1);
2753       __ cbz(rscratch1, ok);
2754       __ stop("exact klass and actual klass differ");
2755       __ bind(ok);
2756     }
2757 #endif
2758     if (!no_conflict) {
2759       if (exact_klass == NULL || TypeEntries::is_type_none(current_klass)) {
2760         if (exact_klass != NULL) {
2761           __ mov_metadata(tmp, exact_klass->constant_encoding());
2762         } else {
2763           __ load_klass(rscratch1, tmp);
2764           __ mov(tmp, rscratch1);
2765         }
2766 
2767         __ ldr(rscratch2, mdo_addr);
2768         __ eor(tmp, tmp, rscratch2);
2769         __ andr(rscratch1, tmp, TypeEntries::type_klass_mask);
2770         // klass seen before, nothing to do. The unknown bit may have been
2771         // set already but no need to check.
2772         __ cbz(rscratch1, next);
2773 
2774         __ tbnz(tmp, exact_log2(TypeEntries::type_unknown), next); // already unknown. Nothing to do anymore.
2775 
2776         if (TypeEntries::is_type_none(current_klass)) {
2777           __ cbz(rscratch2, none);
2778           __ cmp(rscratch2, (u1)TypeEntries::null_seen);
2779           __ br(Assembler::EQ, none);
2780           // There is a chance that the checks above (re-reading profiling
2781           // data from memory) fail if another thread has just set the
2782           // profiling to this obj's klass
2783           __ dmb(Assembler::ISHLD);
2784           __ ldr(rscratch2, mdo_addr);
< prev index next >