< prev index next >

src/hotspot/cpu/x86/c1_LIRAssembler_x86.cpp

Print this page

1618       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1619       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1620     __ jmp(*op->stub()->entry());
1621   } else {
1622     Register tmp1 = op->tmp1()->as_register();
1623     Register tmp2 = op->tmp2()->as_register();
1624     Register tmp3 = op->tmp3()->as_register();
1625     if (len == tmp1) {
1626       tmp1 = tmp3;
1627     } else if (len == tmp2) {
1628       tmp2 = tmp3;
1629     } else if (len == tmp3) {
1630       // everything is ok
1631     } else {
1632       __ mov(tmp3, len);
1633     }
1634     __ allocate_array(op->obj()->as_register(),
1635                       len,
1636                       tmp1,
1637                       tmp2,
1638                       arrayOopDesc::header_size(op->type()),
1639                       array_element_size(op->type()),
1640                       op->klass()->as_register(),
1641                       *op->stub()->entry());
1642   }
1643   __ bind(*op->stub()->continuation());
1644 }
1645 
1646 void LIR_Assembler::type_profile_helper(Register mdo,
1647                                         ciMethodData *md, ciProfileData *data,
1648                                         Register recv, Label* update_done) {
1649   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1650     Label next_test;
1651     // See if the receiver is receiver[n].
1652     __ cmpptr(recv, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1653     __ jccb(Assembler::notEqual, next_test);
1654     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1655     __ addptr(data_addr, DataLayout::counter_increment);
1656     __ jmp(*update_done);
1657     __ bind(next_test);
1658   }

3053 void LIR_Assembler::store_parameter(Metadata* m, int offset_from_rsp_in_words) {
3054   assert(offset_from_rsp_in_words >= 0, "invalid offset from rsp");
3055   int offset_from_rsp_in_bytes = offset_from_rsp_in_words * BytesPerWord;
3056   assert(offset_from_rsp_in_bytes < frame_map()->reserved_argument_area_size(), "invalid offset");
3057   __ mov_metadata(Address(rsp, offset_from_rsp_in_bytes), m, rscratch1);
3058 }
3059 
3060 
3061 // This code replaces a call to arraycopy; no exception may
3062 // be thrown in this code, they must be thrown in the System.arraycopy
3063 // activation frame; we could save some checks if this would not be the case
3064 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
3065   ciArrayKlass* default_type = op->expected_type();
3066   Register src = op->src()->as_register();
3067   Register dst = op->dst()->as_register();
3068   Register src_pos = op->src_pos()->as_register();
3069   Register dst_pos = op->dst_pos()->as_register();
3070   Register length  = op->length()->as_register();
3071   Register tmp = op->tmp()->as_register();
3072   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);

3073 
3074   CodeStub* stub = op->stub();
3075   int flags = op->flags();
3076   BasicType basic_type = default_type != nullptr ? default_type->element_type()->basic_type() : T_ILLEGAL;
3077   if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3078 
3079   // if we don't know anything, just go through the generic arraycopy
3080   if (default_type == nullptr) {
3081     // save outgoing arguments on stack in case call to System.arraycopy is needed
3082     // HACK ALERT. This code used to push the parameters in a hardwired fashion
3083     // for interpreter calling conventions. Now we have to do it in new style conventions.
3084     // For the moment until C1 gets the new register allocator I just force all the
3085     // args to the right place (except the register args) and then on the back side
3086     // reload the register args properly if we go slow path. Yuck
3087 
3088     // These are proper for the calling convention
3089     store_parameter(length, 2);
3090     store_parameter(dst_pos, 1);
3091     store_parameter(dst, 0);
3092 

3177   switch (elem_size) {
3178     case 1 :
3179       scale = Address::times_1;
3180       break;
3181     case 2 :
3182       scale = Address::times_2;
3183       break;
3184     case 4 :
3185       scale = Address::times_4;
3186       break;
3187     case 8 :
3188       scale = Address::times_8;
3189       break;
3190     default:
3191       scale = Address::no_scale;
3192       ShouldNotReachHere();
3193   }
3194 
3195   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
3196   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());
3197   Address src_klass_addr = Address(src, oopDesc::klass_offset_in_bytes());
3198   Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
3199 
3200   // length and pos's are all sign extended at this point on 64bit
3201 
3202   // test for null
3203   if (flags & LIR_OpArrayCopy::src_null_check) {
3204     __ testptr(src, src);
3205     __ jcc(Assembler::zero, *stub->entry());
3206   }
3207   if (flags & LIR_OpArrayCopy::dst_null_check) {
3208     __ testptr(dst, dst);
3209     __ jcc(Assembler::zero, *stub->entry());
3210   }
3211 
3212   // If the compiler was not able to prove that exact type of the source or the destination
3213   // of the arraycopy is an array type, check at runtime if the source or the destination is
3214   // an instance type.
3215   if (flags & LIR_OpArrayCopy::type_check) {
3216     if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3217       __ load_klass(tmp, dst, tmp_load_klass);
3218       __ cmpl(Address(tmp, in_bytes(Klass::layout_helper_offset())), Klass::_lh_neutral_value);

3244   if (flags & LIR_OpArrayCopy::dst_range_check) {
3245     __ lea(tmp, Address(dst_pos, length, Address::times_1, 0));
3246     __ cmpl(tmp, dst_length_addr);
3247     __ jcc(Assembler::above, *stub->entry());
3248   }
3249 
3250   if (flags & LIR_OpArrayCopy::length_positive_check) {
3251     __ testl(length, length);
3252     __ jcc(Assembler::less, *stub->entry());
3253   }
3254 
3255 #ifdef _LP64
3256   __ movl2ptr(src_pos, src_pos); //higher 32bits must be null
3257   __ movl2ptr(dst_pos, dst_pos); //higher 32bits must be null
3258 #endif
3259 
3260   if (flags & LIR_OpArrayCopy::type_check) {
3261     // We don't know the array types are compatible
3262     if (basic_type != T_OBJECT) {
3263       // Simple test for basic type arrays
3264       if (UseCompressedClassPointers) {
3265         __ movl(tmp, src_klass_addr);
3266         __ cmpl(tmp, dst_klass_addr);
3267       } else {
3268         __ movptr(tmp, src_klass_addr);
3269         __ cmpptr(tmp, dst_klass_addr);
3270       }
3271       __ jcc(Assembler::notEqual, *stub->entry());
3272     } else {
3273       // For object arrays, if src is a sub class of dst then we can
3274       // safely do the copy.
3275       Label cont, slow;
3276 
3277       __ push(src);
3278       __ push(dst);
3279 
3280       __ load_klass(src, src, tmp_load_klass);
3281       __ load_klass(dst, dst, tmp_load_klass);
3282 
3283       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
3284 
3285       __ push(src);
3286       __ push(dst);
3287       __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
3288       __ pop(dst);
3289       __ pop(src);
3290 

3309             __ load_klass(tmp, src, tmp_load_klass);
3310           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3311             __ load_klass(tmp, dst, tmp_load_klass);
3312           }
3313           int lh_offset = in_bytes(Klass::layout_helper_offset());
3314           Address klass_lh_addr(tmp, lh_offset);
3315           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
3316           __ cmpl(klass_lh_addr, objArray_lh);
3317           __ jcc(Assembler::notEqual, *stub->entry());
3318         }
3319 
3320        // Spill because stubs can use any register they like and it's
3321        // easier to restore just those that we care about.
3322        store_parameter(dst, 0);
3323        store_parameter(dst_pos, 1);
3324        store_parameter(length, 2);
3325        store_parameter(src_pos, 3);
3326        store_parameter(src, 4);
3327 
3328 #ifndef _LP64

3329         __ movptr(tmp, dst_klass_addr);
3330         __ movptr(tmp, Address(tmp, ObjArrayKlass::element_klass_offset()));
3331         __ push(tmp);
3332         __ movl(tmp, Address(tmp, Klass::super_check_offset_offset()));
3333         __ push(tmp);
3334         __ push(length);
3335         __ lea(tmp, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3336         __ push(tmp);
3337         __ lea(tmp, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3338         __ push(tmp);
3339 
3340         __ call_VM_leaf(copyfunc_addr, 5);
3341 #else
3342         __ movl2ptr(length, length); //higher 32bits must be null
3343 
3344         __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3345         assert_different_registers(c_rarg0, dst, dst_pos, length);
3346         __ lea(c_rarg1, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3347         assert_different_registers(c_rarg1, dst, length);
3348 

3412   }
3413 
3414 #ifdef ASSERT
3415   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
3416     // Sanity check the known type with the incoming class.  For the
3417     // primitive case the types must match exactly with src.klass and
3418     // dst.klass each exactly matching the default type.  For the
3419     // object array case, if no type check is needed then either the
3420     // dst type is exactly the expected type and the src type is a
3421     // subtype which we can't check or src is the same array as dst
3422     // but not necessarily exactly of type default_type.
3423     Label known_ok, halt;
3424     __ mov_metadata(tmp, default_type->constant_encoding());
3425 #ifdef _LP64
3426     if (UseCompressedClassPointers) {
3427       __ encode_klass_not_null(tmp, rscratch1);
3428     }
3429 #endif
3430 
3431     if (basic_type != T_OBJECT) {
3432 
3433       if (UseCompressedClassPointers)          __ cmpl(tmp, dst_klass_addr);
3434       else                   __ cmpptr(tmp, dst_klass_addr);
3435       __ jcc(Assembler::notEqual, halt);
3436       if (UseCompressedClassPointers)          __ cmpl(tmp, src_klass_addr);
3437       else                   __ cmpptr(tmp, src_klass_addr);
3438       __ jcc(Assembler::equal, known_ok);
3439     } else {
3440       if (UseCompressedClassPointers)          __ cmpl(tmp, dst_klass_addr);
3441       else                   __ cmpptr(tmp, dst_klass_addr);
3442       __ jcc(Assembler::equal, known_ok);
3443       __ cmpptr(src, dst);
3444       __ jcc(Assembler::equal, known_ok);
3445     }
3446     __ bind(halt);
3447     __ stop("incorrect type information in arraycopy");
3448     __ bind(known_ok);
3449   }
3450 #endif
3451 
3452 #ifndef PRODUCT
3453   if (PrintC1Statistics) {
3454     __ incrementl(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)), rscratch1);
3455   }
3456 #endif
3457 
3458 #ifdef _LP64
3459   assert_different_registers(c_rarg0, dst, dst_pos, length);
3460   __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3461   assert_different_registers(c_rarg1, length);

3517     // done
3518   } else if (op->code() == lir_unlock) {
3519     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
3520     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
3521   } else {
3522     Unimplemented();
3523   }
3524   __ bind(*op->stub()->continuation());
3525 }
3526 
3527 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
3528   Register obj = op->obj()->as_pointer_register();
3529   Register result = op->result_opr()->as_pointer_register();
3530 
3531   CodeEmitInfo* info = op->info();
3532   if (info != nullptr) {
3533     add_debug_info_for_null_check_here(info);
3534   }
3535 
3536 #ifdef _LP64
3537   if (UseCompressedClassPointers) {













3538     __ movl(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3539     __ decode_klass_not_null(result, rscratch1);
3540   } else
3541 #endif

3542     __ movptr(result, Address(obj, oopDesc::klass_offset_in_bytes()));

3543 }
3544 
3545 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
3546   ciMethod* method = op->profiled_method();
3547   int bci          = op->profiled_bci();
3548   ciMethod* callee = op->profiled_callee();
3549   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3550 
3551   // Update counter for all call types
3552   ciMethodData* md = method->method_data_or_null();
3553   assert(md != nullptr, "Sanity");
3554   ciProfileData* data = md->bci_to_data(bci);
3555   assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
3556   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
3557   Register mdo  = op->mdo()->as_register();
3558   __ mov_metadata(mdo, md->constant_encoding());
3559   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
3560   // Perform additional virtual call profiling for invokevirtual and
3561   // invokeinterface bytecodes
3562   if (op->should_profile_receiver_type()) {

1618       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1619       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1620     __ jmp(*op->stub()->entry());
1621   } else {
1622     Register tmp1 = op->tmp1()->as_register();
1623     Register tmp2 = op->tmp2()->as_register();
1624     Register tmp3 = op->tmp3()->as_register();
1625     if (len == tmp1) {
1626       tmp1 = tmp3;
1627     } else if (len == tmp2) {
1628       tmp2 = tmp3;
1629     } else if (len == tmp3) {
1630       // everything is ok
1631     } else {
1632       __ mov(tmp3, len);
1633     }
1634     __ allocate_array(op->obj()->as_register(),
1635                       len,
1636                       tmp1,
1637                       tmp2,
1638                       arrayOopDesc::base_offset_in_bytes(op->type()),
1639                       array_element_size(op->type()),
1640                       op->klass()->as_register(),
1641                       *op->stub()->entry());
1642   }
1643   __ bind(*op->stub()->continuation());
1644 }
1645 
1646 void LIR_Assembler::type_profile_helper(Register mdo,
1647                                         ciMethodData *md, ciProfileData *data,
1648                                         Register recv, Label* update_done) {
1649   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1650     Label next_test;
1651     // See if the receiver is receiver[n].
1652     __ cmpptr(recv, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1653     __ jccb(Assembler::notEqual, next_test);
1654     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1655     __ addptr(data_addr, DataLayout::counter_increment);
1656     __ jmp(*update_done);
1657     __ bind(next_test);
1658   }

3053 void LIR_Assembler::store_parameter(Metadata* m, int offset_from_rsp_in_words) {
3054   assert(offset_from_rsp_in_words >= 0, "invalid offset from rsp");
3055   int offset_from_rsp_in_bytes = offset_from_rsp_in_words * BytesPerWord;
3056   assert(offset_from_rsp_in_bytes < frame_map()->reserved_argument_area_size(), "invalid offset");
3057   __ mov_metadata(Address(rsp, offset_from_rsp_in_bytes), m, rscratch1);
3058 }
3059 
3060 
3061 // This code replaces a call to arraycopy; no exception may
3062 // be thrown in this code, they must be thrown in the System.arraycopy
3063 // activation frame; we could save some checks if this would not be the case
3064 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
3065   ciArrayKlass* default_type = op->expected_type();
3066   Register src = op->src()->as_register();
3067   Register dst = op->dst()->as_register();
3068   Register src_pos = op->src_pos()->as_register();
3069   Register dst_pos = op->dst_pos()->as_register();
3070   Register length  = op->length()->as_register();
3071   Register tmp = op->tmp()->as_register();
3072   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3073   Register tmp2 = UseCompactObjectHeaders ? rscratch2 : noreg;
3074 
3075   CodeStub* stub = op->stub();
3076   int flags = op->flags();
3077   BasicType basic_type = default_type != nullptr ? default_type->element_type()->basic_type() : T_ILLEGAL;
3078   if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3079 
3080   // if we don't know anything, just go through the generic arraycopy
3081   if (default_type == nullptr) {
3082     // save outgoing arguments on stack in case call to System.arraycopy is needed
3083     // HACK ALERT. This code used to push the parameters in a hardwired fashion
3084     // for interpreter calling conventions. Now we have to do it in new style conventions.
3085     // For the moment until C1 gets the new register allocator I just force all the
3086     // args to the right place (except the register args) and then on the back side
3087     // reload the register args properly if we go slow path. Yuck
3088 
3089     // These are proper for the calling convention
3090     store_parameter(length, 2);
3091     store_parameter(dst_pos, 1);
3092     store_parameter(dst, 0);
3093 

3178   switch (elem_size) {
3179     case 1 :
3180       scale = Address::times_1;
3181       break;
3182     case 2 :
3183       scale = Address::times_2;
3184       break;
3185     case 4 :
3186       scale = Address::times_4;
3187       break;
3188     case 8 :
3189       scale = Address::times_8;
3190       break;
3191     default:
3192       scale = Address::no_scale;
3193       ShouldNotReachHere();
3194   }
3195 
3196   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
3197   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());


3198 
3199   // length and pos's are all sign extended at this point on 64bit
3200 
3201   // test for null
3202   if (flags & LIR_OpArrayCopy::src_null_check) {
3203     __ testptr(src, src);
3204     __ jcc(Assembler::zero, *stub->entry());
3205   }
3206   if (flags & LIR_OpArrayCopy::dst_null_check) {
3207     __ testptr(dst, dst);
3208     __ jcc(Assembler::zero, *stub->entry());
3209   }
3210 
3211   // If the compiler was not able to prove that exact type of the source or the destination
3212   // of the arraycopy is an array type, check at runtime if the source or the destination is
3213   // an instance type.
3214   if (flags & LIR_OpArrayCopy::type_check) {
3215     if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3216       __ load_klass(tmp, dst, tmp_load_klass);
3217       __ cmpl(Address(tmp, in_bytes(Klass::layout_helper_offset())), Klass::_lh_neutral_value);

3243   if (flags & LIR_OpArrayCopy::dst_range_check) {
3244     __ lea(tmp, Address(dst_pos, length, Address::times_1, 0));
3245     __ cmpl(tmp, dst_length_addr);
3246     __ jcc(Assembler::above, *stub->entry());
3247   }
3248 
3249   if (flags & LIR_OpArrayCopy::length_positive_check) {
3250     __ testl(length, length);
3251     __ jcc(Assembler::less, *stub->entry());
3252   }
3253 
3254 #ifdef _LP64
3255   __ movl2ptr(src_pos, src_pos); //higher 32bits must be null
3256   __ movl2ptr(dst_pos, dst_pos); //higher 32bits must be null
3257 #endif
3258 
3259   if (flags & LIR_OpArrayCopy::type_check) {
3260     // We don't know the array types are compatible
3261     if (basic_type != T_OBJECT) {
3262       // Simple test for basic type arrays
3263       __ cmp_klass(src, dst, tmp, tmp2);






3264       __ jcc(Assembler::notEqual, *stub->entry());
3265     } else {
3266       // For object arrays, if src is a sub class of dst then we can
3267       // safely do the copy.
3268       Label cont, slow;
3269 
3270       __ push(src);
3271       __ push(dst);
3272 
3273       __ load_klass(src, src, tmp_load_klass);
3274       __ load_klass(dst, dst, tmp_load_klass);
3275 
3276       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
3277 
3278       __ push(src);
3279       __ push(dst);
3280       __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
3281       __ pop(dst);
3282       __ pop(src);
3283 

3302             __ load_klass(tmp, src, tmp_load_klass);
3303           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3304             __ load_klass(tmp, dst, tmp_load_klass);
3305           }
3306           int lh_offset = in_bytes(Klass::layout_helper_offset());
3307           Address klass_lh_addr(tmp, lh_offset);
3308           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
3309           __ cmpl(klass_lh_addr, objArray_lh);
3310           __ jcc(Assembler::notEqual, *stub->entry());
3311         }
3312 
3313        // Spill because stubs can use any register they like and it's
3314        // easier to restore just those that we care about.
3315        store_parameter(dst, 0);
3316        store_parameter(dst_pos, 1);
3317        store_parameter(length, 2);
3318        store_parameter(src_pos, 3);
3319        store_parameter(src, 4);
3320 
3321 #ifndef _LP64
3322        Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
3323         __ movptr(tmp, dst_klass_addr);
3324         __ movptr(tmp, Address(tmp, ObjArrayKlass::element_klass_offset()));
3325         __ push(tmp);
3326         __ movl(tmp, Address(tmp, Klass::super_check_offset_offset()));
3327         __ push(tmp);
3328         __ push(length);
3329         __ lea(tmp, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3330         __ push(tmp);
3331         __ lea(tmp, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3332         __ push(tmp);
3333 
3334         __ call_VM_leaf(copyfunc_addr, 5);
3335 #else
3336         __ movl2ptr(length, length); //higher 32bits must be null
3337 
3338         __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3339         assert_different_registers(c_rarg0, dst, dst_pos, length);
3340         __ lea(c_rarg1, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3341         assert_different_registers(c_rarg1, dst, length);
3342 

3406   }
3407 
3408 #ifdef ASSERT
3409   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
3410     // Sanity check the known type with the incoming class.  For the
3411     // primitive case the types must match exactly with src.klass and
3412     // dst.klass each exactly matching the default type.  For the
3413     // object array case, if no type check is needed then either the
3414     // dst type is exactly the expected type and the src type is a
3415     // subtype which we can't check or src is the same array as dst
3416     // but not necessarily exactly of type default_type.
3417     Label known_ok, halt;
3418     __ mov_metadata(tmp, default_type->constant_encoding());
3419 #ifdef _LP64
3420     if (UseCompressedClassPointers) {
3421       __ encode_klass_not_null(tmp, rscratch1);
3422     }
3423 #endif
3424 
3425     if (basic_type != T_OBJECT) {
3426       __ cmp_klass(tmp, dst, tmp2);


3427       __ jcc(Assembler::notEqual, halt);
3428       __ cmp_klass(tmp, src, tmp2);

3429       __ jcc(Assembler::equal, known_ok);
3430     } else {
3431       __ cmp_klass(tmp, dst, tmp2);

3432       __ jcc(Assembler::equal, known_ok);
3433       __ cmpptr(src, dst);
3434       __ jcc(Assembler::equal, known_ok);
3435     }
3436     __ bind(halt);
3437     __ stop("incorrect type information in arraycopy");
3438     __ bind(known_ok);
3439   }
3440 #endif
3441 
3442 #ifndef PRODUCT
3443   if (PrintC1Statistics) {
3444     __ incrementl(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)), rscratch1);
3445   }
3446 #endif
3447 
3448 #ifdef _LP64
3449   assert_different_registers(c_rarg0, dst, dst_pos, length);
3450   __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3451   assert_different_registers(c_rarg1, length);

3507     // done
3508   } else if (op->code() == lir_unlock) {
3509     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
3510     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
3511   } else {
3512     Unimplemented();
3513   }
3514   __ bind(*op->stub()->continuation());
3515 }
3516 
3517 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
3518   Register obj = op->obj()->as_pointer_register();
3519   Register result = op->result_opr()->as_pointer_register();
3520 
3521   CodeEmitInfo* info = op->info();
3522   if (info != nullptr) {
3523     add_debug_info_for_null_check_here(info);
3524   }
3525 
3526 #ifdef _LP64
3527   if (UseCompactObjectHeaders) {
3528     Register tmp = rscratch1;
3529     assert_different_registers(tmp, obj);
3530     assert_different_registers(tmp, result);
3531 
3532     // Check if we can take the (common) fast path, if obj is unlocked.
3533     __ movq(result, Address(obj, oopDesc::mark_offset_in_bytes()));
3534     __ testb(result, markWord::monitor_value);
3535     __ jcc(Assembler::notZero, *op->stub()->entry());
3536     __ bind(*op->stub()->continuation());
3537     // Fast-path: shift and decode Klass*.
3538     __ shrq(result, markWord::klass_shift);
3539     __ decode_klass_not_null(result, tmp);
3540   } else if (UseCompressedClassPointers) {
3541     __ movl(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3542     __ decode_klass_not_null(result, rscratch1);
3543   } else
3544 #endif
3545   {
3546     __ movptr(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3547   }
3548 }
3549 
3550 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
3551   ciMethod* method = op->profiled_method();
3552   int bci          = op->profiled_bci();
3553   ciMethod* callee = op->profiled_callee();
3554   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3555 
3556   // Update counter for all call types
3557   ciMethodData* md = method->method_data_or_null();
3558   assert(md != nullptr, "Sanity");
3559   ciProfileData* data = md->bci_to_data(bci);
3560   assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
3561   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
3562   Register mdo  = op->mdo()->as_register();
3563   __ mov_metadata(mdo, md->constant_encoding());
3564   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
3565   // Perform additional virtual call profiling for invokevirtual and
3566   // invokeinterface bytecodes
3567   if (op->should_profile_receiver_type()) {
< prev index next >