< prev index next >

src/hotspot/cpu/x86/c1_LIRAssembler_x86.cpp

Print this page

1618       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1619       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1620     __ jmp(*op->stub()->entry());
1621   } else {
1622     Register tmp1 = op->tmp1()->as_register();
1623     Register tmp2 = op->tmp2()->as_register();
1624     Register tmp3 = op->tmp3()->as_register();
1625     if (len == tmp1) {
1626       tmp1 = tmp3;
1627     } else if (len == tmp2) {
1628       tmp2 = tmp3;
1629     } else if (len == tmp3) {
1630       // everything is ok
1631     } else {
1632       __ mov(tmp3, len);
1633     }
1634     __ allocate_array(op->obj()->as_register(),
1635                       len,
1636                       tmp1,
1637                       tmp2,
1638                       arrayOopDesc::header_size(op->type()),
1639                       array_element_size(op->type()),
1640                       op->klass()->as_register(),
1641                       *op->stub()->entry());
1642   }
1643   __ bind(*op->stub()->continuation());
1644 }
1645 
1646 void LIR_Assembler::type_profile_helper(Register mdo,
1647                                         ciMethodData *md, ciProfileData *data,
1648                                         Register recv, Label* update_done) {
1649   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1650     Label next_test;
1651     // See if the receiver is receiver[n].
1652     __ cmpptr(recv, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1653     __ jccb(Assembler::notEqual, next_test);
1654     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1655     __ addptr(data_addr, DataLayout::counter_increment);
1656     __ jmp(*update_done);
1657     __ bind(next_test);
1658   }

3058 void LIR_Assembler::store_parameter(Metadata* m, int offset_from_rsp_in_words) {
3059   assert(offset_from_rsp_in_words >= 0, "invalid offset from rsp");
3060   int offset_from_rsp_in_bytes = offset_from_rsp_in_words * BytesPerWord;
3061   assert(offset_from_rsp_in_bytes < frame_map()->reserved_argument_area_size(), "invalid offset");
3062   __ mov_metadata(Address(rsp, offset_from_rsp_in_bytes), m, rscratch1);
3063 }
3064 
3065 
3066 // This code replaces a call to arraycopy; no exception may
3067 // be thrown in this code, they must be thrown in the System.arraycopy
3068 // activation frame; we could save some checks if this would not be the case
3069 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
3070   ciArrayKlass* default_type = op->expected_type();
3071   Register src = op->src()->as_register();
3072   Register dst = op->dst()->as_register();
3073   Register src_pos = op->src_pos()->as_register();
3074   Register dst_pos = op->dst_pos()->as_register();
3075   Register length  = op->length()->as_register();
3076   Register tmp = op->tmp()->as_register();
3077   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);

3078 
3079   CodeStub* stub = op->stub();
3080   int flags = op->flags();
3081   BasicType basic_type = default_type != nullptr ? default_type->element_type()->basic_type() : T_ILLEGAL;
3082   if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3083 
3084   // if we don't know anything, just go through the generic arraycopy
3085   if (default_type == nullptr) {
3086     // save outgoing arguments on stack in case call to System.arraycopy is needed
3087     // HACK ALERT. This code used to push the parameters in a hardwired fashion
3088     // for interpreter calling conventions. Now we have to do it in new style conventions.
3089     // For the moment until C1 gets the new register allocator I just force all the
3090     // args to the right place (except the register args) and then on the back side
3091     // reload the register args properly if we go slow path. Yuck
3092 
3093     // These are proper for the calling convention
3094     store_parameter(length, 2);
3095     store_parameter(dst_pos, 1);
3096     store_parameter(dst, 0);
3097 

3182   switch (elem_size) {
3183     case 1 :
3184       scale = Address::times_1;
3185       break;
3186     case 2 :
3187       scale = Address::times_2;
3188       break;
3189     case 4 :
3190       scale = Address::times_4;
3191       break;
3192     case 8 :
3193       scale = Address::times_8;
3194       break;
3195     default:
3196       scale = Address::no_scale;
3197       ShouldNotReachHere();
3198   }
3199 
3200   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
3201   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());
3202   Address src_klass_addr = Address(src, oopDesc::klass_offset_in_bytes());
3203   Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
3204 
3205   // length and pos's are all sign extended at this point on 64bit
3206 
3207   // test for null
3208   if (flags & LIR_OpArrayCopy::src_null_check) {
3209     __ testptr(src, src);
3210     __ jcc(Assembler::zero, *stub->entry());
3211   }
3212   if (flags & LIR_OpArrayCopy::dst_null_check) {
3213     __ testptr(dst, dst);
3214     __ jcc(Assembler::zero, *stub->entry());
3215   }
3216 
3217   // If the compiler was not able to prove that exact type of the source or the destination
3218   // of the arraycopy is an array type, check at runtime if the source or the destination is
3219   // an instance type.
3220   if (flags & LIR_OpArrayCopy::type_check) {
3221     if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3222       __ load_klass(tmp, dst, tmp_load_klass);
3223       __ cmpl(Address(tmp, in_bytes(Klass::layout_helper_offset())), Klass::_lh_neutral_value);

3249   if (flags & LIR_OpArrayCopy::dst_range_check) {
3250     __ lea(tmp, Address(dst_pos, length, Address::times_1, 0));
3251     __ cmpl(tmp, dst_length_addr);
3252     __ jcc(Assembler::above, *stub->entry());
3253   }
3254 
3255   if (flags & LIR_OpArrayCopy::length_positive_check) {
3256     __ testl(length, length);
3257     __ jcc(Assembler::less, *stub->entry());
3258   }
3259 
3260 #ifdef _LP64
3261   __ movl2ptr(src_pos, src_pos); //higher 32bits must be null
3262   __ movl2ptr(dst_pos, dst_pos); //higher 32bits must be null
3263 #endif
3264 
3265   if (flags & LIR_OpArrayCopy::type_check) {
3266     // We don't know the array types are compatible
3267     if (basic_type != T_OBJECT) {
3268       // Simple test for basic type arrays
3269       if (UseCompressedClassPointers) {
3270         __ movl(tmp, src_klass_addr);
3271         __ cmpl(tmp, dst_klass_addr);
3272       } else {
3273         __ movptr(tmp, src_klass_addr);
3274         __ cmpptr(tmp, dst_klass_addr);
3275       }
3276       __ jcc(Assembler::notEqual, *stub->entry());
3277     } else {
3278       // For object arrays, if src is a sub class of dst then we can
3279       // safely do the copy.
3280       Label cont, slow;
3281 
3282       __ push(src);
3283       __ push(dst);
3284 
3285       __ load_klass(src, src, tmp_load_klass);
3286       __ load_klass(dst, dst, tmp_load_klass);
3287 
3288       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
3289 
3290       __ push(src);
3291       __ push(dst);
3292       __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
3293       __ pop(dst);
3294       __ pop(src);
3295 

3314             __ load_klass(tmp, src, tmp_load_klass);
3315           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3316             __ load_klass(tmp, dst, tmp_load_klass);
3317           }
3318           int lh_offset = in_bytes(Klass::layout_helper_offset());
3319           Address klass_lh_addr(tmp, lh_offset);
3320           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
3321           __ cmpl(klass_lh_addr, objArray_lh);
3322           __ jcc(Assembler::notEqual, *stub->entry());
3323         }
3324 
3325        // Spill because stubs can use any register they like and it's
3326        // easier to restore just those that we care about.
3327        store_parameter(dst, 0);
3328        store_parameter(dst_pos, 1);
3329        store_parameter(length, 2);
3330        store_parameter(src_pos, 3);
3331        store_parameter(src, 4);
3332 
3333 #ifndef _LP64

3334         __ movptr(tmp, dst_klass_addr);
3335         __ movptr(tmp, Address(tmp, ObjArrayKlass::element_klass_offset()));
3336         __ push(tmp);
3337         __ movl(tmp, Address(tmp, Klass::super_check_offset_offset()));
3338         __ push(tmp);
3339         __ push(length);
3340         __ lea(tmp, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3341         __ push(tmp);
3342         __ lea(tmp, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3343         __ push(tmp);
3344 
3345         __ call_VM_leaf(copyfunc_addr, 5);
3346 #else
3347         __ movl2ptr(length, length); //higher 32bits must be null
3348 
3349         __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3350         assert_different_registers(c_rarg0, dst, dst_pos, length);
3351         __ lea(c_rarg1, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3352         assert_different_registers(c_rarg1, dst, length);
3353 

3417   }
3418 
3419 #ifdef ASSERT
3420   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
3421     // Sanity check the known type with the incoming class.  For the
3422     // primitive case the types must match exactly with src.klass and
3423     // dst.klass each exactly matching the default type.  For the
3424     // object array case, if no type check is needed then either the
3425     // dst type is exactly the expected type and the src type is a
3426     // subtype which we can't check or src is the same array as dst
3427     // but not necessarily exactly of type default_type.
3428     Label known_ok, halt;
3429     __ mov_metadata(tmp, default_type->constant_encoding());
3430 #ifdef _LP64
3431     if (UseCompressedClassPointers) {
3432       __ encode_klass_not_null(tmp, rscratch1);
3433     }
3434 #endif
3435 
3436     if (basic_type != T_OBJECT) {
3437 
3438       if (UseCompressedClassPointers)          __ cmpl(tmp, dst_klass_addr);
3439       else                   __ cmpptr(tmp, dst_klass_addr);
3440       __ jcc(Assembler::notEqual, halt);
3441       if (UseCompressedClassPointers)          __ cmpl(tmp, src_klass_addr);
3442       else                   __ cmpptr(tmp, src_klass_addr);
3443       __ jcc(Assembler::equal, known_ok);
3444     } else {
3445       if (UseCompressedClassPointers)          __ cmpl(tmp, dst_klass_addr);
3446       else                   __ cmpptr(tmp, dst_klass_addr);
3447       __ jcc(Assembler::equal, known_ok);
3448       __ cmpptr(src, dst);
3449       __ jcc(Assembler::equal, known_ok);
3450     }
3451     __ bind(halt);
3452     __ stop("incorrect type information in arraycopy");
3453     __ bind(known_ok);
3454   }
3455 #endif
3456 
3457 #ifndef PRODUCT
3458   if (PrintC1Statistics) {
3459     __ incrementl(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)), rscratch1);
3460   }
3461 #endif
3462 
3463 #ifdef _LP64
3464   assert_different_registers(c_rarg0, dst, dst_pos, length);
3465   __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3466   assert_different_registers(c_rarg1, length);

3522     // done
3523   } else if (op->code() == lir_unlock) {
3524     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
3525     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
3526   } else {
3527     Unimplemented();
3528   }
3529   __ bind(*op->stub()->continuation());
3530 }
3531 
3532 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
3533   Register obj = op->obj()->as_pointer_register();
3534   Register result = op->result_opr()->as_pointer_register();
3535 
3536   CodeEmitInfo* info = op->info();
3537   if (info != nullptr) {
3538     add_debug_info_for_null_check_here(info);
3539   }
3540 
3541 #ifdef _LP64
3542   if (UseCompressedClassPointers) {













3543     __ movl(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3544     __ decode_klass_not_null(result, rscratch1);
3545   } else
3546 #endif

3547     __ movptr(result, Address(obj, oopDesc::klass_offset_in_bytes()));

3548 }
3549 
3550 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
3551   ciMethod* method = op->profiled_method();
3552   int bci          = op->profiled_bci();
3553   ciMethod* callee = op->profiled_callee();
3554   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3555 
3556   // Update counter for all call types
3557   ciMethodData* md = method->method_data_or_null();
3558   assert(md != nullptr, "Sanity");
3559   ciProfileData* data = md->bci_to_data(bci);
3560   assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
3561   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
3562   Register mdo  = op->mdo()->as_register();
3563   __ mov_metadata(mdo, md->constant_encoding());
3564   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
3565   // Perform additional virtual call profiling for invokevirtual and
3566   // invokeinterface bytecodes
3567   if (op->should_profile_receiver_type()) {

1618       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1619       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1620     __ jmp(*op->stub()->entry());
1621   } else {
1622     Register tmp1 = op->tmp1()->as_register();
1623     Register tmp2 = op->tmp2()->as_register();
1624     Register tmp3 = op->tmp3()->as_register();
1625     if (len == tmp1) {
1626       tmp1 = tmp3;
1627     } else if (len == tmp2) {
1628       tmp2 = tmp3;
1629     } else if (len == tmp3) {
1630       // everything is ok
1631     } else {
1632       __ mov(tmp3, len);
1633     }
1634     __ allocate_array(op->obj()->as_register(),
1635                       len,
1636                       tmp1,
1637                       tmp2,
1638                       arrayOopDesc::base_offset_in_bytes(op->type()),
1639                       array_element_size(op->type()),
1640                       op->klass()->as_register(),
1641                       *op->stub()->entry());
1642   }
1643   __ bind(*op->stub()->continuation());
1644 }
1645 
1646 void LIR_Assembler::type_profile_helper(Register mdo,
1647                                         ciMethodData *md, ciProfileData *data,
1648                                         Register recv, Label* update_done) {
1649   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1650     Label next_test;
1651     // See if the receiver is receiver[n].
1652     __ cmpptr(recv, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1653     __ jccb(Assembler::notEqual, next_test);
1654     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1655     __ addptr(data_addr, DataLayout::counter_increment);
1656     __ jmp(*update_done);
1657     __ bind(next_test);
1658   }

3058 void LIR_Assembler::store_parameter(Metadata* m, int offset_from_rsp_in_words) {
3059   assert(offset_from_rsp_in_words >= 0, "invalid offset from rsp");
3060   int offset_from_rsp_in_bytes = offset_from_rsp_in_words * BytesPerWord;
3061   assert(offset_from_rsp_in_bytes < frame_map()->reserved_argument_area_size(), "invalid offset");
3062   __ mov_metadata(Address(rsp, offset_from_rsp_in_bytes), m, rscratch1);
3063 }
3064 
3065 
3066 // This code replaces a call to arraycopy; no exception may
3067 // be thrown in this code, they must be thrown in the System.arraycopy
3068 // activation frame; we could save some checks if this would not be the case
3069 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
3070   ciArrayKlass* default_type = op->expected_type();
3071   Register src = op->src()->as_register();
3072   Register dst = op->dst()->as_register();
3073   Register src_pos = op->src_pos()->as_register();
3074   Register dst_pos = op->dst_pos()->as_register();
3075   Register length  = op->length()->as_register();
3076   Register tmp = op->tmp()->as_register();
3077   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3078   Register tmp2 = UseCompactObjectHeaders ? rscratch2 : noreg;
3079 
3080   CodeStub* stub = op->stub();
3081   int flags = op->flags();
3082   BasicType basic_type = default_type != nullptr ? default_type->element_type()->basic_type() : T_ILLEGAL;
3083   if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3084 
3085   // if we don't know anything, just go through the generic arraycopy
3086   if (default_type == nullptr) {
3087     // save outgoing arguments on stack in case call to System.arraycopy is needed
3088     // HACK ALERT. This code used to push the parameters in a hardwired fashion
3089     // for interpreter calling conventions. Now we have to do it in new style conventions.
3090     // For the moment until C1 gets the new register allocator I just force all the
3091     // args to the right place (except the register args) and then on the back side
3092     // reload the register args properly if we go slow path. Yuck
3093 
3094     // These are proper for the calling convention
3095     store_parameter(length, 2);
3096     store_parameter(dst_pos, 1);
3097     store_parameter(dst, 0);
3098 

3183   switch (elem_size) {
3184     case 1 :
3185       scale = Address::times_1;
3186       break;
3187     case 2 :
3188       scale = Address::times_2;
3189       break;
3190     case 4 :
3191       scale = Address::times_4;
3192       break;
3193     case 8 :
3194       scale = Address::times_8;
3195       break;
3196     default:
3197       scale = Address::no_scale;
3198       ShouldNotReachHere();
3199   }
3200 
3201   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
3202   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());


3203 
3204   // length and pos's are all sign extended at this point on 64bit
3205 
3206   // test for null
3207   if (flags & LIR_OpArrayCopy::src_null_check) {
3208     __ testptr(src, src);
3209     __ jcc(Assembler::zero, *stub->entry());
3210   }
3211   if (flags & LIR_OpArrayCopy::dst_null_check) {
3212     __ testptr(dst, dst);
3213     __ jcc(Assembler::zero, *stub->entry());
3214   }
3215 
3216   // If the compiler was not able to prove that exact type of the source or the destination
3217   // of the arraycopy is an array type, check at runtime if the source or the destination is
3218   // an instance type.
3219   if (flags & LIR_OpArrayCopy::type_check) {
3220     if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3221       __ load_klass(tmp, dst, tmp_load_klass);
3222       __ cmpl(Address(tmp, in_bytes(Klass::layout_helper_offset())), Klass::_lh_neutral_value);

3248   if (flags & LIR_OpArrayCopy::dst_range_check) {
3249     __ lea(tmp, Address(dst_pos, length, Address::times_1, 0));
3250     __ cmpl(tmp, dst_length_addr);
3251     __ jcc(Assembler::above, *stub->entry());
3252   }
3253 
3254   if (flags & LIR_OpArrayCopy::length_positive_check) {
3255     __ testl(length, length);
3256     __ jcc(Assembler::less, *stub->entry());
3257   }
3258 
3259 #ifdef _LP64
3260   __ movl2ptr(src_pos, src_pos); //higher 32bits must be null
3261   __ movl2ptr(dst_pos, dst_pos); //higher 32bits must be null
3262 #endif
3263 
3264   if (flags & LIR_OpArrayCopy::type_check) {
3265     // We don't know the array types are compatible
3266     if (basic_type != T_OBJECT) {
3267       // Simple test for basic type arrays
3268       __ cmp_klass(src, dst, tmp, tmp2);






3269       __ jcc(Assembler::notEqual, *stub->entry());
3270     } else {
3271       // For object arrays, if src is a sub class of dst then we can
3272       // safely do the copy.
3273       Label cont, slow;
3274 
3275       __ push(src);
3276       __ push(dst);
3277 
3278       __ load_klass(src, src, tmp_load_klass);
3279       __ load_klass(dst, dst, tmp_load_klass);
3280 
3281       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
3282 
3283       __ push(src);
3284       __ push(dst);
3285       __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
3286       __ pop(dst);
3287       __ pop(src);
3288 

3307             __ load_klass(tmp, src, tmp_load_klass);
3308           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3309             __ load_klass(tmp, dst, tmp_load_klass);
3310           }
3311           int lh_offset = in_bytes(Klass::layout_helper_offset());
3312           Address klass_lh_addr(tmp, lh_offset);
3313           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
3314           __ cmpl(klass_lh_addr, objArray_lh);
3315           __ jcc(Assembler::notEqual, *stub->entry());
3316         }
3317 
3318        // Spill because stubs can use any register they like and it's
3319        // easier to restore just those that we care about.
3320        store_parameter(dst, 0);
3321        store_parameter(dst_pos, 1);
3322        store_parameter(length, 2);
3323        store_parameter(src_pos, 3);
3324        store_parameter(src, 4);
3325 
3326 #ifndef _LP64
3327        Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
3328         __ movptr(tmp, dst_klass_addr);
3329         __ movptr(tmp, Address(tmp, ObjArrayKlass::element_klass_offset()));
3330         __ push(tmp);
3331         __ movl(tmp, Address(tmp, Klass::super_check_offset_offset()));
3332         __ push(tmp);
3333         __ push(length);
3334         __ lea(tmp, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3335         __ push(tmp);
3336         __ lea(tmp, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3337         __ push(tmp);
3338 
3339         __ call_VM_leaf(copyfunc_addr, 5);
3340 #else
3341         __ movl2ptr(length, length); //higher 32bits must be null
3342 
3343         __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3344         assert_different_registers(c_rarg0, dst, dst_pos, length);
3345         __ lea(c_rarg1, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3346         assert_different_registers(c_rarg1, dst, length);
3347 

3411   }
3412 
3413 #ifdef ASSERT
3414   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
3415     // Sanity check the known type with the incoming class.  For the
3416     // primitive case the types must match exactly with src.klass and
3417     // dst.klass each exactly matching the default type.  For the
3418     // object array case, if no type check is needed then either the
3419     // dst type is exactly the expected type and the src type is a
3420     // subtype which we can't check or src is the same array as dst
3421     // but not necessarily exactly of type default_type.
3422     Label known_ok, halt;
3423     __ mov_metadata(tmp, default_type->constant_encoding());
3424 #ifdef _LP64
3425     if (UseCompressedClassPointers) {
3426       __ encode_klass_not_null(tmp, rscratch1);
3427     }
3428 #endif
3429 
3430     if (basic_type != T_OBJECT) {
3431       __ cmp_klass(tmp, dst, tmp2);


3432       __ jcc(Assembler::notEqual, halt);
3433       __ cmp_klass(tmp, src, tmp2);

3434       __ jcc(Assembler::equal, known_ok);
3435     } else {
3436       __ cmp_klass(tmp, dst, tmp2);

3437       __ jcc(Assembler::equal, known_ok);
3438       __ cmpptr(src, dst);
3439       __ jcc(Assembler::equal, known_ok);
3440     }
3441     __ bind(halt);
3442     __ stop("incorrect type information in arraycopy");
3443     __ bind(known_ok);
3444   }
3445 #endif
3446 
3447 #ifndef PRODUCT
3448   if (PrintC1Statistics) {
3449     __ incrementl(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)), rscratch1);
3450   }
3451 #endif
3452 
3453 #ifdef _LP64
3454   assert_different_registers(c_rarg0, dst, dst_pos, length);
3455   __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3456   assert_different_registers(c_rarg1, length);

3512     // done
3513   } else if (op->code() == lir_unlock) {
3514     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
3515     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
3516   } else {
3517     Unimplemented();
3518   }
3519   __ bind(*op->stub()->continuation());
3520 }
3521 
3522 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
3523   Register obj = op->obj()->as_pointer_register();
3524   Register result = op->result_opr()->as_pointer_register();
3525 
3526   CodeEmitInfo* info = op->info();
3527   if (info != nullptr) {
3528     add_debug_info_for_null_check_here(info);
3529   }
3530 
3531 #ifdef _LP64
3532   if (UseCompactObjectHeaders) {
3533     Register tmp = rscratch1;
3534     assert_different_registers(tmp, obj);
3535     assert_different_registers(tmp, result);
3536 
3537     // Check if we can take the (common) fast path, if obj is unlocked.
3538     __ movq(result, Address(obj, oopDesc::mark_offset_in_bytes()));
3539     __ testb(result, markWord::monitor_value);
3540     __ jcc(Assembler::notZero, *op->stub()->entry());
3541     __ bind(*op->stub()->continuation());
3542     // Fast-path: shift and decode Klass*.
3543     __ shrq(result, markWord::klass_shift);
3544     __ decode_klass_not_null(result, tmp);
3545   } else if (UseCompressedClassPointers) {
3546     __ movl(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3547     __ decode_klass_not_null(result, rscratch1);
3548   } else
3549 #endif
3550   {
3551     __ movptr(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3552   }
3553 }
3554 
3555 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
3556   ciMethod* method = op->profiled_method();
3557   int bci          = op->profiled_bci();
3558   ciMethod* callee = op->profiled_callee();
3559   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3560 
3561   // Update counter for all call types
3562   ciMethodData* md = method->method_data_or_null();
3563   assert(md != nullptr, "Sanity");
3564   ciProfileData* data = md->bci_to_data(bci);
3565   assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
3566   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
3567   Register mdo  = op->mdo()->as_register();
3568   __ mov_metadata(mdo, md->constant_encoding());
3569   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
3570   // Perform additional virtual call profiling for invokevirtual and
3571   // invokeinterface bytecodes
3572   if (op->should_profile_receiver_type()) {
< prev index next >