< prev index next >

src/hotspot/cpu/x86/c1_LIRAssembler_x86.cpp

Print this page

1601       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1602       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1603     __ jmp(*op->stub()->entry());
1604   } else {
1605     Register tmp1 = op->tmp1()->as_register();
1606     Register tmp2 = op->tmp2()->as_register();
1607     Register tmp3 = op->tmp3()->as_register();
1608     if (len == tmp1) {
1609       tmp1 = tmp3;
1610     } else if (len == tmp2) {
1611       tmp2 = tmp3;
1612     } else if (len == tmp3) {
1613       // everything is ok
1614     } else {
1615       __ mov(tmp3, len);
1616     }
1617     __ allocate_array(op->obj()->as_register(),
1618                       len,
1619                       tmp1,
1620                       tmp2,
1621                       arrayOopDesc::header_size(op->type()),
1622                       array_element_size(op->type()),
1623                       op->klass()->as_register(),
1624                       *op->stub()->entry());
1625   }
1626   __ bind(*op->stub()->continuation());
1627 }
1628 
1629 void LIR_Assembler::type_profile_helper(Register mdo,
1630                                         ciMethodData *md, ciProfileData *data,
1631                                         Register recv, Label* update_done) {
1632   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1633     Label next_test;
1634     // See if the receiver is receiver[n].
1635     __ cmpptr(recv, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1636     __ jccb(Assembler::notEqual, next_test);
1637     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1638     __ addptr(data_addr, DataLayout::counter_increment);
1639     __ jmp(*update_done);
1640     __ bind(next_test);
1641   }

3030 void LIR_Assembler::store_parameter(Metadata* m, int offset_from_rsp_in_words) {
3031   assert(offset_from_rsp_in_words >= 0, "invalid offset from rsp");
3032   int offset_from_rsp_in_bytes = offset_from_rsp_in_words * BytesPerWord;
3033   assert(offset_from_rsp_in_bytes < frame_map()->reserved_argument_area_size(), "invalid offset");
3034   __ mov_metadata(Address(rsp, offset_from_rsp_in_bytes), m, rscratch1);
3035 }
3036 
3037 
3038 // This code replaces a call to arraycopy; no exception may
3039 // be thrown in this code, they must be thrown in the System.arraycopy
3040 // activation frame; we could save some checks if this would not be the case
3041 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
3042   ciArrayKlass* default_type = op->expected_type();
3043   Register src = op->src()->as_register();
3044   Register dst = op->dst()->as_register();
3045   Register src_pos = op->src_pos()->as_register();
3046   Register dst_pos = op->dst_pos()->as_register();
3047   Register length  = op->length()->as_register();
3048   Register tmp = op->tmp()->as_register();
3049   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);

3050 
3051   CodeStub* stub = op->stub();
3052   int flags = op->flags();
3053   BasicType basic_type = default_type != nullptr ? default_type->element_type()->basic_type() : T_ILLEGAL;
3054   if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3055 
3056   // if we don't know anything, just go through the generic arraycopy
3057   if (default_type == nullptr) {
3058     // save outgoing arguments on stack in case call to System.arraycopy is needed
3059     // HACK ALERT. This code used to push the parameters in a hardwired fashion
3060     // for interpreter calling conventions. Now we have to do it in new style conventions.
3061     // For the moment until C1 gets the new register allocator I just force all the
3062     // args to the right place (except the register args) and then on the back side
3063     // reload the register args properly if we go slow path. Yuck
3064 
3065     // These are proper for the calling convention
3066     store_parameter(length, 2);
3067     store_parameter(dst_pos, 1);
3068     store_parameter(dst, 0);
3069 

3154   switch (elem_size) {
3155     case 1 :
3156       scale = Address::times_1;
3157       break;
3158     case 2 :
3159       scale = Address::times_2;
3160       break;
3161     case 4 :
3162       scale = Address::times_4;
3163       break;
3164     case 8 :
3165       scale = Address::times_8;
3166       break;
3167     default:
3168       scale = Address::no_scale;
3169       ShouldNotReachHere();
3170   }
3171 
3172   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
3173   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());
3174   Address src_klass_addr = Address(src, oopDesc::klass_offset_in_bytes());
3175   Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
3176 
3177   // length and pos's are all sign extended at this point on 64bit
3178 
3179   // test for null
3180   if (flags & LIR_OpArrayCopy::src_null_check) {
3181     __ testptr(src, src);
3182     __ jcc(Assembler::zero, *stub->entry());
3183   }
3184   if (flags & LIR_OpArrayCopy::dst_null_check) {
3185     __ testptr(dst, dst);
3186     __ jcc(Assembler::zero, *stub->entry());
3187   }
3188 
3189   // If the compiler was not able to prove that exact type of the source or the destination
3190   // of the arraycopy is an array type, check at runtime if the source or the destination is
3191   // an instance type.
3192   if (flags & LIR_OpArrayCopy::type_check) {
3193     if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3194       __ load_klass(tmp, dst, tmp_load_klass);
3195       __ cmpl(Address(tmp, in_bytes(Klass::layout_helper_offset())), Klass::_lh_neutral_value);

3221   if (flags & LIR_OpArrayCopy::dst_range_check) {
3222     __ lea(tmp, Address(dst_pos, length, Address::times_1, 0));
3223     __ cmpl(tmp, dst_length_addr);
3224     __ jcc(Assembler::above, *stub->entry());
3225   }
3226 
3227   if (flags & LIR_OpArrayCopy::length_positive_check) {
3228     __ testl(length, length);
3229     __ jcc(Assembler::less, *stub->entry());
3230   }
3231 
3232 #ifdef _LP64
3233   __ movl2ptr(src_pos, src_pos); //higher 32bits must be null
3234   __ movl2ptr(dst_pos, dst_pos); //higher 32bits must be null
3235 #endif
3236 
3237   if (flags & LIR_OpArrayCopy::type_check) {
3238     // We don't know the array types are compatible
3239     if (basic_type != T_OBJECT) {
3240       // Simple test for basic type arrays
3241       if (UseCompressedClassPointers) {
3242         __ movl(tmp, src_klass_addr);
3243         __ cmpl(tmp, dst_klass_addr);
3244       } else {
3245         __ movptr(tmp, src_klass_addr);
3246         __ cmpptr(tmp, dst_klass_addr);
3247       }
3248       __ jcc(Assembler::notEqual, *stub->entry());
3249     } else {
3250       // For object arrays, if src is a sub class of dst then we can
3251       // safely do the copy.
3252       Label cont, slow;
3253 
3254       __ push(src);
3255       __ push(dst);
3256 
3257       __ load_klass(src, src, tmp_load_klass);
3258       __ load_klass(dst, dst, tmp_load_klass);
3259 
3260       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
3261 
3262       __ push(src);
3263       __ push(dst);
3264       __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
3265       __ pop(dst);
3266       __ pop(src);
3267 

3286             __ load_klass(tmp, src, tmp_load_klass);
3287           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3288             __ load_klass(tmp, dst, tmp_load_klass);
3289           }
3290           int lh_offset = in_bytes(Klass::layout_helper_offset());
3291           Address klass_lh_addr(tmp, lh_offset);
3292           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
3293           __ cmpl(klass_lh_addr, objArray_lh);
3294           __ jcc(Assembler::notEqual, *stub->entry());
3295         }
3296 
3297        // Spill because stubs can use any register they like and it's
3298        // easier to restore just those that we care about.
3299        store_parameter(dst, 0);
3300        store_parameter(dst_pos, 1);
3301        store_parameter(length, 2);
3302        store_parameter(src_pos, 3);
3303        store_parameter(src, 4);
3304 
3305 #ifndef _LP64

3306         __ movptr(tmp, dst_klass_addr);
3307         __ movptr(tmp, Address(tmp, ObjArrayKlass::element_klass_offset()));
3308         __ push(tmp);
3309         __ movl(tmp, Address(tmp, Klass::super_check_offset_offset()));
3310         __ push(tmp);
3311         __ push(length);
3312         __ lea(tmp, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3313         __ push(tmp);
3314         __ lea(tmp, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3315         __ push(tmp);
3316 
3317         __ call_VM_leaf(copyfunc_addr, 5);
3318 #else
3319         __ movl2ptr(length, length); //higher 32bits must be null
3320 
3321         __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3322         assert_different_registers(c_rarg0, dst, dst_pos, length);
3323         __ lea(c_rarg1, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3324         assert_different_registers(c_rarg1, dst, length);
3325 

3387       __ pop(src);
3388     }
3389   }
3390 
3391 #ifdef ASSERT
3392   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
3393     // Sanity check the known type with the incoming class.  For the
3394     // primitive case the types must match exactly with src.klass and
3395     // dst.klass each exactly matching the default type.  For the
3396     // object array case, if no type check is needed then either the
3397     // dst type is exactly the expected type and the src type is a
3398     // subtype which we can't check or src is the same array as dst
3399     // but not necessarily exactly of type default_type.
3400     Label known_ok, halt;
3401     __ mov_metadata(tmp, default_type->constant_encoding());
3402 #ifdef _LP64
3403     if (UseCompressedClassPointers) {
3404       __ encode_klass_not_null(tmp, rscratch1);
3405     }
3406 #endif
3407 
3408     if (basic_type != T_OBJECT) {
3409 
3410       if (UseCompressedClassPointers)          __ cmpl(tmp, dst_klass_addr);
3411       else                   __ cmpptr(tmp, dst_klass_addr);
3412       __ jcc(Assembler::notEqual, halt);
3413       if (UseCompressedClassPointers)          __ cmpl(tmp, src_klass_addr);
3414       else                   __ cmpptr(tmp, src_klass_addr);
3415       __ jcc(Assembler::equal, known_ok);
3416     } else {
3417       if (UseCompressedClassPointers)          __ cmpl(tmp, dst_klass_addr);
3418       else                   __ cmpptr(tmp, dst_klass_addr);
3419       __ jcc(Assembler::equal, known_ok);
3420       __ cmpptr(src, dst);
3421       __ jcc(Assembler::equal, known_ok);
3422     }
3423     __ bind(halt);
3424     __ stop("incorrect type information in arraycopy");
3425     __ bind(known_ok);
3426   }
3427 #endif
3428 
3429 #ifndef PRODUCT
3430   if (PrintC1Statistics) {
3431     __ incrementl(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)), rscratch1);
3432   }
3433 #endif
3434 
3435 #ifdef _LP64
3436   assert_different_registers(c_rarg0, dst, dst_pos, length);
3437   __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3438   assert_different_registers(c_rarg1, length);

3494     // done
3495   } else if (op->code() == lir_unlock) {
3496     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
3497     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
3498   } else {
3499     Unimplemented();
3500   }
3501   __ bind(*op->stub()->continuation());
3502 }
3503 
3504 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
3505   Register obj = op->obj()->as_pointer_register();
3506   Register result = op->result_opr()->as_pointer_register();
3507 
3508   CodeEmitInfo* info = op->info();
3509   if (info != nullptr) {
3510     add_debug_info_for_null_check_here(info);
3511   }
3512 
3513 #ifdef _LP64
3514   if (UseCompressedClassPointers) {













3515     __ movl(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3516     __ decode_klass_not_null(result, rscratch1);
3517   } else
3518 #endif

3519     __ movptr(result, Address(obj, oopDesc::klass_offset_in_bytes()));

3520 }
3521 
3522 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
3523   ciMethod* method = op->profiled_method();
3524   int bci          = op->profiled_bci();
3525   ciMethod* callee = op->profiled_callee();
3526   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3527 
3528   // Update counter for all call types
3529   ciMethodData* md = method->method_data_or_null();
3530   assert(md != nullptr, "Sanity");
3531   ciProfileData* data = md->bci_to_data(bci);
3532   assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
3533   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
3534   Register mdo  = op->mdo()->as_register();
3535   __ mov_metadata(mdo, md->constant_encoding());
3536   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
3537   // Perform additional virtual call profiling for invokevirtual and
3538   // invokeinterface bytecodes
3539   if (op->should_profile_receiver_type()) {

1601       (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1602       (!UseFastNewTypeArray   && !is_reference_type(op->type()))) {
1603     __ jmp(*op->stub()->entry());
1604   } else {
1605     Register tmp1 = op->tmp1()->as_register();
1606     Register tmp2 = op->tmp2()->as_register();
1607     Register tmp3 = op->tmp3()->as_register();
1608     if (len == tmp1) {
1609       tmp1 = tmp3;
1610     } else if (len == tmp2) {
1611       tmp2 = tmp3;
1612     } else if (len == tmp3) {
1613       // everything is ok
1614     } else {
1615       __ mov(tmp3, len);
1616     }
1617     __ allocate_array(op->obj()->as_register(),
1618                       len,
1619                       tmp1,
1620                       tmp2,
1621                       arrayOopDesc::base_offset_in_bytes(op->type()),
1622                       array_element_size(op->type()),
1623                       op->klass()->as_register(),
1624                       *op->stub()->entry());
1625   }
1626   __ bind(*op->stub()->continuation());
1627 }
1628 
1629 void LIR_Assembler::type_profile_helper(Register mdo,
1630                                         ciMethodData *md, ciProfileData *data,
1631                                         Register recv, Label* update_done) {
1632   for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1633     Label next_test;
1634     // See if the receiver is receiver[n].
1635     __ cmpptr(recv, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1636     __ jccb(Assembler::notEqual, next_test);
1637     Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1638     __ addptr(data_addr, DataLayout::counter_increment);
1639     __ jmp(*update_done);
1640     __ bind(next_test);
1641   }

3030 void LIR_Assembler::store_parameter(Metadata* m, int offset_from_rsp_in_words) {
3031   assert(offset_from_rsp_in_words >= 0, "invalid offset from rsp");
3032   int offset_from_rsp_in_bytes = offset_from_rsp_in_words * BytesPerWord;
3033   assert(offset_from_rsp_in_bytes < frame_map()->reserved_argument_area_size(), "invalid offset");
3034   __ mov_metadata(Address(rsp, offset_from_rsp_in_bytes), m, rscratch1);
3035 }
3036 
3037 
3038 // This code replaces a call to arraycopy; no exception may
3039 // be thrown in this code, they must be thrown in the System.arraycopy
3040 // activation frame; we could save some checks if this would not be the case
3041 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
3042   ciArrayKlass* default_type = op->expected_type();
3043   Register src = op->src()->as_register();
3044   Register dst = op->dst()->as_register();
3045   Register src_pos = op->src_pos()->as_register();
3046   Register dst_pos = op->dst_pos()->as_register();
3047   Register length  = op->length()->as_register();
3048   Register tmp = op->tmp()->as_register();
3049   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3050   Register tmp2 = UseCompactObjectHeaders ? rscratch2 : noreg;
3051 
3052   CodeStub* stub = op->stub();
3053   int flags = op->flags();
3054   BasicType basic_type = default_type != nullptr ? default_type->element_type()->basic_type() : T_ILLEGAL;
3055   if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3056 
3057   // if we don't know anything, just go through the generic arraycopy
3058   if (default_type == nullptr) {
3059     // save outgoing arguments on stack in case call to System.arraycopy is needed
3060     // HACK ALERT. This code used to push the parameters in a hardwired fashion
3061     // for interpreter calling conventions. Now we have to do it in new style conventions.
3062     // For the moment until C1 gets the new register allocator I just force all the
3063     // args to the right place (except the register args) and then on the back side
3064     // reload the register args properly if we go slow path. Yuck
3065 
3066     // These are proper for the calling convention
3067     store_parameter(length, 2);
3068     store_parameter(dst_pos, 1);
3069     store_parameter(dst, 0);
3070 

3155   switch (elem_size) {
3156     case 1 :
3157       scale = Address::times_1;
3158       break;
3159     case 2 :
3160       scale = Address::times_2;
3161       break;
3162     case 4 :
3163       scale = Address::times_4;
3164       break;
3165     case 8 :
3166       scale = Address::times_8;
3167       break;
3168     default:
3169       scale = Address::no_scale;
3170       ShouldNotReachHere();
3171   }
3172 
3173   Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
3174   Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());


3175 
3176   // length and pos's are all sign extended at this point on 64bit
3177 
3178   // test for null
3179   if (flags & LIR_OpArrayCopy::src_null_check) {
3180     __ testptr(src, src);
3181     __ jcc(Assembler::zero, *stub->entry());
3182   }
3183   if (flags & LIR_OpArrayCopy::dst_null_check) {
3184     __ testptr(dst, dst);
3185     __ jcc(Assembler::zero, *stub->entry());
3186   }
3187 
3188   // If the compiler was not able to prove that exact type of the source or the destination
3189   // of the arraycopy is an array type, check at runtime if the source or the destination is
3190   // an instance type.
3191   if (flags & LIR_OpArrayCopy::type_check) {
3192     if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3193       __ load_klass(tmp, dst, tmp_load_klass);
3194       __ cmpl(Address(tmp, in_bytes(Klass::layout_helper_offset())), Klass::_lh_neutral_value);

3220   if (flags & LIR_OpArrayCopy::dst_range_check) {
3221     __ lea(tmp, Address(dst_pos, length, Address::times_1, 0));
3222     __ cmpl(tmp, dst_length_addr);
3223     __ jcc(Assembler::above, *stub->entry());
3224   }
3225 
3226   if (flags & LIR_OpArrayCopy::length_positive_check) {
3227     __ testl(length, length);
3228     __ jcc(Assembler::less, *stub->entry());
3229   }
3230 
3231 #ifdef _LP64
3232   __ movl2ptr(src_pos, src_pos); //higher 32bits must be null
3233   __ movl2ptr(dst_pos, dst_pos); //higher 32bits must be null
3234 #endif
3235 
3236   if (flags & LIR_OpArrayCopy::type_check) {
3237     // We don't know the array types are compatible
3238     if (basic_type != T_OBJECT) {
3239       // Simple test for basic type arrays
3240       __ cmp_klass(src, dst, tmp, tmp2);






3241       __ jcc(Assembler::notEqual, *stub->entry());
3242     } else {
3243       // For object arrays, if src is a sub class of dst then we can
3244       // safely do the copy.
3245       Label cont, slow;
3246 
3247       __ push(src);
3248       __ push(dst);
3249 
3250       __ load_klass(src, src, tmp_load_klass);
3251       __ load_klass(dst, dst, tmp_load_klass);
3252 
3253       __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
3254 
3255       __ push(src);
3256       __ push(dst);
3257       __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
3258       __ pop(dst);
3259       __ pop(src);
3260 

3279             __ load_klass(tmp, src, tmp_load_klass);
3280           } else if (!(flags & LIR_OpArrayCopy::dst_objarray)) {
3281             __ load_klass(tmp, dst, tmp_load_klass);
3282           }
3283           int lh_offset = in_bytes(Klass::layout_helper_offset());
3284           Address klass_lh_addr(tmp, lh_offset);
3285           jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
3286           __ cmpl(klass_lh_addr, objArray_lh);
3287           __ jcc(Assembler::notEqual, *stub->entry());
3288         }
3289 
3290        // Spill because stubs can use any register they like and it's
3291        // easier to restore just those that we care about.
3292        store_parameter(dst, 0);
3293        store_parameter(dst_pos, 1);
3294        store_parameter(length, 2);
3295        store_parameter(src_pos, 3);
3296        store_parameter(src, 4);
3297 
3298 #ifndef _LP64
3299         Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
3300         __ movptr(tmp, dst_klass_addr);
3301         __ movptr(tmp, Address(tmp, ObjArrayKlass::element_klass_offset()));
3302         __ push(tmp);
3303         __ movl(tmp, Address(tmp, Klass::super_check_offset_offset()));
3304         __ push(tmp);
3305         __ push(length);
3306         __ lea(tmp, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3307         __ push(tmp);
3308         __ lea(tmp, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3309         __ push(tmp);
3310 
3311         __ call_VM_leaf(copyfunc_addr, 5);
3312 #else
3313         __ movl2ptr(length, length); //higher 32bits must be null
3314 
3315         __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3316         assert_different_registers(c_rarg0, dst, dst_pos, length);
3317         __ lea(c_rarg1, Address(dst, dst_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3318         assert_different_registers(c_rarg1, dst, length);
3319 

3381       __ pop(src);
3382     }
3383   }
3384 
3385 #ifdef ASSERT
3386   if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
3387     // Sanity check the known type with the incoming class.  For the
3388     // primitive case the types must match exactly with src.klass and
3389     // dst.klass each exactly matching the default type.  For the
3390     // object array case, if no type check is needed then either the
3391     // dst type is exactly the expected type and the src type is a
3392     // subtype which we can't check or src is the same array as dst
3393     // but not necessarily exactly of type default_type.
3394     Label known_ok, halt;
3395     __ mov_metadata(tmp, default_type->constant_encoding());
3396 #ifdef _LP64
3397     if (UseCompressedClassPointers) {
3398       __ encode_klass_not_null(tmp, rscratch1);
3399     }
3400 #endif

3401     if (basic_type != T_OBJECT) {
3402       __ cmp_klass(tmp, dst, tmp2);


3403       __ jcc(Assembler::notEqual, halt);
3404       __ cmp_klass(tmp, src, tmp2);

3405       __ jcc(Assembler::equal, known_ok);
3406     } else {
3407       __ cmp_klass(tmp, dst, tmp2);

3408       __ jcc(Assembler::equal, known_ok);
3409       __ cmpptr(src, dst);
3410       __ jcc(Assembler::equal, known_ok);
3411     }
3412     __ bind(halt);
3413     __ stop("incorrect type information in arraycopy");
3414     __ bind(known_ok);
3415   }
3416 #endif
3417 
3418 #ifndef PRODUCT
3419   if (PrintC1Statistics) {
3420     __ incrementl(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)), rscratch1);
3421   }
3422 #endif
3423 
3424 #ifdef _LP64
3425   assert_different_registers(c_rarg0, dst, dst_pos, length);
3426   __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3427   assert_different_registers(c_rarg1, length);

3483     // done
3484   } else if (op->code() == lir_unlock) {
3485     assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
3486     __ unlock_object(hdr, obj, lock, *op->stub()->entry());
3487   } else {
3488     Unimplemented();
3489   }
3490   __ bind(*op->stub()->continuation());
3491 }
3492 
3493 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
3494   Register obj = op->obj()->as_pointer_register();
3495   Register result = op->result_opr()->as_pointer_register();
3496 
3497   CodeEmitInfo* info = op->info();
3498   if (info != nullptr) {
3499     add_debug_info_for_null_check_here(info);
3500   }
3501 
3502 #ifdef _LP64
3503   if (UseCompactObjectHeaders) {
3504     Register tmp = rscratch1;
3505     assert_different_registers(tmp, obj);
3506     assert_different_registers(tmp, result);
3507 
3508     // Check if we can take the (common) fast path, if obj is unlocked.
3509     __ movq(result, Address(obj, oopDesc::mark_offset_in_bytes()));
3510     __ testb(result, markWord::monitor_value);
3511     __ jcc(Assembler::notZero, *op->stub()->entry());
3512     __ bind(*op->stub()->continuation());
3513     // Fast-path: shift and decode Klass*.
3514     __ shrq(result, markWord::klass_shift);
3515     __ decode_klass_not_null(result, tmp);
3516   } else if (UseCompressedClassPointers) {
3517     __ movl(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3518     __ decode_klass_not_null(result, rscratch1);
3519   } else
3520 #endif
3521   {
3522     __ movptr(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3523   }
3524 }
3525 
3526 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
3527   ciMethod* method = op->profiled_method();
3528   int bci          = op->profiled_bci();
3529   ciMethod* callee = op->profiled_callee();
3530   Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3531 
3532   // Update counter for all call types
3533   ciMethodData* md = method->method_data_or_null();
3534   assert(md != nullptr, "Sanity");
3535   ciProfileData* data = md->bci_to_data(bci);
3536   assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
3537   assert(op->mdo()->is_single_cpu(),  "mdo must be allocated");
3538   Register mdo  = op->mdo()->as_register();
3539   __ mov_metadata(mdo, md->constant_encoding());
3540   Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset()));
3541   // Perform additional virtual call profiling for invokevirtual and
3542   // invokeinterface bytecodes
3543   if (op->should_profile_receiver_type()) {
< prev index next >