1618 (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1619 (!UseFastNewTypeArray && !is_reference_type(op->type()))) {
1620 __ jmp(*op->stub()->entry());
1621 } else {
1622 Register tmp1 = op->tmp1()->as_register();
1623 Register tmp2 = op->tmp2()->as_register();
1624 Register tmp3 = op->tmp3()->as_register();
1625 if (len == tmp1) {
1626 tmp1 = tmp3;
1627 } else if (len == tmp2) {
1628 tmp2 = tmp3;
1629 } else if (len == tmp3) {
1630 // everything is ok
1631 } else {
1632 __ mov(tmp3, len);
1633 }
1634 __ allocate_array(op->obj()->as_register(),
1635 len,
1636 tmp1,
1637 tmp2,
1638 arrayOopDesc::header_size(op->type()),
1639 array_element_size(op->type()),
1640 op->klass()->as_register(),
1641 *op->stub()->entry());
1642 }
1643 __ bind(*op->stub()->continuation());
1644 }
1645
1646 void LIR_Assembler::type_profile_helper(Register mdo,
1647 ciMethodData *md, ciProfileData *data,
1648 Register recv, Label* update_done) {
1649 for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1650 Label next_test;
1651 // See if the receiver is receiver[n].
1652 __ cmpptr(recv, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1653 __ jccb(Assembler::notEqual, next_test);
1654 Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1655 __ addptr(data_addr, DataLayout::counter_increment);
1656 __ jmp(*update_done);
1657 __ bind(next_test);
1658 }
3053 void LIR_Assembler::store_parameter(Metadata* m, int offset_from_rsp_in_words) {
3054 assert(offset_from_rsp_in_words >= 0, "invalid offset from rsp");
3055 int offset_from_rsp_in_bytes = offset_from_rsp_in_words * BytesPerWord;
3056 assert(offset_from_rsp_in_bytes < frame_map()->reserved_argument_area_size(), "invalid offset");
3057 __ mov_metadata(Address(rsp, offset_from_rsp_in_bytes), m, rscratch1);
3058 }
3059
3060
3061 // This code replaces a call to arraycopy; no exception may
3062 // be thrown in this code, they must be thrown in the System.arraycopy
3063 // activation frame; we could save some checks if this would not be the case
3064 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
3065 ciArrayKlass* default_type = op->expected_type();
3066 Register src = op->src()->as_register();
3067 Register dst = op->dst()->as_register();
3068 Register src_pos = op->src_pos()->as_register();
3069 Register dst_pos = op->dst_pos()->as_register();
3070 Register length = op->length()->as_register();
3071 Register tmp = op->tmp()->as_register();
3072 Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3073
3074 CodeStub* stub = op->stub();
3075 int flags = op->flags();
3076 BasicType basic_type = default_type != nullptr ? default_type->element_type()->basic_type() : T_ILLEGAL;
3077 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3078
3079 // if we don't know anything, just go through the generic arraycopy
3080 if (default_type == nullptr) {
3081 // save outgoing arguments on stack in case call to System.arraycopy is needed
3082 // HACK ALERT. This code used to push the parameters in a hardwired fashion
3083 // for interpreter calling conventions. Now we have to do it in new style conventions.
3084 // For the moment until C1 gets the new register allocator I just force all the
3085 // args to the right place (except the register args) and then on the back side
3086 // reload the register args properly if we go slow path. Yuck
3087
3088 // These are proper for the calling convention
3089 store_parameter(length, 2);
3090 store_parameter(dst_pos, 1);
3091 store_parameter(dst, 0);
3092
3244 if (flags & LIR_OpArrayCopy::dst_range_check) {
3245 __ lea(tmp, Address(dst_pos, length, Address::times_1, 0));
3246 __ cmpl(tmp, dst_length_addr);
3247 __ jcc(Assembler::above, *stub->entry());
3248 }
3249
3250 if (flags & LIR_OpArrayCopy::length_positive_check) {
3251 __ testl(length, length);
3252 __ jcc(Assembler::less, *stub->entry());
3253 }
3254
3255 #ifdef _LP64
3256 __ movl2ptr(src_pos, src_pos); //higher 32bits must be null
3257 __ movl2ptr(dst_pos, dst_pos); //higher 32bits must be null
3258 #endif
3259
3260 if (flags & LIR_OpArrayCopy::type_check) {
3261 // We don't know the array types are compatible
3262 if (basic_type != T_OBJECT) {
3263 // Simple test for basic type arrays
3264 if (UseCompressedClassPointers) {
3265 __ movl(tmp, src_klass_addr);
3266 __ cmpl(tmp, dst_klass_addr);
3267 } else {
3268 __ movptr(tmp, src_klass_addr);
3269 __ cmpptr(tmp, dst_klass_addr);
3270 }
3271 __ jcc(Assembler::notEqual, *stub->entry());
3272 } else {
3273 // For object arrays, if src is a sub class of dst then we can
3274 // safely do the copy.
3275 Label cont, slow;
3276
3277 __ push(src);
3278 __ push(dst);
3279
3280 __ load_klass(src, src, tmp_load_klass);
3281 __ load_klass(dst, dst, tmp_load_klass);
3282
3283 __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
3284
3285 __ push(src);
3286 __ push(dst);
3287 __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
3288 __ pop(dst);
3289 __ pop(src);
3290
3410 __ pop(src);
3411 }
3412 }
3413
3414 #ifdef ASSERT
3415 if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
3416 // Sanity check the known type with the incoming class. For the
3417 // primitive case the types must match exactly with src.klass and
3418 // dst.klass each exactly matching the default type. For the
3419 // object array case, if no type check is needed then either the
3420 // dst type is exactly the expected type and the src type is a
3421 // subtype which we can't check or src is the same array as dst
3422 // but not necessarily exactly of type default_type.
3423 Label known_ok, halt;
3424 __ mov_metadata(tmp, default_type->constant_encoding());
3425 #ifdef _LP64
3426 if (UseCompressedClassPointers) {
3427 __ encode_klass_not_null(tmp, rscratch1);
3428 }
3429 #endif
3430
3431 if (basic_type != T_OBJECT) {
3432
3433 if (UseCompressedClassPointers) __ cmpl(tmp, dst_klass_addr);
3434 else __ cmpptr(tmp, dst_klass_addr);
3435 __ jcc(Assembler::notEqual, halt);
3436 if (UseCompressedClassPointers) __ cmpl(tmp, src_klass_addr);
3437 else __ cmpptr(tmp, src_klass_addr);
3438 __ jcc(Assembler::equal, known_ok);
3439 } else {
3440 if (UseCompressedClassPointers) __ cmpl(tmp, dst_klass_addr);
3441 else __ cmpptr(tmp, dst_klass_addr);
3442 __ jcc(Assembler::equal, known_ok);
3443 __ cmpptr(src, dst);
3444 __ jcc(Assembler::equal, known_ok);
3445 }
3446 __ bind(halt);
3447 __ stop("incorrect type information in arraycopy");
3448 __ bind(known_ok);
3449 }
3450 #endif
3451
3452 #ifndef PRODUCT
3453 if (PrintC1Statistics) {
3454 __ incrementl(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)), rscratch1);
3455 }
3456 #endif
3457
3458 #ifdef _LP64
3459 assert_different_registers(c_rarg0, dst, dst_pos, length);
3460 __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3461 assert_different_registers(c_rarg1, length);
3517 // done
3518 } else if (op->code() == lir_unlock) {
3519 assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
3520 __ unlock_object(hdr, obj, lock, *op->stub()->entry());
3521 } else {
3522 Unimplemented();
3523 }
3524 __ bind(*op->stub()->continuation());
3525 }
3526
3527 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
3528 Register obj = op->obj()->as_pointer_register();
3529 Register result = op->result_opr()->as_pointer_register();
3530
3531 CodeEmitInfo* info = op->info();
3532 if (info != nullptr) {
3533 add_debug_info_for_null_check_here(info);
3534 }
3535
3536 #ifdef _LP64
3537 if (UseCompressedClassPointers) {
3538 __ movl(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3539 __ decode_klass_not_null(result, rscratch1);
3540 } else
3541 #endif
3542 __ movptr(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3543 }
3544
3545 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
3546 ciMethod* method = op->profiled_method();
3547 int bci = op->profiled_bci();
3548 ciMethod* callee = op->profiled_callee();
3549 Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3550
3551 // Update counter for all call types
3552 ciMethodData* md = method->method_data_or_null();
3553 assert(md != nullptr, "Sanity");
3554 ciProfileData* data = md->bci_to_data(bci);
3555 assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
3556 assert(op->mdo()->is_single_cpu(), "mdo must be allocated");
|
1618 (!UseFastNewObjectArray && is_reference_type(op->type())) ||
1619 (!UseFastNewTypeArray && !is_reference_type(op->type()))) {
1620 __ jmp(*op->stub()->entry());
1621 } else {
1622 Register tmp1 = op->tmp1()->as_register();
1623 Register tmp2 = op->tmp2()->as_register();
1624 Register tmp3 = op->tmp3()->as_register();
1625 if (len == tmp1) {
1626 tmp1 = tmp3;
1627 } else if (len == tmp2) {
1628 tmp2 = tmp3;
1629 } else if (len == tmp3) {
1630 // everything is ok
1631 } else {
1632 __ mov(tmp3, len);
1633 }
1634 __ allocate_array(op->obj()->as_register(),
1635 len,
1636 tmp1,
1637 tmp2,
1638 arrayOopDesc::base_offset_in_bytes(op->type()),
1639 array_element_size(op->type()),
1640 op->klass()->as_register(),
1641 *op->stub()->entry());
1642 }
1643 __ bind(*op->stub()->continuation());
1644 }
1645
1646 void LIR_Assembler::type_profile_helper(Register mdo,
1647 ciMethodData *md, ciProfileData *data,
1648 Register recv, Label* update_done) {
1649 for (uint i = 0; i < ReceiverTypeData::row_limit(); i++) {
1650 Label next_test;
1651 // See if the receiver is receiver[n].
1652 __ cmpptr(recv, Address(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_offset(i))));
1653 __ jccb(Assembler::notEqual, next_test);
1654 Address data_addr(mdo, md->byte_offset_of_slot(data, ReceiverTypeData::receiver_count_offset(i)));
1655 __ addptr(data_addr, DataLayout::counter_increment);
1656 __ jmp(*update_done);
1657 __ bind(next_test);
1658 }
3053 void LIR_Assembler::store_parameter(Metadata* m, int offset_from_rsp_in_words) {
3054 assert(offset_from_rsp_in_words >= 0, "invalid offset from rsp");
3055 int offset_from_rsp_in_bytes = offset_from_rsp_in_words * BytesPerWord;
3056 assert(offset_from_rsp_in_bytes < frame_map()->reserved_argument_area_size(), "invalid offset");
3057 __ mov_metadata(Address(rsp, offset_from_rsp_in_bytes), m, rscratch1);
3058 }
3059
3060
3061 // This code replaces a call to arraycopy; no exception may
3062 // be thrown in this code, they must be thrown in the System.arraycopy
3063 // activation frame; we could save some checks if this would not be the case
3064 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
3065 ciArrayKlass* default_type = op->expected_type();
3066 Register src = op->src()->as_register();
3067 Register dst = op->dst()->as_register();
3068 Register src_pos = op->src_pos()->as_register();
3069 Register dst_pos = op->dst_pos()->as_register();
3070 Register length = op->length()->as_register();
3071 Register tmp = op->tmp()->as_register();
3072 Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3073 Register tmp2 = UseCompactObjectHeaders ? rscratch2 : noreg;
3074
3075 CodeStub* stub = op->stub();
3076 int flags = op->flags();
3077 BasicType basic_type = default_type != nullptr ? default_type->element_type()->basic_type() : T_ILLEGAL;
3078 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3079
3080 // if we don't know anything, just go through the generic arraycopy
3081 if (default_type == nullptr) {
3082 // save outgoing arguments on stack in case call to System.arraycopy is needed
3083 // HACK ALERT. This code used to push the parameters in a hardwired fashion
3084 // for interpreter calling conventions. Now we have to do it in new style conventions.
3085 // For the moment until C1 gets the new register allocator I just force all the
3086 // args to the right place (except the register args) and then on the back side
3087 // reload the register args properly if we go slow path. Yuck
3088
3089 // These are proper for the calling convention
3090 store_parameter(length, 2);
3091 store_parameter(dst_pos, 1);
3092 store_parameter(dst, 0);
3093
3245 if (flags & LIR_OpArrayCopy::dst_range_check) {
3246 __ lea(tmp, Address(dst_pos, length, Address::times_1, 0));
3247 __ cmpl(tmp, dst_length_addr);
3248 __ jcc(Assembler::above, *stub->entry());
3249 }
3250
3251 if (flags & LIR_OpArrayCopy::length_positive_check) {
3252 __ testl(length, length);
3253 __ jcc(Assembler::less, *stub->entry());
3254 }
3255
3256 #ifdef _LP64
3257 __ movl2ptr(src_pos, src_pos); //higher 32bits must be null
3258 __ movl2ptr(dst_pos, dst_pos); //higher 32bits must be null
3259 #endif
3260
3261 if (flags & LIR_OpArrayCopy::type_check) {
3262 // We don't know the array types are compatible
3263 if (basic_type != T_OBJECT) {
3264 // Simple test for basic type arrays
3265 __ cmp_klass(src, dst, tmp, tmp2);
3266 __ jcc(Assembler::notEqual, *stub->entry());
3267 } else {
3268 // For object arrays, if src is a sub class of dst then we can
3269 // safely do the copy.
3270 Label cont, slow;
3271
3272 __ push(src);
3273 __ push(dst);
3274
3275 __ load_klass(src, src, tmp_load_klass);
3276 __ load_klass(dst, dst, tmp_load_klass);
3277
3278 __ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, nullptr);
3279
3280 __ push(src);
3281 __ push(dst);
3282 __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
3283 __ pop(dst);
3284 __ pop(src);
3285
3405 __ pop(src);
3406 }
3407 }
3408
3409 #ifdef ASSERT
3410 if (basic_type != T_OBJECT || !(flags & LIR_OpArrayCopy::type_check)) {
3411 // Sanity check the known type with the incoming class. For the
3412 // primitive case the types must match exactly with src.klass and
3413 // dst.klass each exactly matching the default type. For the
3414 // object array case, if no type check is needed then either the
3415 // dst type is exactly the expected type and the src type is a
3416 // subtype which we can't check or src is the same array as dst
3417 // but not necessarily exactly of type default_type.
3418 Label known_ok, halt;
3419 __ mov_metadata(tmp, default_type->constant_encoding());
3420 #ifdef _LP64
3421 if (UseCompressedClassPointers) {
3422 __ encode_klass_not_null(tmp, rscratch1);
3423 }
3424 #endif
3425 if (basic_type != T_OBJECT) {
3426 __ cmp_klass(tmp, dst, tmp2);
3427 __ jcc(Assembler::notEqual, halt);
3428 __ cmp_klass(tmp, src, tmp2);
3429 __ jcc(Assembler::equal, known_ok);
3430 } else {
3431 __ cmp_klass(tmp, dst, tmp2);
3432 __ jcc(Assembler::equal, known_ok);
3433 __ cmpptr(src, dst);
3434 __ jcc(Assembler::equal, known_ok);
3435 }
3436 __ bind(halt);
3437 __ stop("incorrect type information in arraycopy");
3438 __ bind(known_ok);
3439 }
3440 #endif
3441
3442 #ifndef PRODUCT
3443 if (PrintC1Statistics) {
3444 __ incrementl(ExternalAddress(Runtime1::arraycopy_count_address(basic_type)), rscratch1);
3445 }
3446 #endif
3447
3448 #ifdef _LP64
3449 assert_different_registers(c_rarg0, dst, dst_pos, length);
3450 __ lea(c_rarg0, Address(src, src_pos, scale, arrayOopDesc::base_offset_in_bytes(basic_type)));
3451 assert_different_registers(c_rarg1, length);
3507 // done
3508 } else if (op->code() == lir_unlock) {
3509 assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
3510 __ unlock_object(hdr, obj, lock, *op->stub()->entry());
3511 } else {
3512 Unimplemented();
3513 }
3514 __ bind(*op->stub()->continuation());
3515 }
3516
3517 void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
3518 Register obj = op->obj()->as_pointer_register();
3519 Register result = op->result_opr()->as_pointer_register();
3520
3521 CodeEmitInfo* info = op->info();
3522 if (info != nullptr) {
3523 add_debug_info_for_null_check_here(info);
3524 }
3525
3526 #ifdef _LP64
3527 if (UseCompactObjectHeaders) {
3528 Register tmp = rscratch1;
3529 assert_different_registers(tmp, obj);
3530 assert_different_registers(tmp, result);
3531
3532 // Check if we can take the (common) fast path, if obj is unlocked.
3533 __ movq(result, Address(obj, oopDesc::mark_offset_in_bytes()));
3534 __ testb(result, markWord::monitor_value);
3535 __ jcc(Assembler::notZero, *op->stub()->entry());
3536 __ bind(*op->stub()->continuation());
3537 // Fast-path: shift and decode Klass*.
3538 __ shrq(result, markWord::klass_shift);
3539 __ decode_klass_not_null(result, tmp);
3540 } else
3541 if (UseCompressedClassPointers) {
3542 __ movl(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3543 __ decode_klass_not_null(result, rscratch1);
3544 } else
3545 #endif
3546 __ movptr(result, Address(obj, oopDesc::klass_offset_in_bytes()));
3547 }
3548
3549 void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
3550 ciMethod* method = op->profiled_method();
3551 int bci = op->profiled_bci();
3552 ciMethod* callee = op->profiled_callee();
3553 Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3554
3555 // Update counter for all call types
3556 ciMethodData* md = method->method_data_or_null();
3557 assert(md != nullptr, "Sanity");
3558 ciProfileData* data = md->bci_to_data(bci);
3559 assert(data != nullptr && data->is_CounterData(), "need CounterData for calls");
3560 assert(op->mdo()->is_single_cpu(), "mdo must be allocated");
|