< prev index next > src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp
Print this page
}
__ allocate_array(op->obj()->as_register(),
len,
tmp1,
tmp2,
! arrayOopDesc::header_size(op->type()),
array_element_size(op->type()),
op->klass()->as_register(),
*op->stub()->entry());
}
__ bind(*op->stub()->continuation());
}
__ allocate_array(op->obj()->as_register(),
len,
tmp1,
tmp2,
! arrayOopDesc::base_offset_in_bytes(op->type()),
array_element_size(op->type()),
op->klass()->as_register(),
*op->stub()->entry());
}
__ bind(*op->stub()->continuation());
int elem_size = type2aelembytes(basic_type);
int scale = exact_log2(elem_size);
Address src_length_addr = Address(src, arrayOopDesc::length_offset_in_bytes());
Address dst_length_addr = Address(dst, arrayOopDesc::length_offset_in_bytes());
- Address src_klass_addr = Address(src, oopDesc::klass_offset_in_bytes());
- Address dst_klass_addr = Address(dst, oopDesc::klass_offset_in_bytes());
// test for NULL
if (flags & LIR_OpArrayCopy::src_null_check) {
__ cbz(src, *stub->entry());
}
if (flags & LIR_OpArrayCopy::type_check) {
// We don't know the array types are compatible
if (basic_type != T_OBJECT) {
// Simple test for basic type arrays
! if (UseCompressedClassPointers) {
! __ ldrw(tmp, src_klass_addr);
! __ ldrw(rscratch1, dst_klass_addr);
! __ cmpw(tmp, rscratch1);
- } else {
- __ ldr(tmp, src_klass_addr);
- __ ldr(rscratch1, dst_klass_addr);
- __ cmp(tmp, rscratch1);
- }
__ br(Assembler::NE, *stub->entry());
} else {
// For object arrays, if src is a sub class of dst then we can
// safely do the copy.
Label cont, slow;
if (flags & LIR_OpArrayCopy::type_check) {
// We don't know the array types are compatible
if (basic_type != T_OBJECT) {
// Simple test for basic type arrays
! assert(UseCompressedClassPointers, "Lilliput");
! __ load_nklass(tmp, src);
! __ load_nklass(rscratch1, dst);
! __ cmpw(tmp, rscratch1);
__ br(Assembler::NE, *stub->entry());
} else {
// For object arrays, if src is a sub class of dst then we can
// safely do the copy.
Label cont, slow;
#define POP(r1, r2) \
ldp(r1, r2, __ post(sp, 2 * wordSize));
__ PUSH(src, dst);
! __ load_klass(src, src);
! __ load_klass(dst, dst);
__ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, NULL);
__ PUSH(src, dst);
__ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
#define POP(r1, r2) \
ldp(r1, r2, __ post(sp, 2 * wordSize));
__ PUSH(src, dst);
! __ load_klass(tmp, src);
! __ mov(src, tmp);
+ __ load_klass(tmp, dst);
+ __ mov(dst, tmp);
__ check_klass_subtype_fast_path(src, dst, tmp, &cont, &slow, NULL);
__ PUSH(src, dst);
__ far_call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id)));
__ mov_metadata(tmp, default_type->constant_encoding());
if (UseCompressedClassPointers) {
__ encode_klass_not_null(tmp);
}
if (basic_type != T_OBJECT) {
!
! if (UseCompressedClassPointers) {
- __ ldrw(rscratch1, dst_klass_addr);
- __ cmpw(tmp, rscratch1);
- } else {
- __ ldr(rscratch1, dst_klass_addr);
- __ cmp(tmp, rscratch1);
- }
__ br(Assembler::NE, halt);
! if (UseCompressedClassPointers) {
! __ ldrw(rscratch1, src_klass_addr);
- __ cmpw(tmp, rscratch1);
- } else {
- __ ldr(rscratch1, src_klass_addr);
- __ cmp(tmp, rscratch1);
- }
__ br(Assembler::EQ, known_ok);
} else {
! if (UseCompressedClassPointers) {
! __ ldrw(rscratch1, dst_klass_addr);
- __ cmpw(tmp, rscratch1);
- } else {
- __ ldr(rscratch1, dst_klass_addr);
- __ cmp(tmp, rscratch1);
- }
__ br(Assembler::EQ, known_ok);
__ cmp(src, dst);
__ br(Assembler::EQ, known_ok);
}
__ bind(halt);
__ mov_metadata(tmp, default_type->constant_encoding());
if (UseCompressedClassPointers) {
__ encode_klass_not_null(tmp);
}
+ assert(UseCompressedClassPointers, "Lilliput");
if (basic_type != T_OBJECT) {
! __ load_nklass(rscratch1, dst);
! __ cmpw(tmp, rscratch1);
__ br(Assembler::NE, halt);
! __ load_nklass(rscratch1, src);
! __ cmpw(tmp, rscratch1);
__ br(Assembler::EQ, known_ok);
} else {
! __ load_nklass(rscratch1, dst);
! __ cmpw(tmp, rscratch1);
__ br(Assembler::EQ, known_ok);
__ cmp(src, dst);
__ br(Assembler::EQ, known_ok);
}
__ bind(halt);
}
void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
Register obj = op->obj()->as_pointer_register();
Register result = op->result_opr()->as_pointer_register();
CodeEmitInfo* info = op->info();
if (info != NULL) {
add_debug_info_for_null_check_here(info);
}
! if (UseCompressedClassPointers) {
! __ ldrw(result, Address (obj, oopDesc::klass_offset_in_bytes()));
! __ decode_klass_not_null(result);
! } else {
! __ ldr(result, Address (obj, oopDesc::klass_offset_in_bytes()));
! }
}
void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
ciMethod* method = op->profiled_method();
int bci = op->profiled_bci();
}
void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
Register obj = op->obj()->as_pointer_register();
Register result = op->result_opr()->as_pointer_register();
+ Register tmp = rscratch1;
CodeEmitInfo* info = op->info();
if (info != NULL) {
add_debug_info_for_null_check_here(info);
}
! assert(UseCompressedClassPointers, "expects UseCompressedClassPointers");
!
! // Check if we can take the (common) fast path, if obj is unlocked.
! __ ldr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
! __ eor(tmp, tmp, markWord::unlocked_value);
! __ tst(tmp, markWord::lock_mask_in_place);
+ __ br(Assembler::NE, *op->stub()->entry());
+
+ // Fast-path: shift and decode Klass*.
+ __ mov(result, tmp);
+ __ lsr(result, result, markWord::klass_shift);
+
+ __ bind(*op->stub()->continuation());
+ __ decode_klass_not_null(result);
}
void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {
ciMethod* method = op->profiled_method();
int bci = op->profiled_bci();
__ addptr(data_addr, DataLayout::counter_increment);
return;
}
}
} else {
! __ load_klass(recv, recv);
Label update_done;
type_profile_helper(mdo, md, data, recv, &update_done);
// Receiver did not match any saved receiver and there is no empty row for it.
// Increment total counter to indicate polymorphic case.
__ addptr(counter_addr, DataLayout::counter_increment);
__ addptr(data_addr, DataLayout::counter_increment);
return;
}
}
} else {
! __ load_klass(rscratch1, recv);
+ __ mov(recv, rscratch1);
Label update_done;
type_profile_helper(mdo, md, data, recv, &update_done);
// Receiver did not match any saved receiver and there is no empty row for it.
// Increment total counter to indicate polymorphic case.
__ addptr(counter_addr, DataLayout::counter_increment);
if (do_update) {
#ifdef ASSERT
if (exact_klass != NULL) {
Label ok;
! __ load_klass(tmp, tmp);
__ mov_metadata(rscratch1, exact_klass->constant_encoding());
__ eor(rscratch1, tmp, rscratch1);
__ cbz(rscratch1, ok);
__ stop("exact klass and actual klass differ");
__ bind(ok);
if (do_update) {
#ifdef ASSERT
if (exact_klass != NULL) {
Label ok;
! __ load_klass(rscratch1, tmp);
+ __ mov(tmp, rscratch1);
__ mov_metadata(rscratch1, exact_klass->constant_encoding());
__ eor(rscratch1, tmp, rscratch1);
__ cbz(rscratch1, ok);
__ stop("exact klass and actual klass differ");
__ bind(ok);
if (!no_conflict) {
if (exact_klass == NULL || TypeEntries::is_type_none(current_klass)) {
if (exact_klass != NULL) {
__ mov_metadata(tmp, exact_klass->constant_encoding());
} else {
! __ load_klass(tmp, tmp);
}
__ ldr(rscratch2, mdo_addr);
__ eor(tmp, tmp, rscratch2);
__ andr(rscratch1, tmp, TypeEntries::type_klass_mask);
if (!no_conflict) {
if (exact_klass == NULL || TypeEntries::is_type_none(current_klass)) {
if (exact_klass != NULL) {
__ mov_metadata(tmp, exact_klass->constant_encoding());
} else {
! __ load_klass(rscratch1, tmp);
+ __ mov(tmp, rscratch1);
}
__ ldr(rscratch2, mdo_addr);
__ eor(tmp, tmp, rscratch2);
__ andr(rscratch1, tmp, TypeEntries::type_klass_mask);
< prev index next >