< prev index next > src/hotspot/cpu/x86/templateTable_x86.cpp
Print this page
__ jump_from_interpreted(method, rax);
__ bind(notFinal);
// get receiver klass
- __ null_check(recv, oopDesc::klass_offset_in_bytes());
Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
- __ load_klass(rax, recv, tmp_load_klass);
+ __ load_klass(rax, recv, tmp_load_klass, true);
// profile this call
__ profile_virtual_call(rax, rlocals, rdx);
// get target Method* & entry point
__ lookup_virtual_method(rax, index, method);
__ movl(rlocals, rdx);
__ andl(rlocals, (1 << ConstantPoolCacheEntry::is_vfinal_shift));
__ jcc(Assembler::zero, notVFinal);
// Get receiver klass into rlocals - also a null check
- __ null_check(rcx, oopDesc::klass_offset_in_bytes());
Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
- __ load_klass(rlocals, rcx, tmp_load_klass);
+ __ load_klass(rlocals, rcx, tmp_load_klass, true);
Label subtype;
__ check_klass_subtype(rlocals, rax, rbcp, subtype);
// If we get here the typecheck failed
recvKlass = rdx;
// no return from above
__ bind(notVFinal);
// Get receiver klass into rdx - also a null check
__ restore_locals(); // restore r14
- __ null_check(rcx, oopDesc::klass_offset_in_bytes());
- __ load_klass(rdx, rcx, tmp_load_klass);
+ __ load_klass(rdx, rcx, tmp_load_klass, true);
Label no_such_method;
// Preserve method for throw_AbstractMethodErrorVerbose.
__ mov(rcx, rbx);
// there is an initialize need. Otherwise, skip and go to the slow path.
if (UseTLAB || allow_shared_alloc) {
// The object is initialized before the header. If the object size is
// zero, go directly to the header initialization.
__ bind(initialize_object);
- __ decrement(rdx, sizeof(oopDesc));
+ int header_size = align_up(oopDesc::base_offset_in_bytes(), BytesPerLong);
+ __ decrement(rdx, header_size);
__ jcc(Assembler::zero, initialize_header);
// Initialize topmost object field, divide rdx by 8, check if odd and
// test if zero.
__ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
#endif
// initialize remaining object fields: rdx was a multiple of 8
{ Label loop;
__ bind(loop);
- __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
- NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
+ __ movptr(Address(rax, rdx, Address::times_8, header_size - 1*oopSize), rcx);
+ NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, header_size - 2*oopSize), rcx));
__ decrement(rdx);
__ jcc(Assembler::notZero, loop);
}
// initialize object header only.
__ bind(initialize_header);
- if (UseBiasedLocking) {
+ if (UseBiasedLocking || UseCompactObjectHeaders) {
__ pop(rcx); // get saved klass back in the register.
__ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
__ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
} else {
__ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
(intptr_t)markWord::prototype().value()); // header
__ pop(rcx); // get saved klass back in the register.
}
+ if (!UseCompactObjectHeaders) {
#ifdef _LP64
- __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
- __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
+ __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
+ __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
#endif
- Register tmp_store_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
- __ store_klass(rax, rcx, tmp_store_klass); // klass
+ __ store_klass(rax, rcx, rscratch1); // klass
+ }
{
SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
// Trigger dtrace event for fastpath
__ push(atos);
< prev index next >