< prev index next > src/hotspot/cpu/x86/macroAssembler_x86.cpp
Print this page
void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
BLOCK_COMMENT("encode_and_move_klass_not_null {");
assert_different_registers(src, dst);
if (CompressedKlassPointers::base() != nullptr) {
! movptr(dst, -(intptr_t)CompressedKlassPointers::base());
addq(dst, src);
} else {
movptr(dst, src);
}
if (CompressedKlassPointers::shift() != 0) {
void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src) {
BLOCK_COMMENT("encode_and_move_klass_not_null {");
assert_different_registers(src, dst);
if (CompressedKlassPointers::base() != nullptr) {
! if (AOTCodeCache::is_on_for_dump()) {
+ movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
+ negl(dst);
+ } else {
+ movptr(dst, -(intptr_t)CompressedKlassPointers::base());
+ }
addq(dst, src);
} else {
movptr(dst, src);
}
if (CompressedKlassPointers::shift() != 0) {
// a pointer that needs nothing but a register rename.
movl(dst, src);
} else {
if (CompressedKlassPointers::shift() <= Address::times_8) {
if (CompressedKlassPointers::base() != nullptr) {
! movptr(dst, (intptr_t)CompressedKlassPointers::base());
} else {
xorq(dst, dst);
}
if (CompressedKlassPointers::shift() != 0) {
assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
// a pointer that needs nothing but a register rename.
movl(dst, src);
} else {
if (CompressedKlassPointers::shift() <= Address::times_8) {
if (CompressedKlassPointers::base() != nullptr) {
! if (AOTCodeCache::is_on_for_dump()) {
+ movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
+ } else {
+ movptr(dst, (intptr_t)CompressedKlassPointers::base());
+ }
} else {
xorq(dst, dst);
}
if (CompressedKlassPointers::shift() != 0) {
assert(CompressedKlassPointers::shift() == Address::times_8, "klass not aligned on 64bits?");
} else {
addq(dst, src);
}
} else {
if (CompressedKlassPointers::base() != nullptr) {
! const intptr_t base_right_shifted =
! (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
! movptr(dst, base_right_shifted);
} else {
xorq(dst, dst);
}
addq(dst, src);
shlq(dst, CompressedKlassPointers::shift());
} else {
addq(dst, src);
}
} else {
if (CompressedKlassPointers::base() != nullptr) {
! if (AOTCodeCache::is_on_for_dump()) {
! movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
! shrq(dst, CompressedKlassPointers::shift());
+ } else {
+ const intptr_t base_right_shifted =
+ (intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
+ movptr(dst, base_right_shifted);
+ }
} else {
xorq(dst, dst);
}
addq(dst, src);
shlq(dst, CompressedKlassPointers::shift());
movq(rcx, Address(rsp, 14 * wordSize));
movq(rax, Address(rsp, 15 * wordSize));
addq(rsp, 16 * wordSize);
}
+ void MacroAssembler::load_aotrc_address(Register reg, address a) {
+ #if INCLUDE_CDS
+ assert(AOTRuntimeConstants::contains(a), "address out of range for data area");
+ if (AOTCodeCache::is_on_for_dump()) {
+ // all aotrc field addresses should be registered in the AOTCodeCache address table
+ lea(reg, ExternalAddress(a));
+ } else {
+ mov64(reg, (uint64_t)a);
+ }
+ #else
+ ShouldNotReachHere();
+ #endif
+ }
+
void MacroAssembler::setcc(Assembler::Condition comparison, Register dst) {
if (VM_Version::supports_apx_f()) {
esetzucc(comparison, dst);
} else {
setb(comparison, dst);
< prev index next >