< prev index next > src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp
Print this page
#include "precompiled.hpp"
#include "asm/assembler.hpp"
#include "asm/assembler.inline.hpp"
#include "ci/ciEnv.hpp"
+ #include "ci/ciUtilities.hpp"
#include "code/compiledIC.hpp"
+ #if INCLUDE_CDS
+ #include "code/SCCache.hpp"
+ #endif
#include "compiler/compileTask.hpp"
#include "compiler/disassembler.hpp"
#include "compiler/oopMap.hpp"
#include "gc/shared/barrierSet.hpp"
#include "gc/shared/barrierSetAssembler.hpp"
dest = (dest & 0xffffffffULL) | (uintptr_t(insn_addr) & 0xffff00000000ULL);
target = address(dest);
return 2;
}
virtual int immediate(address insn_addr, address &target) {
+ // Metadata pointers are either narrow (32 bits) or wide (48 bits).
+ // We encode narrow ones by setting the upper 16 bits in the first
+ // instruction.
+ if (Instruction_aarch64::extract(_insn, 31, 21) == 0b11010010101) {
+ assert(nativeInstruction_at(insn_addr+4)->is_movk(), "wrong insns in patch");
+ narrowKlass nk = CompressedKlassPointers::encode((Klass*)target);
+ Instruction_aarch64::patch(insn_addr, 20, 5, nk >> 16);
+ Instruction_aarch64::patch(insn_addr+4, 20, 5, nk & 0xffff);
+ return 2;
+ }
assert(Instruction_aarch64::extract(_insn, 31, 21) == 0b11010010100, "must be");
uint64_t dest = (uint64_t)target;
// Move wide constant
assert(nativeInstruction_at(insn_addr+4)->is_movk(), "wrong insns in patch");
assert(nativeInstruction_at(insn_addr+8)->is_movk(), "wrong insns in patch");
return 2;
}
}
virtual int immediate(address insn_addr, address &target) {
uint32_t *insns = (uint32_t *)insn_addr;
+ // Metadata pointers are either narrow (32 bits) or wide (48 bits).
+ // We encode narrow ones by setting the upper 16 bits in the first
+ // instruction.
+ if (Instruction_aarch64::extract(_insn, 31, 21) == 0b11010010101) {
+ assert(nativeInstruction_at(insn_addr+4)->is_movk(), "wrong insns in patch");
+ narrowKlass nk = (narrowKlass)((uint32_t(Instruction_aarch64::extract(_insn, 20, 5)) << 16)
+ + uint32_t(Instruction_aarch64::extract(insns[1], 20, 5)));
+ target = (address)CompressedKlassPointers::decode(nk);
+ return 2;
+ }
assert(Instruction_aarch64::extract(_insn, 31, 21) == 0b11010010100, "must be");
// Move wide constant: movz, movk, movk. See movptr().
assert(nativeInstruction_at(insns+1)->is_movk(), "wrong insns in patch");
assert(nativeInstruction_at(insns+2)->is_movk(), "wrong insns in patch");
target = address(uint64_t(Instruction_aarch64::extract(_insn, 20, 5))
set_last_Java_frame(last_java_sp, last_java_fp, pc() /* Patched later */, scratch);
}
}
static inline bool target_needs_far_branch(address addr) {
+ if (SCCache::is_on_for_write()) {
+ return true;
+ }
// codecache size <= 128M
if (!MacroAssembler::far_branches()) {
return false;
}
// codecache size > 240M
call_VM_base(oop_result, noreg, noreg, entry_point, number_of_arguments, check_exceptions);
}
// Check the entry target is always reachable from any branch.
static bool is_always_within_branch_range(Address entry) {
+ if (SCCache::is_on_for_write()) {
+ return false;
+ }
const address target = entry.target();
if (!CodeCache::contains(target)) {
// We always use trampolines for callees outside CodeCache.
assert(entry.rspec().type() == relocInfo::runtime_call_type, "non-runtime call of an external target");
bind(done);
}
void MacroAssembler::stop(const char* msg) {
BLOCK_COMMENT(msg);
dcps1(0xdeae);
! emit_int64((uintptr_t)msg);
}
void MacroAssembler::unimplemented(const char* what) {
const char* buf = nullptr;
{
bind(done);
}
void MacroAssembler::stop(const char* msg) {
BLOCK_COMMENT(msg);
+ // load msg into r0 so we can access it from the signal handler
+ // ExternalAddress enables saving and restoring via the code cache
+ lea(c_rarg0, ExternalAddress((address) msg));
dcps1(0xdeae);
! SCCache::add_C_string(msg);
}
void MacroAssembler::unimplemented(const char* what) {
const char* buf = nullptr;
{
}
void MacroAssembler::reinit_heapbase()
{
if (UseCompressedOops) {
! if (Universe::is_fully_initialized()) {
mov(rheapbase, CompressedOops::base());
} else {
lea(rheapbase, ExternalAddress(CompressedOops::base_addr()));
ldr(rheapbase, Address(rheapbase));
}
}
void MacroAssembler::reinit_heapbase()
{
if (UseCompressedOops) {
! if (Universe::is_fully_initialized() && !SCCache::is_on_for_write()) {
mov(rheapbase, CompressedOops::base());
} else {
lea(rheapbase, ExternalAddress(CompressedOops::base_addr()));
ldr(rheapbase, Address(rheapbase));
}
CardTable::CardValue* byte_map_base =
((CardTableBarrierSet*)(BarrierSet::barrier_set()))->card_table()->byte_map_base();
// Strictly speaking the byte_map_base isn't an address at all, and it might
// even be negative. It is thus materialised as a constant.
! mov(reg, (uint64_t)byte_map_base);
}
void MacroAssembler::build_frame(int framesize) {
assert(framesize >= 2 * wordSize, "framesize must include space for FP/LR");
assert(framesize % (2*wordSize) == 0, "must preserve 2*wordSize alignment");
CardTable::CardValue* byte_map_base =
((CardTableBarrierSet*)(BarrierSet::barrier_set()))->card_table()->byte_map_base();
// Strictly speaking the byte_map_base isn't an address at all, and it might
// even be negative. It is thus materialised as a constant.
! #if INCLUDE_CDS
+ if (SCCache::is_on_for_write()) {
+ // SCA needs relocation info for card table base
+ lea(reg, ExternalAddress(reinterpret_cast<address>(byte_map_base)));
+ } else {
+ #endif
+ mov(reg, (uint64_t)byte_map_base);
+ #if INCLUDE_CDS
+ }
+ #endif
+ }
+
+ void MacroAssembler::load_aotrc_address(Register reg, address a) {
+ #if INCLUDE_CDS
+ assert(AOTRuntimeConstants::contains(a), "address out of range for data area");
+ if (SCCache::is_on_for_write()) {
+ // all aotrc field addresses should be registered in the SCC address table
+ lea(reg, ExternalAddress(a));
+ } else {
+ mov(reg, (uint64_t)a);
+ }
+ #else
+ ShouldNotReachHere();
+ #endif
}
void MacroAssembler::build_frame(int framesize) {
assert(framesize >= 2 * wordSize, "framesize must include space for FP/LR");
assert(framesize % (2*wordSize) == 0, "must preserve 2*wordSize alignment");
< prev index next >