< prev index next > src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp
Print this page
#include "interp_masm_aarch64.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/interpreterRuntime.hpp"
#include "logging/log.hpp"
#include "oops/arrayOop.hpp"
+ #include "oops/constMethodFlags.hpp"
#include "oops/markWord.hpp"
#include "oops/method.hpp"
#include "oops/methodData.hpp"
+ #include "oops/inlineKlass.hpp"
#include "oops/resolvedFieldEntry.hpp"
#include "oops/resolvedIndyEntry.hpp"
#include "oops/resolvedMethodEntry.hpp"
#include "prims/jvmtiExport.hpp"
#include "prims/jvmtiThreadState.hpp"
ldr(mcs, Address(method, Method::method_counters_offset()));
cbz(mcs, skip); // No MethodCounters allocated, OutOfMemory
bind(has_counters);
}
+ void InterpreterMacroAssembler::allocate_instance(Register klass, Register new_obj,
+ Register t1, Register t2,
+ bool clear_fields, Label& alloc_failed) {
+ MacroAssembler::allocate_instance(klass, new_obj, t1, t2, clear_fields, alloc_failed);
+ {
+ SkipIfEqual skip_if(this, &DTraceAllocProbes, 0);
+ // Trigger dtrace event for fastpath
+ push(atos);
+ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), new_obj);
+ pop(atos);
+ }
+ }
+
+ void InterpreterMacroAssembler::read_flat_field(Register entry,
+ Register field_index, Register field_offset,
+ Register temp, Register obj) {
+ Label alloc_failed, empty_value, done;
+ const Register src = field_offset;
+ const Register alloc_temp = r10;
+ const Register dst_temp = field_index;
+ const Register layout_info = temp;
+ assert_different_registers(obj, entry, field_index, field_offset, temp, alloc_temp);
+
+ // Grab the inline field klass
+ ldr(rscratch1, Address(entry, in_bytes(ResolvedFieldEntry::field_holder_offset())));
+ inline_layout_info(rscratch1, field_index, layout_info);
+
+ const Register field_klass = dst_temp;
+ ldr(field_klass, Address(layout_info, in_bytes(InlineLayoutInfo::klass_offset())));
+
+ // check for empty value klass
+ test_klass_is_empty_inline_type(field_klass, rscratch1, empty_value);
+
+ // allocate buffer
+ push(obj); // save holder
+ allocate_instance(field_klass, obj, alloc_temp, rscratch2, false, alloc_failed);
+
+ // Have an oop instance buffer, copy into it
+ data_for_oop(obj, dst_temp, field_klass); // danger, uses rscratch1
+ pop(alloc_temp); // restore holder
+ lea(src, Address(alloc_temp, field_offset));
+ // call_VM_leaf, clobbers a few regs, save restore new obj
+ push(obj);
+ flat_field_copy(IS_DEST_UNINITIALIZED, src, dst_temp, layout_info);
+ pop(obj);
+ b(done);
+
+ bind(empty_value);
+ get_empty_inline_type_oop(field_klass, alloc_temp, obj);
+ b(done);
+
+ bind(alloc_failed);
+ pop(obj);
+ call_VM(obj, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flat_field),
+ obj, entry);
+
+ bind(done);
+ membar(Assembler::StoreStore);
+ }
+
// Load object from cpool->resolved_references(index)
void InterpreterMacroAssembler::load_resolved_reference_at_index(
Register result, Register index, Register tmp) {
assert_different_registers(result, index);
// Rsub_klass: subklass
//
// Kills:
// r2, r5
void InterpreterMacroAssembler::gen_subtype_check(Register Rsub_klass,
- Label& ok_is_subtype) {
+ Label& ok_is_subtype,
+ bool profile) {
assert(Rsub_klass != r0, "r0 holds superklass");
assert(Rsub_klass != r2, "r2 holds 2ndary super array length");
assert(Rsub_klass != r5, "r5 holds 2ndary super array scan ptr");
// Profile the not-null value's klass.
- profile_typecheck(r2, Rsub_klass, r5); // blows r2, reloads r5
+ if (profile) {
+ profile_typecheck(r2, Rsub_klass, r5); // blows r2, reloads r5
+ }
// Do the check.
check_klass_subtype(Rsub_klass, r0, r2, ok_is_subtype); // blows r2
}
// remove activation
// get sender esp
ldr(rscratch2,
Address(rfp, frame::interpreter_frame_sender_sp_offset * wordSize));
+
if (StackReservedPages > 0) {
// testing if reserved zone needs to be re-enabled
Label no_reserved_zone_enabling;
// check if already enabled - if so no re-enabling needed
should_not_reach_here();
bind(no_reserved_zone_enabling);
}
+ if (state == atos && InlineTypeReturnedAsFields) {
+ // Check if we are returning an non-null inline type and load its fields into registers
+ Label skip;
+ test_oop_is_not_inline_type(r0, rscratch2, skip);
+
+ // Load fields from a buffered value with an inline class specific handler
+ load_klass(rscratch1 /*dst*/, r0 /*src*/);
+ ldr(rscratch1, Address(rscratch1, InstanceKlass::adr_inlineklass_fixed_block_offset()));
+ ldr(rscratch1, Address(rscratch1, InlineKlass::unpack_handler_offset()));
+ // Unpack handler can be null if inline type is not scalarizable in returns
+ cbz(rscratch1, skip);
+
+ blr(rscratch1);
+ #ifdef ASSERT
+ // TODO 8284443 Enable
+ if (StressCallingConvention && false) {
+ Label skip_stress;
+ ldr(rscratch1, Address(rfp, frame::interpreter_frame_method_offset * wordSize));
+ ldrw(rscratch1, Address(rscratch1, Method::flags_offset()));
+ tstw(rscratch1, MethodFlags::has_scalarized_return_flag());
+ br(Assembler::EQ, skip_stress);
+ load_klass(r0, r0);
+ orr(r0, r0, 1);
+ bind(skip_stress);
+ }
+ #endif
+ bind(skip);
+ // Check above kills sender esp in rscratch2. Reload it.
+ ldr(rscratch2, Address(rfp, frame::interpreter_frame_sender_sp_offset * wordSize));
+ }
+
// restore sender esp
mov(esp, rscratch2);
// remove frame anchor
leave();
// If we're returning to interpreted code we will shortly be
b(count);
} else if (LockingMode == LM_LEGACY) {
// Load (object->mark() | 1) into swap_reg
ldr(rscratch1, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
orr(swap_reg, rscratch1, 1);
+ if (EnableValhalla) {
+ // Mask inline_type bit such that we go to the slow path if object is an inline type
+ andr(swap_reg, swap_reg, ~((int) markWord::inline_type_bit_in_place));
+ }
// Save (object->mark() | 1) into BasicLock's displaced header
str(swap_reg, Address(lock_reg, mark_offset));
assert(lock_offset == 0,
bind(profile_continue);
}
}
- void InterpreterMacroAssembler::profile_not_taken_branch(Register mdp) {
+ void InterpreterMacroAssembler::profile_not_taken_branch(Register mdp, bool acmp) {
if (ProfileInterpreter) {
Label profile_continue;
// If no method data exists, go to profile_continue.
test_method_data_pointer(mdp, profile_continue);
// We are taking a branch. Increment the not taken count.
increment_mdp_data_at(mdp, in_bytes(BranchData::not_taken_offset()));
// The method data pointer needs to be updated to correspond to
// the next bytecode
- update_mdp_by_constant(mdp, in_bytes(BranchData::branch_data_size()));
+ update_mdp_by_constant(mdp, acmp ? in_bytes(ACmpData::acmp_data_size()) : in_bytes(BranchData::branch_data_size()));
bind(profile_continue);
}
}
bind(profile_continue);
}
}
+ template <class ArrayData> void InterpreterMacroAssembler::profile_array_type(Register mdp,
+ Register array,
+ Register tmp) {
+ if (ProfileInterpreter) {
+ Label profile_continue;
+
+ // If no method data exists, go to profile_continue.
+ test_method_data_pointer(mdp, profile_continue);
+
+ mov(tmp, array);
+ profile_obj_type(tmp, Address(mdp, in_bytes(ArrayData::array_offset())));
+
+ Label not_flat;
+ test_non_flat_array_oop(array, tmp, not_flat);
+
+ set_mdp_flag_at(mdp, ArrayData::flat_array_byte_constant());
+
+ bind(not_flat);
+
+ Label not_null_free;
+ test_non_null_free_array_oop(array, tmp, not_null_free);
+
+ set_mdp_flag_at(mdp, ArrayData::null_free_array_byte_constant());
+
+ bind(not_null_free);
+
+ bind(profile_continue);
+ }
+ }
+
+ template void InterpreterMacroAssembler::profile_array_type<ArrayLoadData>(Register mdp,
+ Register array,
+ Register tmp);
+ template void InterpreterMacroAssembler::profile_array_type<ArrayStoreData>(Register mdp,
+ Register array,
+ Register tmp);
+
+ void InterpreterMacroAssembler::profile_multiple_element_types(Register mdp, Register element, Register tmp, const Register tmp2) {
+ if (ProfileInterpreter) {
+ Label profile_continue;
+
+ // If no method data exists, go to profile_continue.
+ test_method_data_pointer(mdp, profile_continue);
+
+ Label done, update;
+ cbnz(element, update);
+ set_mdp_flag_at(mdp, BitData::null_seen_byte_constant());
+ b(done);
+
+ bind(update);
+ load_klass(tmp, element);
+
+ // Record the object type.
+ record_klass_in_profile(tmp, mdp, tmp2);
+
+ bind(done);
+
+ // The method data pointer needs to be updated.
+ update_mdp_by_constant(mdp, in_bytes(ArrayStoreData::array_store_data_size()));
+
+ bind(profile_continue);
+ }
+ }
+
+
+ void InterpreterMacroAssembler::profile_element_type(Register mdp,
+ Register element,
+ Register tmp) {
+ if (ProfileInterpreter) {
+ Label profile_continue;
+
+ // If no method data exists, go to profile_continue.
+ test_method_data_pointer(mdp, profile_continue);
+
+ mov(tmp, element);
+ profile_obj_type(tmp, Address(mdp, in_bytes(ArrayLoadData::element_offset())));
+
+ // The method data pointer needs to be updated.
+ update_mdp_by_constant(mdp, in_bytes(ArrayLoadData::array_load_data_size()));
+
+ bind(profile_continue);
+ }
+ }
+
+ void InterpreterMacroAssembler::profile_acmp(Register mdp,
+ Register left,
+ Register right,
+ Register tmp) {
+ if (ProfileInterpreter) {
+ Label profile_continue;
+
+ // If no method data exists, go to profile_continue.
+ test_method_data_pointer(mdp, profile_continue);
+
+ mov(tmp, left);
+ profile_obj_type(tmp, Address(mdp, in_bytes(ACmpData::left_offset())));
+
+ Label left_not_inline_type;
+ test_oop_is_not_inline_type(left, tmp, left_not_inline_type);
+ set_mdp_flag_at(mdp, ACmpData::left_inline_type_byte_constant());
+ bind(left_not_inline_type);
+
+ mov(tmp, right);
+ profile_obj_type(tmp, Address(mdp, in_bytes(ACmpData::right_offset())));
+
+ Label right_not_inline_type;
+ test_oop_is_not_inline_type(right, tmp, right_not_inline_type);
+ set_mdp_flag_at(mdp, ACmpData::right_inline_type_byte_constant());
+ bind(right_not_inline_type);
+
+ bind(profile_continue);
+ }
+ }
+
void InterpreterMacroAssembler::_interp_verify_oop(Register reg, TosState state, const char* file, int line) {
if (state == atos) {
MacroAssembler::_verify_oop_checked(reg, "broken oop", file, line);
}
}
if (MethodData::profile_return()) {
// We're right after the type profile for the last
// argument. tmp is the number of cells left in the
// CallTypeData/VirtualCallTypeData to reach its end. Non null
// if there's a return to profile.
- assert(ReturnTypeEntry::static_cell_count() < TypeStackSlotEntries::per_arg_count(), "can't move past ret type");
+ assert(SingleTypeEntry::static_cell_count() < TypeStackSlotEntries::per_arg_count(), "can't move past ret type");
add(mdp, mdp, tmp, LSL, exact_log2(DataLayout::cell_size));
}
str(mdp, Address(rfp, frame::interpreter_frame_mdp_offset * wordSize));
} else {
assert(MethodData::profile_return(), "either profile call args or call ret");
br(Assembler::NE, profile_continue);
bind(do_profile);
}
- Address mdo_ret_addr(mdp, -in_bytes(ReturnTypeEntry::size()));
+ Address mdo_ret_addr(mdp, -in_bytes(SingleTypeEntry::size()));
mov(tmp, ret);
profile_obj_type(tmp, mdo_ret_addr);
bind(profile_continue);
}
< prev index next >