< prev index next >

src/hotspot/cpu/x86/interp_masm_x86.cpp

Print this page

        

*** 29,38 **** --- 29,39 ---- #include "logging/log.hpp" #include "oops/arrayOop.hpp" #include "oops/markOop.hpp" #include "oops/methodData.hpp" #include "oops/method.hpp" + #include "oops/valueKlass.hpp" #include "prims/jvmtiExport.hpp" #include "prims/jvmtiThreadState.hpp" #include "runtime/basicLock.hpp" #include "runtime/biasedLocking.hpp" #include "runtime/frame.inline.hpp"
*** 991,1001 **** const Address do_not_unlock_if_synchronized(rthread, in_bytes(JavaThread::do_not_unlock_if_synchronized_offset())); movbool(rbx, do_not_unlock_if_synchronized); movbool(do_not_unlock_if_synchronized, false); // reset the flag ! // get method access flags movptr(rcx, Address(rbp, frame::interpreter_frame_method_offset * wordSize)); movl(rcx, Address(rcx, Method::access_flags_offset())); testl(rcx, JVM_ACC_SYNCHRONIZED); jcc(Assembler::zero, unlocked); --- 992,1002 ---- const Address do_not_unlock_if_synchronized(rthread, in_bytes(JavaThread::do_not_unlock_if_synchronized_offset())); movbool(rbx, do_not_unlock_if_synchronized); movbool(do_not_unlock_if_synchronized, false); // reset the flag ! // get method access flags movptr(rcx, Address(rbp, frame::interpreter_frame_method_offset * wordSize)); movl(rcx, Address(rcx, Method::access_flags_offset())); testl(rcx, JVM_ACC_SYNCHRONIZED); jcc(Assembler::zero, unlocked);
*** 1115,1129 **** notify_method_exit(state, NotifyJVMTI); // preserve TOSCA } else { notify_method_exit(state, SkipNotifyJVMTI); // preserve TOSCA } - // remove activation - // get sender sp - movptr(rbx, - Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize)); if (StackReservedPages > 0) { // testing if reserved zone needs to be re-enabled Register rthread = LP64_ONLY(r15_thread) NOT_LP64(rcx); Label no_reserved_zone_enabling; NOT_LP64(get_thread(rthread);) --- 1116,1128 ---- notify_method_exit(state, NotifyJVMTI); // preserve TOSCA } else { notify_method_exit(state, SkipNotifyJVMTI); // preserve TOSCA } if (StackReservedPages > 0) { + movptr(rbx, + Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize)); // testing if reserved zone needs to be re-enabled Register rthread = LP64_ONLY(r15_thread) NOT_LP64(rcx); Label no_reserved_zone_enabling; NOT_LP64(get_thread(rthread);)
*** 1140,1149 **** --- 1139,1181 ---- InterpreterRuntime::throw_delayed_StackOverflowError)); should_not_reach_here(); bind(no_reserved_zone_enabling); } + + // remove activation + // get sender sp + movptr(rbx, + Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize)); + + if (state == atos && ValueTypeReturnedAsFields) { + Label skip; + // Test if the return type is a value type + movptr(rdi, Address(rbp, frame::interpreter_frame_method_offset * wordSize)); + movptr(rdi, Address(rdi, Method::const_offset())); + load_unsigned_byte(rdi, Address(rdi, ConstMethod::result_type_offset())); + cmpl(rdi, T_VALUETYPE); + jcc(Assembler::notEqual, skip); + + // We are returning a value type, load its fields into registers + #ifndef _LP64 + super_call_VM_leaf(StubRoutines::load_value_type_fields_in_regs()); + #else + // Load fields from a buffered value with a value class specific handler + load_klass(rdi, rax); + movptr(rdi, Address(rdi, InstanceKlass::adr_valueklass_fixed_block_offset())); + movptr(rdi, Address(rdi, ValueKlass::unpack_handler_offset())); + + testptr(rdi, rdi); + jcc(Assembler::equal, skip); + + call(rdi); + #endif + // call above kills the value in rbx. Reload it. + movptr(rbx, Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize)); + bind(skip); + } leave(); // remove frame anchor pop(ret_addr); // get return address mov(rsp, rbx); // set sp to sender sp }
*** 1202,1211 **** --- 1234,1247 ---- // Load immediate 1 into swap_reg %rax movl(swap_reg, (int32_t)1); // Load (object->mark() | 1) into swap_reg %rax orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes())); + if (EnableValhalla && !UseBiasedLocking) { + // For slow path is_always_locked, using biased, which is never natural for !UseBiasLocking + andptr(swap_reg, ~markOopDesc::biased_lock_bit_in_place); + } // Save (object->mark() | 1) into BasicLock's displaced header movptr(Address(lock_reg, mark_offset), swap_reg); assert(lock_offset == 0,
< prev index next >