< prev index next >

src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp

Print this page

        

*** 53,62 **** --- 53,66 ---- #ifndef PRODUCT #include "oops/method.hpp" #endif // !PRODUCT + #ifdef BUILTIN_SIM + #include "../../../../../../simulator/simulator.hpp" + #endif + // Size of interpreter code. Increase if too small. Interpreter will // fail with a guarantee ("not enough space for interpreter generation"); // if too small. // Run with +PrintInterpreter to get the VM to print out the size. // Max size with JVMTI
*** 294,305 **** break; default: ShouldNotReachHere(); fn = NULL; // unreachable } __ mov(rscratch1, fn); ! __ blr(rscratch1); } // Abstract method entry // Attempt to execute abstract method. Throw exception address TemplateInterpreterGenerator::generate_abstract_entry(void) { --- 298,310 ---- break; default: ShouldNotReachHere(); fn = NULL; // unreachable } + const int gpargs = 0, rtype = 3; __ mov(rscratch1, fn); ! __ blrt(rscratch1, gpargs, fpargs, rtype); } // Abstract method entry // Attempt to execute abstract method. Throw exception address TemplateInterpreterGenerator::generate_abstract_entry(void) {
*** 462,471 **** --- 467,483 ---- __ ldr(rscratch2, Address(rfp, frame::interpreter_frame_initial_sp_offset * wordSize)); __ sub(rscratch1, rscratch2, rscratch1, ext::uxtw, 3); __ andr(sp, rscratch1, -16); + #ifndef PRODUCT + // tell the simulator that the method has been reentered + if (NotifySimulator) { + __ notify(Assembler::method_reentry); + } + #endif + __ check_and_handle_popframe(rthread); __ check_and_handle_earlyret(rthread); __ get_dispatch(); __ dispatch_next(state, step);
*** 500,521 **** #if INCLUDE_JVMCI // Check if we need to take lock at entry of synchronized method. This can // only occur on method entry so emit it only for vtos with step 0. if ((EnableJVMCI || UseAOT) && state == vtos && step == 0) { Label L; ! __ ldrb(rscratch1, Address(rthread, JavaThread::pending_monitorenter_offset())); __ cbz(rscratch1, L); // Clear flag. __ strb(zr, Address(rthread, JavaThread::pending_monitorenter_offset())); // Take lock. lock_method(); __ bind(L); } else { #ifdef ASSERT if (EnableJVMCI) { Label L; ! __ ldrb(rscratch1, Address(rthread, JavaThread::pending_monitorenter_offset())); __ cbz(rscratch1, L); __ stop("unexpected pending monitor in deopt entry"); __ bind(L); } #endif --- 512,533 ---- #if INCLUDE_JVMCI // Check if we need to take lock at entry of synchronized method. This can // only occur on method entry so emit it only for vtos with step 0. if ((EnableJVMCI || UseAOT) && state == vtos && step == 0) { Label L; ! __ ldr(rscratch1, Address(rthread, Thread::pending_exception_offset())); __ cbz(rscratch1, L); // Clear flag. __ strb(zr, Address(rthread, JavaThread::pending_monitorenter_offset())); // Take lock. lock_method(); __ bind(L); } else { #ifdef ASSERT if (EnableJVMCI) { Label L; ! __ ldr(rscratch1, Address(rthread, Thread::pending_exception_offset())); __ cbz(rscratch1, L); __ stop("unexpected pending monitor in deopt entry"); __ bind(L); } #endif
*** 872,883 **** } else { __ stp(zr, rmethod, Address(sp, 6 * wordSize)); // save Method* (no mdp) } // Get mirror and store it in the frame as GC root for this Method* ! __ load_mirror(r10, rmethod); ! __ stp(r10, zr, Address(sp, 4 * wordSize)); __ ldr(rcpool, Address(rmethod, Method::const_offset())); __ ldr(rcpool, Address(rcpool, ConstMethod::constants_offset())); __ ldr(rcpool, Address(rcpool, ConstantPool::cache_offset_in_bytes())); __ stp(rlocals, rcpool, Address(sp, 2 * wordSize)); --- 884,895 ---- } else { __ stp(zr, rmethod, Address(sp, 6 * wordSize)); // save Method* (no mdp) } // Get mirror and store it in the frame as GC root for this Method* ! __ load_mirror(rscratch1, rmethod); ! __ stp(rscratch1, zr, Address(sp, 4 * wordSize)); __ ldr(rcpool, Address(rmethod, Method::const_offset())); __ ldr(rcpool, Address(rcpool, ConstMethod::constants_offset())); __ ldr(rcpool, Address(rcpool, ConstantPool::cache_offset_in_bytes())); __ stp(rlocals, rcpool, Address(sp, 2 * wordSize));
*** 1171,1180 **** --- 1183,1198 ---- // Pull SP back to minimum size: this avoids holes in the stack __ andr(sp, esp, -16); // initialize fixed part of activation frame generate_fixed_frame(true); + #ifndef PRODUCT + // tell the simulator that a method has been entered + if (NotifySimulator) { + __ notify(Assembler::method_entry); + } + #endif // make sure method is native & not abstract #ifdef ASSERT __ ldrw(r0, access_flags); {
*** 1355,1365 **** __ mov(rscratch1, _thread_in_native); __ lea(rscratch2, Address(rthread, JavaThread::thread_state_offset())); __ stlrw(rscratch1, rscratch2); // Call the native method. ! __ blr(r10); __ bind(native_return); __ maybe_isb(); __ get_method(rmethod); // result potentially in r0 or v0 --- 1373,1383 ---- __ mov(rscratch1, _thread_in_native); __ lea(rscratch2, Address(rthread, JavaThread::thread_state_offset())); __ stlrw(rscratch1, rscratch2); // Call the native method. ! __ blrt(r10, rscratch1); __ bind(native_return); __ maybe_isb(); __ get_method(rmethod); // result potentially in r0 or v0
*** 1395,1405 **** // clearing _last_native_pc down below. So we do a runtime call by // hand. // __ mov(c_rarg0, rthread); __ mov(rscratch2, CAST_FROM_FN_PTR(address, JavaThread::check_special_condition_for_native_trans)); ! __ blr(rscratch2); __ maybe_isb(); __ get_method(rmethod); __ reinit_heapbase(); __ bind(Continue); } --- 1413,1423 ---- // clearing _last_native_pc down below. So we do a runtime call by // hand. // __ mov(c_rarg0, rthread); __ mov(rscratch2, CAST_FROM_FN_PTR(address, JavaThread::check_special_condition_for_native_trans)); ! __ blrt(rscratch2, 1, 0, 0); __ maybe_isb(); __ get_method(rmethod); __ reinit_heapbase(); __ bind(Continue); }
*** 1446,1456 **** __ br(Assembler::NE, no_reguard); __ pusha(); // XXX only save smashed registers __ mov(c_rarg0, rthread); __ mov(rscratch2, CAST_FROM_FN_PTR(address, SharedRuntime::reguard_yellow_pages)); ! __ blr(rscratch2); __ popa(); // XXX only restore smashed registers __ bind(no_reguard); } // The method register is junk from after the thread_in_native transition --- 1464,1474 ---- __ br(Assembler::NE, no_reguard); __ pusha(); // XXX only save smashed registers __ mov(c_rarg0, rthread); __ mov(rscratch2, CAST_FROM_FN_PTR(address, SharedRuntime::reguard_yellow_pages)); ! __ blrt(rscratch2, 0, 0, 0); __ popa(); // XXX only restore smashed registers __ bind(no_reguard); } // The method register is junk from after the thread_in_native transition
*** 1601,1611 **** // And the base dispatch table __ get_dispatch(); // initialize fixed part of activation frame generate_fixed_frame(false); ! // make sure method is not native & not abstract #ifdef ASSERT __ ldrw(r0, access_flags); { Label L; --- 1619,1634 ---- // And the base dispatch table __ get_dispatch(); // initialize fixed part of activation frame generate_fixed_frame(false); ! #ifndef PRODUCT ! // tell the simulator that a method has been entered ! if (NotifySimulator) { ! __ notify(Assembler::method_entry); ! } ! #endif // make sure method is not native & not abstract #ifdef ASSERT __ ldrw(r0, access_flags); { Label L;
*** 1737,1746 **** --- 1760,1776 ---- __ restore_locals(); __ restore_constant_pool_cache(); __ reinit_heapbase(); // restore rheapbase as heapbase. __ get_dispatch(); + #ifndef PRODUCT + // tell the simulator that the caller method has been reentered + if (NotifySimulator) { + __ get_method(rmethod); + __ notify(Assembler::method_reentry); + } + #endif // Entry point for exceptions thrown within interpreter code Interpreter::_throw_exception_entry = __ pc(); // If we came here via a NullPointerException on the receiver of a // method, rmethod may be corrupt. __ get_method(rmethod);
*** 2054,2059 **** --- 2084,2206 ---- __ brk(0); __ bind(L); __ pop(rscratch1); } + #ifdef BUILTIN_SIM + + #include <sys/mman.h> + #include <unistd.h> + + extern "C" { + static int PAGESIZE = getpagesize(); + int is_mapped_address(u_int64_t address) + { + address = (address & ~((u_int64_t)PAGESIZE - 1)); + if (msync((void *)address, PAGESIZE, MS_ASYNC) == 0) { + return true; + } + if (errno != ENOMEM) { + return true; + } + return false; + } + + void bccheck1(u_int64_t pc, u_int64_t fp, char *method, int *bcidx, int *framesize, char *decode) + { + if (method != 0) { + method[0] = '\0'; + } + if (bcidx != 0) { + *bcidx = -2; + } + if (decode != 0) { + decode[0] = 0; + } + + if (framesize != 0) { + *framesize = -1; + } + + if (Interpreter::contains((address)pc)) { + AArch64Simulator *sim = AArch64Simulator::get_current(UseSimulatorCache, DisableBCCheck); + Method* meth; + address bcp; + if (fp) { + #define FRAME_SLOT_METHOD 3 + #define FRAME_SLOT_BCP 7 + meth = (Method*)sim->getMemory()->loadU64(fp - (FRAME_SLOT_METHOD << 3)); + bcp = (address)sim->getMemory()->loadU64(fp - (FRAME_SLOT_BCP << 3)); + #undef FRAME_SLOT_METHOD + #undef FRAME_SLOT_BCP + } else { + meth = (Method*)sim->getCPUState().xreg(RMETHOD, 0); + bcp = (address)sim->getCPUState().xreg(RBCP, 0); + } + if (meth->is_native()) { + return; + } + if(method && meth->is_method()) { + ResourceMark rm; + method[0] = 'I'; + method[1] = ' '; + meth->name_and_sig_as_C_string(method + 2, 398); + } + if (bcidx) { + if (meth->contains(bcp)) { + *bcidx = meth->bci_from(bcp); + } else { + *bcidx = -2; + } + } + if (decode) { + if (!BytecodeTracer::closure()) { + BytecodeTracer::set_closure(BytecodeTracer::std_closure()); + } + stringStream str(decode, 400); + BytecodeTracer::trace(meth, bcp, &str); + } + } else { + if (method) { + CodeBlob *cb = CodeCache::find_blob((address)pc); + if (cb != NULL) { + if (cb->is_nmethod()) { + ResourceMark rm; + nmethod* nm = (nmethod*)cb; + method[0] = 'C'; + method[1] = ' '; + nm->method()->name_and_sig_as_C_string(method + 2, 398); + } else if (cb->is_adapter_blob()) { + strcpy(method, "B adapter blob"); + } else if (cb->is_runtime_stub()) { + strcpy(method, "B runtime stub"); + } else if (cb->is_exception_stub()) { + strcpy(method, "B exception stub"); + } else if (cb->is_deoptimization_stub()) { + strcpy(method, "B deoptimization stub"); + } else if (cb->is_safepoint_stub()) { + strcpy(method, "B safepoint stub"); + } else if (cb->is_uncommon_trap_stub()) { + strcpy(method, "B uncommon trap stub"); + } else if (cb->contains((address)StubRoutines::call_stub())) { + strcpy(method, "B call stub"); + } else { + strcpy(method, "B unknown blob : "); + strcat(method, cb->name()); + } + if (framesize != NULL) { + *framesize = cb->frame_size(); + } + } + } + } + } + + + JNIEXPORT void bccheck(u_int64_t pc, u_int64_t fp, char *method, int *bcidx, int *framesize, char *decode) + { + bccheck1(pc, fp, method, bcidx, framesize, decode); + } + } + + #endif // BUILTIN_SIM #endif // !PRODUCT
< prev index next >