< prev index next > src/hotspot/cpu/x86/templateInterpreterGenerator_x86.cpp
Print this page
}
// Interpreter stub for calling a native method. (asm interpreter)
// This sets up a somewhat different looking stack for calling the
// native method than the typical interpreter frame setup.
! address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
// determine code generation flags
bool inc_counter = UseCompiler || CountCompiledCalls;
// rbx: Method*
// rbcp: sender sp
}
// Interpreter stub for calling a native method. (asm interpreter)
// This sets up a somewhat different looking stack for calling the
// native method than the typical interpreter frame setup.
! address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
// determine code generation flags
bool inc_counter = UseCompiler || CountCompiledCalls;
// rbx: Method*
// rbcp: sender sp
#endif
// jvmti support
__ notify_method_entry();
+ // runtime upcalls
+ if (runtime_upcalls) {
+ __ generate_runtime_upcalls_on_method_entry();
+ }
+
// work registers
const Register method = rbx;
const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread);
const Register t = NOT_LP64(rcx) LP64_ONLY(r11);
}
//
// Generic interpreted method entry to (asm) interpreter
//
! address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
// determine code generation flags
bool inc_counter = UseCompiler || CountCompiledCalls;
// ebx: Method*
// rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
}
//
// Generic interpreted method entry to (asm) interpreter
//
! address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
// determine code generation flags
bool inc_counter = UseCompiler || CountCompiledCalls;
// ebx: Method*
// rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
#endif
// jvmti support
__ notify_method_entry();
+ if (runtime_upcalls) {
+ __ generate_runtime_upcalls_on_method_entry();
+ }
+
__ dispatch_next(vtos);
// invocation counter overflow
if (inc_counter) {
// Handle overflow of counter and compile method
generate_and_dispatch(t);
}
//-----------------------------------------------------------------------------
+ void TemplateInterpreterGenerator::count_bytecode() {
+ #ifdef _LP64
+ __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
+ #else
+ Unimplemented();
+ #endif
+ }
+
+ void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
+ __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
+ }
+
// Non-product code
#ifndef PRODUCT
address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
address entry = __ pc();
#endif // _LP64
return entry;
}
- void TemplateInterpreterGenerator::count_bytecode() {
- __ incrementl(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
- }
-
- void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
- __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
- }
-
void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
__ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
__ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
__ orl(rbx,
((int) t->bytecode()) <<
< prev index next >