< prev index next > src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp
Print this page
generate_and_dispatch(t);
}
//-----------------------------------------------------------------------------
+ void TemplateInterpreterGenerator::count_bytecode() {
+ if (CountBytecodesPerThread) {
+ Address bc_counter_addr(rthread, Thread::bc_counter_offset());
+ __ ldr(r10, bc_counter_addr);
+ __ add(r10, r10, 1);
+ __ str(r10, bc_counter_addr);
+ }
+ if (CountBytecodes || TraceBytecodes || StopInterpreterAt > 0) {
+ __ mov(r10, (address) &BytecodeCounter::_counter_value);
+ __ atomic_add(noreg, 1, r10);
+ }
+ }
+
+ void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
+ __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
+ __ atomic_addw(noreg, 1, r10);
+ }
+
// Non-product code
#ifndef PRODUCT
address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
address entry = __ pc();
__ ret(lr); // return from result handler
return entry;
}
- void TemplateInterpreterGenerator::count_bytecode() {
- __ mov(r10, (address) &BytecodeCounter::_counter_value);
- __ atomic_addw(noreg, 1, r10);
- }
-
- void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
- __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
- __ atomic_addw(noreg, 1, r10);
- }
-
void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
// Calculate new index for counter:
// _index = (_index >> log2_number_of_codes) |
// (bytecode << log2_number_of_codes);
Register index_addr = rscratch1;
< prev index next >