24 */
25
26 #include "asm/macroAssembler.inline.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "compiler/disassembler.hpp"
29 #include "compiler/compiler_globals.hpp"
30 #include "gc/shared/barrierSetAssembler.hpp"
31 #include "interpreter/bytecodeHistogram.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateInterpreterGenerator.hpp"
36 #include "interpreter/templateTable.hpp"
37 #include "interpreter/bytecodeTracer.hpp"
38 #include "memory/resourceArea.hpp"
39 #include "oops/arrayOop.hpp"
40 #include "oops/method.hpp"
41 #include "oops/methodCounters.hpp"
42 #include "oops/methodData.hpp"
43 #include "oops/oop.inline.hpp"
44 #include "oops/resolvedIndyEntry.hpp"
45 #include "oops/resolvedMethodEntry.hpp"
46 #include "prims/jvmtiExport.hpp"
47 #include "prims/jvmtiThreadState.hpp"
48 #include "runtime/arguments.hpp"
49 #include "runtime/deoptimization.hpp"
50 #include "runtime/frame.inline.hpp"
51 #include "runtime/globals.hpp"
52 #include "runtime/jniHandles.hpp"
53 #include "runtime/sharedRuntime.hpp"
54 #include "runtime/stubRoutines.hpp"
55 #include "runtime/synchronizer.hpp"
56 #include "runtime/timer.hpp"
57 #include "runtime/vframeArray.hpp"
58 #include "utilities/checkedCast.hpp"
59 #include "utilities/debug.hpp"
60 #include "utilities/powerOfTwo.hpp"
61 #include <sys/types.h>
62
63 // Size of interpreter code. Increase if too small. Interpreter will
450 } else {
451 __ mov(c_rarg2, NULL_WORD);
452 }
453 __ call_VM(r0,
454 CAST_FROM_FN_PTR(address, InterpreterRuntime::create_exception),
455 c_rarg1, c_rarg2);
456 }
457 // throw exception
458 __ b(address(Interpreter::throw_exception_entry()));
459 return entry;
460 }
461
462 address TemplateInterpreterGenerator::generate_return_entry_for(TosState state, int step, size_t index_size) {
463 address entry = __ pc();
464
465 // Restore stack bottom in case i2c adjusted stack
466 __ ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
467 __ lea(esp, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
468 // and null it as marker that esp is now tos until next java call
469 __ str(zr, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
470 __ restore_bcp();
471 __ restore_locals();
472 __ restore_constant_pool_cache();
473 __ get_method(rmethod);
474
475 if (state == atos) {
476 Register obj = r0;
477 Register mdp = r1;
478 Register tmp = r2;
479 __ profile_return_type(mdp, obj, tmp);
480 }
481
482 const Register cache = r1;
483 const Register index = r2;
484
485 if (index_size == sizeof(u4)) {
486 __ load_resolved_indy_entry(cache, index);
487 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedIndyEntry::num_parameters_offset())));
488 __ add(esp, esp, cache, Assembler::LSL, 3);
489 } else {
1640
1641 JFR_ONLY(__ leave_jfr_critical_section();)
1642
1643 // restore sender sp
1644 __ mov(sp, esp);
1645
1646 __ ret(lr);
1647
1648 if (inc_counter) {
1649 // Handle overflow of counter and compile method
1650 __ bind(invocation_counter_overflow);
1651 generate_counter_overflow(continue_after_compile);
1652 }
1653
1654 return entry_point;
1655 }
1656
1657 //
1658 // Generic interpreted method entry to (asm) interpreter
1659 //
1660 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1661 // determine code generation flags
1662 bool inc_counter = UseCompiler || CountCompiledCalls;
1663
1664 // rscratch1: sender sp
1665 address entry_point = __ pc();
1666
1667 const Address constMethod(rmethod, Method::const_offset());
1668 const Address access_flags(rmethod, Method::access_flags_offset());
1669 const Address size_of_parameters(r3,
1670 ConstMethod::size_of_parameters_offset());
1671 const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1672
1673 // get parameter size (always needed)
1674 // need to load the const method first
1675 __ ldr(r3, constMethod);
1676 __ load_unsigned_short(r2, size_of_parameters);
1677
1678 // r2: size of parameters
1679
1680 __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1767 // check for synchronized methods
1768 // Must happen AFTER invocation_counter check and stack overflow check,
1769 // so method is not locked if overflows.
1770 if (synchronized) {
1771 // Allocate monitor and lock method
1772 lock_method();
1773 } else {
1774 // no synchronization necessary
1775 #ifdef ASSERT
1776 {
1777 Label L;
1778 __ ldrh(r0, access_flags);
1779 __ tst(r0, JVM_ACC_SYNCHRONIZED);
1780 __ br(Assembler::EQ, L);
1781 __ stop("method needs synchronization");
1782 __ bind(L);
1783 }
1784 #endif
1785 }
1786
1787 // start execution
1788 #ifdef ASSERT
1789 {
1790 Label L;
1791 const Address monitor_block_top (rfp,
1792 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1793 __ ldr(rscratch1, monitor_block_top);
1794 __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1795 __ cmp(esp, rscratch1);
1796 __ br(Assembler::EQ, L);
1797 __ stop("broken stack frame setup in interpreter 2");
1798 __ bind(L);
1799 }
1800 #endif
1801
1802 // jvmti support
1803 __ notify_method_entry();
1804
1805 __ dispatch_next(vtos);
1806
|
24 */
25
26 #include "asm/macroAssembler.inline.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "compiler/disassembler.hpp"
29 #include "compiler/compiler_globals.hpp"
30 #include "gc/shared/barrierSetAssembler.hpp"
31 #include "interpreter/bytecodeHistogram.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateInterpreterGenerator.hpp"
36 #include "interpreter/templateTable.hpp"
37 #include "interpreter/bytecodeTracer.hpp"
38 #include "memory/resourceArea.hpp"
39 #include "oops/arrayOop.hpp"
40 #include "oops/method.hpp"
41 #include "oops/methodCounters.hpp"
42 #include "oops/methodData.hpp"
43 #include "oops/oop.inline.hpp"
44 #include "oops/inlineKlass.hpp"
45 #include "oops/resolvedIndyEntry.hpp"
46 #include "oops/resolvedMethodEntry.hpp"
47 #include "prims/jvmtiExport.hpp"
48 #include "prims/jvmtiThreadState.hpp"
49 #include "runtime/arguments.hpp"
50 #include "runtime/deoptimization.hpp"
51 #include "runtime/frame.inline.hpp"
52 #include "runtime/globals.hpp"
53 #include "runtime/jniHandles.hpp"
54 #include "runtime/sharedRuntime.hpp"
55 #include "runtime/stubRoutines.hpp"
56 #include "runtime/synchronizer.hpp"
57 #include "runtime/timer.hpp"
58 #include "runtime/vframeArray.hpp"
59 #include "utilities/checkedCast.hpp"
60 #include "utilities/debug.hpp"
61 #include "utilities/powerOfTwo.hpp"
62 #include <sys/types.h>
63
64 // Size of interpreter code. Increase if too small. Interpreter will
451 } else {
452 __ mov(c_rarg2, NULL_WORD);
453 }
454 __ call_VM(r0,
455 CAST_FROM_FN_PTR(address, InterpreterRuntime::create_exception),
456 c_rarg1, c_rarg2);
457 }
458 // throw exception
459 __ b(address(Interpreter::throw_exception_entry()));
460 return entry;
461 }
462
463 address TemplateInterpreterGenerator::generate_return_entry_for(TosState state, int step, size_t index_size) {
464 address entry = __ pc();
465
466 // Restore stack bottom in case i2c adjusted stack
467 __ ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
468 __ lea(esp, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
469 // and null it as marker that esp is now tos until next java call
470 __ str(zr, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
471
472 if (state == atos && InlineTypeReturnedAsFields) {
473 __ store_inline_type_fields_to_buf(nullptr, true);
474 }
475
476 __ restore_bcp();
477 __ restore_locals();
478 __ restore_constant_pool_cache();
479 __ get_method(rmethod);
480
481 if (state == atos) {
482 Register obj = r0;
483 Register mdp = r1;
484 Register tmp = r2;
485 __ profile_return_type(mdp, obj, tmp);
486 }
487
488 const Register cache = r1;
489 const Register index = r2;
490
491 if (index_size == sizeof(u4)) {
492 __ load_resolved_indy_entry(cache, index);
493 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedIndyEntry::num_parameters_offset())));
494 __ add(esp, esp, cache, Assembler::LSL, 3);
495 } else {
1646
1647 JFR_ONLY(__ leave_jfr_critical_section();)
1648
1649 // restore sender sp
1650 __ mov(sp, esp);
1651
1652 __ ret(lr);
1653
1654 if (inc_counter) {
1655 // Handle overflow of counter and compile method
1656 __ bind(invocation_counter_overflow);
1657 generate_counter_overflow(continue_after_compile);
1658 }
1659
1660 return entry_point;
1661 }
1662
1663 //
1664 // Generic interpreted method entry to (asm) interpreter
1665 //
1666 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool object_init) {
1667 // determine code generation flags
1668 bool inc_counter = UseCompiler || CountCompiledCalls;
1669
1670 // rscratch1: sender sp
1671 address entry_point = __ pc();
1672
1673 const Address constMethod(rmethod, Method::const_offset());
1674 const Address access_flags(rmethod, Method::access_flags_offset());
1675 const Address size_of_parameters(r3,
1676 ConstMethod::size_of_parameters_offset());
1677 const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1678
1679 // get parameter size (always needed)
1680 // need to load the const method first
1681 __ ldr(r3, constMethod);
1682 __ load_unsigned_short(r2, size_of_parameters);
1683
1684 // r2: size of parameters
1685
1686 __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1773 // check for synchronized methods
1774 // Must happen AFTER invocation_counter check and stack overflow check,
1775 // so method is not locked if overflows.
1776 if (synchronized) {
1777 // Allocate monitor and lock method
1778 lock_method();
1779 } else {
1780 // no synchronization necessary
1781 #ifdef ASSERT
1782 {
1783 Label L;
1784 __ ldrh(r0, access_flags);
1785 __ tst(r0, JVM_ACC_SYNCHRONIZED);
1786 __ br(Assembler::EQ, L);
1787 __ stop("method needs synchronization");
1788 __ bind(L);
1789 }
1790 #endif
1791 }
1792
1793 // Issue a StoreStore barrier on entry to Object_init if the
1794 // class has strict field fields. Be lazy, always do it.
1795 if (object_init) {
1796 __ membar(MacroAssembler::StoreStore);
1797 }
1798
1799 // start execution
1800 #ifdef ASSERT
1801 {
1802 Label L;
1803 const Address monitor_block_top (rfp,
1804 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1805 __ ldr(rscratch1, monitor_block_top);
1806 __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1807 __ cmp(esp, rscratch1);
1808 __ br(Assembler::EQ, L);
1809 __ stop("broken stack frame setup in interpreter 2");
1810 __ bind(L);
1811 }
1812 #endif
1813
1814 // jvmti support
1815 __ notify_method_entry();
1816
1817 __ dispatch_next(vtos);
1818
|