24 */
25
26 #include "asm/macroAssembler.inline.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "compiler/disassembler.hpp"
29 #include "compiler/compiler_globals.hpp"
30 #include "gc/shared/barrierSetAssembler.hpp"
31 #include "interpreter/bytecodeHistogram.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateInterpreterGenerator.hpp"
36 #include "interpreter/templateTable.hpp"
37 #include "interpreter/bytecodeTracer.hpp"
38 #include "memory/resourceArea.hpp"
39 #include "oops/arrayOop.hpp"
40 #include "oops/method.hpp"
41 #include "oops/methodCounters.hpp"
42 #include "oops/methodData.hpp"
43 #include "oops/oop.inline.hpp"
44 #include "oops/resolvedIndyEntry.hpp"
45 #include "oops/resolvedMethodEntry.hpp"
46 #include "prims/jvmtiExport.hpp"
47 #include "prims/jvmtiThreadState.hpp"
48 #include "runtime/arguments.hpp"
49 #include "runtime/deoptimization.hpp"
50 #include "runtime/frame.inline.hpp"
51 #include "runtime/globals.hpp"
52 #include "runtime/jniHandles.hpp"
53 #include "runtime/sharedRuntime.hpp"
54 #include "runtime/stubRoutines.hpp"
55 #include "runtime/synchronizer.hpp"
56 #include "runtime/timer.hpp"
57 #include "runtime/vframeArray.hpp"
58 #include "utilities/checkedCast.hpp"
59 #include "utilities/debug.hpp"
60 #include "utilities/powerOfTwo.hpp"
61 #include <sys/types.h>
62
63 // Size of interpreter code. Increase if too small. Interpreter will
450 } else {
451 __ mov(c_rarg2, NULL_WORD);
452 }
453 __ call_VM(r0,
454 CAST_FROM_FN_PTR(address, InterpreterRuntime::create_exception),
455 c_rarg1, c_rarg2);
456 }
457 // throw exception
458 __ b(address(Interpreter::throw_exception_entry()));
459 return entry;
460 }
461
462 address TemplateInterpreterGenerator::generate_return_entry_for(TosState state, int step, size_t index_size) {
463 address entry = __ pc();
464
465 // Restore stack bottom in case i2c adjusted stack
466 __ ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
467 __ lea(esp, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
468 // and null it as marker that esp is now tos until next java call
469 __ str(zr, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
470 __ restore_bcp();
471 __ restore_locals();
472 __ restore_constant_pool_cache();
473 __ get_method(rmethod);
474
475 if (state == atos) {
476 Register obj = r0;
477 Register mdp = r1;
478 Register tmp = r2;
479 __ profile_return_type(mdp, obj, tmp);
480 }
481
482 const Register cache = r1;
483 const Register index = r2;
484
485 if (index_size == sizeof(u4)) {
486 __ load_resolved_indy_entry(cache, index);
487 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedIndyEntry::num_parameters_offset())));
488 __ add(esp, esp, cache, Assembler::LSL, 3);
489 } else {
1641
1642 JFR_ONLY(__ leave_jfr_critical_section();)
1643
1644 // restore sender sp
1645 __ mov(sp, esp);
1646
1647 __ ret(lr);
1648
1649 if (inc_counter) {
1650 // Handle overflow of counter and compile method
1651 __ bind(invocation_counter_overflow);
1652 generate_counter_overflow(continue_after_compile);
1653 }
1654
1655 return entry_point;
1656 }
1657
1658 //
1659 // Generic interpreted method entry to (asm) interpreter
1660 //
1661 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1662 // determine code generation flags
1663 bool inc_counter = UseCompiler || CountCompiledCalls;
1664
1665 // rscratch1: sender sp
1666 address entry_point = __ pc();
1667
1668 const Address constMethod(rmethod, Method::const_offset());
1669 const Address access_flags(rmethod, Method::access_flags_offset());
1670 const Address size_of_parameters(r3,
1671 ConstMethod::size_of_parameters_offset());
1672 const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1673
1674 // get parameter size (always needed)
1675 // need to load the const method first
1676 __ ldr(r3, constMethod);
1677 __ load_unsigned_short(r2, size_of_parameters);
1678
1679 // r2: size of parameters
1680
1681 __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1768 // check for synchronized methods
1769 // Must happen AFTER invocation_counter check and stack overflow check,
1770 // so method is not locked if overflows.
1771 if (synchronized) {
1772 // Allocate monitor and lock method
1773 lock_method();
1774 } else {
1775 // no synchronization necessary
1776 #ifdef ASSERT
1777 {
1778 Label L;
1779 __ ldrh(r0, access_flags);
1780 __ tst(r0, JVM_ACC_SYNCHRONIZED);
1781 __ br(Assembler::EQ, L);
1782 __ stop("method needs synchronization");
1783 __ bind(L);
1784 }
1785 #endif
1786 }
1787
1788 // start execution
1789 #ifdef ASSERT
1790 {
1791 Label L;
1792 const Address monitor_block_top (rfp,
1793 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1794 __ ldr(rscratch1, monitor_block_top);
1795 __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1796 __ cmp(esp, rscratch1);
1797 __ br(Assembler::EQ, L);
1798 __ stop("broken stack frame setup in interpreter 2");
1799 __ bind(L);
1800 }
1801 #endif
1802
1803 // jvmti support
1804 __ notify_method_entry();
1805
1806 __ dispatch_next(vtos);
1807
|
24 */
25
26 #include "asm/macroAssembler.inline.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "compiler/disassembler.hpp"
29 #include "compiler/compiler_globals.hpp"
30 #include "gc/shared/barrierSetAssembler.hpp"
31 #include "interpreter/bytecodeHistogram.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "interpreter/templateInterpreterGenerator.hpp"
36 #include "interpreter/templateTable.hpp"
37 #include "interpreter/bytecodeTracer.hpp"
38 #include "memory/resourceArea.hpp"
39 #include "oops/arrayOop.hpp"
40 #include "oops/method.hpp"
41 #include "oops/methodCounters.hpp"
42 #include "oops/methodData.hpp"
43 #include "oops/oop.inline.hpp"
44 #include "oops/inlineKlass.hpp"
45 #include "oops/resolvedIndyEntry.hpp"
46 #include "oops/resolvedMethodEntry.hpp"
47 #include "prims/jvmtiExport.hpp"
48 #include "prims/jvmtiThreadState.hpp"
49 #include "runtime/arguments.hpp"
50 #include "runtime/deoptimization.hpp"
51 #include "runtime/frame.inline.hpp"
52 #include "runtime/globals.hpp"
53 #include "runtime/jniHandles.hpp"
54 #include "runtime/sharedRuntime.hpp"
55 #include "runtime/stubRoutines.hpp"
56 #include "runtime/synchronizer.hpp"
57 #include "runtime/timer.hpp"
58 #include "runtime/vframeArray.hpp"
59 #include "utilities/checkedCast.hpp"
60 #include "utilities/debug.hpp"
61 #include "utilities/powerOfTwo.hpp"
62 #include <sys/types.h>
63
64 // Size of interpreter code. Increase if too small. Interpreter will
451 } else {
452 __ mov(c_rarg2, NULL_WORD);
453 }
454 __ call_VM(r0,
455 CAST_FROM_FN_PTR(address, InterpreterRuntime::create_exception),
456 c_rarg1, c_rarg2);
457 }
458 // throw exception
459 __ b(address(Interpreter::throw_exception_entry()));
460 return entry;
461 }
462
463 address TemplateInterpreterGenerator::generate_return_entry_for(TosState state, int step, size_t index_size) {
464 address entry = __ pc();
465
466 // Restore stack bottom in case i2c adjusted stack
467 __ ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
468 __ lea(esp, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
469 // and null it as marker that esp is now tos until next java call
470 __ str(zr, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
471
472 if (state == atos && InlineTypeReturnedAsFields) {
473 __ store_inline_type_fields_to_buf(nullptr, true);
474 }
475
476 __ restore_bcp();
477 __ restore_locals();
478 __ restore_constant_pool_cache();
479 __ get_method(rmethod);
480
481 if (state == atos) {
482 Register obj = r0;
483 Register mdp = r1;
484 Register tmp = r2;
485 __ profile_return_type(mdp, obj, tmp);
486 }
487
488 const Register cache = r1;
489 const Register index = r2;
490
491 if (index_size == sizeof(u4)) {
492 __ load_resolved_indy_entry(cache, index);
493 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedIndyEntry::num_parameters_offset())));
494 __ add(esp, esp, cache, Assembler::LSL, 3);
495 } else {
1647
1648 JFR_ONLY(__ leave_jfr_critical_section();)
1649
1650 // restore sender sp
1651 __ mov(sp, esp);
1652
1653 __ ret(lr);
1654
1655 if (inc_counter) {
1656 // Handle overflow of counter and compile method
1657 __ bind(invocation_counter_overflow);
1658 generate_counter_overflow(continue_after_compile);
1659 }
1660
1661 return entry_point;
1662 }
1663
1664 //
1665 // Generic interpreted method entry to (asm) interpreter
1666 //
1667 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool object_init) {
1668 // determine code generation flags
1669 bool inc_counter = UseCompiler || CountCompiledCalls;
1670
1671 // rscratch1: sender sp
1672 address entry_point = __ pc();
1673
1674 const Address constMethod(rmethod, Method::const_offset());
1675 const Address access_flags(rmethod, Method::access_flags_offset());
1676 const Address size_of_parameters(r3,
1677 ConstMethod::size_of_parameters_offset());
1678 const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1679
1680 // get parameter size (always needed)
1681 // need to load the const method first
1682 __ ldr(r3, constMethod);
1683 __ load_unsigned_short(r2, size_of_parameters);
1684
1685 // r2: size of parameters
1686
1687 __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1774 // check for synchronized methods
1775 // Must happen AFTER invocation_counter check and stack overflow check,
1776 // so method is not locked if overflows.
1777 if (synchronized) {
1778 // Allocate monitor and lock method
1779 lock_method();
1780 } else {
1781 // no synchronization necessary
1782 #ifdef ASSERT
1783 {
1784 Label L;
1785 __ ldrh(r0, access_flags);
1786 __ tst(r0, JVM_ACC_SYNCHRONIZED);
1787 __ br(Assembler::EQ, L);
1788 __ stop("method needs synchronization");
1789 __ bind(L);
1790 }
1791 #endif
1792 }
1793
1794 // Issue a StoreStore barrier on entry to Object_init if the
1795 // class has strict field fields. Be lazy, always do it.
1796 if (object_init) {
1797 __ membar(MacroAssembler::StoreStore);
1798 }
1799
1800 // start execution
1801 #ifdef ASSERT
1802 {
1803 Label L;
1804 const Address monitor_block_top (rfp,
1805 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1806 __ ldr(rscratch1, monitor_block_top);
1807 __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1808 __ cmp(esp, rscratch1);
1809 __ br(Assembler::EQ, L);
1810 __ stop("broken stack frame setup in interpreter 2");
1811 __ bind(L);
1812 }
1813 #endif
1814
1815 // jvmti support
1816 __ notify_method_entry();
1817
1818 __ dispatch_next(vtos);
1819
|