< prev index next >

src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp

Print this page

 556   case T_VOID   : /* nothing to do */        break;
 557   case T_FLOAT  : /* nothing to do */        break;
 558   case T_DOUBLE : /* nothing to do */        break;
 559   case T_OBJECT :
 560     // retrieve result from frame
 561     __ ldr(r0, Address(rfp, frame::interpreter_frame_oop_temp_offset*wordSize));
 562     // and verify it
 563     __ verify_oop(r0);
 564     break;
 565   default       : ShouldNotReachHere();
 566   }
 567   __ ret(lr);                                  // return from result handler
 568   return entry;
 569 }
 570 
 571 address TemplateInterpreterGenerator::generate_safept_entry_for(
 572         TosState state,
 573         address runtime_entry) {
 574   address entry = __ pc();
 575   __ push(state);

 576   __ call_VM(noreg, runtime_entry);

 577   __ membar(Assembler::AnyAny);
 578   __ dispatch_via(vtos, Interpreter::_normal_table.table_for(vtos));
 579   return entry;
 580 }
 581 
 582 // Helpers for commoning out cases in the various type of method entries.
 583 //
 584 
 585 
 586 // increment invocation count & check for overflow
 587 //
 588 // Note: checking for negative value instead of overflow
 589 //       so we have a 'sticky' overflow test
 590 //
 591 // rmethod: method
 592 //
 593 void TemplateInterpreterGenerator::generate_counter_incr(Label* overflow) {
 594   Label done;
 595   // Note: In tiered we increment either counters in Method* or in MDO depending if we're profiling or not.
 596   int increment = InvocationCounter::count_increment;

 769     {
 770       Label L;
 771       __ cbnz(r0, L);
 772       __ stop("synchronization object is NULL");
 773       __ bind(L);
 774     }
 775 #endif // ASSERT
 776 
 777     __ bind(done);
 778   }
 779 
 780   // add space for monitor & lock
 781   __ sub(sp, sp, entry_size); // add space for a monitor entry
 782   __ sub(esp, esp, entry_size);
 783   __ mov(rscratch1, esp);
 784   __ str(rscratch1, monitor_block_top);  // set new monitor block top
 785   // store object
 786   __ str(r0, Address(esp, BasicObjectLock::obj_offset_in_bytes()));
 787   __ mov(c_rarg1, esp); // object address
 788   __ lock_object(c_rarg1);

 789 }
 790 
 791 // Generate a fixed interpreter frame. This is identical setup for
 792 // interpreted methods and for native methods hence the shared code.
 793 //
 794 // Args:
 795 //      lr: return address
 796 //      rmethod: Method*
 797 //      rlocals: pointer to locals
 798 //      rcpool: cp cache
 799 //      stack_pointer: previous sp
 800 void TemplateInterpreterGenerator::generate_fixed_frame(bool native_call) {
 801   // initialize fixed part of activation frame
 802   if (native_call) {
 803     __ sub(esp, sp, 14 *  wordSize);
 804     __ mov(rbcp, zr);
 805     __ stp(esp, zr, Address(__ pre(sp, -14 * wordSize)));
 806     // add 2 zero-initialized slots for native calls
 807     __ stp(zr, zr, Address(sp, 12 * wordSize));
 808   } else {

 834 
 835   __ stp(rfp, lr, Address(sp, 10 * wordSize));
 836   __ lea(rfp, Address(sp, 10 * wordSize));
 837 
 838   // set sender sp
 839   // leave last_sp as null
 840   __ stp(zr, r13, Address(sp, 8 * wordSize));
 841 
 842   // Move SP out of the way
 843   if (! native_call) {
 844     __ ldr(rscratch1, Address(rmethod, Method::const_offset()));
 845     __ ldrh(rscratch1, Address(rscratch1, ConstMethod::max_stack_offset()));
 846     __ add(rscratch1, rscratch1, frame::interpreter_frame_monitor_size() + 2);
 847     __ sub(rscratch1, sp, rscratch1, ext::uxtw, 3);
 848     __ andr(sp, rscratch1, -16);
 849   }
 850 }
 851 
 852 // End of helpers
 853 










 854 // Various method entries
 855 //------------------------------------------------------------------------------------------------------------------------
 856 //
 857 //
 858 
 859 // Method entry for java.lang.ref.Reference.get.
 860 address TemplateInterpreterGenerator::generate_Reference_get_entry(void) {
 861   // Code: _aload_0, _getfield, _areturn
 862   // parameter size = 1
 863   //
 864   // The code that gets generated by this routine is split into 2 parts:
 865   //    1. The "intrinsified" code for G1 (or any SATB based GC),
 866   //    2. The slow path - which is an expansion of the regular method entry.
 867   //
 868   // Notes:-
 869   // * In the G1 code we do not check whether we need to block for
 870   //   a safepoint. If G1 is enabled then we must execute the specialized
 871   //   code for Reference.get (except when the Reference object is null)
 872   //   so that we can log the value in the referent field with an SATB
 873   //   update buffer.

1442       // BasicObjectLock will be first in list, since this is a
1443       // synchronized method. However, need to check that the object
1444       // has not been unlocked by an explicit monitorexit bytecode.
1445 
1446       // monitor expect in c_rarg1 for slow unlock path
1447       __ lea (c_rarg1, Address(rfp,   // address of first monitor
1448                                (intptr_t)(frame::interpreter_frame_initial_sp_offset *
1449                                           wordSize - sizeof(BasicObjectLock))));
1450 
1451       __ ldr(t, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));
1452       __ cbnz(t, unlock);
1453 
1454       // Entry already unlocked, need to throw exception
1455       __ MacroAssembler::call_VM(noreg,
1456                                  CAST_FROM_FN_PTR(address,
1457                    InterpreterRuntime::throw_illegal_monitor_state_exception));
1458       __ should_not_reach_here();
1459 
1460       __ bind(unlock);
1461       __ unlock_object(c_rarg1);

1462     }
1463     __ bind(L);
1464   }
1465 
1466   // jvmti support
1467   // Note: This must happen _after_ handling/throwing any exceptions since
1468   //       the exception handler code notifies the runtime of method exits
1469   //       too. If this happens before, method entry/exit notifications are
1470   //       not properly paired (was bug - gri 11/22/99).
1471   __ notify_method_exit(vtos, InterpreterMacroAssembler::NotifyJVMTI);
1472 
1473   // restore potential result in r0:d0, call result handler to
1474   // restore potential result in ST0 & handle result
1475 
1476   __ pop(ltos);
1477   __ pop(dtos);
1478 
1479   __ blr(result_handler);
1480 
1481   // remove activation

 556   case T_VOID   : /* nothing to do */        break;
 557   case T_FLOAT  : /* nothing to do */        break;
 558   case T_DOUBLE : /* nothing to do */        break;
 559   case T_OBJECT :
 560     // retrieve result from frame
 561     __ ldr(r0, Address(rfp, frame::interpreter_frame_oop_temp_offset*wordSize));
 562     // and verify it
 563     __ verify_oop(r0);
 564     break;
 565   default       : ShouldNotReachHere();
 566   }
 567   __ ret(lr);                                  // return from result handler
 568   return entry;
 569 }
 570 
 571 address TemplateInterpreterGenerator::generate_safept_entry_for(
 572         TosState state,
 573         address runtime_entry) {
 574   address entry = __ pc();
 575   __ push(state);
 576   __ push_cont_fastpath(rthread);
 577   __ call_VM(noreg, runtime_entry);
 578   __ pop_cont_fastpath(rthread);
 579   __ membar(Assembler::AnyAny);
 580   __ dispatch_via(vtos, Interpreter::_normal_table.table_for(vtos));
 581   return entry;
 582 }
 583 
 584 // Helpers for commoning out cases in the various type of method entries.
 585 //
 586 
 587 
 588 // increment invocation count & check for overflow
 589 //
 590 // Note: checking for negative value instead of overflow
 591 //       so we have a 'sticky' overflow test
 592 //
 593 // rmethod: method
 594 //
 595 void TemplateInterpreterGenerator::generate_counter_incr(Label* overflow) {
 596   Label done;
 597   // Note: In tiered we increment either counters in Method* or in MDO depending if we're profiling or not.
 598   int increment = InvocationCounter::count_increment;

 771     {
 772       Label L;
 773       __ cbnz(r0, L);
 774       __ stop("synchronization object is NULL");
 775       __ bind(L);
 776     }
 777 #endif // ASSERT
 778 
 779     __ bind(done);
 780   }
 781 
 782   // add space for monitor & lock
 783   __ sub(sp, sp, entry_size); // add space for a monitor entry
 784   __ sub(esp, esp, entry_size);
 785   __ mov(rscratch1, esp);
 786   __ str(rscratch1, monitor_block_top);  // set new monitor block top
 787   // store object
 788   __ str(r0, Address(esp, BasicObjectLock::obj_offset_in_bytes()));
 789   __ mov(c_rarg1, esp); // object address
 790   __ lock_object(c_rarg1);
 791   __ inc_held_monitor_count(rthread);
 792 }
 793 
 794 // Generate a fixed interpreter frame. This is identical setup for
 795 // interpreted methods and for native methods hence the shared code.
 796 //
 797 // Args:
 798 //      lr: return address
 799 //      rmethod: Method*
 800 //      rlocals: pointer to locals
 801 //      rcpool: cp cache
 802 //      stack_pointer: previous sp
 803 void TemplateInterpreterGenerator::generate_fixed_frame(bool native_call) {
 804   // initialize fixed part of activation frame
 805   if (native_call) {
 806     __ sub(esp, sp, 14 *  wordSize);
 807     __ mov(rbcp, zr);
 808     __ stp(esp, zr, Address(__ pre(sp, -14 * wordSize)));
 809     // add 2 zero-initialized slots for native calls
 810     __ stp(zr, zr, Address(sp, 12 * wordSize));
 811   } else {

 837 
 838   __ stp(rfp, lr, Address(sp, 10 * wordSize));
 839   __ lea(rfp, Address(sp, 10 * wordSize));
 840 
 841   // set sender sp
 842   // leave last_sp as null
 843   __ stp(zr, r13, Address(sp, 8 * wordSize));
 844 
 845   // Move SP out of the way
 846   if (! native_call) {
 847     __ ldr(rscratch1, Address(rmethod, Method::const_offset()));
 848     __ ldrh(rscratch1, Address(rscratch1, ConstMethod::max_stack_offset()));
 849     __ add(rscratch1, rscratch1, frame::interpreter_frame_monitor_size() + 2);
 850     __ sub(rscratch1, sp, rscratch1, ext::uxtw, 3);
 851     __ andr(sp, rscratch1, -16);
 852   }
 853 }
 854 
 855 // End of helpers
 856 
 857 address TemplateInterpreterGenerator::generate_Continuation_doYield_entry(void) {
 858   address entry = __ pc();
 859   assert(StubRoutines::cont_doYield() != NULL, "stub not yet generated");
 860 
 861   __ push_cont_fastpath(rthread);
 862   __ far_jump(RuntimeAddress(CAST_FROM_FN_PTR(address, StubRoutines::cont_doYield())));
 863 
 864   return entry;
 865 }
 866 
 867 // Various method entries
 868 //------------------------------------------------------------------------------------------------------------------------
 869 //
 870 //
 871 
 872 // Method entry for java.lang.ref.Reference.get.
 873 address TemplateInterpreterGenerator::generate_Reference_get_entry(void) {
 874   // Code: _aload_0, _getfield, _areturn
 875   // parameter size = 1
 876   //
 877   // The code that gets generated by this routine is split into 2 parts:
 878   //    1. The "intrinsified" code for G1 (or any SATB based GC),
 879   //    2. The slow path - which is an expansion of the regular method entry.
 880   //
 881   // Notes:-
 882   // * In the G1 code we do not check whether we need to block for
 883   //   a safepoint. If G1 is enabled then we must execute the specialized
 884   //   code for Reference.get (except when the Reference object is null)
 885   //   so that we can log the value in the referent field with an SATB
 886   //   update buffer.

1455       // BasicObjectLock will be first in list, since this is a
1456       // synchronized method. However, need to check that the object
1457       // has not been unlocked by an explicit monitorexit bytecode.
1458 
1459       // monitor expect in c_rarg1 for slow unlock path
1460       __ lea (c_rarg1, Address(rfp,   // address of first monitor
1461                                (intptr_t)(frame::interpreter_frame_initial_sp_offset *
1462                                           wordSize - sizeof(BasicObjectLock))));
1463 
1464       __ ldr(t, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));
1465       __ cbnz(t, unlock);
1466 
1467       // Entry already unlocked, need to throw exception
1468       __ MacroAssembler::call_VM(noreg,
1469                                  CAST_FROM_FN_PTR(address,
1470                    InterpreterRuntime::throw_illegal_monitor_state_exception));
1471       __ should_not_reach_here();
1472 
1473       __ bind(unlock);
1474       __ unlock_object(c_rarg1);
1475       __ dec_held_monitor_count(rthread);
1476     }
1477     __ bind(L);
1478   }
1479 
1480   // jvmti support
1481   // Note: This must happen _after_ handling/throwing any exceptions since
1482   //       the exception handler code notifies the runtime of method exits
1483   //       too. If this happens before, method entry/exit notifications are
1484   //       not properly paired (was bug - gri 11/22/99).
1485   __ notify_method_exit(vtos, InterpreterMacroAssembler::NotifyJVMTI);
1486 
1487   // restore potential result in r0:d0, call result handler to
1488   // restore potential result in ST0 & handle result
1489 
1490   __ pop(ltos);
1491   __ pop(dtos);
1492 
1493   __ blr(result_handler);
1494 
1495   // remove activation
< prev index next >