501 // Unlock any Java monitors from synchronized blocks.
502 // Apply stack watermark barrier.
503 // Notify JVMTI.
504 // Remove the activation from the stack.
505 //
506 // If there are locked Java monitors
507 // If throw_monitor_exception
508 // throws IllegalMonitorStateException
509 // Else if install_monitor_exception
510 // installs IllegalMonitorStateException
511 // Else
512 // no error processing
513 void InterpreterMacroAssembler::remove_activation(TosState state,
514 bool throw_monitor_exception,
515 bool install_monitor_exception,
516 bool notify_jvmdi) {
517 // Note: Registers x13 may be in use for the
518 // result check if synchronized method
519 Label unlocked, unlock, no_unlock;
520
521 // get the value of _do_not_unlock_if_synchronized into x13
522 const Address do_not_unlock_if_synchronized(xthread,
523 in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
524 lbu(x13, do_not_unlock_if_synchronized);
525 sb(zr, do_not_unlock_if_synchronized); // reset the flag
526
527 // get method access flags
528 ld(x11, Address(fp, frame::interpreter_frame_method_offset * wordSize));
529 load_unsigned_short(x12, Address(x11, Method::access_flags_offset()));
530 test_bit(t0, x12, exact_log2(JVM_ACC_SYNCHRONIZED));
531 beqz(t0, unlocked);
532
533 // Don't unlock anything if the _do_not_unlock_if_synchronized flag
534 // is set.
535 bnez(x13, no_unlock);
536
537 // unlock monitor
538 push(state); // save result
539
540 // BasicObjectLock will be first in list, since this is a
1424 // Note: No need to save/restore xbcp & xlocals pointer since these
1425 // are callee saved registers and no blocking/ GC can happen
1426 // in leaf calls.
1427 #ifdef ASSERT
1428 {
1429 Label L;
1430 ld(t0, Address(fp, frame::interpreter_frame_last_sp_offset * wordSize));
1431 beqz(t0, L);
1432 stop("InterpreterMacroAssembler::call_VM_leaf_base:"
1433 " last_sp isn't null");
1434 bind(L);
1435 }
1436 #endif /* ASSERT */
1437 // super call
1438 MacroAssembler::call_VM_leaf_base(entry_point, number_of_arguments);
1439 }
1440
1441 void InterpreterMacroAssembler::call_VM_base(Register oop_result,
1442 Register java_thread,
1443 Register last_java_sp,
1444 address entry_point,
1445 int number_of_arguments,
1446 bool check_exceptions) {
1447 // interpreter specific
1448 //
1449 // Note: Could avoid restoring locals ptr (callee saved) - however doesn't
1450 // really make a difference for these runtime calls, since they are
1451 // slow anyway. Btw., bcp must be saved/restored since it may change
1452 // due to GC.
1453 save_bcp();
1454 #ifdef ASSERT
1455 {
1456 Label L;
1457 ld(t0, Address(fp, frame::interpreter_frame_last_sp_offset * wordSize));
1458 beqz(t0, L);
1459 stop("InterpreterMacroAssembler::call_VM_base:"
1460 " last_sp isn't null");
1461 bind(L);
1462 }
1463 #endif /* ASSERT */
1464 // super call
1465 MacroAssembler::call_VM_base(oop_result, noreg, last_java_sp,
1466 entry_point, number_of_arguments,
1467 check_exceptions);
1468 // interpreter specific
1469 restore_bcp();
1470 restore_locals();
1471 }
1472
1473 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
1474 address entry_point,
1475 Register arg_1) {
1476 assert(arg_1 == c_rarg1, "");
1477 Label resume_pc, not_preempted;
1478
1479 #ifdef ASSERT
1480 {
1481 Label L;
1482 ld(t0, Address(xthread, JavaThread::preempt_alternate_return_offset()));
1483 beqz(t0, L);
1484 stop("Should not have alternate return address set");
1485 bind(L);
1486 }
1487 #endif /* ASSERT */
1488
1489 // Force freeze slow path.
1490 push_cont_fastpath();
1491
1492 // Make VM call. In case of preemption set last_pc to the one we want to resume to.
1493 la(t0, resume_pc);
1494 sd(t0, Address(xthread, JavaThread::last_Java_pc_offset()));
1495 call_VM_base(oop_result, noreg, noreg, entry_point, 1, false /*check_exceptions*/);
1496
1497 pop_cont_fastpath();
1498
1499 // Check if preempted.
1500 ld(t1, Address(xthread, JavaThread::preempt_alternate_return_offset()));
1501 beqz(t1, not_preempted);
1502 sd(zr, Address(xthread, JavaThread::preempt_alternate_return_offset()));
1503 jr(t1);
1504
1505 // In case of preemption, this is where we will resume once we finally acquire the monitor.
1506 bind(resume_pc);
1507 restore_after_resume(false /* is_native */);
1508
1509 bind(not_preempted);
1510 }
1511
1512 void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
1513 la(t1, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
1514 jalr(t1);
1515 if (is_native) {
1516 // On resume we need to set up stack as expected
1517 push(dtos);
1518 push(ltos);
1519 }
1520 }
1521
1522 void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr, Register tmp) {
1523 assert_different_registers(obj, tmp, t0, mdo_addr.base());
1524 Label update, next, none;
1525
1526 verify_oop(obj);
1527
1528 bnez(obj, update);
1529 orptr(mdo_addr, TypeEntries::null_seen, t0, tmp);
|
501 // Unlock any Java monitors from synchronized blocks.
502 // Apply stack watermark barrier.
503 // Notify JVMTI.
504 // Remove the activation from the stack.
505 //
506 // If there are locked Java monitors
507 // If throw_monitor_exception
508 // throws IllegalMonitorStateException
509 // Else if install_monitor_exception
510 // installs IllegalMonitorStateException
511 // Else
512 // no error processing
513 void InterpreterMacroAssembler::remove_activation(TosState state,
514 bool throw_monitor_exception,
515 bool install_monitor_exception,
516 bool notify_jvmdi) {
517 // Note: Registers x13 may be in use for the
518 // result check if synchronized method
519 Label unlocked, unlock, no_unlock;
520
521 #ifdef ASSERT
522 Label not_preempted;
523 ld(t0, Address(xthread, JavaThread::preempt_alternate_return_offset()));
524 beqz(t0, not_preempted);
525 stop("remove_activation: should not have alternate return address set");
526 bind(not_preempted);
527 #endif /* ASSERT */
528
529 // get the value of _do_not_unlock_if_synchronized into x13
530 const Address do_not_unlock_if_synchronized(xthread,
531 in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
532 lbu(x13, do_not_unlock_if_synchronized);
533 sb(zr, do_not_unlock_if_synchronized); // reset the flag
534
535 // get method access flags
536 ld(x11, Address(fp, frame::interpreter_frame_method_offset * wordSize));
537 load_unsigned_short(x12, Address(x11, Method::access_flags_offset()));
538 test_bit(t0, x12, exact_log2(JVM_ACC_SYNCHRONIZED));
539 beqz(t0, unlocked);
540
541 // Don't unlock anything if the _do_not_unlock_if_synchronized flag
542 // is set.
543 bnez(x13, no_unlock);
544
545 // unlock monitor
546 push(state); // save result
547
548 // BasicObjectLock will be first in list, since this is a
1432 // Note: No need to save/restore xbcp & xlocals pointer since these
1433 // are callee saved registers and no blocking/ GC can happen
1434 // in leaf calls.
1435 #ifdef ASSERT
1436 {
1437 Label L;
1438 ld(t0, Address(fp, frame::interpreter_frame_last_sp_offset * wordSize));
1439 beqz(t0, L);
1440 stop("InterpreterMacroAssembler::call_VM_leaf_base:"
1441 " last_sp isn't null");
1442 bind(L);
1443 }
1444 #endif /* ASSERT */
1445 // super call
1446 MacroAssembler::call_VM_leaf_base(entry_point, number_of_arguments);
1447 }
1448
1449 void InterpreterMacroAssembler::call_VM_base(Register oop_result,
1450 Register java_thread,
1451 Register last_java_sp,
1452 Label* return_pc,
1453 address entry_point,
1454 int number_of_arguments,
1455 bool check_exceptions) {
1456 // interpreter specific
1457 //
1458 // Note: Could avoid restoring locals ptr (callee saved) - however doesn't
1459 // really make a difference for these runtime calls, since they are
1460 // slow anyway. Btw., bcp must be saved/restored since it may change
1461 // due to GC.
1462 save_bcp();
1463 #ifdef ASSERT
1464 {
1465 Label L;
1466 ld(t0, Address(fp, frame::interpreter_frame_last_sp_offset * wordSize));
1467 beqz(t0, L);
1468 stop("InterpreterMacroAssembler::call_VM_base:"
1469 " last_sp isn't null");
1470 bind(L);
1471 }
1472 #endif /* ASSERT */
1473 // super call
1474 MacroAssembler::call_VM_base(oop_result, noreg, last_java_sp,
1475 return_pc, entry_point,
1476 number_of_arguments, check_exceptions);
1477 // interpreter specific
1478 restore_bcp();
1479 restore_locals();
1480 }
1481
1482 void InterpreterMacroAssembler::call_VM_preemptable_helper(Register oop_result,
1483 address entry_point,
1484 int number_of_arguments,
1485 bool check_exceptions) {
1486 assert(InterpreterRuntime::is_preemptable_call(entry_point),
1487 "VM call not preemptable, should use call_VM()");
1488 Label resume_pc, not_preempted;
1489
1490 #ifdef ASSERT
1491 {
1492 Label L1, L2;
1493 ld(t0, Address(xthread, JavaThread::preempt_alternate_return_offset()));
1494 beqz(t0, L1);
1495 stop("call_VM_preemptable_helper: Should not have alternate return address set");
1496 bind(L1);
1497 // We check this counter in patch_return_pc_with_preempt_stub() during freeze.
1498 incrementw(Address(xthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
1499 lw(t0, Address(xthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
1500 bgtz(t0, L2);
1501 stop("call_VM_preemptable_helper: should be > 0");
1502 bind(L2);
1503 }
1504 #endif /* ASSERT */
1505
1506 // Force freeze slow path.
1507 push_cont_fastpath();
1508
1509 // Make VM call. In case of preemption set last_pc to the one we want to resume to.
1510 // Note: call_VM_base will use resume_pc label to set last_Java_pc.
1511 call_VM_base(noreg, noreg, noreg, &resume_pc, entry_point, number_of_arguments, false /*check_exceptions*/);
1512
1513 pop_cont_fastpath();
1514
1515 #ifdef ASSERT
1516 {
1517 Label L;
1518 decrementw(Address(xthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
1519 lw(t0, Address(xthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
1520 bgez(t0, L);
1521 stop("call_VM_preemptable_helper: should be >= 0");
1522 bind(L);
1523 }
1524 #endif /* ASSERT */
1525
1526 // Check if preempted.
1527 ld(t1, Address(xthread, JavaThread::preempt_alternate_return_offset()));
1528 beqz(t1, not_preempted);
1529 sd(zr, Address(xthread, JavaThread::preempt_alternate_return_offset()));
1530 jr(t1);
1531
1532 // In case of preemption, this is where we will resume once we finally acquire the monitor.
1533 bind(resume_pc);
1534 restore_after_resume(false /* is_native */);
1535
1536 bind(not_preempted);
1537 if (check_exceptions) {
1538 // check for pending exceptions
1539 ld(t0, Address(xthread, in_bytes(Thread::pending_exception_offset())));
1540 Label ok;
1541 beqz(t0, ok);
1542 la(t1, RuntimeAddress(StubRoutines::forward_exception_entry()));
1543 jr(t1);
1544 bind(ok);
1545 }
1546
1547 // get oop result if there is one and reset the value in the thread
1548 if (oop_result->is_valid()) {
1549 get_vm_result_oop(oop_result, xthread);
1550 }
1551 }
1552
1553 static void pass_arg1(MacroAssembler* masm, Register arg) {
1554 if (c_rarg1 != arg) {
1555 masm->mv(c_rarg1, arg);
1556 }
1557 }
1558
1559 static void pass_arg2(MacroAssembler* masm, Register arg) {
1560 if (c_rarg2 != arg) {
1561 masm->mv(c_rarg2, arg);
1562 }
1563 }
1564
1565 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
1566 address entry_point,
1567 Register arg_1,
1568 bool check_exceptions) {
1569 pass_arg1(this, arg_1);
1570 call_VM_preemptable_helper(oop_result, entry_point, 1, check_exceptions);
1571 }
1572
1573 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
1574 address entry_point,
1575 Register arg_1,
1576 Register arg_2,
1577 bool check_exceptions) {
1578 LP64_ONLY(assert_different_registers(arg_1, c_rarg2));
1579 pass_arg2(this, arg_2);
1580 pass_arg1(this, arg_1);
1581 call_VM_preemptable_helper(oop_result, entry_point, 2, check_exceptions);
1582 }
1583
1584 void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
1585 la(t1, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
1586 jalr(t1);
1587 if (is_native) {
1588 // On resume we need to set up stack as expected
1589 push(dtos);
1590 push(ltos);
1591 }
1592 }
1593
1594 void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr, Register tmp) {
1595 assert_different_registers(obj, tmp, t0, mdo_addr.base());
1596 Label update, next, none;
1597
1598 verify_oop(obj);
1599
1600 bnez(obj, update);
1601 orptr(mdo_addr, TypeEntries::null_seen, t0, tmp);
|