462 // Unlock any Java monitors from synchronized blocks.
463 // Apply stack watermark barrier.
464 // Notify JVMTI.
465 // Remove the activation from the stack.
466 //
467 // If there are locked Java monitors
468 // If throw_monitor_exception
469 // throws IllegalMonitorStateException
470 // Else if install_monitor_exception
471 // installs IllegalMonitorStateException
472 // Else
473 // no error processing
474 void InterpreterMacroAssembler::remove_activation(TosState state,
475 bool throw_monitor_exception,
476 bool install_monitor_exception,
477 bool notify_jvmdi) {
478 // Note: Registers r3 xmm0 may be in use for the
479 // result check if synchronized method
480 Label unlocked, unlock, no_unlock;
481
482 // get the value of _do_not_unlock_if_synchronized into r3
483 const Address do_not_unlock_if_synchronized(rthread,
484 in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
485 ldrb(r3, do_not_unlock_if_synchronized);
486 strb(zr, do_not_unlock_if_synchronized); // reset the flag
487
488 // get method access flags
489 ldr(r1, Address(rfp, frame::interpreter_frame_method_offset * wordSize));
490 ldrh(r2, Address(r1, Method::access_flags_offset()));
491 tbz(r2, exact_log2(JVM_ACC_SYNCHRONIZED), unlocked);
492
493 // Don't unlock anything if the _do_not_unlock_if_synchronized flag
494 // is set.
495 cbnz(r3, no_unlock);
496
497 // unlock monitor
498 push(state); // save result
499
500 // BasicObjectLock will be first in list, since this is a
501 // synchronized method. However, need to check that the object has
1354 // Note: No need to save/restore rbcp & rlocals pointer since these
1355 // are callee saved registers and no blocking/ GC can happen
1356 // in leaf calls.
1357 #ifdef ASSERT
1358 {
1359 Label L;
1360 ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
1361 cbz(rscratch1, L);
1362 stop("InterpreterMacroAssembler::call_VM_leaf_base:"
1363 " last_sp != nullptr");
1364 bind(L);
1365 }
1366 #endif /* ASSERT */
1367 // super call
1368 MacroAssembler::call_VM_leaf_base(entry_point, number_of_arguments);
1369 }
1370
1371 void InterpreterMacroAssembler::call_VM_base(Register oop_result,
1372 Register java_thread,
1373 Register last_java_sp,
1374 address entry_point,
1375 int number_of_arguments,
1376 bool check_exceptions) {
1377 // interpreter specific
1378 //
1379 // Note: Could avoid restoring locals ptr (callee saved) - however doesn't
1380 // really make a difference for these runtime calls, since they are
1381 // slow anyway. Btw., bcp must be saved/restored since it may change
1382 // due to GC.
1383 // assert(java_thread == noreg , "not expecting a precomputed java thread");
1384 save_bcp();
1385 #ifdef ASSERT
1386 {
1387 Label L;
1388 ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
1389 cbz(rscratch1, L);
1390 stop("InterpreterMacroAssembler::call_VM_base:"
1391 " last_sp != nullptr");
1392 bind(L);
1393 }
1394 #endif /* ASSERT */
1395 // super call
1396 MacroAssembler::call_VM_base(oop_result, noreg, last_java_sp,
1397 entry_point, number_of_arguments,
1398 check_exceptions);
1399 // interpreter specific
1400 restore_bcp();
1401 restore_locals();
1402 }
1403
1404 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
1405 address entry_point,
1406 Register arg_1) {
1407 assert(arg_1 == c_rarg1, "");
1408 Label resume_pc, not_preempted;
1409
1410 #ifdef ASSERT
1411 {
1412 Label L;
1413 ldr(rscratch1, Address(rthread, JavaThread::preempt_alternate_return_offset()));
1414 cbz(rscratch1, L);
1415 stop("Should not have alternate return address set");
1416 bind(L);
1417 }
1418 #endif /* ASSERT */
1419
1420 // Force freeze slow path.
1421 push_cont_fastpath();
1422
1423 // Make VM call. In case of preemption set last_pc to the one we want to resume to.
1424 adr(rscratch1, resume_pc);
1425 str(rscratch1, Address(rthread, JavaThread::last_Java_pc_offset()));
1426 call_VM_base(oop_result, noreg, noreg, entry_point, 1, false /*check_exceptions*/);
1427
1428 pop_cont_fastpath();
1429
1430 // Check if preempted.
1431 ldr(rscratch1, Address(rthread, JavaThread::preempt_alternate_return_offset()));
1432 cbz(rscratch1, not_preempted);
1433 str(zr, Address(rthread, JavaThread::preempt_alternate_return_offset()));
1434 br(rscratch1);
1435
1436 // In case of preemption, this is where we will resume once we finally acquire the monitor.
1437 bind(resume_pc);
1438 restore_after_resume(false /* is_native */);
1439
1440 bind(not_preempted);
1441 }
1442
1443 void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
1444 lea(rscratch1, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
1445 blr(rscratch1);
1446 if (is_native) {
1447 // On resume we need to set up stack as expected
1448 push(dtos);
1449 push(ltos);
1450 }
1451 }
1452
1453 void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr) {
1454 assert_different_registers(obj, rscratch1, mdo_addr.base(), mdo_addr.index());
1455 Label update, next, none;
1456
1457 verify_oop(obj);
1458
1459 cbnz(obj, update);
1460 orptr(mdo_addr, TypeEntries::null_seen);
|
462 // Unlock any Java monitors from synchronized blocks.
463 // Apply stack watermark barrier.
464 // Notify JVMTI.
465 // Remove the activation from the stack.
466 //
467 // If there are locked Java monitors
468 // If throw_monitor_exception
469 // throws IllegalMonitorStateException
470 // Else if install_monitor_exception
471 // installs IllegalMonitorStateException
472 // Else
473 // no error processing
474 void InterpreterMacroAssembler::remove_activation(TosState state,
475 bool throw_monitor_exception,
476 bool install_monitor_exception,
477 bool notify_jvmdi) {
478 // Note: Registers r3 xmm0 may be in use for the
479 // result check if synchronized method
480 Label unlocked, unlock, no_unlock;
481
482 #ifdef ASSERT
483 Label not_preempted;
484 ldr(rscratch1, Address(rthread, JavaThread::preempt_alternate_return_offset()));
485 cbz(rscratch1, not_preempted);
486 stop("remove_activation: should not have alternate return address set");
487 bind(not_preempted);
488 #endif /* ASSERT */
489
490 // get the value of _do_not_unlock_if_synchronized into r3
491 const Address do_not_unlock_if_synchronized(rthread,
492 in_bytes(JavaThread::do_not_unlock_if_synchronized_offset()));
493 ldrb(r3, do_not_unlock_if_synchronized);
494 strb(zr, do_not_unlock_if_synchronized); // reset the flag
495
496 // get method access flags
497 ldr(r1, Address(rfp, frame::interpreter_frame_method_offset * wordSize));
498 ldrh(r2, Address(r1, Method::access_flags_offset()));
499 tbz(r2, exact_log2(JVM_ACC_SYNCHRONIZED), unlocked);
500
501 // Don't unlock anything if the _do_not_unlock_if_synchronized flag
502 // is set.
503 cbnz(r3, no_unlock);
504
505 // unlock monitor
506 push(state); // save result
507
508 // BasicObjectLock will be first in list, since this is a
509 // synchronized method. However, need to check that the object has
1362 // Note: No need to save/restore rbcp & rlocals pointer since these
1363 // are callee saved registers and no blocking/ GC can happen
1364 // in leaf calls.
1365 #ifdef ASSERT
1366 {
1367 Label L;
1368 ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
1369 cbz(rscratch1, L);
1370 stop("InterpreterMacroAssembler::call_VM_leaf_base:"
1371 " last_sp != nullptr");
1372 bind(L);
1373 }
1374 #endif /* ASSERT */
1375 // super call
1376 MacroAssembler::call_VM_leaf_base(entry_point, number_of_arguments);
1377 }
1378
1379 void InterpreterMacroAssembler::call_VM_base(Register oop_result,
1380 Register java_thread,
1381 Register last_java_sp,
1382 Label* return_pc,
1383 address entry_point,
1384 int number_of_arguments,
1385 bool check_exceptions) {
1386 // interpreter specific
1387 //
1388 // Note: Could avoid restoring locals ptr (callee saved) - however doesn't
1389 // really make a difference for these runtime calls, since they are
1390 // slow anyway. Btw., bcp must be saved/restored since it may change
1391 // due to GC.
1392 // assert(java_thread == noreg , "not expecting a precomputed java thread");
1393 save_bcp();
1394 #ifdef ASSERT
1395 {
1396 Label L;
1397 ldr(rscratch1, Address(rfp, frame::interpreter_frame_last_sp_offset * wordSize));
1398 cbz(rscratch1, L);
1399 stop("InterpreterMacroAssembler::call_VM_base:"
1400 " last_sp != nullptr");
1401 bind(L);
1402 }
1403 #endif /* ASSERT */
1404 // super call
1405 MacroAssembler::call_VM_base(oop_result, noreg, last_java_sp,
1406 return_pc, entry_point,
1407 number_of_arguments, check_exceptions);
1408 // interpreter specific
1409 restore_bcp();
1410 restore_locals();
1411 }
1412
1413 void InterpreterMacroAssembler::call_VM_preemptable_helper(Register oop_result,
1414 address entry_point,
1415 int number_of_arguments,
1416 bool check_exceptions) {
1417 assert(InterpreterRuntime::is_preemptable_call(entry_point), "VM call not preemptable, should use call_VM()");
1418 Label resume_pc, not_preempted;
1419
1420 #ifdef ASSERT
1421 {
1422 Label L1, L2;
1423 ldr(rscratch1, Address(rthread, JavaThread::preempt_alternate_return_offset()));
1424 cbz(rscratch1, L1);
1425 stop("call_VM_preemptable_helper: Should not have alternate return address set");
1426 bind(L1);
1427 // We check this counter in patch_return_pc_with_preempt_stub() during freeze.
1428 incrementw(Address(rthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
1429 ldrw(rscratch1, Address(rthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
1430 cmpw(rscratch1, 0);
1431 br(Assembler::GT, L2);
1432 stop("call_VM_preemptable_helper: should be > 0");
1433 bind(L2);
1434 }
1435 #endif /* ASSERT */
1436
1437 // Force freeze slow path.
1438 push_cont_fastpath();
1439
1440 // Make VM call. In case of preemption set last_pc to the one we want to resume to.
1441 // Note: call_VM_base will use resume_pc label to set last_Java_pc.
1442 call_VM_base(noreg, noreg, noreg, &resume_pc, entry_point, number_of_arguments, false /*check_exceptions*/);
1443
1444 pop_cont_fastpath();
1445
1446 #ifdef ASSERT
1447 {
1448 Label L;
1449 decrementw(Address(rthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
1450 ldrw(rscratch1, Address(rthread, JavaThread::interp_at_preemptable_vmcall_cnt_offset()));
1451 cmpw(rscratch1, 0);
1452 br(Assembler::GE, L);
1453 stop("call_VM_preemptable_helper: should be >= 0");
1454 bind(L);
1455 }
1456 #endif /* ASSERT */
1457
1458 // Check if preempted.
1459 ldr(rscratch1, Address(rthread, JavaThread::preempt_alternate_return_offset()));
1460 cbz(rscratch1, not_preempted);
1461 str(zr, Address(rthread, JavaThread::preempt_alternate_return_offset()));
1462 br(rscratch1);
1463
1464 // In case of preemption, this is where we will resume once we finally acquire the monitor.
1465 bind(resume_pc);
1466 restore_after_resume(false /* is_native */);
1467
1468 bind(not_preempted);
1469 if (check_exceptions) {
1470 // check for pending exceptions (java_thread is set upon return)
1471 ldr(rscratch1, Address(rthread, in_bytes(Thread::pending_exception_offset())));
1472 Label ok;
1473 cbz(rscratch1, ok);
1474 lea(rscratch1, RuntimeAddress(StubRoutines::forward_exception_entry()));
1475 br(rscratch1);
1476 bind(ok);
1477 }
1478
1479 // get oop result if there is one and reset the value in the thread
1480 if (oop_result->is_valid()) {
1481 get_vm_result_oop(oop_result, rthread);
1482 }
1483 }
1484
1485 static void pass_arg1(MacroAssembler* masm, Register arg) {
1486 if (c_rarg1 != arg ) {
1487 masm->mov(c_rarg1, arg);
1488 }
1489 }
1490
1491 static void pass_arg2(MacroAssembler* masm, Register arg) {
1492 if (c_rarg2 != arg ) {
1493 masm->mov(c_rarg2, arg);
1494 }
1495 }
1496
1497 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
1498 address entry_point,
1499 Register arg_1,
1500 bool check_exceptions) {
1501 pass_arg1(this, arg_1);
1502 call_VM_preemptable_helper(oop_result, entry_point, 1, check_exceptions);
1503 }
1504
1505 void InterpreterMacroAssembler::call_VM_preemptable(Register oop_result,
1506 address entry_point,
1507 Register arg_1,
1508 Register arg_2,
1509 bool check_exceptions) {
1510 LP64_ONLY(assert_different_registers(arg_1, c_rarg2));
1511 pass_arg2(this, arg_2);
1512 pass_arg1(this, arg_1);
1513 call_VM_preemptable_helper(oop_result, entry_point, 2, check_exceptions);
1514 }
1515
1516 void InterpreterMacroAssembler::restore_after_resume(bool is_native) {
1517 lea(rscratch1, ExternalAddress(Interpreter::cont_resume_interpreter_adapter()));
1518 blr(rscratch1);
1519 if (is_native) {
1520 // On resume we need to set up stack as expected
1521 push(dtos);
1522 push(ltos);
1523 }
1524 }
1525
1526 void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr) {
1527 assert_different_registers(obj, rscratch1, mdo_addr.base(), mdo_addr.index());
1528 Label update, next, none;
1529
1530 verify_oop(obj);
1531
1532 cbnz(obj, update);
1533 orptr(mdo_addr, TypeEntries::null_seen);
|