4466
4467 return start;
4468 }
4469
4470 address generate_cont_thaw(const char* label, Continuation::thaw_kind kind) {
4471 if (!Continuations::enabled()) return nullptr;
4472
4473 bool return_barrier = Continuation::is_thaw_return_barrier(kind);
4474 bool return_barrier_exception = Continuation::is_thaw_return_barrier_exception(kind);
4475
4476 StubCodeMark mark(this, "StubRoutines", label);
4477
4478 Register tmp1 = R10_ARG8;
4479 Register tmp2 = R9_ARG7;
4480 Register tmp3 = R8_ARG6;
4481 Register nvtmp = R15_esp; // nonvolatile tmp register
4482 FloatRegister nvftmp = F20; // nonvolatile fp tmp register
4483
4484 address start = __ pc();
4485
4486 if (return_barrier) {
4487 __ mr(nvtmp, R3_RET); __ fmr(nvftmp, F1_RET); // preserve possible return value from a method returning to the return barrier
4488 DEBUG_ONLY(__ ld_ptr(tmp1, _abi0(callers_sp), R1_SP);)
4489 __ ld_ptr(R1_SP, JavaThread::cont_entry_offset(), R16_thread);
4490 #ifdef ASSERT
4491 __ ld_ptr(tmp2, _abi0(callers_sp), R1_SP);
4492 __ cmpd(CCR0, tmp1, tmp2);
4493 __ asm_assert_eq(FILE_AND_LINE ": callers sp is corrupt");
4494 #endif
4495 }
4496 #ifdef ASSERT
4497 __ ld_ptr(tmp1, JavaThread::cont_entry_offset(), R16_thread);
4498 __ cmpd(CCR0, R1_SP, tmp1);
4499 __ asm_assert_eq(FILE_AND_LINE ": incorrect R1_SP");
4500 #endif
4501
4502 __ li(R4_ARG2, return_barrier ? 1 : 0);
4503 __ call_VM_leaf(CAST_FROM_FN_PTR(address, Continuation::prepare_thaw), R16_thread, R4_ARG2);
4504
4505 #ifdef ASSERT
4554 }
4555 __ blr();
4556
4557 return start;
4558 }
4559
4560 address generate_cont_thaw() {
4561 return generate_cont_thaw("Cont thaw", Continuation::thaw_top);
4562 }
4563
4564 // TODO: will probably need multiple return barriers depending on return type
4565
4566 address generate_cont_returnBarrier() {
4567 return generate_cont_thaw("Cont thaw return barrier", Continuation::thaw_return_barrier);
4568 }
4569
4570 address generate_cont_returnBarrier_exception() {
4571 return generate_cont_thaw("Cont thaw return barrier exception", Continuation::thaw_return_barrier_exception);
4572 }
4573
4574 // exception handler for upcall stubs
4575 address generate_upcall_stub_exception_handler() {
4576 StubCodeMark mark(this, "StubRoutines", "upcall stub exception handler");
4577 address start = __ pc();
4578
4579 // Native caller has no idea how to handle exceptions,
4580 // so we just crash here. Up to callee to catch exceptions.
4581 __ verify_oop(R3_ARG1);
4582 __ load_const_optimized(R12_scratch2, CAST_FROM_FN_PTR(uint64_t, UpcallLinker::handle_uncaught_exception), R0);
4583 __ call_c(R12_scratch2);
4584 __ should_not_reach_here();
4585
4586 return start;
4587 }
4588
4589 // load Method* target of MethodHandle
4590 // R3_ARG1 = jobject receiver
4591 // R19_method = result Method*
4592 address generate_upcall_stub_load_target() {
4593
4629 }
4630
4631 // CRC32 Intrinsics.
4632 if (UseCRC32Intrinsics) {
4633 StubRoutines::_crc_table_adr = StubRoutines::ppc::generate_crc_constants(REVERSE_CRC32_POLY);
4634 StubRoutines::_updateBytesCRC32 = generate_CRC32_updateBytes(false);
4635 }
4636
4637 // CRC32C Intrinsics.
4638 if (UseCRC32CIntrinsics) {
4639 StubRoutines::_crc32c_table_addr = StubRoutines::ppc::generate_crc_constants(REVERSE_CRC32C_POLY);
4640 StubRoutines::_updateBytesCRC32C = generate_CRC32_updateBytes(true);
4641 }
4642 }
4643
4644 void generate_continuation_stubs() {
4645 // Continuation stubs:
4646 StubRoutines::_cont_thaw = generate_cont_thaw();
4647 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4648 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4649 }
4650
4651 void generate_final_stubs() {
4652 // Generates all stubs and initializes the entry points
4653
4654 // support for verify_oop (must happen after universe_init)
4655 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
4656
4657 // nmethod entry barriers for concurrent class unloading
4658 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
4659 if (bs_nm != nullptr) {
4660 StubRoutines::_method_entry_barrier = generate_method_entry_barrier();
4661 }
4662
4663 // arraycopy stubs used by compilers
4664 generate_arraycopy_stubs();
4665
4666 if (UseSecondarySupersTable) {
4667 StubRoutines::_lookup_secondary_supers_table_slow_path_stub = generate_lookup_secondary_supers_table_slow_path_stub();
4668 if (!InlineSecondarySupersTest) {
|
4466
4467 return start;
4468 }
4469
4470 address generate_cont_thaw(const char* label, Continuation::thaw_kind kind) {
4471 if (!Continuations::enabled()) return nullptr;
4472
4473 bool return_barrier = Continuation::is_thaw_return_barrier(kind);
4474 bool return_barrier_exception = Continuation::is_thaw_return_barrier_exception(kind);
4475
4476 StubCodeMark mark(this, "StubRoutines", label);
4477
4478 Register tmp1 = R10_ARG8;
4479 Register tmp2 = R9_ARG7;
4480 Register tmp3 = R8_ARG6;
4481 Register nvtmp = R15_esp; // nonvolatile tmp register
4482 FloatRegister nvftmp = F20; // nonvolatile fp tmp register
4483
4484 address start = __ pc();
4485
4486 if (kind == Continuation::thaw_top) {
4487 __ clobber_nonvolatile_registers(); // Except R16_thread and R29_TOC
4488 }
4489
4490 if (return_barrier) {
4491 __ mr(nvtmp, R3_RET); __ fmr(nvftmp, F1_RET); // preserve possible return value from a method returning to the return barrier
4492 DEBUG_ONLY(__ ld_ptr(tmp1, _abi0(callers_sp), R1_SP);)
4493 __ ld_ptr(R1_SP, JavaThread::cont_entry_offset(), R16_thread);
4494 #ifdef ASSERT
4495 __ ld_ptr(tmp2, _abi0(callers_sp), R1_SP);
4496 __ cmpd(CCR0, tmp1, tmp2);
4497 __ asm_assert_eq(FILE_AND_LINE ": callers sp is corrupt");
4498 #endif
4499 }
4500 #ifdef ASSERT
4501 __ ld_ptr(tmp1, JavaThread::cont_entry_offset(), R16_thread);
4502 __ cmpd(CCR0, R1_SP, tmp1);
4503 __ asm_assert_eq(FILE_AND_LINE ": incorrect R1_SP");
4504 #endif
4505
4506 __ li(R4_ARG2, return_barrier ? 1 : 0);
4507 __ call_VM_leaf(CAST_FROM_FN_PTR(address, Continuation::prepare_thaw), R16_thread, R4_ARG2);
4508
4509 #ifdef ASSERT
4558 }
4559 __ blr();
4560
4561 return start;
4562 }
4563
4564 address generate_cont_thaw() {
4565 return generate_cont_thaw("Cont thaw", Continuation::thaw_top);
4566 }
4567
4568 // TODO: will probably need multiple return barriers depending on return type
4569
4570 address generate_cont_returnBarrier() {
4571 return generate_cont_thaw("Cont thaw return barrier", Continuation::thaw_return_barrier);
4572 }
4573
4574 address generate_cont_returnBarrier_exception() {
4575 return generate_cont_thaw("Cont thaw return barrier exception", Continuation::thaw_return_barrier_exception);
4576 }
4577
4578 address generate_cont_preempt_stub() {
4579 if (!Continuations::enabled()) return nullptr;
4580 StubCodeMark mark(this, "StubRoutines","Continuation preempt stub");
4581 address start = __ pc();
4582
4583 __ clobber_nonvolatile_registers(); // Except R16_thread and R29_TOC
4584
4585 __ reset_last_Java_frame(false /*check_last_java_sp*/);
4586
4587 // Set sp to enterSpecial frame, i.e. remove all frames copied into the heap.
4588 __ ld_ptr(R1_SP, JavaThread::cont_entry_offset(), R16_thread);
4589
4590 Label preemption_cancelled;
4591 __ lbz(R11_scratch1, in_bytes(JavaThread::preemption_cancelled_offset()), R16_thread);
4592 __ cmpwi(CCR0, R11_scratch1, 0);
4593 __ bne(CCR0, preemption_cancelled);
4594
4595 // Remove enterSpecial frame from the stack and return to Continuation.run() to unmount.
4596 SharedRuntime::continuation_enter_cleanup(_masm);
4597 __ pop_frame();
4598 __ restore_LR(R11_scratch1);
4599 __ blr();
4600
4601 // We acquired the monitor after freezing the frames so call thaw to continue execution.
4602 __ bind(preemption_cancelled);
4603 __ li(R11_scratch1, 0); // false
4604 __ stb(R11_scratch1, in_bytes(JavaThread::preemption_cancelled_offset()), R16_thread);
4605 int simm16_offs = __ load_const_optimized(R11_scratch1, ContinuationEntry::thaw_call_pc_address(), R0, true);
4606 __ ld(R11_scratch1, simm16_offs, R11_scratch1);
4607 __ mtctr(R11_scratch1);
4608 __ bctr();
4609
4610 return start;
4611 }
4612
4613 // exception handler for upcall stubs
4614 address generate_upcall_stub_exception_handler() {
4615 StubCodeMark mark(this, "StubRoutines", "upcall stub exception handler");
4616 address start = __ pc();
4617
4618 // Native caller has no idea how to handle exceptions,
4619 // so we just crash here. Up to callee to catch exceptions.
4620 __ verify_oop(R3_ARG1);
4621 __ load_const_optimized(R12_scratch2, CAST_FROM_FN_PTR(uint64_t, UpcallLinker::handle_uncaught_exception), R0);
4622 __ call_c(R12_scratch2);
4623 __ should_not_reach_here();
4624
4625 return start;
4626 }
4627
4628 // load Method* target of MethodHandle
4629 // R3_ARG1 = jobject receiver
4630 // R19_method = result Method*
4631 address generate_upcall_stub_load_target() {
4632
4668 }
4669
4670 // CRC32 Intrinsics.
4671 if (UseCRC32Intrinsics) {
4672 StubRoutines::_crc_table_adr = StubRoutines::ppc::generate_crc_constants(REVERSE_CRC32_POLY);
4673 StubRoutines::_updateBytesCRC32 = generate_CRC32_updateBytes(false);
4674 }
4675
4676 // CRC32C Intrinsics.
4677 if (UseCRC32CIntrinsics) {
4678 StubRoutines::_crc32c_table_addr = StubRoutines::ppc::generate_crc_constants(REVERSE_CRC32C_POLY);
4679 StubRoutines::_updateBytesCRC32C = generate_CRC32_updateBytes(true);
4680 }
4681 }
4682
4683 void generate_continuation_stubs() {
4684 // Continuation stubs:
4685 StubRoutines::_cont_thaw = generate_cont_thaw();
4686 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4687 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4688 StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
4689 }
4690
4691 void generate_final_stubs() {
4692 // Generates all stubs and initializes the entry points
4693
4694 // support for verify_oop (must happen after universe_init)
4695 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
4696
4697 // nmethod entry barriers for concurrent class unloading
4698 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
4699 if (bs_nm != nullptr) {
4700 StubRoutines::_method_entry_barrier = generate_method_entry_barrier();
4701 }
4702
4703 // arraycopy stubs used by compilers
4704 generate_arraycopy_stubs();
4705
4706 if (UseSecondarySupersTable) {
4707 StubRoutines::_lookup_secondary_supers_table_slow_path_stub = generate_lookup_secondary_supers_table_slow_path_stub();
4708 if (!InlineSecondarySupersTest) {
|