573 #endif
574
575 xorl(tmpReg, tmpReg); // Set ZF == 1
576
577 bind(NO_COUNT);
578 }
579
580 void C2_MacroAssembler::fast_lock_lightweight(Register obj, Register box, Register rax_reg,
581 Register t, Register thread) {
582 assert(LockingMode == LM_LIGHTWEIGHT, "must be");
583 assert(rax_reg == rax, "Used for CAS");
584 assert_different_registers(obj, box, rax_reg, t, thread);
585
586 // Handle inflated monitor.
587 Label inflated;
588 // Finish fast lock successfully. ZF value is irrelevant.
589 Label locked;
590 // Finish fast lock unsuccessfully. MUST jump with ZF == 0
591 Label slow_path;
592
593 if (DiagnoseSyncOnValueBasedClasses != 0) {
594 load_klass(rax_reg, obj, t);
595 movl(rax_reg, Address(rax_reg, Klass::access_flags_offset()));
596 testl(rax_reg, JVM_ACC_IS_VALUE_BASED_CLASS);
597 jcc(Assembler::notZero, slow_path);
598 }
599
600 const Register mark = t;
601
602 { // Lightweight Lock
603
604 Label push;
605
606 const Register top = box;
607
608 // Load the mark.
609 movptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
610
611 // Prefetch top.
612 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
613
614 // Check for monitor (0b10).
615 testptr(mark, markWord::monitor_value);
616 jcc(Assembler::notZero, inflated);
617
618 // Check if lock-stack is full.
619 cmpl(top, LockStack::end_offset() - 1);
620 jcc(Assembler::greater, slow_path);
621
622 // Check if recursive.
623 cmpptr(obj, Address(thread, top, Address::times_1, -oopSize));
624 jccb(Assembler::equal, push);
625
626 // Try to lock. Transition lock bits 0b01 => 0b00
627 movptr(rax_reg, mark);
628 orptr(rax_reg, markWord::unlocked_value);
629 andptr(mark, ~(int32_t)markWord::unlocked_value);
630 lock(); cmpxchgptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
631 jcc(Assembler::notEqual, slow_path);
632
633 bind(push);
634 // After successful lock, push object on lock-stack.
635 movptr(Address(thread, top), obj);
636 addl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
637 jmpb(locked);
638 }
639
640 { // Handle inflated monitor.
641 bind(inflated);
642
643 const Register tagged_monitor = mark;
644
645 // CAS owner (null => current thread).
646 xorptr(rax_reg, rax_reg);
647 lock(); cmpxchgptr(thread, Address(tagged_monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)));
648 jccb(Assembler::equal, locked);
649
650 // Check if recursive.
651 cmpptr(thread, rax_reg);
652 jccb(Assembler::notEqual, slow_path);
653
654 // Recursive.
655 increment(Address(tagged_monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions)));
656 }
657
658 bind(locked);
659 increment(Address(thread, JavaThread::held_monitor_count_offset()));
660 // Set ZF = 1
661 xorl(rax_reg, rax_reg);
662
663 #ifdef ASSERT
664 // Check that locked label is reached with ZF set.
665 Label zf_correct;
666 Label zf_bad_zero;
667 jcc(Assembler::zero, zf_correct);
668 jmp(zf_bad_zero);
669 #endif
670
671 bind(slow_path);
672 #ifdef ASSERT
673 // Check that slow_path label is reached with ZF not set.
674 jcc(Assembler::notZero, zf_correct);
675 stop("Fast Lock ZF != 0");
677 stop("Fast Lock ZF != 1");
678 bind(zf_correct);
679 #endif
680 // C2 uses the value of ZF to determine the continuation.
681 }
682
683 void C2_MacroAssembler::fast_unlock_lightweight(Register obj, Register reg_rax, Register t, Register thread) {
684 assert(LockingMode == LM_LIGHTWEIGHT, "must be");
685 assert(reg_rax == rax, "Used for CAS");
686 assert_different_registers(obj, reg_rax, t);
687
688 // Handle inflated monitor.
689 Label inflated, inflated_check_lock_stack;
690 // Finish fast unlock successfully. MUST jump with ZF == 1
691 Label unlocked;
692
693 // Assume success.
694 decrement(Address(thread, JavaThread::held_monitor_count_offset()));
695
696 const Register mark = t;
697 const Register top = reg_rax;
698
699 Label dummy;
700 C2FastUnlockLightweightStub* stub = nullptr;
701
702 if (!Compile::current()->output()->in_scratch_emit_size()) {
703 stub = new (Compile::current()->comp_arena()) C2FastUnlockLightweightStub(obj, mark, reg_rax, thread);
704 Compile::current()->output()->add_stub(stub);
705 }
706
707 Label& push_and_slow_path = stub == nullptr ? dummy : stub->push_and_slow_path();
708 Label& check_successor = stub == nullptr ? dummy : stub->check_successor();
709
710 { // Lightweight Unlock
711
712 // Load top.
713 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
714
715 // Prefetch mark.
716 movptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
717
718 // Check if obj is top of lock-stack.
719 cmpptr(obj, Address(thread, top, Address::times_1, -oopSize));
720 // Top of lock stack was not obj. Must be monitor.
721 jcc(Assembler::notEqual, inflated_check_lock_stack);
722
723 // Pop lock-stack.
724 DEBUG_ONLY(movptr(Address(thread, top, Address::times_1, -oopSize), 0);)
725 subl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
726
727 // Check if recursive.
728 cmpptr(obj, Address(thread, top, Address::times_1, -2 * oopSize));
729 jcc(Assembler::equal, unlocked);
730
731 // We elide the monitor check, let the CAS fail instead.
732
733 // Try to unlock. Transition lock bits 0b00 => 0b01
734 movptr(reg_rax, mark);
735 andptr(reg_rax, ~(int32_t)markWord::lock_mask);
736 orptr(mark, markWord::unlocked_value);
737 lock(); cmpxchgptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
738 jcc(Assembler::notEqual, push_and_slow_path);
739 jmp(unlocked);
740 }
741
742
743 { // Handle inflated monitor.
744 bind(inflated_check_lock_stack);
745 #ifdef ASSERT
746 Label check_done;
747 subl(top, oopSize);
748 cmpl(top, in_bytes(JavaThread::lock_stack_base_offset()));
749 jcc(Assembler::below, check_done);
750 cmpptr(obj, Address(thread, top));
751 jccb(Assembler::notEqual, inflated_check_lock_stack);
752 stop("Fast Unlock lock on stack");
753 bind(check_done);
754 testptr(mark, markWord::monitor_value);
755 jccb(Assembler::notZero, inflated);
756 stop("Fast Unlock not monitor");
757 #endif
758
759 bind(inflated);
760
761 // mark contains the tagged ObjectMonitor*.
762 const Register monitor = mark;
763
764 #ifndef _LP64
765 // Check if recursive.
766 xorptr(reg_rax, reg_rax);
767 orptr(reg_rax, Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions)));
768 jcc(Assembler::notZero, check_successor);
769
770 // Check if the entry lists are empty.
771 movptr(reg_rax, Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(EntryList)));
772 orptr(reg_rax, Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(cxq)));
773 jcc(Assembler::notZero, check_successor);
774
775 // Release lock.
776 movptr(Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), NULL_WORD);
777 #else // _LP64
778 Label recursive;
779
780 // Check if recursive.
781 cmpptr(Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions)), 0);
782 jccb(Assembler::notEqual, recursive);
783
784 // Check if the entry lists are empty.
785 movptr(reg_rax, Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(cxq)));
786 orptr(reg_rax, Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(EntryList)));
787 jcc(Assembler::notZero, check_successor);
788
789 // Release lock.
790 movptr(Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(owner)), NULL_WORD);
791 jmpb(unlocked);
792
793 // Recursive unlock.
794 bind(recursive);
795 decrement(Address(monitor, OM_OFFSET_NO_MONITOR_VALUE_TAG(recursions)));
796 xorl(t, t);
797 #endif
798 }
799
800 bind(unlocked);
801 if (stub != nullptr) {
802 bind(stub->unlocked_continuation());
803 }
804
805 #ifdef ASSERT
806 // Check that unlocked label is reached with ZF set.
807 Label zf_correct;
808 jcc(Assembler::zero, zf_correct);
809 stop("Fast Unlock ZF != 1");
810 #endif
811
812 if (stub != nullptr) {
813 bind(stub->slow_path_continuation());
814 }
815 #ifdef ASSERT
816 // Check that stub->continuation() label is reached with ZF not set.
817 jccb(Assembler::notZero, zf_correct);
6306 // Perform above steps with lane comparison expression as INDEX >= 48 && INDEX < 64
6307 // and broadcasting third 128 bit lane.
6308 evpcmpb(ktmp, k0, shuffle, xtmp1, Assembler::nlt, true, vlen_enc);
6309 vpsllq(xtmp2, xtmp2, 0x1, vlen_enc);
6310 evpcmpb(ktmp, ktmp, shuffle, xtmp2, Assembler::lt, true, vlen_enc);
6311 evshufi64x2(xtmp3, src, src, 0xFF, vlen_enc);
6312 evpshufb(dst, ktmp, xtmp3, shuffle, true, vlen_enc);
6313 }
6314
6315 void C2_MacroAssembler::vector_rearrange_int_float(BasicType bt, XMMRegister dst,
6316 XMMRegister shuffle, XMMRegister src, int vlen_enc) {
6317 if (vlen_enc == AVX_128bit) {
6318 vpermilps(dst, src, shuffle, vlen_enc);
6319 } else if (bt == T_INT) {
6320 vpermd(dst, shuffle, src, vlen_enc);
6321 } else {
6322 assert(bt == T_FLOAT, "");
6323 vpermps(dst, shuffle, src, vlen_enc);
6324 }
6325 }
|
573 #endif
574
575 xorl(tmpReg, tmpReg); // Set ZF == 1
576
577 bind(NO_COUNT);
578 }
579
580 void C2_MacroAssembler::fast_lock_lightweight(Register obj, Register box, Register rax_reg,
581 Register t, Register thread) {
582 assert(LockingMode == LM_LIGHTWEIGHT, "must be");
583 assert(rax_reg == rax, "Used for CAS");
584 assert_different_registers(obj, box, rax_reg, t, thread);
585
586 // Handle inflated monitor.
587 Label inflated;
588 // Finish fast lock successfully. ZF value is irrelevant.
589 Label locked;
590 // Finish fast lock unsuccessfully. MUST jump with ZF == 0
591 Label slow_path;
592
593 if (UseObjectMonitorTable) {
594 // Clear cache in case fast locking succeeds.
595 movptr(Address(box, BasicLock::object_monitor_cache_offset_in_bytes()), 0);
596 }
597
598 if (DiagnoseSyncOnValueBasedClasses != 0) {
599 load_klass(rax_reg, obj, t);
600 movl(rax_reg, Address(rax_reg, Klass::access_flags_offset()));
601 testl(rax_reg, JVM_ACC_IS_VALUE_BASED_CLASS);
602 jcc(Assembler::notZero, slow_path);
603 }
604
605 const Register mark = t;
606
607 { // Lightweight Lock
608
609 Label push;
610
611 const Register top = UseObjectMonitorTable ? rax_reg : box;
612
613 // Load the mark.
614 movptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
615
616 // Prefetch top.
617 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
618
619 // Check for monitor (0b10).
620 testptr(mark, markWord::monitor_value);
621 jcc(Assembler::notZero, inflated);
622
623 // Check if lock-stack is full.
624 cmpl(top, LockStack::end_offset() - 1);
625 jcc(Assembler::greater, slow_path);
626
627 // Check if recursive.
628 cmpptr(obj, Address(thread, top, Address::times_1, -oopSize));
629 jccb(Assembler::equal, push);
630
631 // Try to lock. Transition lock bits 0b01 => 0b00
632 movptr(rax_reg, mark);
633 orptr(rax_reg, markWord::unlocked_value);
634 andptr(mark, ~(int32_t)markWord::unlocked_value);
635 lock(); cmpxchgptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
636 jcc(Assembler::notEqual, slow_path);
637
638 if (UseObjectMonitorTable) {
639 // Need to reload top, clobbered by CAS.
640 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
641 }
642 bind(push);
643 // After successful lock, push object on lock-stack.
644 movptr(Address(thread, top), obj);
645 addl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
646 jmpb(locked);
647 }
648
649 { // Handle inflated monitor.
650 bind(inflated);
651
652 const Register monitor = t;
653
654 if (!UseObjectMonitorTable) {
655 assert(mark == monitor, "should be the same here");
656 } else {
657 // Uses ObjectMonitorTable. Look for the monitor in the om_cache.
658 // Fetch ObjectMonitor* from the cache or take the slow-path.
659 Label monitor_found;
660
661 // Load cache address
662 lea(t, Address(thread, JavaThread::om_cache_oops_offset()));
663
664 const int num_unrolled = 2;
665 for (int i = 0; i < num_unrolled; i++) {
666 cmpptr(obj, Address(t));
667 jccb(Assembler::equal, monitor_found);
668 if (i + 1 != num_unrolled) {
669 increment(t, in_bytes(OMCache::oop_to_oop_difference()));
670 }
671 }
672
673 // Loop after unrolling, advance iterator.
674 increment(t, in_bytes(OMCache::oop_to_oop_difference()));
675
676 Label loop;
677
678 // Search for obj in cache.
679 bind(loop);
680
681 // Check for match.
682 cmpptr(obj, Address(t));
683 jccb(Assembler::equal, monitor_found);
684
685 // Search until null encountered, guaranteed _null_sentinel at end.
686 cmpptr(Address(t), 1);
687 jcc(Assembler::below, slow_path); // 0 check, but with ZF=0 when *t == 0
688 increment(t, in_bytes(OMCache::oop_to_oop_difference()));
689 jmpb(loop);
690
691 // Cache hit.
692 bind(monitor_found);
693 movptr(monitor, Address(t, OMCache::oop_to_monitor_difference()));
694 }
695 const ByteSize monitor_tag = in_ByteSize(UseObjectMonitorTable ? 0 : checked_cast<int>(markWord::monitor_value));
696 const Address recursions_address{monitor, ObjectMonitor::recursions_offset() - monitor_tag};
697 const Address owner_address{monitor, ObjectMonitor::owner_offset() - monitor_tag};
698
699 Label monitor_locked;
700 // Lock the monitor.
701
702 // CAS owner (null => current thread).
703 xorptr(rax_reg, rax_reg);
704 lock(); cmpxchgptr(thread, owner_address);
705 jccb(Assembler::equal, monitor_locked);
706
707 // Check if recursive.
708 cmpptr(thread, rax_reg);
709 jccb(Assembler::notEqual, slow_path);
710
711 // Recursive.
712 increment(recursions_address);
713
714 bind(monitor_locked);
715 if (UseObjectMonitorTable) {
716 // Cache the monitor for unlock
717 movptr(Address(box, BasicLock::object_monitor_cache_offset_in_bytes()), monitor);
718 }
719 }
720
721 bind(locked);
722 increment(Address(thread, JavaThread::held_monitor_count_offset()));
723 // Set ZF = 1
724 xorl(rax_reg, rax_reg);
725
726 #ifdef ASSERT
727 // Check that locked label is reached with ZF set.
728 Label zf_correct;
729 Label zf_bad_zero;
730 jcc(Assembler::zero, zf_correct);
731 jmp(zf_bad_zero);
732 #endif
733
734 bind(slow_path);
735 #ifdef ASSERT
736 // Check that slow_path label is reached with ZF not set.
737 jcc(Assembler::notZero, zf_correct);
738 stop("Fast Lock ZF != 0");
740 stop("Fast Lock ZF != 1");
741 bind(zf_correct);
742 #endif
743 // C2 uses the value of ZF to determine the continuation.
744 }
745
746 void C2_MacroAssembler::fast_unlock_lightweight(Register obj, Register reg_rax, Register t, Register thread) {
747 assert(LockingMode == LM_LIGHTWEIGHT, "must be");
748 assert(reg_rax == rax, "Used for CAS");
749 assert_different_registers(obj, reg_rax, t);
750
751 // Handle inflated monitor.
752 Label inflated, inflated_check_lock_stack;
753 // Finish fast unlock successfully. MUST jump with ZF == 1
754 Label unlocked;
755
756 // Assume success.
757 decrement(Address(thread, JavaThread::held_monitor_count_offset()));
758
759 const Register mark = t;
760 const Register monitor = t;
761 const Register top = UseObjectMonitorTable ? t : reg_rax;
762 const Register box = reg_rax;
763
764 Label dummy;
765 C2FastUnlockLightweightStub* stub = nullptr;
766
767 if (!Compile::current()->output()->in_scratch_emit_size()) {
768 stub = new (Compile::current()->comp_arena()) C2FastUnlockLightweightStub(obj, mark, reg_rax, thread);
769 Compile::current()->output()->add_stub(stub);
770 }
771
772 Label& push_and_slow_path = stub == nullptr ? dummy : stub->push_and_slow_path();
773 Label& check_successor = stub == nullptr ? dummy : stub->check_successor();
774 Label& slow_path = stub == nullptr ? dummy : stub->slow_path();
775
776 { // Lightweight Unlock
777
778 // Load top.
779 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
780
781 if (!UseObjectMonitorTable) {
782 // Prefetch mark.
783 movptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
784 }
785
786 // Check if obj is top of lock-stack.
787 cmpptr(obj, Address(thread, top, Address::times_1, -oopSize));
788 // Top of lock stack was not obj. Must be monitor.
789 jcc(Assembler::notEqual, inflated_check_lock_stack);
790
791 // Pop lock-stack.
792 DEBUG_ONLY(movptr(Address(thread, top, Address::times_1, -oopSize), 0);)
793 subl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
794
795 // Check if recursive.
796 cmpptr(obj, Address(thread, top, Address::times_1, -2 * oopSize));
797 jcc(Assembler::equal, unlocked);
798
799 // We elide the monitor check, let the CAS fail instead.
800
801 if (UseObjectMonitorTable) {
802 // Load mark.
803 movptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
804 }
805
806 // Try to unlock. Transition lock bits 0b00 => 0b01
807 movptr(reg_rax, mark);
808 andptr(reg_rax, ~(int32_t)markWord::lock_mask);
809 orptr(mark, markWord::unlocked_value);
810 lock(); cmpxchgptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
811 jcc(Assembler::notEqual, push_and_slow_path);
812 jmp(unlocked);
813 }
814
815
816 { // Handle inflated monitor.
817 bind(inflated_check_lock_stack);
818 #ifdef ASSERT
819 Label check_done;
820 subl(top, oopSize);
821 cmpl(top, in_bytes(JavaThread::lock_stack_base_offset()));
822 jcc(Assembler::below, check_done);
823 cmpptr(obj, Address(thread, top));
824 jccb(Assembler::notEqual, inflated_check_lock_stack);
825 stop("Fast Unlock lock on stack");
826 bind(check_done);
827 if (UseObjectMonitorTable) {
828 movptr(mark, Address(obj, oopDesc::mark_offset_in_bytes()));
829 }
830 testptr(mark, markWord::monitor_value);
831 jccb(Assembler::notZero, inflated);
832 stop("Fast Unlock not monitor");
833 #endif
834
835 bind(inflated);
836
837 if (!UseObjectMonitorTable) {
838 assert(mark == monitor, "should be the same here");
839 } else {
840 // Uses ObjectMonitorTable. Look for the monitor in our BasicLock on the stack.
841 movptr(monitor, Address(box, BasicLock::object_monitor_cache_offset_in_bytes()));
842 // null check with ZF == 0, no valid pointer below alignof(ObjectMonitor*)
843 cmpptr(monitor, alignof(ObjectMonitor*));
844 jcc(Assembler::below, slow_path);
845 }
846 const ByteSize monitor_tag = in_ByteSize(UseObjectMonitorTable ? 0 : checked_cast<int>(markWord::monitor_value));
847 const Address recursions_address{monitor, ObjectMonitor::recursions_offset() - monitor_tag};
848 const Address cxq_address{monitor, ObjectMonitor::cxq_offset() - monitor_tag};
849 const Address EntryList_address{monitor, ObjectMonitor::EntryList_offset() - monitor_tag};
850 const Address owner_address{monitor, ObjectMonitor::owner_offset() - monitor_tag};
851
852 Label recursive;
853
854 // Check if recursive.
855 cmpptr(recursions_address, 0);
856 jccb(Assembler::notEqual, recursive);
857
858 // Check if the entry lists are empty.
859 movptr(reg_rax, cxq_address);
860 orptr(reg_rax, EntryList_address);
861 jcc(Assembler::notZero, check_successor);
862
863 // Release lock.
864 movptr(owner_address, NULL_WORD);
865 jmpb(unlocked);
866
867 // Recursive unlock.
868 bind(recursive);
869 decrement(recursions_address);
870 xorl(t, t);
871 }
872
873 bind(unlocked);
874 if (stub != nullptr) {
875 bind(stub->unlocked_continuation());
876 }
877
878 #ifdef ASSERT
879 // Check that unlocked label is reached with ZF set.
880 Label zf_correct;
881 jcc(Assembler::zero, zf_correct);
882 stop("Fast Unlock ZF != 1");
883 #endif
884
885 if (stub != nullptr) {
886 bind(stub->slow_path_continuation());
887 }
888 #ifdef ASSERT
889 // Check that stub->continuation() label is reached with ZF not set.
890 jccb(Assembler::notZero, zf_correct);
6379 // Perform above steps with lane comparison expression as INDEX >= 48 && INDEX < 64
6380 // and broadcasting third 128 bit lane.
6381 evpcmpb(ktmp, k0, shuffle, xtmp1, Assembler::nlt, true, vlen_enc);
6382 vpsllq(xtmp2, xtmp2, 0x1, vlen_enc);
6383 evpcmpb(ktmp, ktmp, shuffle, xtmp2, Assembler::lt, true, vlen_enc);
6384 evshufi64x2(xtmp3, src, src, 0xFF, vlen_enc);
6385 evpshufb(dst, ktmp, xtmp3, shuffle, true, vlen_enc);
6386 }
6387
6388 void C2_MacroAssembler::vector_rearrange_int_float(BasicType bt, XMMRegister dst,
6389 XMMRegister shuffle, XMMRegister src, int vlen_enc) {
6390 if (vlen_enc == AVX_128bit) {
6391 vpermilps(dst, src, shuffle, vlen_enc);
6392 } else if (bt == T_INT) {
6393 vpermd(dst, shuffle, src, vlen_enc);
6394 } else {
6395 assert(bt == T_FLOAT, "");
6396 vpermps(dst, shuffle, src, vlen_enc);
6397 }
6398 }
6399
6400 #ifdef _LP64
6401 void C2_MacroAssembler::load_nklass_compact_c2(Register dst, Register obj, Register index, Address::ScaleFactor scale, int disp) {
6402 // Note: Don't clobber obj anywhere in that method!
6403
6404 // The incoming address is pointing into obj-start + klass_offset_in_bytes. We need to extract
6405 // obj-start, so that we can load from the object's mark-word instead. Usually the address
6406 // comes as obj-start in obj and klass_offset_in_bytes in disp. However, sometimes C2
6407 // emits code that pre-computes obj-start + klass_offset_in_bytes into a register, and
6408 // then passes that register as obj and 0 in disp. The following code extracts the base
6409 // and offset to load the mark-word.
6410 int offset = oopDesc::mark_offset_in_bytes() + disp - oopDesc::klass_offset_in_bytes();
6411 movq(dst, Address(obj, index, scale, offset));
6412 shrq(dst, markWord::klass_shift);
6413 }
6414 #endif
|