750 Compile* C = Compile::current();
751 assert(C->get_alias_index(cross_check) == C->get_alias_index(tp),
752 "must stay in the original alias category");
753 // The type of the address must be contained in the adr_type,
754 // disregarding "null"-ness.
755 // (We make an exception for TypeRawPtr::BOTTOM, which is a bit bucket.)
756 const TypePtr* tp_notnull = tp->join(TypePtr::NOTNULL)->is_ptr();
757 assert(cross_check->meet(tp_notnull) == cross_check->remove_speculative(),
758 "real address must not escape from expected memory type");
759 }
760 #endif
761 return tp;
762 }
763 }
764
765 uint8_t MemNode::barrier_data(const Node* n) {
766 if (n->is_LoadStore()) {
767 return n->as_LoadStore()->barrier_data();
768 } else if (n->is_Mem()) {
769 return n->as_Mem()->barrier_data();
770 }
771 return 0;
772 }
773
774 AccessAnalyzer::AccessAnalyzer(PhaseValues* phase, MemNode* n)
775 : _phase(phase), _n(n), _memory_size(n->memory_size()), _alias_idx(-1) {
776 Node* adr = _n->in(MemNode::Address);
777 _offset = 0;
778 _base = AddPNode::Ideal_base_and_offset(adr, _phase, _offset);
779 _maybe_raw = MemNode::check_if_adr_maybe_raw(adr);
780 _alloc = AllocateNode::Ideal_allocation(_base);
781 _adr_type = _n->adr_type();
782
783 if (_adr_type != nullptr && _adr_type->base() != TypePtr::AnyPtr) {
784 // Avoid the cases that will upset Compile::get_alias_index
785 _alias_idx = _phase->C->get_alias_index(_adr_type);
786 assert(_alias_idx != Compile::AliasIdxTop, "must not be a dead node");
787 assert(_alias_idx != Compile::AliasIdxBot || !phase->C->do_aliasing(), "must not be a very wide access");
788 }
789 }
790
791 // Decide whether the memory accessed by '_n' and 'other' may overlap. This function may be used
792 // when we want to walk the memory graph to fold a load, or when we want to hoist a load above a
793 // loop when there are no stores that may overlap with the load inside the loop.
|
750 Compile* C = Compile::current();
751 assert(C->get_alias_index(cross_check) == C->get_alias_index(tp),
752 "must stay in the original alias category");
753 // The type of the address must be contained in the adr_type,
754 // disregarding "null"-ness.
755 // (We make an exception for TypeRawPtr::BOTTOM, which is a bit bucket.)
756 const TypePtr* tp_notnull = tp->join(TypePtr::NOTNULL)->is_ptr();
757 assert(cross_check->meet(tp_notnull) == cross_check->remove_speculative(),
758 "real address must not escape from expected memory type");
759 }
760 #endif
761 return tp;
762 }
763 }
764
765 uint8_t MemNode::barrier_data(const Node* n) {
766 if (n->is_LoadStore()) {
767 return n->as_LoadStore()->barrier_data();
768 } else if (n->is_Mem()) {
769 return n->as_Mem()->barrier_data();
770 } else if (n->is_DecodeN() && n->in(1)->is_Load()) {
771 return n->in(1)->as_Load()->barrier_data();
772 }
773 return 0;
774 }
775
776 MemNode::MemOrd MemNode::memory_order(const Node* n) {
777 if (n->is_Load()) {
778 return n->as_Load()->memory_order();
779 } else if (n->is_Store()) {
780 return n->as_Store()->memory_order();;
781 }
782 return MemNode::MemOrd::seqcst;
783 }
784
785 AccessAnalyzer::AccessAnalyzer(PhaseValues* phase, MemNode* n)
786 : _phase(phase), _n(n), _memory_size(n->memory_size()), _alias_idx(-1) {
787 Node* adr = _n->in(MemNode::Address);
788 _offset = 0;
789 _base = AddPNode::Ideal_base_and_offset(adr, _phase, _offset);
790 _maybe_raw = MemNode::check_if_adr_maybe_raw(adr);
791 _alloc = AllocateNode::Ideal_allocation(_base);
792 _adr_type = _n->adr_type();
793
794 if (_adr_type != nullptr && _adr_type->base() != TypePtr::AnyPtr) {
795 // Avoid the cases that will upset Compile::get_alias_index
796 _alias_idx = _phase->C->get_alias_index(_adr_type);
797 assert(_alias_idx != Compile::AliasIdxTop, "must not be a dead node");
798 assert(_alias_idx != Compile::AliasIdxBot || !phase->C->do_aliasing(), "must not be a very wide access");
799 }
800 }
801
802 // Decide whether the memory accessed by '_n' and 'other' may overlap. This function may be used
803 // when we want to walk the memory graph to fold a load, or when we want to hoist a load above a
804 // loop when there are no stores that may overlap with the load inside the loop.
|