112 // no_ctrl, but that doesn't buy much latitude.
113 Node* card_val = __ load( __ ctrl(), card_adr, TypeInt::BYTE, T_BYTE, adr_type);
114 __ if_then(card_val, BoolTest::ne, dirty);
115 }
116
117 // Smash dirty value into card
118 __ store(__ ctrl(), card_adr, dirty, T_BYTE, adr_type, MemNode::unordered);
119
120 if (UseCondCardMark) {
121 __ end_if();
122 }
123
124 // Final sync IdealKit and GraphKit.
125 kit->final_sync(ideal);
126 }
127
128 bool CardTableBarrierSetC2::use_ReduceInitialCardMarks() const {
129 return ReduceInitialCardMarks;
130 }
131
132 void CardTableBarrierSetC2::eliminate_gc_barrier(PhaseMacroExpand* macro, Node* node) const {
133 assert(node->Opcode() == Op_CastP2X, "ConvP2XNode required");
134 Node *shift = node->unique_out();
135 Node *addp = shift->unique_out();
136 for (DUIterator_Last jmin, j = addp->last_outs(jmin); j >= jmin; --j) {
137 Node *mem = addp->last_out(j);
138 if (UseCondCardMark && mem->is_Load()) {
139 assert(mem->Opcode() == Op_LoadB, "unexpected code shape");
140 // The load is checking if the card has been written so
141 // replace it with zero to fold the test.
142 macro->replace_node(mem, macro->intcon(0));
143 continue;
144 }
145 assert(mem->is_Store(), "store required");
146 macro->replace_node(mem, mem->in(MemNode::Memory));
147 }
148 }
149
150 bool CardTableBarrierSetC2::array_copy_requires_gc_barriers(bool tightly_coupled_alloc, BasicType type, bool is_clone, bool is_clone_instance, ArrayCopyPhase phase) const {
151 bool is_oop = is_reference_type(type);
152 return is_oop && (!tightly_coupled_alloc || !use_ReduceInitialCardMarks());
153 }
|
112 // no_ctrl, but that doesn't buy much latitude.
113 Node* card_val = __ load( __ ctrl(), card_adr, TypeInt::BYTE, T_BYTE, adr_type);
114 __ if_then(card_val, BoolTest::ne, dirty);
115 }
116
117 // Smash dirty value into card
118 __ store(__ ctrl(), card_adr, dirty, T_BYTE, adr_type, MemNode::unordered);
119
120 if (UseCondCardMark) {
121 __ end_if();
122 }
123
124 // Final sync IdealKit and GraphKit.
125 kit->final_sync(ideal);
126 }
127
128 bool CardTableBarrierSetC2::use_ReduceInitialCardMarks() const {
129 return ReduceInitialCardMarks;
130 }
131
132 void CardTableBarrierSetC2::eliminate_gc_barrier(PhaseIterGVN* igvn, Node* node) const {
133 assert(node->Opcode() == Op_CastP2X, "ConvP2XNode required");
134 for (DUIterator_Last imin, i = node->last_outs(imin); i >= imin; --i) {
135 Node* shift = node->last_out(i);
136 for (DUIterator_Last jmin, j = shift->last_outs(jmin); j >= jmin; --j) {
137 Node* addp = shift->last_out(j);
138 for (DUIterator_Last kmin, k = addp->last_outs(kmin); k >= kmin; --k) {
139 Node* mem = addp->last_out(k);
140 if (UseCondCardMark && mem->is_Load()) {
141 assert(mem->Opcode() == Op_LoadB, "unexpected code shape");
142 // The load is checking if the card has been written so
143 // replace it with zero to fold the test.
144 igvn->replace_node(mem, igvn->intcon(0));
145 continue;
146 }
147 assert(mem->is_Store(), "store required");
148 igvn->replace_node(mem, mem->in(MemNode::Memory));
149 }
150 }
151 }
152 }
153
154 bool CardTableBarrierSetC2::array_copy_requires_gc_barriers(bool tightly_coupled_alloc, BasicType type, bool is_clone, bool is_clone_instance, ArrayCopyPhase phase) const {
155 bool is_oop = type == T_OBJECT || type == T_ARRAY;
156 return is_oop && (!tightly_coupled_alloc || !use_ReduceInitialCardMarks());
157 }
|