< prev index next >

src/hotspot/share/opto/escape.cpp

Print this page

3406   (void)add_edge(ptadr, src);
3407   src->set_arraycopy_src();
3408   // Add edge from destination object to arraycopy node.
3409   (void)add_edge(dst, ptadr);
3410   dst->set_arraycopy_dst();
3411 }
3412 
3413 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
3414   const Type* adr_type = n->as_AddP()->bottom_type();
3415   BasicType bt = T_INT;
3416   if (offset == Type::OffsetBot) {
3417     // Check only oop fields.
3418     if (!adr_type->isa_aryptr() ||
3419         adr_type->isa_aryptr()->elem() == Type::BOTTOM ||
3420         adr_type->isa_aryptr()->elem()->make_oopptr() != nullptr) {
3421       // OffsetBot is used to reference array's element. Ignore first AddP.
3422       if (find_second_addp(n, n->in(AddPNode::Base)) == nullptr) {
3423         bt = T_OBJECT;
3424       }
3425     }
3426   } else if (offset != oopDesc::klass_offset_in_bytes()) {
3427     if (adr_type->isa_instptr()) {
3428       ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
3429       if (field != nullptr) {
3430         bt = field->layout_type();
3431       } else {
3432         // Check for unsafe oop field access
3433         if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
3434             n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
3435             n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN) ||
3436             BarrierSet::barrier_set()->barrier_set_c2()->escape_has_out_with_unsafe_object(n)) {
3437           bt = T_OBJECT;
3438           (*unsafe) = true;
3439         }
3440       }
3441     } else if (adr_type->isa_aryptr()) {
3442       if (offset == arrayOopDesc::length_offset_in_bytes()) {
3443         // Ignore array length load.
3444       } else if (find_second_addp(n, n->in(AddPNode::Base)) != nullptr) {
3445         // Ignore first AddP.
3446       } else {

4405         alloc->as_Allocate()->_is_scalar_replaceable = true;
4406       }
4407       set_escape_state(ptnode_adr(n->_idx), es NOT_PRODUCT(COMMA trace_propagate_message(ptn))); // CheckCastPP escape state
4408       // in order for an object to be scalar-replaceable, it must be:
4409       //   - a direct allocation (not a call returning an object)
4410       //   - non-escaping
4411       //   - eligible to be a unique type
4412       //   - not determined to be ineligible by escape analysis
4413       set_map(alloc, n);
4414       set_map(n, alloc);
4415       const TypeOopPtr* tinst = t->cast_to_instance_id(ni);
4416       igvn->hash_delete(n);
4417       igvn->set_type(n,  tinst);
4418       n->raise_bottom_type(tinst);
4419       igvn->hash_insert(n);
4420       record_for_optimizer(n);
4421       // Allocate an alias index for the header fields. Accesses to
4422       // the header emitted during macro expansion wouldn't have
4423       // correct memory state otherwise.
4424       _compile->get_alias_index(tinst->add_offset(oopDesc::mark_offset_in_bytes()));
4425       _compile->get_alias_index(tinst->add_offset(oopDesc::klass_offset_in_bytes()));
4426       if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
4427 
4428         // First, put on the worklist all Field edges from Connection Graph
4429         // which is more accurate than putting immediate users from Ideal Graph.
4430         for (EdgeIterator e(ptn); e.has_next(); e.next()) {
4431           PointsToNode* tgt = e.get();
4432           if (tgt->is_Arraycopy()) {
4433             continue;
4434           }
4435           Node* use = tgt->ideal_node();
4436           assert(tgt->is_Field() && use->is_AddP(),
4437                  "only AddP nodes are Field edges in CG");
4438           if (use->outcnt() > 0) { // Don't process dead nodes
4439             Node* addp2 = find_second_addp(use, use->in(AddPNode::Base));
4440             if (addp2 != nullptr) {
4441               assert(alloc->is_AllocateArray(),"array allocation was expected");
4442               alloc_worklist.append_if_missing(addp2);
4443             }
4444             alloc_worklist.append_if_missing(use);
4445           }

3406   (void)add_edge(ptadr, src);
3407   src->set_arraycopy_src();
3408   // Add edge from destination object to arraycopy node.
3409   (void)add_edge(dst, ptadr);
3410   dst->set_arraycopy_dst();
3411 }
3412 
3413 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
3414   const Type* adr_type = n->as_AddP()->bottom_type();
3415   BasicType bt = T_INT;
3416   if (offset == Type::OffsetBot) {
3417     // Check only oop fields.
3418     if (!adr_type->isa_aryptr() ||
3419         adr_type->isa_aryptr()->elem() == Type::BOTTOM ||
3420         adr_type->isa_aryptr()->elem()->make_oopptr() != nullptr) {
3421       // OffsetBot is used to reference array's element. Ignore first AddP.
3422       if (find_second_addp(n, n->in(AddPNode::Base)) == nullptr) {
3423         bt = T_OBJECT;
3424       }
3425     }
3426   } else if (offset != Type::klass_offset()) {
3427     if (adr_type->isa_instptr()) {
3428       ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
3429       if (field != nullptr) {
3430         bt = field->layout_type();
3431       } else {
3432         // Check for unsafe oop field access
3433         if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
3434             n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
3435             n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN) ||
3436             BarrierSet::barrier_set()->barrier_set_c2()->escape_has_out_with_unsafe_object(n)) {
3437           bt = T_OBJECT;
3438           (*unsafe) = true;
3439         }
3440       }
3441     } else if (adr_type->isa_aryptr()) {
3442       if (offset == arrayOopDesc::length_offset_in_bytes()) {
3443         // Ignore array length load.
3444       } else if (find_second_addp(n, n->in(AddPNode::Base)) != nullptr) {
3445         // Ignore first AddP.
3446       } else {

4405         alloc->as_Allocate()->_is_scalar_replaceable = true;
4406       }
4407       set_escape_state(ptnode_adr(n->_idx), es NOT_PRODUCT(COMMA trace_propagate_message(ptn))); // CheckCastPP escape state
4408       // in order for an object to be scalar-replaceable, it must be:
4409       //   - a direct allocation (not a call returning an object)
4410       //   - non-escaping
4411       //   - eligible to be a unique type
4412       //   - not determined to be ineligible by escape analysis
4413       set_map(alloc, n);
4414       set_map(n, alloc);
4415       const TypeOopPtr* tinst = t->cast_to_instance_id(ni);
4416       igvn->hash_delete(n);
4417       igvn->set_type(n,  tinst);
4418       n->raise_bottom_type(tinst);
4419       igvn->hash_insert(n);
4420       record_for_optimizer(n);
4421       // Allocate an alias index for the header fields. Accesses to
4422       // the header emitted during macro expansion wouldn't have
4423       // correct memory state otherwise.
4424       _compile->get_alias_index(tinst->add_offset(oopDesc::mark_offset_in_bytes()));
4425       _compile->get_alias_index(tinst->add_offset(Type::klass_offset()));
4426       if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
4427 
4428         // First, put on the worklist all Field edges from Connection Graph
4429         // which is more accurate than putting immediate users from Ideal Graph.
4430         for (EdgeIterator e(ptn); e.has_next(); e.next()) {
4431           PointsToNode* tgt = e.get();
4432           if (tgt->is_Arraycopy()) {
4433             continue;
4434           }
4435           Node* use = tgt->ideal_node();
4436           assert(tgt->is_Field() && use->is_AddP(),
4437                  "only AddP nodes are Field edges in CG");
4438           if (use->outcnt() > 0) { // Don't process dead nodes
4439             Node* addp2 = find_second_addp(use, use->in(AddPNode::Base));
4440             if (addp2 != nullptr) {
4441               assert(alloc->is_AllocateArray(),"array allocation was expected");
4442               alloc_worklist.append_if_missing(addp2);
4443             }
4444             alloc_worklist.append_if_missing(use);
4445           }
< prev index next >