< prev index next >

src/hotspot/share/opto/escape.cpp

Print this page

3456   (void)add_edge(ptadr, src);
3457   src->set_arraycopy_src();
3458   // Add edge from destination object to arraycopy node.
3459   (void)add_edge(dst, ptadr);
3460   dst->set_arraycopy_dst();
3461 }
3462 
3463 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
3464   const Type* adr_type = n->as_AddP()->bottom_type();
3465   BasicType bt = T_INT;
3466   if (offset == Type::OffsetBot) {
3467     // Check only oop fields.
3468     if (!adr_type->isa_aryptr() ||
3469         adr_type->isa_aryptr()->elem() == Type::BOTTOM ||
3470         adr_type->isa_aryptr()->elem()->make_oopptr() != nullptr) {
3471       // OffsetBot is used to reference array's element. Ignore first AddP.
3472       if (find_second_addp(n, n->in(AddPNode::Base)) == nullptr) {
3473         bt = T_OBJECT;
3474       }
3475     }
3476   } else if (offset != oopDesc::klass_offset_in_bytes()) {
3477     if (adr_type->isa_instptr()) {
3478       ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
3479       if (field != nullptr) {
3480         bt = field->layout_type();
3481       } else {
3482         // Check for unsafe oop field access
3483         if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
3484             n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
3485             n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN) ||
3486             BarrierSet::barrier_set()->barrier_set_c2()->escape_has_out_with_unsafe_object(n)) {
3487           bt = T_OBJECT;
3488           (*unsafe) = true;
3489         }
3490       }
3491     } else if (adr_type->isa_aryptr()) {
3492       if (offset == arrayOopDesc::length_offset_in_bytes()) {
3493         // Ignore array length load.
3494       } else if (find_second_addp(n, n->in(AddPNode::Base)) != nullptr) {
3495         // Ignore first AddP.
3496       } else {

4460         alloc->as_Allocate()->_is_scalar_replaceable = true;
4461       }
4462       set_escape_state(ptnode_adr(n->_idx), es NOT_PRODUCT(COMMA trace_propagate_message(ptn))); // CheckCastPP escape state
4463       // in order for an object to be scalar-replaceable, it must be:
4464       //   - a direct allocation (not a call returning an object)
4465       //   - non-escaping
4466       //   - eligible to be a unique type
4467       //   - not determined to be ineligible by escape analysis
4468       set_map(alloc, n);
4469       set_map(n, alloc);
4470       const TypeOopPtr* tinst = t->cast_to_instance_id(ni);
4471       igvn->hash_delete(n);
4472       igvn->set_type(n,  tinst);
4473       n->raise_bottom_type(tinst);
4474       igvn->hash_insert(n);
4475       record_for_optimizer(n);
4476       // Allocate an alias index for the header fields. Accesses to
4477       // the header emitted during macro expansion wouldn't have
4478       // correct memory state otherwise.
4479       _compile->get_alias_index(tinst->add_offset(oopDesc::mark_offset_in_bytes()));
4480       _compile->get_alias_index(tinst->add_offset(oopDesc::klass_offset_in_bytes()));
4481       if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
4482         // Add a new NarrowMem projection for each existing NarrowMem projection with new adr type
4483         InitializeNode* init = alloc->as_Allocate()->initialization();
4484         assert(init != nullptr, "can't find Initialization node for this Allocate node");
4485         auto process_narrow_proj = [&](NarrowMemProjNode* proj) {
4486           const TypePtr* adr_type = proj->adr_type();
4487           const TypePtr* new_adr_type = tinst->add_offset(adr_type->offset());
4488           if (adr_type != new_adr_type && !init->already_has_narrow_mem_proj_with_adr_type(new_adr_type)) {
4489             DEBUG_ONLY( uint alias_idx = _compile->get_alias_index(new_adr_type); )
4490             assert(_compile->get_general_index(alias_idx) == _compile->get_alias_index(adr_type), "new adr type should be narrowed down from existing adr type");
4491             NarrowMemProjNode* new_proj = new NarrowMemProjNode(init, new_adr_type);
4492             igvn->set_type(new_proj, new_proj->bottom_type());
4493             record_for_optimizer(new_proj);
4494             set_map(proj, new_proj); // record it so ConnectionGraph::find_inst_mem() can find it
4495           }
4496         };
4497         init->for_each_narrow_mem_proj_with_new_uses(process_narrow_proj);
4498 
4499         // First, put on the worklist all Field edges from Connection Graph
4500         // which is more accurate than putting immediate users from Ideal Graph.

3456   (void)add_edge(ptadr, src);
3457   src->set_arraycopy_src();
3458   // Add edge from destination object to arraycopy node.
3459   (void)add_edge(dst, ptadr);
3460   dst->set_arraycopy_dst();
3461 }
3462 
3463 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
3464   const Type* adr_type = n->as_AddP()->bottom_type();
3465   BasicType bt = T_INT;
3466   if (offset == Type::OffsetBot) {
3467     // Check only oop fields.
3468     if (!adr_type->isa_aryptr() ||
3469         adr_type->isa_aryptr()->elem() == Type::BOTTOM ||
3470         adr_type->isa_aryptr()->elem()->make_oopptr() != nullptr) {
3471       // OffsetBot is used to reference array's element. Ignore first AddP.
3472       if (find_second_addp(n, n->in(AddPNode::Base)) == nullptr) {
3473         bt = T_OBJECT;
3474       }
3475     }
3476   } else if (offset != Type::klass_offset()) {
3477     if (adr_type->isa_instptr()) {
3478       ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
3479       if (field != nullptr) {
3480         bt = field->layout_type();
3481       } else {
3482         // Check for unsafe oop field access
3483         if (n->has_out_with(Op_StoreP, Op_LoadP, Op_StoreN, Op_LoadN) ||
3484             n->has_out_with(Op_GetAndSetP, Op_GetAndSetN, Op_CompareAndExchangeP, Op_CompareAndExchangeN) ||
3485             n->has_out_with(Op_CompareAndSwapP, Op_CompareAndSwapN, Op_WeakCompareAndSwapP, Op_WeakCompareAndSwapN) ||
3486             BarrierSet::barrier_set()->barrier_set_c2()->escape_has_out_with_unsafe_object(n)) {
3487           bt = T_OBJECT;
3488           (*unsafe) = true;
3489         }
3490       }
3491     } else if (adr_type->isa_aryptr()) {
3492       if (offset == arrayOopDesc::length_offset_in_bytes()) {
3493         // Ignore array length load.
3494       } else if (find_second_addp(n, n->in(AddPNode::Base)) != nullptr) {
3495         // Ignore first AddP.
3496       } else {

4460         alloc->as_Allocate()->_is_scalar_replaceable = true;
4461       }
4462       set_escape_state(ptnode_adr(n->_idx), es NOT_PRODUCT(COMMA trace_propagate_message(ptn))); // CheckCastPP escape state
4463       // in order for an object to be scalar-replaceable, it must be:
4464       //   - a direct allocation (not a call returning an object)
4465       //   - non-escaping
4466       //   - eligible to be a unique type
4467       //   - not determined to be ineligible by escape analysis
4468       set_map(alloc, n);
4469       set_map(n, alloc);
4470       const TypeOopPtr* tinst = t->cast_to_instance_id(ni);
4471       igvn->hash_delete(n);
4472       igvn->set_type(n,  tinst);
4473       n->raise_bottom_type(tinst);
4474       igvn->hash_insert(n);
4475       record_for_optimizer(n);
4476       // Allocate an alias index for the header fields. Accesses to
4477       // the header emitted during macro expansion wouldn't have
4478       // correct memory state otherwise.
4479       _compile->get_alias_index(tinst->add_offset(oopDesc::mark_offset_in_bytes()));
4480       _compile->get_alias_index(tinst->add_offset(Type::klass_offset()));
4481       if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
4482         // Add a new NarrowMem projection for each existing NarrowMem projection with new adr type
4483         InitializeNode* init = alloc->as_Allocate()->initialization();
4484         assert(init != nullptr, "can't find Initialization node for this Allocate node");
4485         auto process_narrow_proj = [&](NarrowMemProjNode* proj) {
4486           const TypePtr* adr_type = proj->adr_type();
4487           const TypePtr* new_adr_type = tinst->add_offset(adr_type->offset());
4488           if (adr_type != new_adr_type && !init->already_has_narrow_mem_proj_with_adr_type(new_adr_type)) {
4489             DEBUG_ONLY( uint alias_idx = _compile->get_alias_index(new_adr_type); )
4490             assert(_compile->get_general_index(alias_idx) == _compile->get_alias_index(adr_type), "new adr type should be narrowed down from existing adr type");
4491             NarrowMemProjNode* new_proj = new NarrowMemProjNode(init, new_adr_type);
4492             igvn->set_type(new_proj, new_proj->bottom_type());
4493             record_for_optimizer(new_proj);
4494             set_map(proj, new_proj); // record it so ConnectionGraph::find_inst_mem() can find it
4495           }
4496         };
4497         init->for_each_narrow_mem_proj_with_new_uses(process_narrow_proj);
4498 
4499         // First, put on the worklist all Field edges from Connection Graph
4500         // which is more accurate than putting immediate users from Ideal Graph.
< prev index next >