3487 (void)add_edge(ptadr, src);
3488 src->set_arraycopy_src();
3489 // Add edge from destination object to arraycopy node.
3490 (void)add_edge(dst, ptadr);
3491 dst->set_arraycopy_dst();
3492 }
3493
3494 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
3495 const Type* adr_type = n->as_AddP()->bottom_type();
3496 BasicType bt = T_INT;
3497 if (offset == Type::OffsetBot) {
3498 // Check only oop fields.
3499 if (!adr_type->isa_aryptr() ||
3500 adr_type->isa_aryptr()->elem() == Type::BOTTOM ||
3501 adr_type->isa_aryptr()->elem()->make_oopptr() != nullptr) {
3502 // OffsetBot is used to reference array's element. Ignore first AddP.
3503 if (find_second_addp(n, n->in(AddPNode::Base)) == nullptr) {
3504 bt = T_OBJECT;
3505 }
3506 }
3507 } else if (offset != oopDesc::klass_offset_in_bytes()) {
3508 if (adr_type->isa_instptr()) {
3509 ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
3510 if (field != nullptr) {
3511 bt = field->layout_type();
3512 } else {
3513 // Check for unsafe oop field access
3514 if (has_oop_node_outs(n)) {
3515 bt = T_OBJECT;
3516 (*unsafe) = true;
3517 }
3518 }
3519 } else if (adr_type->isa_aryptr()) {
3520 if (offset == arrayOopDesc::length_offset_in_bytes()) {
3521 // Ignore array length load.
3522 } else if (find_second_addp(n, n->in(AddPNode::Base)) != nullptr) {
3523 // Ignore first AddP.
3524 } else {
3525 const Type* elemtype = adr_type->isa_aryptr()->elem();
3526 bt = elemtype->array_element_basic_type();
3527 }
4495 alloc->as_Allocate()->_is_scalar_replaceable = true;
4496 }
4497 set_escape_state(ptnode_adr(n->_idx), es NOT_PRODUCT(COMMA trace_propagate_message(ptn))); // CheckCastPP escape state
4498 // in order for an object to be scalar-replaceable, it must be:
4499 // - a direct allocation (not a call returning an object)
4500 // - non-escaping
4501 // - eligible to be a unique type
4502 // - not determined to be ineligible by escape analysis
4503 set_map(alloc, n);
4504 set_map(n, alloc);
4505 const TypeOopPtr* tinst = t->cast_to_instance_id(ni);
4506 igvn->hash_delete(n);
4507 igvn->set_type(n, tinst);
4508 n->raise_bottom_type(tinst);
4509 igvn->hash_insert(n);
4510 record_for_optimizer(n);
4511 // Allocate an alias index for the header fields. Accesses to
4512 // the header emitted during macro expansion wouldn't have
4513 // correct memory state otherwise.
4514 _compile->get_alias_index(tinst->add_offset(oopDesc::mark_offset_in_bytes()));
4515 _compile->get_alias_index(tinst->add_offset(oopDesc::klass_offset_in_bytes()));
4516 if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
4517 // Add a new NarrowMem projection for each existing NarrowMem projection with new adr type
4518 InitializeNode* init = alloc->as_Allocate()->initialization();
4519 assert(init != nullptr, "can't find Initialization node for this Allocate node");
4520 auto process_narrow_proj = [&](NarrowMemProjNode* proj) {
4521 const TypePtr* adr_type = proj->adr_type();
4522 const TypePtr* new_adr_type = tinst->add_offset(adr_type->offset());
4523 if (adr_type != new_adr_type && !init->already_has_narrow_mem_proj_with_adr_type(new_adr_type)) {
4524 DEBUG_ONLY( uint alias_idx = _compile->get_alias_index(new_adr_type); )
4525 assert(_compile->get_general_index(alias_idx) == _compile->get_alias_index(adr_type), "new adr type should be narrowed down from existing adr type");
4526 NarrowMemProjNode* new_proj = new NarrowMemProjNode(init, new_adr_type);
4527 igvn->set_type(new_proj, new_proj->bottom_type());
4528 record_for_optimizer(new_proj);
4529 set_map(proj, new_proj); // record it so ConnectionGraph::find_inst_mem() can find it
4530 }
4531 };
4532 init->for_each_narrow_mem_proj_with_new_uses(process_narrow_proj);
4533
4534 // First, put on the worklist all Field edges from Connection Graph
4535 // which is more accurate than putting immediate users from Ideal Graph.
|
3487 (void)add_edge(ptadr, src);
3488 src->set_arraycopy_src();
3489 // Add edge from destination object to arraycopy node.
3490 (void)add_edge(dst, ptadr);
3491 dst->set_arraycopy_dst();
3492 }
3493
3494 bool ConnectionGraph::is_oop_field(Node* n, int offset, bool* unsafe) {
3495 const Type* adr_type = n->as_AddP()->bottom_type();
3496 BasicType bt = T_INT;
3497 if (offset == Type::OffsetBot) {
3498 // Check only oop fields.
3499 if (!adr_type->isa_aryptr() ||
3500 adr_type->isa_aryptr()->elem() == Type::BOTTOM ||
3501 adr_type->isa_aryptr()->elem()->make_oopptr() != nullptr) {
3502 // OffsetBot is used to reference array's element. Ignore first AddP.
3503 if (find_second_addp(n, n->in(AddPNode::Base)) == nullptr) {
3504 bt = T_OBJECT;
3505 }
3506 }
3507 } else if (offset != Type::klass_offset()) {
3508 if (adr_type->isa_instptr()) {
3509 ciField* field = _compile->alias_type(adr_type->isa_instptr())->field();
3510 if (field != nullptr) {
3511 bt = field->layout_type();
3512 } else {
3513 // Check for unsafe oop field access
3514 if (has_oop_node_outs(n)) {
3515 bt = T_OBJECT;
3516 (*unsafe) = true;
3517 }
3518 }
3519 } else if (adr_type->isa_aryptr()) {
3520 if (offset == arrayOopDesc::length_offset_in_bytes()) {
3521 // Ignore array length load.
3522 } else if (find_second_addp(n, n->in(AddPNode::Base)) != nullptr) {
3523 // Ignore first AddP.
3524 } else {
3525 const Type* elemtype = adr_type->isa_aryptr()->elem();
3526 bt = elemtype->array_element_basic_type();
3527 }
4495 alloc->as_Allocate()->_is_scalar_replaceable = true;
4496 }
4497 set_escape_state(ptnode_adr(n->_idx), es NOT_PRODUCT(COMMA trace_propagate_message(ptn))); // CheckCastPP escape state
4498 // in order for an object to be scalar-replaceable, it must be:
4499 // - a direct allocation (not a call returning an object)
4500 // - non-escaping
4501 // - eligible to be a unique type
4502 // - not determined to be ineligible by escape analysis
4503 set_map(alloc, n);
4504 set_map(n, alloc);
4505 const TypeOopPtr* tinst = t->cast_to_instance_id(ni);
4506 igvn->hash_delete(n);
4507 igvn->set_type(n, tinst);
4508 n->raise_bottom_type(tinst);
4509 igvn->hash_insert(n);
4510 record_for_optimizer(n);
4511 // Allocate an alias index for the header fields. Accesses to
4512 // the header emitted during macro expansion wouldn't have
4513 // correct memory state otherwise.
4514 _compile->get_alias_index(tinst->add_offset(oopDesc::mark_offset_in_bytes()));
4515 _compile->get_alias_index(tinst->add_offset(Type::klass_offset()));
4516 if (alloc->is_Allocate() && (t->isa_instptr() || t->isa_aryptr())) {
4517 // Add a new NarrowMem projection for each existing NarrowMem projection with new adr type
4518 InitializeNode* init = alloc->as_Allocate()->initialization();
4519 assert(init != nullptr, "can't find Initialization node for this Allocate node");
4520 auto process_narrow_proj = [&](NarrowMemProjNode* proj) {
4521 const TypePtr* adr_type = proj->adr_type();
4522 const TypePtr* new_adr_type = tinst->add_offset(adr_type->offset());
4523 if (adr_type != new_adr_type && !init->already_has_narrow_mem_proj_with_adr_type(new_adr_type)) {
4524 DEBUG_ONLY( uint alias_idx = _compile->get_alias_index(new_adr_type); )
4525 assert(_compile->get_general_index(alias_idx) == _compile->get_alias_index(adr_type), "new adr type should be narrowed down from existing adr type");
4526 NarrowMemProjNode* new_proj = new NarrowMemProjNode(init, new_adr_type);
4527 igvn->set_type(new_proj, new_proj->bottom_type());
4528 record_for_optimizer(new_proj);
4529 set_map(proj, new_proj); // record it so ConnectionGraph::find_inst_mem() can find it
4530 }
4531 };
4532 init->for_each_narrow_mem_proj_with_new_uses(process_narrow_proj);
4533
4534 // First, put on the worklist all Field edges from Connection Graph
4535 // which is more accurate than putting immediate users from Ideal Graph.
|