< prev index next >

src/hotspot/share/opto/memnode.cpp

Print this page

3319 
3320   bool progress = false;
3321   // Eliminate volatile MemBars for scalar replaced objects.
3322   if (can_reshape && req() == (Precedent+1)) {
3323     bool eliminate = false;
3324     int opc = Opcode();
3325     if ((opc == Op_MemBarAcquire || opc == Op_MemBarVolatile)) {
3326       // Volatile field loads and stores.
3327       Node* my_mem = in(MemBarNode::Precedent);
3328       // The MembarAquire may keep an unused LoadNode alive through the Precedent edge
3329       if ((my_mem != nullptr) && (opc == Op_MemBarAcquire) && (my_mem->outcnt() == 1)) {
3330         // if the Precedent is a decodeN and its input (a Load) is used at more than one place,
3331         // replace this Precedent (decodeN) with the Load instead.
3332         if ((my_mem->Opcode() == Op_DecodeN) && (my_mem->in(1)->outcnt() > 1))  {
3333           Node* load_node = my_mem->in(1);
3334           set_req(MemBarNode::Precedent, load_node);
3335           phase->is_IterGVN()->_worklist.push(my_mem);
3336           my_mem = load_node;
3337         } else {
3338           assert(my_mem->unique_out() == this, "sanity");

3339           del_req(Precedent);
3340           phase->is_IterGVN()->_worklist.push(my_mem); // remove dead node later
3341           my_mem = nullptr;
3342         }
3343         progress = true;
3344       }
3345       if (my_mem != nullptr && my_mem->is_Mem()) {
3346         const TypeOopPtr* t_oop = my_mem->in(MemNode::Address)->bottom_type()->isa_oopptr();
3347         // Check for scalar replaced object reference.
3348         if( t_oop != nullptr && t_oop->is_known_instance_field() &&
3349             t_oop->offset() != Type::OffsetBot &&
3350             t_oop->offset() != Type::OffsetTop) {
3351           eliminate = true;
3352         }
3353       }
3354     } else if (opc == Op_MemBarRelease) {
3355       // Final field stores.
3356       Node* alloc = AllocateNode::Ideal_allocation(in(MemBarNode::Precedent), phase);
3357       if ((alloc != nullptr) && alloc->is_Allocate() &&
3358           alloc->as_Allocate()->does_not_escape_thread()) {

3319 
3320   bool progress = false;
3321   // Eliminate volatile MemBars for scalar replaced objects.
3322   if (can_reshape && req() == (Precedent+1)) {
3323     bool eliminate = false;
3324     int opc = Opcode();
3325     if ((opc == Op_MemBarAcquire || opc == Op_MemBarVolatile)) {
3326       // Volatile field loads and stores.
3327       Node* my_mem = in(MemBarNode::Precedent);
3328       // The MembarAquire may keep an unused LoadNode alive through the Precedent edge
3329       if ((my_mem != nullptr) && (opc == Op_MemBarAcquire) && (my_mem->outcnt() == 1)) {
3330         // if the Precedent is a decodeN and its input (a Load) is used at more than one place,
3331         // replace this Precedent (decodeN) with the Load instead.
3332         if ((my_mem->Opcode() == Op_DecodeN) && (my_mem->in(1)->outcnt() > 1))  {
3333           Node* load_node = my_mem->in(1);
3334           set_req(MemBarNode::Precedent, load_node);
3335           phase->is_IterGVN()->_worklist.push(my_mem);
3336           my_mem = load_node;
3337         } else {
3338           assert(my_mem->unique_out() == this, "sanity");
3339           assert(!trailing_load_store(), "load store node can't be eliminated");
3340           del_req(Precedent);
3341           phase->is_IterGVN()->_worklist.push(my_mem); // remove dead node later
3342           my_mem = nullptr;
3343         }
3344         progress = true;
3345       }
3346       if (my_mem != nullptr && my_mem->is_Mem()) {
3347         const TypeOopPtr* t_oop = my_mem->in(MemNode::Address)->bottom_type()->isa_oopptr();
3348         // Check for scalar replaced object reference.
3349         if( t_oop != nullptr && t_oop->is_known_instance_field() &&
3350             t_oop->offset() != Type::OffsetBot &&
3351             t_oop->offset() != Type::OffsetTop) {
3352           eliminate = true;
3353         }
3354       }
3355     } else if (opc == Op_MemBarRelease) {
3356       // Final field stores.
3357       Node* alloc = AllocateNode::Ideal_allocation(in(MemBarNode::Precedent), phase);
3358       if ((alloc != nullptr) && alloc->is_Allocate() &&
3359           alloc->as_Allocate()->does_not_escape_thread()) {
< prev index next >