< prev index next >

src/share/vm/opto/macro.cpp

Print this page




  24 
  25 #include "precompiled.hpp"
  26 #include "compiler/compileLog.hpp"
  27 #include "libadt/vectset.hpp"
  28 #include "opto/addnode.hpp"
  29 #include "opto/callnode.hpp"
  30 #include "opto/cfgnode.hpp"
  31 #include "opto/compile.hpp"
  32 #include "opto/connode.hpp"
  33 #include "opto/locknode.hpp"
  34 #include "opto/loopnode.hpp"
  35 #include "opto/macro.hpp"
  36 #include "opto/memnode.hpp"
  37 #include "opto/node.hpp"
  38 #include "opto/phaseX.hpp"
  39 #include "opto/rootnode.hpp"
  40 #include "opto/runtime.hpp"
  41 #include "opto/subnode.hpp"
  42 #include "opto/type.hpp"
  43 #include "runtime/sharedRuntime.hpp"





  44 
  45 
  46 //
  47 // Replace any references to "oldref" in inputs to "use" with "newref".
  48 // Returns the number of replacements made.
  49 //
  50 int PhaseMacroExpand::replace_input(Node *use, Node *oldref, Node *newref) {
  51   int nreplacements = 0;
  52   uint req = use->req();
  53   for (uint j = 0; j < use->len(); j++) {
  54     Node *uin = use->in(j);
  55     if (uin == oldref) {
  56       if (j < req)
  57         use->set_req(j, newref);
  58       else
  59         use->set_prec(j, newref);
  60       nreplacements++;
  61     } else if (j >= req && uin == NULL) {
  62       break;
  63     }


 427   for (uint j = 1; j < length; j++) {
 428     Node *in = mem->in(j);
 429     if (in == NULL || in->is_top()) {
 430       values.at_put(j, in);
 431     } else  {
 432       Node *val = scan_mem_chain(in, alias_idx, offset, start_mem, alloc, &_igvn);
 433       if (val == start_mem || val == alloc_mem) {
 434         // hit a sentinel, return appropriate 0 value
 435         values.at_put(j, _igvn.zerocon(ft));
 436         continue;
 437       }
 438       if (val->is_Initialize()) {
 439         val = val->as_Initialize()->find_captured_store(offset, type2aelembytes(ft), &_igvn);
 440       }
 441       if (val == NULL) {
 442         return NULL;  // can't find a value on this path
 443       }
 444       if (val == mem) {
 445         values.at_put(j, mem);
 446       } else if (val->is_Store()) {
 447         values.at_put(j, val->in(MemNode::ValueIn));






 448       } else if(val->is_Proj() && val->in(0) == alloc) {
 449         values.at_put(j, _igvn.zerocon(ft));
 450       } else if (val->is_Phi()) {
 451         val = value_from_mem_phi(val, ft, phi_type, adr_t, alloc, value_phis, level-1);
 452         if (val == NULL) {
 453           return NULL;
 454         }
 455         values.at_put(j, val);
 456       } else if (val->Opcode() == Op_SCMemProj) {
 457         assert(val->in(0)->is_LoadStore() || val->in(0)->Opcode() == Op_EncodeISOArray, "sanity");
 458         assert(false, "Object is not scalar replaceable if a LoadStore node access its field");
 459         return NULL;
 460       } else {
 461 #ifdef ASSERT
 462         val->dump();
 463         assert(false, "unknown node on this path");
 464 #endif
 465         return NULL;  // unknown node on this path
 466       }
 467     }


 529           unique_input = n;
 530         } else if (unique_input != n) {
 531           unique_input = top;
 532           break;
 533         }
 534       }
 535       if (unique_input != NULL && unique_input != top) {
 536         mem = unique_input;
 537       } else {
 538         done = true;
 539       }
 540     } else {
 541       assert(false, "unexpected node");
 542     }
 543   }
 544   if (mem != NULL) {
 545     if (mem == start_mem || mem == alloc_mem) {
 546       // hit a sentinel, return appropriate 0 value
 547       return _igvn.zerocon(ft);
 548     } else if (mem->is_Store()) {
 549       return mem->in(MemNode::ValueIn);






 550     } else if (mem->is_Phi()) {
 551       // attempt to produce a Phi reflecting the values on the input paths of the Phi
 552       Node_Stack value_phis(a, 8);
 553       Node * phi = value_from_mem_phi(mem, ft, ftype, adr_t, alloc, &value_phis, ValueSearchLimit);
 554       if (phi != NULL) {
 555         return phi;
 556       } else {
 557         // Kill all new Phis
 558         while(value_phis.is_nonempty()) {
 559           Node* n = value_phis.node();
 560           _igvn.replace_node(n, C->top());
 561           value_phis.pop();
 562         }
 563       }
 564     }
 565   }
 566   // Something go wrong.
 567   return NULL;
 568 }
 569 


 596     }
 597   }
 598 
 599   if (can_eliminate && res != NULL) {
 600     for (DUIterator_Fast jmax, j = res->fast_outs(jmax);
 601                                j < jmax && can_eliminate; j++) {
 602       Node* use = res->fast_out(j);
 603 
 604       if (use->is_AddP()) {
 605         const TypePtr* addp_type = _igvn.type(use)->is_ptr();
 606         int offset = addp_type->offset();
 607 
 608         if (offset == Type::OffsetTop || offset == Type::OffsetBot) {
 609           NOT_PRODUCT(fail_eliminate = "Undefined field referrence";)
 610           can_eliminate = false;
 611           break;
 612         }
 613         for (DUIterator_Fast kmax, k = use->fast_outs(kmax);
 614                                    k < kmax && can_eliminate; k++) {
 615           Node* n = use->fast_out(k);
 616           if (!n->is_Store() && n->Opcode() != Op_CastP2X) {

 617             DEBUG_ONLY(disq_node = n;)
 618             if (n->is_Load() || n->is_LoadStore()) {
 619               NOT_PRODUCT(fail_eliminate = "Field load";)
 620             } else {
 621               NOT_PRODUCT(fail_eliminate = "Not store field referrence";)
 622             }
 623             can_eliminate = false;
 624           }
 625         }
 626       } else if (use->is_SafePoint()) {
 627         SafePointNode* sfpt = use->as_SafePoint();
 628         if (sfpt->is_Call() && sfpt->as_Call()->has_non_debug_use(res)) {
 629           // Object is passed as argument.
 630           DEBUG_ONLY(disq_node = use;)
 631           NOT_PRODUCT(fail_eliminate = "Object is passed as argument";)
 632           can_eliminate = false;
 633         }
 634         Node* sfptMem = sfpt->memory();
 635         if (sfptMem == NULL || sfptMem->is_top()) {
 636           DEBUG_ONLY(disq_node = use;)


 867       uint oc1 = res->outcnt();
 868 
 869       if (use->is_AddP()) {
 870         for (DUIterator_Last kmin, k = use->last_outs(kmin); k >= kmin; ) {
 871           Node *n = use->last_out(k);
 872           uint oc2 = use->outcnt();
 873           if (n->is_Store()) {
 874 #ifdef ASSERT
 875             // Verify that there is no dependent MemBarVolatile nodes,
 876             // they should be removed during IGVN, see MemBarNode::Ideal().
 877             for (DUIterator_Fast pmax, p = n->fast_outs(pmax);
 878                                        p < pmax; p++) {
 879               Node* mb = n->fast_out(p);
 880               assert(mb->is_Initialize() || !mb->is_MemBar() ||
 881                      mb->req() <= MemBarNode::Precedent ||
 882                      mb->in(MemBarNode::Precedent) != n,
 883                      "MemBarVolatile should be eliminated for non-escaping object");
 884             }
 885 #endif
 886             _igvn.replace_node(n, n->in(MemNode::Memory));


 887           } else {
 888             eliminate_card_mark(n);
 889           }
 890           k -= (oc2 - use->outcnt());
 891         }

 892       } else {
 893         eliminate_card_mark(use);
 894       }
 895       j -= (oc1 - res->outcnt());
 896     }
 897     assert(res->outcnt() == 0, "all uses of allocated objects must be deleted");
 898     _igvn.remove_dead_node(res);
 899   }
 900 
 901   //
 902   // Process other users of allocation's projections
 903   //
 904   if (_resproj != NULL && _resproj->outcnt() != 0) {
 905     // First disconnect stores captured by Initialize node.
 906     // If Initialize node is eliminated first in the following code,
 907     // it will kill such stores and DUIterator_Last will assert.
 908     for (DUIterator_Fast jmax, j = _resproj->fast_outs(jmax);  j < jmax; j++) {
 909       Node *use = _resproj->fast_out(j);
 910       if (use->is_AddP()) {
 911         // raw memory addresses used only by the initialization


1368       Node* alloc_size = new (C) ConvI2LNode(size_in_bytes);
1369       transform_later(alloc_size);
1370 #endif
1371       Node* new_alloc_bytes = new (C) AddLNode(alloc_bytes, alloc_size);
1372       transform_later(new_alloc_bytes);
1373       fast_oop_rawmem = make_store(fast_oop_ctrl, store_eden_top, alloc_bytes_adr,
1374                                    0, new_alloc_bytes, T_LONG);
1375     }
1376 
1377     InitializeNode* init = alloc->initialization();
1378     fast_oop_rawmem = initialize_object(alloc,
1379                                         fast_oop_ctrl, fast_oop_rawmem, fast_oop,
1380                                         klass_node, length, size_in_bytes);
1381 
1382     // If initialization is performed by an array copy, any required
1383     // MemBarStoreStore was already added. If the object does not
1384     // escape no need for a MemBarStoreStore. Otherwise we need a
1385     // MemBarStoreStore so that stores that initialize this object
1386     // can't be reordered with a subsequent store that makes this
1387     // object accessible by other threads.
1388     if (init == NULL || (!init->is_complete_with_arraycopy() && !init->does_not_escape())) {






1389       if (init == NULL || init->req() < InitializeNode::RawStores) {
1390         // No InitializeNode or no stores captured by zeroing
1391         // elimination. Simply add the MemBarStoreStore after object
1392         // initialization.
1393         MemBarNode* mb = MemBarNode::make(C, Op_MemBarStoreStore, Compile::AliasIdxBot);
1394         transform_later(mb);
1395 
1396         mb->init_req(TypeFunc::Memory, fast_oop_rawmem);
1397         mb->init_req(TypeFunc::Control, fast_oop_ctrl);
1398         fast_oop_ctrl = new (C) ProjNode(mb,TypeFunc::Control);
1399         transform_later(fast_oop_ctrl);
1400         fast_oop_rawmem = new (C) ProjNode(mb,TypeFunc::Memory);
1401         transform_later(fast_oop_rawmem);
1402       } else {
1403         // Add the MemBarStoreStore after the InitializeNode so that
1404         // all stores performing the initialization that were moved
1405         // before the InitializeNode happen before the storestore
1406         // barrier.
1407 
1408         Node* init_ctrl = init->proj_out(TypeFunc::Control);




  24 
  25 #include "precompiled.hpp"
  26 #include "compiler/compileLog.hpp"
  27 #include "libadt/vectset.hpp"
  28 #include "opto/addnode.hpp"
  29 #include "opto/callnode.hpp"
  30 #include "opto/cfgnode.hpp"
  31 #include "opto/compile.hpp"
  32 #include "opto/connode.hpp"
  33 #include "opto/locknode.hpp"
  34 #include "opto/loopnode.hpp"
  35 #include "opto/macro.hpp"
  36 #include "opto/memnode.hpp"
  37 #include "opto/node.hpp"
  38 #include "opto/phaseX.hpp"
  39 #include "opto/rootnode.hpp"
  40 #include "opto/runtime.hpp"
  41 #include "opto/subnode.hpp"
  42 #include "opto/type.hpp"
  43 #include "runtime/sharedRuntime.hpp"
  44 #if INCLUDE_ALL_GCS
  45 #include "gc_implementation/shenandoah/shenandoahForwarding.hpp"
  46 #include "gc_implementation/shenandoah/c2/shenandoahBarrierSetC2.hpp"
  47 #include "gc_implementation/shenandoah/c2/shenandoahSupport.hpp"
  48 #endif
  49 
  50 
  51 //
  52 // Replace any references to "oldref" in inputs to "use" with "newref".
  53 // Returns the number of replacements made.
  54 //
  55 int PhaseMacroExpand::replace_input(Node *use, Node *oldref, Node *newref) {
  56   int nreplacements = 0;
  57   uint req = use->req();
  58   for (uint j = 0; j < use->len(); j++) {
  59     Node *uin = use->in(j);
  60     if (uin == oldref) {
  61       if (j < req)
  62         use->set_req(j, newref);
  63       else
  64         use->set_prec(j, newref);
  65       nreplacements++;
  66     } else if (j >= req && uin == NULL) {
  67       break;
  68     }


 432   for (uint j = 1; j < length; j++) {
 433     Node *in = mem->in(j);
 434     if (in == NULL || in->is_top()) {
 435       values.at_put(j, in);
 436     } else  {
 437       Node *val = scan_mem_chain(in, alias_idx, offset, start_mem, alloc, &_igvn);
 438       if (val == start_mem || val == alloc_mem) {
 439         // hit a sentinel, return appropriate 0 value
 440         values.at_put(j, _igvn.zerocon(ft));
 441         continue;
 442       }
 443       if (val->is_Initialize()) {
 444         val = val->as_Initialize()->find_captured_store(offset, type2aelembytes(ft), &_igvn);
 445       }
 446       if (val == NULL) {
 447         return NULL;  // can't find a value on this path
 448       }
 449       if (val == mem) {
 450         values.at_put(j, mem);
 451       } else if (val->is_Store()) {
 452         Node* n = val->in(MemNode::ValueIn);
 453 #if INCLUDE_ALL_GCS
 454         if (UseShenandoahGC) {
 455           n = ShenandoahBarrierSetC2::bsc2()->step_over_gc_barrier(n);
 456         }
 457 #endif
 458         values.at_put(j, n);
 459       } else if(val->is_Proj() && val->in(0) == alloc) {
 460         values.at_put(j, _igvn.zerocon(ft));
 461       } else if (val->is_Phi()) {
 462         val = value_from_mem_phi(val, ft, phi_type, adr_t, alloc, value_phis, level-1);
 463         if (val == NULL) {
 464           return NULL;
 465         }
 466         values.at_put(j, val);
 467       } else if (val->Opcode() == Op_SCMemProj) {
 468         assert(val->in(0)->is_LoadStore() || val->in(0)->Opcode() == Op_EncodeISOArray, "sanity");
 469         assert(false, "Object is not scalar replaceable if a LoadStore node access its field");
 470         return NULL;
 471       } else {
 472 #ifdef ASSERT
 473         val->dump();
 474         assert(false, "unknown node on this path");
 475 #endif
 476         return NULL;  // unknown node on this path
 477       }
 478     }


 540           unique_input = n;
 541         } else if (unique_input != n) {
 542           unique_input = top;
 543           break;
 544         }
 545       }
 546       if (unique_input != NULL && unique_input != top) {
 547         mem = unique_input;
 548       } else {
 549         done = true;
 550       }
 551     } else {
 552       assert(false, "unexpected node");
 553     }
 554   }
 555   if (mem != NULL) {
 556     if (mem == start_mem || mem == alloc_mem) {
 557       // hit a sentinel, return appropriate 0 value
 558       return _igvn.zerocon(ft);
 559     } else if (mem->is_Store()) {
 560       Node* n = mem->in(MemNode::ValueIn);
 561 #if INCLUDE_ALL_GCS
 562       if (UseShenandoahGC) {
 563         n = ShenandoahBarrierSetC2::bsc2()->step_over_gc_barrier(n);
 564       }
 565 #endif
 566       return n;
 567     } else if (mem->is_Phi()) {
 568       // attempt to produce a Phi reflecting the values on the input paths of the Phi
 569       Node_Stack value_phis(a, 8);
 570       Node * phi = value_from_mem_phi(mem, ft, ftype, adr_t, alloc, &value_phis, ValueSearchLimit);
 571       if (phi != NULL) {
 572         return phi;
 573       } else {
 574         // Kill all new Phis
 575         while(value_phis.is_nonempty()) {
 576           Node* n = value_phis.node();
 577           _igvn.replace_node(n, C->top());
 578           value_phis.pop();
 579         }
 580       }
 581     }
 582   }
 583   // Something go wrong.
 584   return NULL;
 585 }
 586 


 613     }
 614   }
 615 
 616   if (can_eliminate && res != NULL) {
 617     for (DUIterator_Fast jmax, j = res->fast_outs(jmax);
 618                                j < jmax && can_eliminate; j++) {
 619       Node* use = res->fast_out(j);
 620 
 621       if (use->is_AddP()) {
 622         const TypePtr* addp_type = _igvn.type(use)->is_ptr();
 623         int offset = addp_type->offset();
 624 
 625         if (offset == Type::OffsetTop || offset == Type::OffsetBot) {
 626           NOT_PRODUCT(fail_eliminate = "Undefined field referrence";)
 627           can_eliminate = false;
 628           break;
 629         }
 630         for (DUIterator_Fast kmax, k = use->fast_outs(kmax);
 631                                    k < kmax && can_eliminate; k++) {
 632           Node* n = use->fast_out(k);
 633           if (!n->is_Store() && n->Opcode() != Op_CastP2X &&
 634               (!UseShenandoahGC || !n->is_g1_wb_pre_call())) {
 635             DEBUG_ONLY(disq_node = n;)
 636             if (n->is_Load() || n->is_LoadStore()) {
 637               NOT_PRODUCT(fail_eliminate = "Field load";)
 638             } else {
 639               NOT_PRODUCT(fail_eliminate = "Not store field referrence";)
 640             }
 641             can_eliminate = false;
 642           }
 643         }
 644       } else if (use->is_SafePoint()) {
 645         SafePointNode* sfpt = use->as_SafePoint();
 646         if (sfpt->is_Call() && sfpt->as_Call()->has_non_debug_use(res)) {
 647           // Object is passed as argument.
 648           DEBUG_ONLY(disq_node = use;)
 649           NOT_PRODUCT(fail_eliminate = "Object is passed as argument";)
 650           can_eliminate = false;
 651         }
 652         Node* sfptMem = sfpt->memory();
 653         if (sfptMem == NULL || sfptMem->is_top()) {
 654           DEBUG_ONLY(disq_node = use;)


 885       uint oc1 = res->outcnt();
 886 
 887       if (use->is_AddP()) {
 888         for (DUIterator_Last kmin, k = use->last_outs(kmin); k >= kmin; ) {
 889           Node *n = use->last_out(k);
 890           uint oc2 = use->outcnt();
 891           if (n->is_Store()) {
 892 #ifdef ASSERT
 893             // Verify that there is no dependent MemBarVolatile nodes,
 894             // they should be removed during IGVN, see MemBarNode::Ideal().
 895             for (DUIterator_Fast pmax, p = n->fast_outs(pmax);
 896                                        p < pmax; p++) {
 897               Node* mb = n->fast_out(p);
 898               assert(mb->is_Initialize() || !mb->is_MemBar() ||
 899                      mb->req() <= MemBarNode::Precedent ||
 900                      mb->in(MemBarNode::Precedent) != n,
 901                      "MemBarVolatile should be eliminated for non-escaping object");
 902             }
 903 #endif
 904             _igvn.replace_node(n, n->in(MemNode::Memory));
 905           } else if (UseShenandoahGC && n->is_g1_wb_pre_call()) {
 906             C->shenandoah_eliminate_g1_wb_pre(n, &_igvn);
 907           } else {
 908             eliminate_card_mark(n);
 909           }
 910           k -= (oc2 - use->outcnt());
 911         }
 912         _igvn.remove_dead_node(use);
 913       } else {
 914         eliminate_card_mark(use);
 915       }
 916       j -= (oc1 - res->outcnt());
 917     }
 918     assert(res->outcnt() == 0, "all uses of allocated objects must be deleted");
 919     _igvn.remove_dead_node(res);
 920   }
 921 
 922   //
 923   // Process other users of allocation's projections
 924   //
 925   if (_resproj != NULL && _resproj->outcnt() != 0) {
 926     // First disconnect stores captured by Initialize node.
 927     // If Initialize node is eliminated first in the following code,
 928     // it will kill such stores and DUIterator_Last will assert.
 929     for (DUIterator_Fast jmax, j = _resproj->fast_outs(jmax);  j < jmax; j++) {
 930       Node *use = _resproj->fast_out(j);
 931       if (use->is_AddP()) {
 932         // raw memory addresses used only by the initialization


1389       Node* alloc_size = new (C) ConvI2LNode(size_in_bytes);
1390       transform_later(alloc_size);
1391 #endif
1392       Node* new_alloc_bytes = new (C) AddLNode(alloc_bytes, alloc_size);
1393       transform_later(new_alloc_bytes);
1394       fast_oop_rawmem = make_store(fast_oop_ctrl, store_eden_top, alloc_bytes_adr,
1395                                    0, new_alloc_bytes, T_LONG);
1396     }
1397 
1398     InitializeNode* init = alloc->initialization();
1399     fast_oop_rawmem = initialize_object(alloc,
1400                                         fast_oop_ctrl, fast_oop_rawmem, fast_oop,
1401                                         klass_node, length, size_in_bytes);
1402 
1403     // If initialization is performed by an array copy, any required
1404     // MemBarStoreStore was already added. If the object does not
1405     // escape no need for a MemBarStoreStore. Otherwise we need a
1406     // MemBarStoreStore so that stores that initialize this object
1407     // can't be reordered with a subsequent store that makes this
1408     // object accessible by other threads.
1409     if ( AARCH64_ONLY ( !alloc->does_not_escape_thread() &&
1410                         (init == NULL ||
1411                          !init->is_complete_with_arraycopy()) )
1412          NOT_AARCH64  ( init == NULL ||
1413                         (!init->is_complete_with_arraycopy() &&
1414                          !init->does_not_escape()) )
1415        ) {
1416       if (init == NULL || init->req() < InitializeNode::RawStores) {
1417         // No InitializeNode or no stores captured by zeroing
1418         // elimination. Simply add the MemBarStoreStore after object
1419         // initialization.
1420         MemBarNode* mb = MemBarNode::make(C, Op_MemBarStoreStore, Compile::AliasIdxBot);
1421         transform_later(mb);
1422 
1423         mb->init_req(TypeFunc::Memory, fast_oop_rawmem);
1424         mb->init_req(TypeFunc::Control, fast_oop_ctrl);
1425         fast_oop_ctrl = new (C) ProjNode(mb,TypeFunc::Control);
1426         transform_later(fast_oop_ctrl);
1427         fast_oop_rawmem = new (C) ProjNode(mb,TypeFunc::Memory);
1428         transform_later(fast_oop_rawmem);
1429       } else {
1430         // Add the MemBarStoreStore after the InitializeNode so that
1431         // all stores performing the initialization that were moved
1432         // before the InitializeNode happen before the storestore
1433         // barrier.
1434 
1435         Node* init_ctrl = init->proj_out(TypeFunc::Control);


< prev index next >