< prev index next >

src/hotspot/share/opto/chaitin.cpp

Print this page

1825   // See if already computed; if so return it
1826   if (derived_base_map[derived->_idx]) {
1827     return derived_base_map[derived->_idx];
1828   }
1829 
1830 #ifdef ASSERT
1831   if (derived->is_Mach() && derived->as_Mach()->ideal_Opcode() == Op_VerifyVectorAlignment) {
1832     // Bypass the verification node
1833     Node* base = find_base_for_derived(derived_base_map, derived->in(1), maxlrg);
1834     derived_base_map[derived->_idx] = base;
1835     return base;
1836   }
1837 #endif
1838 
1839   // See if this happens to be a base.
1840   // NOTE: we use TypePtr instead of TypeOopPtr because we can have
1841   // pointers derived from null!  These are always along paths that
1842   // can't happen at run-time but the optimizer cannot deduce it so
1843   // we have to handle it gracefully.
1844   assert(!derived->bottom_type()->isa_narrowoop() ||
1845           derived->bottom_type()->make_ptr()->is_ptr()->_offset == 0, "sanity");
1846   const TypePtr *tj = derived->bottom_type()->isa_ptr();
1847   // If its an OOP with a non-zero offset, then it is derived.
1848   if( tj == nullptr || tj->_offset == 0 ) {
1849     derived_base_map[derived->_idx] = derived;
1850     return derived;
1851   }
1852   // Derived is null+offset?  Base is null!
1853   if( derived->is_Con() ) {
1854     Node *base = _matcher.mach_null();
1855     assert(base != nullptr, "sanity");
1856     if (base->in(0) == nullptr) {
1857       // Initialize it once and make it shared:
1858       // set control to _root and place it into Start block
1859       // (where top() node is placed).
1860       base->init_req(0, _cfg.get_root_node());
1861       Block *startb = _cfg.get_block_for_node(C->top());
1862       uint node_pos = startb->find_node(C->top());
1863       startb->insert_node(base, node_pos);
1864       _cfg.map_node_to_block(base, startb);
1865       assert(_lrg_map.live_range_id(base) == 0, "should not have LRG yet");
1866 
1867       // The loadConP0 might have projection nodes depending on architecture
1868       // Add the projection nodes to the CFG

1994         // Copies do not define a new value and so do not interfere.
1995         // Remove the copies source from the liveout set before interfering.
1996         uint idx = n->is_Copy();
1997         if (idx) {
1998           liveout.remove(_lrg_map.live_range_id(n->in(idx)));
1999         }
2000       }
2001 
2002       // Found a safepoint?
2003       JVMState *jvms = n->jvms();
2004       if (jvms && !liveout.is_empty()) {
2005         // Now scan for a live derived pointer
2006         IndexSetIterator elements(&liveout);
2007         uint neighbor;
2008         while ((neighbor = elements.next()) != 0) {
2009           // Find reaching DEF for base and derived values
2010           // This works because we are still in SSA during this call.
2011           Node *derived = lrgs(neighbor)._def;
2012           const TypePtr *tj = derived->bottom_type()->isa_ptr();
2013           assert(!derived->bottom_type()->isa_narrowoop() ||
2014                   derived->bottom_type()->make_ptr()->is_ptr()->_offset == 0, "sanity");
2015           // If its an OOP with a non-zero offset, then it is derived.
2016           if( tj && tj->_offset != 0 && tj->isa_oop_ptr() ) {
2017             Node *base = find_base_for_derived(derived_base_map, derived, maxlrg);
2018             assert(base->_idx < _lrg_map.size(), "");
2019             // Add reaching DEFs of derived pointer and base pointer as a
2020             // pair of inputs
2021             n->add_req(derived);
2022             n->add_req(base);
2023 
2024             // See if the base pointer is already live to this point.
2025             // Since I'm working on the SSA form, live-ness amounts to
2026             // reaching def's.  So if I find the base's live range then
2027             // I know the base's def reaches here.
2028             if ((_lrg_map.live_range_id(base) >= _lrg_map.max_lrg_id() || // (Brand new base (hence not live) or
2029                  !liveout.member(_lrg_map.live_range_id(base))) && // not live) AND
2030                  (_lrg_map.live_range_id(base) > 0) && // not a constant
2031                  _cfg.get_block_for_node(base) != block) { // base not def'd in blk)
2032               // Base pointer is not currently live.  Since I stretched
2033               // the base pointer to here and it crosses basic-block
2034               // boundaries, the global live info is now incorrect.
2035               // Recompute live.
2036               must_recompute_live = true;

2286   return buf+strlen(buf);
2287 }
2288 
2289 void PhaseChaitin::dump_for_spill_split_recycle() const {
2290   if( WizardMode && (PrintCompilation || PrintOpto) ) {
2291     // Display which live ranges need to be split and the allocator's state
2292     tty->print_cr("Graph-Coloring Iteration %d will split the following live ranges", _trip_cnt);
2293     for (uint bidx = 1; bidx < _lrg_map.max_lrg_id(); bidx++) {
2294       if( lrgs(bidx).alive() && lrgs(bidx).reg() >= LRG::SPILL_REG ) {
2295         tty->print("L%d: ", bidx);
2296         lrgs(bidx).dump();
2297       }
2298     }
2299     tty->cr();
2300     dump();
2301   }
2302 }
2303 
2304 void PhaseChaitin::dump_frame() const {
2305   const char *fp = OptoReg::regname(OptoReg::c_frame_pointer);
2306   const TypeTuple *domain = C->tf()->domain();
2307   const int        argcnt = domain->cnt() - TypeFunc::Parms;
2308 
2309   // Incoming arguments in registers dump
2310   for( int k = 0; k < argcnt; k++ ) {
2311     OptoReg::Name parmreg = _matcher._parm_regs[k].first();
2312     if( OptoReg::is_reg(parmreg))  {
2313       const char *reg_name = OptoReg::regname(parmreg);
2314       tty->print("#r%3.3d %s", parmreg, reg_name);
2315       parmreg = _matcher._parm_regs[k].second();
2316       if( OptoReg::is_reg(parmreg))  {
2317         tty->print(":%s", OptoReg::regname(parmreg));
2318       }
2319       tty->print("   : parm %d: ", k);
2320       domain->field_at(k + TypeFunc::Parms)->dump();
2321       tty->cr();
2322     }
2323   }
2324 
2325   // Check for un-owned padding above incoming args
2326   OptoReg::Name reg = _matcher._new_SP;

2495             // Check each derived/base pair
2496             for (uint idx = jvms->oopoff(); idx < sfpt->req(); idx++) {
2497               Node* check = sfpt->in(idx);
2498               bool is_derived = ((idx - jvms->oopoff()) & 1) == 0;
2499               // search upwards through spills and spill phis for AddP
2500               worklist.clear();
2501               worklist.push(check);
2502               uint k = 0;
2503               while (k < worklist.size()) {
2504                 check = worklist.at(k);
2505                 assert(check, "Bad base or derived pointer");
2506                 // See PhaseChaitin::find_base_for_derived() for all cases.
2507                 int isc = check->is_Copy();
2508                 if (isc) {
2509                   worklist.push(check->in(isc));
2510                 } else if (check->is_Phi()) {
2511                   for (uint m = 1; m < check->req(); m++) {
2512                     worklist.push(check->in(m));
2513                   }
2514                 } else if (check->is_Con()) {
2515                   if (is_derived && check->bottom_type()->is_ptr()->_offset != 0) {
2516                     // Derived is null+non-zero offset, base must be null.
2517                     assert(check->bottom_type()->is_ptr()->ptr() == TypePtr::Null, "Bad derived pointer");
2518                   } else {
2519                     assert(check->bottom_type()->is_ptr()->_offset == 0, "Bad base pointer");
2520                     // Base either ConP(nullptr) or loadConP
2521                     if (check->is_Mach()) {
2522                       assert(check->as_Mach()->ideal_Opcode() == Op_ConP, "Bad base pointer");
2523                     } else {
2524                       assert(check->Opcode() == Op_ConP &&
2525                              check->bottom_type()->is_ptr()->ptr() == TypePtr::Null, "Bad base pointer");
2526                     }
2527                   }
2528                 } else if (check->bottom_type()->is_ptr()->_offset == 0) {
2529                   if (check->is_Proj() || (check->is_Mach() &&
2530                      (check->as_Mach()->ideal_Opcode() == Op_CreateEx ||
2531                       check->as_Mach()->ideal_Opcode() == Op_ThreadLocal ||
2532                       check->as_Mach()->ideal_Opcode() == Op_CMoveP ||
2533                       check->as_Mach()->ideal_Opcode() == Op_CheckCastPP ||
2534 #ifdef _LP64
2535                       (UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_CastPP) ||
2536                       (UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_DecodeN) ||
2537                       (UseCompressedClassPointers && check->as_Mach()->ideal_Opcode() == Op_DecodeNKlass) ||
2538 #endif // _LP64
2539                       check->as_Mach()->ideal_Opcode() == Op_LoadP ||
2540                       check->as_Mach()->ideal_Opcode() == Op_LoadKlass))) {
2541                     // Valid nodes
2542                   } else {
2543                     check->dump();
2544                     assert(false, "Bad base or derived pointer");
2545                   }
2546                 } else {
2547                   assert(is_derived, "Bad base pointer");
2548                   assert(check->is_Mach() && check->as_Mach()->ideal_Opcode() == Op_AddP, "Bad derived pointer");

1825   // See if already computed; if so return it
1826   if (derived_base_map[derived->_idx]) {
1827     return derived_base_map[derived->_idx];
1828   }
1829 
1830 #ifdef ASSERT
1831   if (derived->is_Mach() && derived->as_Mach()->ideal_Opcode() == Op_VerifyVectorAlignment) {
1832     // Bypass the verification node
1833     Node* base = find_base_for_derived(derived_base_map, derived->in(1), maxlrg);
1834     derived_base_map[derived->_idx] = base;
1835     return base;
1836   }
1837 #endif
1838 
1839   // See if this happens to be a base.
1840   // NOTE: we use TypePtr instead of TypeOopPtr because we can have
1841   // pointers derived from null!  These are always along paths that
1842   // can't happen at run-time but the optimizer cannot deduce it so
1843   // we have to handle it gracefully.
1844   assert(!derived->bottom_type()->isa_narrowoop() ||
1845          derived->bottom_type()->make_ptr()->is_ptr()->offset() == 0, "sanity");
1846   const TypePtr *tj = derived->bottom_type()->isa_ptr();
1847   // If its an OOP with a non-zero offset, then it is derived.
1848   if (tj == nullptr || tj->offset() == 0) {
1849     derived_base_map[derived->_idx] = derived;
1850     return derived;
1851   }
1852   // Derived is null+offset?  Base is null!
1853   if( derived->is_Con() ) {
1854     Node *base = _matcher.mach_null();
1855     assert(base != nullptr, "sanity");
1856     if (base->in(0) == nullptr) {
1857       // Initialize it once and make it shared:
1858       // set control to _root and place it into Start block
1859       // (where top() node is placed).
1860       base->init_req(0, _cfg.get_root_node());
1861       Block *startb = _cfg.get_block_for_node(C->top());
1862       uint node_pos = startb->find_node(C->top());
1863       startb->insert_node(base, node_pos);
1864       _cfg.map_node_to_block(base, startb);
1865       assert(_lrg_map.live_range_id(base) == 0, "should not have LRG yet");
1866 
1867       // The loadConP0 might have projection nodes depending on architecture
1868       // Add the projection nodes to the CFG

1994         // Copies do not define a new value and so do not interfere.
1995         // Remove the copies source from the liveout set before interfering.
1996         uint idx = n->is_Copy();
1997         if (idx) {
1998           liveout.remove(_lrg_map.live_range_id(n->in(idx)));
1999         }
2000       }
2001 
2002       // Found a safepoint?
2003       JVMState *jvms = n->jvms();
2004       if (jvms && !liveout.is_empty()) {
2005         // Now scan for a live derived pointer
2006         IndexSetIterator elements(&liveout);
2007         uint neighbor;
2008         while ((neighbor = elements.next()) != 0) {
2009           // Find reaching DEF for base and derived values
2010           // This works because we are still in SSA during this call.
2011           Node *derived = lrgs(neighbor)._def;
2012           const TypePtr *tj = derived->bottom_type()->isa_ptr();
2013           assert(!derived->bottom_type()->isa_narrowoop() ||
2014                  derived->bottom_type()->make_ptr()->is_ptr()->offset() == 0, "sanity");
2015           // If its an OOP with a non-zero offset, then it is derived.
2016           if (tj && tj->offset() != 0 && tj->isa_oop_ptr()) {
2017             Node *base = find_base_for_derived(derived_base_map, derived, maxlrg);
2018             assert(base->_idx < _lrg_map.size(), "");
2019             // Add reaching DEFs of derived pointer and base pointer as a
2020             // pair of inputs
2021             n->add_req(derived);
2022             n->add_req(base);
2023 
2024             // See if the base pointer is already live to this point.
2025             // Since I'm working on the SSA form, live-ness amounts to
2026             // reaching def's.  So if I find the base's live range then
2027             // I know the base's def reaches here.
2028             if ((_lrg_map.live_range_id(base) >= _lrg_map.max_lrg_id() || // (Brand new base (hence not live) or
2029                  !liveout.member(_lrg_map.live_range_id(base))) && // not live) AND
2030                  (_lrg_map.live_range_id(base) > 0) && // not a constant
2031                  _cfg.get_block_for_node(base) != block) { // base not def'd in blk)
2032               // Base pointer is not currently live.  Since I stretched
2033               // the base pointer to here and it crosses basic-block
2034               // boundaries, the global live info is now incorrect.
2035               // Recompute live.
2036               must_recompute_live = true;

2286   return buf+strlen(buf);
2287 }
2288 
2289 void PhaseChaitin::dump_for_spill_split_recycle() const {
2290   if( WizardMode && (PrintCompilation || PrintOpto) ) {
2291     // Display which live ranges need to be split and the allocator's state
2292     tty->print_cr("Graph-Coloring Iteration %d will split the following live ranges", _trip_cnt);
2293     for (uint bidx = 1; bidx < _lrg_map.max_lrg_id(); bidx++) {
2294       if( lrgs(bidx).alive() && lrgs(bidx).reg() >= LRG::SPILL_REG ) {
2295         tty->print("L%d: ", bidx);
2296         lrgs(bidx).dump();
2297       }
2298     }
2299     tty->cr();
2300     dump();
2301   }
2302 }
2303 
2304 void PhaseChaitin::dump_frame() const {
2305   const char *fp = OptoReg::regname(OptoReg::c_frame_pointer);
2306   const TypeTuple *domain = C->tf()->domain_cc();
2307   const int        argcnt = domain->cnt() - TypeFunc::Parms;
2308 
2309   // Incoming arguments in registers dump
2310   for( int k = 0; k < argcnt; k++ ) {
2311     OptoReg::Name parmreg = _matcher._parm_regs[k].first();
2312     if( OptoReg::is_reg(parmreg))  {
2313       const char *reg_name = OptoReg::regname(parmreg);
2314       tty->print("#r%3.3d %s", parmreg, reg_name);
2315       parmreg = _matcher._parm_regs[k].second();
2316       if( OptoReg::is_reg(parmreg))  {
2317         tty->print(":%s", OptoReg::regname(parmreg));
2318       }
2319       tty->print("   : parm %d: ", k);
2320       domain->field_at(k + TypeFunc::Parms)->dump();
2321       tty->cr();
2322     }
2323   }
2324 
2325   // Check for un-owned padding above incoming args
2326   OptoReg::Name reg = _matcher._new_SP;

2495             // Check each derived/base pair
2496             for (uint idx = jvms->oopoff(); idx < sfpt->req(); idx++) {
2497               Node* check = sfpt->in(idx);
2498               bool is_derived = ((idx - jvms->oopoff()) & 1) == 0;
2499               // search upwards through spills and spill phis for AddP
2500               worklist.clear();
2501               worklist.push(check);
2502               uint k = 0;
2503               while (k < worklist.size()) {
2504                 check = worklist.at(k);
2505                 assert(check, "Bad base or derived pointer");
2506                 // See PhaseChaitin::find_base_for_derived() for all cases.
2507                 int isc = check->is_Copy();
2508                 if (isc) {
2509                   worklist.push(check->in(isc));
2510                 } else if (check->is_Phi()) {
2511                   for (uint m = 1; m < check->req(); m++) {
2512                     worklist.push(check->in(m));
2513                   }
2514                 } else if (check->is_Con()) {
2515                   if (is_derived && check->bottom_type()->is_ptr()->offset() != 0) {
2516                     // Derived is null+non-zero offset, base must be null.
2517                     assert(check->bottom_type()->is_ptr()->ptr() == TypePtr::Null, "Bad derived pointer");
2518                   } else {
2519                     assert(check->bottom_type()->is_ptr()->offset() == 0, "Bad base pointer");
2520                     // Base either ConP(nullptr) or loadConP
2521                     if (check->is_Mach()) {
2522                       assert(check->as_Mach()->ideal_Opcode() == Op_ConP, "Bad base pointer");
2523                     } else {
2524                       assert(check->Opcode() == Op_ConP &&
2525                              check->bottom_type()->is_ptr()->ptr() == TypePtr::Null, "Bad base pointer");
2526                     }
2527                   }
2528                 } else if (check->bottom_type()->is_ptr()->offset() == 0) {
2529                   if (check->is_Proj() || (check->is_Mach() &&
2530                      (check->as_Mach()->ideal_Opcode() == Op_CreateEx ||
2531                       check->as_Mach()->ideal_Opcode() == Op_ThreadLocal ||
2532                       check->as_Mach()->ideal_Opcode() == Op_CMoveP ||
2533                       check->as_Mach()->ideal_Opcode() == Op_CheckCastPP ||
2534 #ifdef _LP64
2535                       (UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_CastPP) ||
2536                       (UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_DecodeN) ||
2537                       (UseCompressedClassPointers && check->as_Mach()->ideal_Opcode() == Op_DecodeNKlass) ||
2538 #endif // _LP64
2539                       check->as_Mach()->ideal_Opcode() == Op_LoadP ||
2540                       check->as_Mach()->ideal_Opcode() == Op_LoadKlass))) {
2541                     // Valid nodes
2542                   } else {
2543                     check->dump();
2544                     assert(false, "Bad base or derived pointer");
2545                   }
2546                 } else {
2547                   assert(is_derived, "Bad base pointer");
2548                   assert(check->is_Mach() && check->as_Mach()->ideal_Opcode() == Op_AddP, "Bad derived pointer");
< prev index next >