1827 // See if already computed; if so return it
1828 if (derived_base_map[derived->_idx]) {
1829 return derived_base_map[derived->_idx];
1830 }
1831
1832 #ifdef ASSERT
1833 if (derived->is_Mach() && derived->as_Mach()->ideal_Opcode() == Op_VerifyVectorAlignment) {
1834 // Bypass the verification node
1835 Node* base = find_base_for_derived(derived_base_map, derived->in(1), maxlrg);
1836 derived_base_map[derived->_idx] = base;
1837 return base;
1838 }
1839 #endif
1840
1841 // See if this happens to be a base.
1842 // NOTE: we use TypePtr instead of TypeOopPtr because we can have
1843 // pointers derived from null! These are always along paths that
1844 // can't happen at run-time but the optimizer cannot deduce it so
1845 // we have to handle it gracefully.
1846 assert(!derived->bottom_type()->isa_narrowoop() ||
1847 derived->bottom_type()->make_ptr()->is_ptr()->_offset == 0, "sanity");
1848 const TypePtr *tj = derived->bottom_type()->isa_ptr();
1849 // If its an OOP with a non-zero offset, then it is derived.
1850 if( tj == nullptr || tj->_offset == 0 ) {
1851 derived_base_map[derived->_idx] = derived;
1852 return derived;
1853 }
1854 // Derived is null+offset? Base is null!
1855 if( derived->is_Con() ) {
1856 Node *base = _matcher.mach_null();
1857 assert(base != nullptr, "sanity");
1858 if (base->in(0) == nullptr) {
1859 // Initialize it once and make it shared:
1860 // set control to _root and place it into Start block
1861 // (where top() node is placed).
1862 base->init_req(0, _cfg.get_root_node());
1863 Block *startb = _cfg.get_block_for_node(C->top());
1864 uint node_pos = startb->find_node(C->top());
1865 startb->insert_node(base, node_pos);
1866 _cfg.map_node_to_block(base, startb);
1867 assert(_lrg_map.live_range_id(base) == 0, "should not have LRG yet");
1868
1869 // The loadConP0 might have projection nodes depending on architecture
1870 // Add the projection nodes to the CFG
1996 // Copies do not define a new value and so do not interfere.
1997 // Remove the copies source from the liveout set before interfering.
1998 uint idx = n->is_Copy();
1999 if (idx) {
2000 liveout.remove(_lrg_map.live_range_id(n->in(idx)));
2001 }
2002 }
2003
2004 // Found a safepoint?
2005 JVMState *jvms = n->jvms();
2006 if (jvms && !liveout.is_empty()) {
2007 // Now scan for a live derived pointer
2008 IndexSetIterator elements(&liveout);
2009 uint neighbor;
2010 while ((neighbor = elements.next()) != 0) {
2011 // Find reaching DEF for base and derived values
2012 // This works because we are still in SSA during this call.
2013 Node *derived = lrgs(neighbor)._def;
2014 const TypePtr *tj = derived->bottom_type()->isa_ptr();
2015 assert(!derived->bottom_type()->isa_narrowoop() ||
2016 derived->bottom_type()->make_ptr()->is_ptr()->_offset == 0, "sanity");
2017 // If its an OOP with a non-zero offset, then it is derived.
2018 if( tj && tj->_offset != 0 && tj->isa_oop_ptr() ) {
2019 Node *base = find_base_for_derived(derived_base_map, derived, maxlrg);
2020 assert(base->_idx < _lrg_map.size(), "");
2021 // Add reaching DEFs of derived pointer and base pointer as a
2022 // pair of inputs
2023 n->add_req(derived);
2024 n->add_req(base);
2025
2026 // See if the base pointer is already live to this point.
2027 // Since I'm working on the SSA form, live-ness amounts to
2028 // reaching def's. So if I find the base's live range then
2029 // I know the base's def reaches here.
2030 if ((_lrg_map.live_range_id(base) >= _lrg_map.max_lrg_id() || // (Brand new base (hence not live) or
2031 !liveout.member(_lrg_map.live_range_id(base))) && // not live) AND
2032 (_lrg_map.live_range_id(base) > 0) && // not a constant
2033 _cfg.get_block_for_node(base) != block) { // base not def'd in blk)
2034 // Base pointer is not currently live. Since I stretched
2035 // the base pointer to here and it crosses basic-block
2036 // boundaries, the global live info is now incorrect.
2037 // Recompute live.
2038 must_recompute_live = true;
2288 return buf+strlen(buf);
2289 }
2290
2291 void PhaseChaitin::dump_for_spill_split_recycle() const {
2292 if( WizardMode && (PrintCompilation || PrintOpto) ) {
2293 // Display which live ranges need to be split and the allocator's state
2294 tty->print_cr("Graph-Coloring Iteration %d will split the following live ranges", _trip_cnt);
2295 for (uint bidx = 1; bidx < _lrg_map.max_lrg_id(); bidx++) {
2296 if( lrgs(bidx).alive() && lrgs(bidx).reg() >= LRG::SPILL_REG ) {
2297 tty->print("L%d: ", bidx);
2298 lrgs(bidx).dump();
2299 }
2300 }
2301 tty->cr();
2302 dump();
2303 }
2304 }
2305
2306 void PhaseChaitin::dump_frame() const {
2307 const char *fp = OptoReg::regname(OptoReg::c_frame_pointer);
2308 const TypeTuple *domain = C->tf()->domain();
2309 const int argcnt = domain->cnt() - TypeFunc::Parms;
2310
2311 // Incoming arguments in registers dump
2312 for( int k = 0; k < argcnt; k++ ) {
2313 OptoReg::Name parmreg = _matcher._parm_regs[k].first();
2314 if( OptoReg::is_reg(parmreg)) {
2315 const char *reg_name = OptoReg::regname(parmreg);
2316 tty->print("#r%3.3d %s", parmreg, reg_name);
2317 parmreg = _matcher._parm_regs[k].second();
2318 if( OptoReg::is_reg(parmreg)) {
2319 tty->print(":%s", OptoReg::regname(parmreg));
2320 }
2321 tty->print(" : parm %d: ", k);
2322 domain->field_at(k + TypeFunc::Parms)->dump();
2323 tty->cr();
2324 }
2325 }
2326
2327 // Check for un-owned padding above incoming args
2328 OptoReg::Name reg = _matcher._new_SP;
2497 // Check each derived/base pair
2498 for (uint idx = jvms->oopoff(); idx < sfpt->req(); idx++) {
2499 Node* check = sfpt->in(idx);
2500 bool is_derived = ((idx - jvms->oopoff()) & 1) == 0;
2501 // search upwards through spills and spill phis for AddP
2502 worklist.clear();
2503 worklist.push(check);
2504 uint k = 0;
2505 while (k < worklist.size()) {
2506 check = worklist.at(k);
2507 assert(check, "Bad base or derived pointer");
2508 // See PhaseChaitin::find_base_for_derived() for all cases.
2509 int isc = check->is_Copy();
2510 if (isc) {
2511 worklist.push(check->in(isc));
2512 } else if (check->is_Phi()) {
2513 for (uint m = 1; m < check->req(); m++) {
2514 worklist.push(check->in(m));
2515 }
2516 } else if (check->is_Con()) {
2517 if (is_derived && check->bottom_type()->is_ptr()->_offset != 0) {
2518 // Derived is null+non-zero offset, base must be null.
2519 assert(check->bottom_type()->is_ptr()->ptr() == TypePtr::Null, "Bad derived pointer");
2520 } else {
2521 assert(check->bottom_type()->is_ptr()->_offset == 0, "Bad base pointer");
2522 // Base either ConP(nullptr) or loadConP
2523 if (check->is_Mach()) {
2524 assert(check->as_Mach()->ideal_Opcode() == Op_ConP, "Bad base pointer");
2525 } else {
2526 assert(check->Opcode() == Op_ConP &&
2527 check->bottom_type()->is_ptr()->ptr() == TypePtr::Null, "Bad base pointer");
2528 }
2529 }
2530 } else if (check->bottom_type()->is_ptr()->_offset == 0) {
2531 if (check->is_Proj() || (check->is_Mach() &&
2532 (check->as_Mach()->ideal_Opcode() == Op_CreateEx ||
2533 check->as_Mach()->ideal_Opcode() == Op_ThreadLocal ||
2534 check->as_Mach()->ideal_Opcode() == Op_CMoveP ||
2535 check->as_Mach()->ideal_Opcode() == Op_CheckCastPP ||
2536 #ifdef _LP64
2537 (UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_CastPP) ||
2538 (UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_DecodeN) ||
2539 (UseCompressedClassPointers && check->as_Mach()->ideal_Opcode() == Op_DecodeNKlass) ||
2540 #endif // _LP64
2541 check->as_Mach()->ideal_Opcode() == Op_LoadP ||
2542 check->as_Mach()->ideal_Opcode() == Op_LoadKlass))) {
2543 // Valid nodes
2544 } else {
2545 check->dump();
2546 assert(false, "Bad base or derived pointer");
2547 }
2548 } else {
2549 assert(is_derived, "Bad base pointer");
2550 assert(check->is_Mach() && check->as_Mach()->ideal_Opcode() == Op_AddP, "Bad derived pointer");
|
1827 // See if already computed; if so return it
1828 if (derived_base_map[derived->_idx]) {
1829 return derived_base_map[derived->_idx];
1830 }
1831
1832 #ifdef ASSERT
1833 if (derived->is_Mach() && derived->as_Mach()->ideal_Opcode() == Op_VerifyVectorAlignment) {
1834 // Bypass the verification node
1835 Node* base = find_base_for_derived(derived_base_map, derived->in(1), maxlrg);
1836 derived_base_map[derived->_idx] = base;
1837 return base;
1838 }
1839 #endif
1840
1841 // See if this happens to be a base.
1842 // NOTE: we use TypePtr instead of TypeOopPtr because we can have
1843 // pointers derived from null! These are always along paths that
1844 // can't happen at run-time but the optimizer cannot deduce it so
1845 // we have to handle it gracefully.
1846 assert(!derived->bottom_type()->isa_narrowoop() ||
1847 derived->bottom_type()->make_ptr()->is_ptr()->offset() == 0, "sanity");
1848 const TypePtr *tj = derived->bottom_type()->isa_ptr();
1849 // If its an OOP with a non-zero offset, then it is derived.
1850 if (tj == nullptr || tj->offset() == 0) {
1851 derived_base_map[derived->_idx] = derived;
1852 return derived;
1853 }
1854 // Derived is null+offset? Base is null!
1855 if( derived->is_Con() ) {
1856 Node *base = _matcher.mach_null();
1857 assert(base != nullptr, "sanity");
1858 if (base->in(0) == nullptr) {
1859 // Initialize it once and make it shared:
1860 // set control to _root and place it into Start block
1861 // (where top() node is placed).
1862 base->init_req(0, _cfg.get_root_node());
1863 Block *startb = _cfg.get_block_for_node(C->top());
1864 uint node_pos = startb->find_node(C->top());
1865 startb->insert_node(base, node_pos);
1866 _cfg.map_node_to_block(base, startb);
1867 assert(_lrg_map.live_range_id(base) == 0, "should not have LRG yet");
1868
1869 // The loadConP0 might have projection nodes depending on architecture
1870 // Add the projection nodes to the CFG
1996 // Copies do not define a new value and so do not interfere.
1997 // Remove the copies source from the liveout set before interfering.
1998 uint idx = n->is_Copy();
1999 if (idx) {
2000 liveout.remove(_lrg_map.live_range_id(n->in(idx)));
2001 }
2002 }
2003
2004 // Found a safepoint?
2005 JVMState *jvms = n->jvms();
2006 if (jvms && !liveout.is_empty()) {
2007 // Now scan for a live derived pointer
2008 IndexSetIterator elements(&liveout);
2009 uint neighbor;
2010 while ((neighbor = elements.next()) != 0) {
2011 // Find reaching DEF for base and derived values
2012 // This works because we are still in SSA during this call.
2013 Node *derived = lrgs(neighbor)._def;
2014 const TypePtr *tj = derived->bottom_type()->isa_ptr();
2015 assert(!derived->bottom_type()->isa_narrowoop() ||
2016 derived->bottom_type()->make_ptr()->is_ptr()->offset() == 0, "sanity");
2017 // If its an OOP with a non-zero offset, then it is derived.
2018 if (tj && tj->offset() != 0 && tj->isa_oop_ptr()) {
2019 Node *base = find_base_for_derived(derived_base_map, derived, maxlrg);
2020 assert(base->_idx < _lrg_map.size(), "");
2021 // Add reaching DEFs of derived pointer and base pointer as a
2022 // pair of inputs
2023 n->add_req(derived);
2024 n->add_req(base);
2025
2026 // See if the base pointer is already live to this point.
2027 // Since I'm working on the SSA form, live-ness amounts to
2028 // reaching def's. So if I find the base's live range then
2029 // I know the base's def reaches here.
2030 if ((_lrg_map.live_range_id(base) >= _lrg_map.max_lrg_id() || // (Brand new base (hence not live) or
2031 !liveout.member(_lrg_map.live_range_id(base))) && // not live) AND
2032 (_lrg_map.live_range_id(base) > 0) && // not a constant
2033 _cfg.get_block_for_node(base) != block) { // base not def'd in blk)
2034 // Base pointer is not currently live. Since I stretched
2035 // the base pointer to here and it crosses basic-block
2036 // boundaries, the global live info is now incorrect.
2037 // Recompute live.
2038 must_recompute_live = true;
2288 return buf+strlen(buf);
2289 }
2290
2291 void PhaseChaitin::dump_for_spill_split_recycle() const {
2292 if( WizardMode && (PrintCompilation || PrintOpto) ) {
2293 // Display which live ranges need to be split and the allocator's state
2294 tty->print_cr("Graph-Coloring Iteration %d will split the following live ranges", _trip_cnt);
2295 for (uint bidx = 1; bidx < _lrg_map.max_lrg_id(); bidx++) {
2296 if( lrgs(bidx).alive() && lrgs(bidx).reg() >= LRG::SPILL_REG ) {
2297 tty->print("L%d: ", bidx);
2298 lrgs(bidx).dump();
2299 }
2300 }
2301 tty->cr();
2302 dump();
2303 }
2304 }
2305
2306 void PhaseChaitin::dump_frame() const {
2307 const char *fp = OptoReg::regname(OptoReg::c_frame_pointer);
2308 const TypeTuple *domain = C->tf()->domain_cc();
2309 const int argcnt = domain->cnt() - TypeFunc::Parms;
2310
2311 // Incoming arguments in registers dump
2312 for( int k = 0; k < argcnt; k++ ) {
2313 OptoReg::Name parmreg = _matcher._parm_regs[k].first();
2314 if( OptoReg::is_reg(parmreg)) {
2315 const char *reg_name = OptoReg::regname(parmreg);
2316 tty->print("#r%3.3d %s", parmreg, reg_name);
2317 parmreg = _matcher._parm_regs[k].second();
2318 if( OptoReg::is_reg(parmreg)) {
2319 tty->print(":%s", OptoReg::regname(parmreg));
2320 }
2321 tty->print(" : parm %d: ", k);
2322 domain->field_at(k + TypeFunc::Parms)->dump();
2323 tty->cr();
2324 }
2325 }
2326
2327 // Check for un-owned padding above incoming args
2328 OptoReg::Name reg = _matcher._new_SP;
2497 // Check each derived/base pair
2498 for (uint idx = jvms->oopoff(); idx < sfpt->req(); idx++) {
2499 Node* check = sfpt->in(idx);
2500 bool is_derived = ((idx - jvms->oopoff()) & 1) == 0;
2501 // search upwards through spills and spill phis for AddP
2502 worklist.clear();
2503 worklist.push(check);
2504 uint k = 0;
2505 while (k < worklist.size()) {
2506 check = worklist.at(k);
2507 assert(check, "Bad base or derived pointer");
2508 // See PhaseChaitin::find_base_for_derived() for all cases.
2509 int isc = check->is_Copy();
2510 if (isc) {
2511 worklist.push(check->in(isc));
2512 } else if (check->is_Phi()) {
2513 for (uint m = 1; m < check->req(); m++) {
2514 worklist.push(check->in(m));
2515 }
2516 } else if (check->is_Con()) {
2517 if (is_derived && check->bottom_type()->is_ptr()->offset() != 0) {
2518 // Derived is null+non-zero offset, base must be null.
2519 assert(check->bottom_type()->is_ptr()->ptr() == TypePtr::Null, "Bad derived pointer");
2520 } else {
2521 assert(check->bottom_type()->is_ptr()->offset() == 0, "Bad base pointer");
2522 // Base either ConP(nullptr) or loadConP
2523 if (check->is_Mach()) {
2524 assert(check->as_Mach()->ideal_Opcode() == Op_ConP, "Bad base pointer");
2525 } else {
2526 assert(check->Opcode() == Op_ConP &&
2527 check->bottom_type()->is_ptr()->ptr() == TypePtr::Null, "Bad base pointer");
2528 }
2529 }
2530 } else if (check->bottom_type()->is_ptr()->offset() == 0) {
2531 if (check->is_Proj() || (check->is_Mach() &&
2532 (check->as_Mach()->ideal_Opcode() == Op_CreateEx ||
2533 check->as_Mach()->ideal_Opcode() == Op_ThreadLocal ||
2534 check->as_Mach()->ideal_Opcode() == Op_CMoveP ||
2535 check->as_Mach()->ideal_Opcode() == Op_CheckCastPP ||
2536 #ifdef _LP64
2537 (UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_CastPP) ||
2538 (UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_DecodeN) ||
2539 (UseCompressedClassPointers && check->as_Mach()->ideal_Opcode() == Op_DecodeNKlass) ||
2540 #endif // _LP64
2541 check->as_Mach()->ideal_Opcode() == Op_LoadP ||
2542 check->as_Mach()->ideal_Opcode() == Op_LoadKlass))) {
2543 // Valid nodes
2544 } else {
2545 check->dump();
2546 assert(false, "Bad base or derived pointer");
2547 }
2548 } else {
2549 assert(is_derived, "Bad base pointer");
2550 assert(check->is_Mach() && check->as_Mach()->ideal_Opcode() == Op_AddP, "Bad derived pointer");
|