< prev index next >

src/hotspot/share/opto/superword.cpp

Print this page




3463   NOT_PRODUCT(_tracer.ctor_1(mem);)
3464 
3465   Node* adr = mem->in(MemNode::Address);
3466   if (!adr->is_AddP()) {
3467     assert(!valid(), "too complex");
3468     return;
3469   }
3470   // Match AddP(base, AddP(ptr, k*iv [+ invariant]), constant)
3471   Node* base = adr->in(AddPNode::Base);
3472   // The base address should be loop invariant
3473   if (!invariant(base)) {
3474     assert(!valid(), "base address is loop variant");
3475     return;
3476   }
3477   // unsafe references require misaligned vector access support
3478   if (base->is_top() && !Matcher::misaligned_vectors_ok()) {
3479     assert(!valid(), "unsafe access");
3480     return;
3481   }
3482 
























3483   NOT_PRODUCT(if(_slp->is_trace_alignment()) _tracer.store_depth();)
3484   NOT_PRODUCT(_tracer.ctor_2(adr);)
3485 
3486   int i;
3487   for (i = 0; i < 3; i++) {
3488     NOT_PRODUCT(_tracer.ctor_3(adr, i);)
3489 
3490     if (!scaled_iv_plus_offset(adr->in(AddPNode::Offset))) {
3491       assert(!valid(), "too complex");
3492       return;
3493     }
3494     adr = adr->in(AddPNode::Address);
3495     NOT_PRODUCT(_tracer.ctor_4(adr, i);)
3496 
3497     if (base == adr || !adr->is_AddP()) {
3498       NOT_PRODUCT(_tracer.ctor_5(adr, base, i);)
3499       break; // stop looking at addp's
3500     }
3501   }
3502   if (!invariant(adr)) {




3463   NOT_PRODUCT(_tracer.ctor_1(mem);)
3464 
3465   Node* adr = mem->in(MemNode::Address);
3466   if (!adr->is_AddP()) {
3467     assert(!valid(), "too complex");
3468     return;
3469   }
3470   // Match AddP(base, AddP(ptr, k*iv [+ invariant]), constant)
3471   Node* base = adr->in(AddPNode::Base);
3472   // The base address should be loop invariant
3473   if (!invariant(base)) {
3474     assert(!valid(), "base address is loop variant");
3475     return;
3476   }
3477   // unsafe references require misaligned vector access support
3478   if (base->is_top() && !Matcher::misaligned_vectors_ok()) {
3479     assert(!valid(), "unsafe access");
3480     return;
3481   }
3482 
3483   // Detect a Shenandoah write barrier between the pre and main loop
3484   // (which could break loop alignment code)
3485   if (UseShenandoahGC) {
3486     CountedLoopNode *main_head = slp->lp()->as_CountedLoop();
3487     if (main_head->is_main_loop()) {
3488       Node* c = main_head->skip_predicates()->in(0)->in(0)->in(0);
3489       if (!c->is_CountedLoopEnd()) {
3490         // in case of a reserve copy
3491         c = main_head->skip_strip_mined()->in(LoopNode::EntryControl)->in(0)->in(0);
3492         c = CountedLoopNode::skip_predicates_from_entry(c);
3493         c = c->in(0)->in(0)->in(0);
3494         assert(c->is_CountedLoopEnd(), "where's the pre loop?");
3495       }
3496       CountedLoopEndNode* pre_end = c->as_CountedLoopEnd();
3497       CountedLoopNode* pre_loop = pre_end->loopnode();
3498       assert(pre_loop->is_pre_loop(), "where's the pre loop?");
3499 
3500       Node* base_c = phase()->get_ctrl(base);
3501       if (!phase()->is_dominator(base_c, pre_loop)) {
3502         return;
3503       }
3504     }
3505   }
3506 
3507   NOT_PRODUCT(if(_slp->is_trace_alignment()) _tracer.store_depth();)
3508   NOT_PRODUCT(_tracer.ctor_2(adr);)
3509 
3510   int i;
3511   for (i = 0; i < 3; i++) {
3512     NOT_PRODUCT(_tracer.ctor_3(adr, i);)
3513 
3514     if (!scaled_iv_plus_offset(adr->in(AddPNode::Offset))) {
3515       assert(!valid(), "too complex");
3516       return;
3517     }
3518     adr = adr->in(AddPNode::Address);
3519     NOT_PRODUCT(_tracer.ctor_4(adr, i);)
3520 
3521     if (base == adr || !adr->is_AddP()) {
3522       NOT_PRODUCT(_tracer.ctor_5(adr, base, i);)
3523       break; // stop looking at addp's
3524     }
3525   }
3526   if (!invariant(adr)) {


< prev index next >