< prev index next >

src/hotspot/share/opto/superword.cpp

Print this page




3462   NOT_PRODUCT(_tracer.ctor_1(mem);)
3463 
3464   Node* adr = mem->in(MemNode::Address);
3465   if (!adr->is_AddP()) {
3466     assert(!valid(), "too complex");
3467     return;
3468   }
3469   // Match AddP(base, AddP(ptr, k*iv [+ invariant]), constant)
3470   Node* base = adr->in(AddPNode::Base);
3471   // The base address should be loop invariant
3472   if (!invariant(base)) {
3473     assert(!valid(), "base address is loop variant");
3474     return;
3475   }
3476   // unsafe references require misaligned vector access support
3477   if (base->is_top() && !Matcher::misaligned_vectors_ok()) {
3478     assert(!valid(), "unsafe access");
3479     return;
3480   }
3481 
























3482   NOT_PRODUCT(if(_slp->is_trace_alignment()) _tracer.store_depth();)
3483   NOT_PRODUCT(_tracer.ctor_2(adr);)
3484 
3485   int i;
3486   for (i = 0; i < 3; i++) {
3487     NOT_PRODUCT(_tracer.ctor_3(adr, i);)
3488 
3489     if (!scaled_iv_plus_offset(adr->in(AddPNode::Offset))) {
3490       assert(!valid(), "too complex");
3491       return;
3492     }
3493     adr = adr->in(AddPNode::Address);
3494     NOT_PRODUCT(_tracer.ctor_4(adr, i);)
3495 
3496     if (base == adr || !adr->is_AddP()) {
3497       NOT_PRODUCT(_tracer.ctor_5(adr, base, i);)
3498       break; // stop looking at addp's
3499     }
3500   }
3501   if (!invariant(adr)) {




3462   NOT_PRODUCT(_tracer.ctor_1(mem);)
3463 
3464   Node* adr = mem->in(MemNode::Address);
3465   if (!adr->is_AddP()) {
3466     assert(!valid(), "too complex");
3467     return;
3468   }
3469   // Match AddP(base, AddP(ptr, k*iv [+ invariant]), constant)
3470   Node* base = adr->in(AddPNode::Base);
3471   // The base address should be loop invariant
3472   if (!invariant(base)) {
3473     assert(!valid(), "base address is loop variant");
3474     return;
3475   }
3476   // unsafe references require misaligned vector access support
3477   if (base->is_top() && !Matcher::misaligned_vectors_ok()) {
3478     assert(!valid(), "unsafe access");
3479     return;
3480   }
3481 
3482   // Detect a Shenandoah write barrier between the pre and main loop
3483   // (which could break loop alignment code)
3484   if (UseShenandoahGC) {
3485     CountedLoopNode *main_head = slp->lp()->as_CountedLoop();
3486     if (main_head->is_main_loop()) {
3487       Node* c = main_head->skip_predicates()->in(0)->in(0)->in(0);
3488       if (!c->is_CountedLoopEnd()) {
3489         // in case of a reserve copy
3490         c = main_head->skip_strip_mined()->in(LoopNode::EntryControl)->in(0)->in(0);
3491         c = CountedLoopNode::skip_predicates_from_entry(c);
3492         c = c->in(0)->in(0)->in(0);
3493         assert(c->is_CountedLoopEnd(), "where's the pre loop?");
3494       }
3495       CountedLoopEndNode* pre_end = c->as_CountedLoopEnd();
3496       CountedLoopNode* pre_loop = pre_end->loopnode();
3497       assert(pre_loop->is_pre_loop(), "where's the pre loop?");
3498 
3499       Node* base_c = phase()->get_ctrl(base);
3500       if (!phase()->is_dominator(base_c, pre_loop)) {
3501         return;
3502       }
3503     }
3504   }
3505 
3506   NOT_PRODUCT(if(_slp->is_trace_alignment()) _tracer.store_depth();)
3507   NOT_PRODUCT(_tracer.ctor_2(adr);)
3508 
3509   int i;
3510   for (i = 0; i < 3; i++) {
3511     NOT_PRODUCT(_tracer.ctor_3(adr, i);)
3512 
3513     if (!scaled_iv_plus_offset(adr->in(AddPNode::Offset))) {
3514       assert(!valid(), "too complex");
3515       return;
3516     }
3517     adr = adr->in(AddPNode::Address);
3518     NOT_PRODUCT(_tracer.ctor_4(adr, i);)
3519 
3520     if (base == adr || !adr->is_AddP()) {
3521       NOT_PRODUCT(_tracer.ctor_5(adr, base, i);)
3522       break; // stop looking at addp's
3523     }
3524   }
3525   if (!invariant(adr)) {


< prev index next >