375 Node *in = mem->in(j);
376 if (in == nullptr || in->is_top()) {
377 values.at_put(j, in);
378 } else {
379 Node *val = scan_mem_chain(in, alias_idx, offset, start_mem, alloc, &_igvn);
380 if (val == start_mem || val == alloc_mem) {
381 // hit a sentinel, return appropriate 0 value
382 values.at_put(j, _igvn.zerocon(ft));
383 continue;
384 }
385 if (val->is_Initialize()) {
386 val = val->as_Initialize()->find_captured_store(offset, type2aelembytes(ft), &_igvn);
387 }
388 if (val == nullptr) {
389 return nullptr; // can't find a value on this path
390 }
391 if (val == mem) {
392 values.at_put(j, mem);
393 } else if (val->is_Store()) {
394 Node* n = val->in(MemNode::ValueIn);
395 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
396 n = bs->step_over_gc_barrier(n);
397 if (is_subword_type(ft)) {
398 n = Compile::narrow_value(ft, n, phi_type, &_igvn, true);
399 }
400 values.at_put(j, n);
401 } else if(val->is_Proj() && val->in(0) == alloc) {
402 values.at_put(j, _igvn.zerocon(ft));
403 } else if (val->is_Phi()) {
404 val = value_from_mem_phi(val, ft, phi_type, adr_t, alloc, value_phis, level-1);
405 if (val == nullptr) {
406 return nullptr;
407 }
408 values.at_put(j, val);
409 } else if (val->Opcode() == Op_SCMemProj) {
410 assert(val->in(0)->is_LoadStore() ||
411 val->in(0)->Opcode() == Op_EncodeISOArray ||
412 val->in(0)->Opcode() == Op_StrCompressedCopy, "sanity");
413 assert(false, "Object is not scalar replaceable if a LoadStore node accesses its field");
414 return nullptr;
415 } else if (val->is_ArrayCopy()) {
416 Node* res = make_arraycopy_load(val->as_ArrayCopy(), offset, val->in(0), val->in(TypeFunc::Memory), ft, phi_type, alloc);
499 } else {
500 done = true;
501 }
502 } else if (mem->is_ArrayCopy()) {
503 done = true;
504 } else if (mem->is_top()) {
505 // The slice is on a dead path. Returning nullptr would lead to elimination
506 // bailout, but we want to prevent that. Just forwarding the top is also legal,
507 // and IGVN can just clean things up, and remove whatever receives top.
508 return mem;
509 } else {
510 DEBUG_ONLY( mem->dump(); )
511 assert(false, "unexpected node");
512 }
513 }
514 if (mem != nullptr) {
515 if (mem == start_mem || mem == alloc_mem) {
516 // hit a sentinel, return appropriate 0 value
517 return _igvn.zerocon(ft);
518 } else if (mem->is_Store()) {
519 Node* n = mem->in(MemNode::ValueIn);
520 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
521 n = bs->step_over_gc_barrier(n);
522 return n;
523 } else if (mem->is_Phi()) {
524 // attempt to produce a Phi reflecting the values on the input paths of the Phi
525 Node_Stack value_phis(8);
526 Node* phi = value_from_mem_phi(mem, ft, ftype, adr_t, alloc, &value_phis, ValueSearchLimit);
527 if (phi != nullptr) {
528 return phi;
529 } else {
530 // Kill all new Phis
531 while(value_phis.is_nonempty()) {
532 Node* n = value_phis.node();
533 _igvn.replace_node(n, C->top());
534 value_phis.pop();
535 }
536 }
537 } else if (mem->is_ArrayCopy()) {
538 Node* ctl = mem->in(0);
539 Node* m = mem->in(TypeFunc::Memory);
540 if (sfpt_ctl->is_Proj() && sfpt_ctl->as_Proj()->is_uncommon_trap_proj()) {
541 // pin the loads in the uncommon trap path
542 ctl = sfpt_ctl;
588 j < jmax && can_eliminate; j++) {
589 Node* use = res->fast_out(j);
590
591 if (use->is_AddP()) {
592 const TypePtr* addp_type = igvn->type(use)->is_ptr();
593 int offset = addp_type->offset();
594
595 if (offset == Type::OffsetTop || offset == Type::OffsetBot) {
596 NOT_PRODUCT(fail_eliminate = "Undefined field reference";)
597 can_eliminate = false;
598 break;
599 }
600 for (DUIterator_Fast kmax, k = use->fast_outs(kmax);
601 k < kmax && can_eliminate; k++) {
602 Node* n = use->fast_out(k);
603 if (n->is_Mem() && n->as_Mem()->is_mismatched_access()) {
604 DEBUG_ONLY(disq_node = n);
605 NOT_PRODUCT(fail_eliminate = "Mismatched access");
606 can_eliminate = false;
607 }
608 if (!n->is_Store() && n->Opcode() != Op_CastP2X && !bs->is_gc_pre_barrier_node(n) && !reduce_merge_precheck) {
609 DEBUG_ONLY(disq_node = n;)
610 if (n->is_Load() || n->is_LoadStore()) {
611 NOT_PRODUCT(fail_eliminate = "Field load";)
612 } else {
613 NOT_PRODUCT(fail_eliminate = "Not store field reference";)
614 }
615 can_eliminate = false;
616 }
617 }
618 } else if (use->is_ArrayCopy() &&
619 (use->as_ArrayCopy()->is_clonebasic() ||
620 use->as_ArrayCopy()->is_arraycopy_validated() ||
621 use->as_ArrayCopy()->is_copyof_validated() ||
622 use->as_ArrayCopy()->is_copyofrange_validated()) &&
623 use->in(ArrayCopyNode::Dest) == res) {
624 // ok to eliminate
625 } else if (use->is_SafePoint()) {
626 SafePointNode* sfpt = use->as_SafePoint();
627 if (sfpt->is_Call() && sfpt->as_Call()->has_non_debug_use(res)) {
628 // Object is passed as argument.
2485 break;
2486 case Node::Class_Lock:
2487 case Node::Class_Unlock:
2488 assert(!n->as_AbstractLock()->is_eliminated(), "sanity");
2489 break;
2490 case Node::Class_ArrayCopy:
2491 break;
2492 case Node::Class_OuterStripMinedLoop:
2493 break;
2494 case Node::Class_SubTypeCheck:
2495 break;
2496 case Node::Class_Opaque1:
2497 break;
2498 default:
2499 assert(n->Opcode() == Op_LoopLimit ||
2500 n->Opcode() == Op_ModD ||
2501 n->Opcode() == Op_ModF ||
2502 n->is_OpaqueNotNull() ||
2503 n->is_OpaqueInitializedAssertionPredicate() ||
2504 n->Opcode() == Op_MaxL ||
2505 n->Opcode() == Op_MinL ||
2506 BarrierSet::barrier_set()->barrier_set_c2()->is_gc_barrier_node(n),
2507 "unknown node type in macro list");
2508 }
2509 assert(success == (C->macro_count() < old_macro_count), "elimination reduces macro count");
2510 progress = progress || success;
2511 if (success) {
2512 C->print_method(PHASE_AFTER_MACRO_ELIMINATION_STEP, 5, n);
2513 }
2514 }
2515 }
2516 #ifndef PRODUCT
2517 if (PrintOptoStatistics) {
2518 int membar_after = count_MemBar(C);
2519 AtomicAccess::add(&PhaseMacroExpand::_memory_barriers_removed_counter, membar_before - membar_after);
2520 }
2521 #endif
2522 }
2523
2524 void PhaseMacroExpand::eliminate_opaque_looplimit_macro_nodes() {
2525 if (C->macro_count() == 0) {
2526 return;
|
375 Node *in = mem->in(j);
376 if (in == nullptr || in->is_top()) {
377 values.at_put(j, in);
378 } else {
379 Node *val = scan_mem_chain(in, alias_idx, offset, start_mem, alloc, &_igvn);
380 if (val == start_mem || val == alloc_mem) {
381 // hit a sentinel, return appropriate 0 value
382 values.at_put(j, _igvn.zerocon(ft));
383 continue;
384 }
385 if (val->is_Initialize()) {
386 val = val->as_Initialize()->find_captured_store(offset, type2aelembytes(ft), &_igvn);
387 }
388 if (val == nullptr) {
389 return nullptr; // can't find a value on this path
390 }
391 if (val == mem) {
392 values.at_put(j, mem);
393 } else if (val->is_Store()) {
394 Node* n = val->in(MemNode::ValueIn);
395 if (is_subword_type(ft)) {
396 n = Compile::narrow_value(ft, n, phi_type, &_igvn, true);
397 }
398 values.at_put(j, n);
399 } else if(val->is_Proj() && val->in(0) == alloc) {
400 values.at_put(j, _igvn.zerocon(ft));
401 } else if (val->is_Phi()) {
402 val = value_from_mem_phi(val, ft, phi_type, adr_t, alloc, value_phis, level-1);
403 if (val == nullptr) {
404 return nullptr;
405 }
406 values.at_put(j, val);
407 } else if (val->Opcode() == Op_SCMemProj) {
408 assert(val->in(0)->is_LoadStore() ||
409 val->in(0)->Opcode() == Op_EncodeISOArray ||
410 val->in(0)->Opcode() == Op_StrCompressedCopy, "sanity");
411 assert(false, "Object is not scalar replaceable if a LoadStore node accesses its field");
412 return nullptr;
413 } else if (val->is_ArrayCopy()) {
414 Node* res = make_arraycopy_load(val->as_ArrayCopy(), offset, val->in(0), val->in(TypeFunc::Memory), ft, phi_type, alloc);
497 } else {
498 done = true;
499 }
500 } else if (mem->is_ArrayCopy()) {
501 done = true;
502 } else if (mem->is_top()) {
503 // The slice is on a dead path. Returning nullptr would lead to elimination
504 // bailout, but we want to prevent that. Just forwarding the top is also legal,
505 // and IGVN can just clean things up, and remove whatever receives top.
506 return mem;
507 } else {
508 DEBUG_ONLY( mem->dump(); )
509 assert(false, "unexpected node");
510 }
511 }
512 if (mem != nullptr) {
513 if (mem == start_mem || mem == alloc_mem) {
514 // hit a sentinel, return appropriate 0 value
515 return _igvn.zerocon(ft);
516 } else if (mem->is_Store()) {
517 return mem->in(MemNode::ValueIn);
518 } else if (mem->is_Phi()) {
519 // attempt to produce a Phi reflecting the values on the input paths of the Phi
520 Node_Stack value_phis(8);
521 Node* phi = value_from_mem_phi(mem, ft, ftype, adr_t, alloc, &value_phis, ValueSearchLimit);
522 if (phi != nullptr) {
523 return phi;
524 } else {
525 // Kill all new Phis
526 while(value_phis.is_nonempty()) {
527 Node* n = value_phis.node();
528 _igvn.replace_node(n, C->top());
529 value_phis.pop();
530 }
531 }
532 } else if (mem->is_ArrayCopy()) {
533 Node* ctl = mem->in(0);
534 Node* m = mem->in(TypeFunc::Memory);
535 if (sfpt_ctl->is_Proj() && sfpt_ctl->as_Proj()->is_uncommon_trap_proj()) {
536 // pin the loads in the uncommon trap path
537 ctl = sfpt_ctl;
583 j < jmax && can_eliminate; j++) {
584 Node* use = res->fast_out(j);
585
586 if (use->is_AddP()) {
587 const TypePtr* addp_type = igvn->type(use)->is_ptr();
588 int offset = addp_type->offset();
589
590 if (offset == Type::OffsetTop || offset == Type::OffsetBot) {
591 NOT_PRODUCT(fail_eliminate = "Undefined field reference";)
592 can_eliminate = false;
593 break;
594 }
595 for (DUIterator_Fast kmax, k = use->fast_outs(kmax);
596 k < kmax && can_eliminate; k++) {
597 Node* n = use->fast_out(k);
598 if (n->is_Mem() && n->as_Mem()->is_mismatched_access()) {
599 DEBUG_ONLY(disq_node = n);
600 NOT_PRODUCT(fail_eliminate = "Mismatched access");
601 can_eliminate = false;
602 }
603 if (!n->is_Store() && n->Opcode() != Op_CastP2X && !reduce_merge_precheck) {
604 DEBUG_ONLY(disq_node = n;)
605 if (n->is_Load() || n->is_LoadStore()) {
606 NOT_PRODUCT(fail_eliminate = "Field load";)
607 } else {
608 NOT_PRODUCT(fail_eliminate = "Not store field reference";)
609 }
610 can_eliminate = false;
611 }
612 }
613 } else if (use->is_ArrayCopy() &&
614 (use->as_ArrayCopy()->is_clonebasic() ||
615 use->as_ArrayCopy()->is_arraycopy_validated() ||
616 use->as_ArrayCopy()->is_copyof_validated() ||
617 use->as_ArrayCopy()->is_copyofrange_validated()) &&
618 use->in(ArrayCopyNode::Dest) == res) {
619 // ok to eliminate
620 } else if (use->is_SafePoint()) {
621 SafePointNode* sfpt = use->as_SafePoint();
622 if (sfpt->is_Call() && sfpt->as_Call()->has_non_debug_use(res)) {
623 // Object is passed as argument.
2480 break;
2481 case Node::Class_Lock:
2482 case Node::Class_Unlock:
2483 assert(!n->as_AbstractLock()->is_eliminated(), "sanity");
2484 break;
2485 case Node::Class_ArrayCopy:
2486 break;
2487 case Node::Class_OuterStripMinedLoop:
2488 break;
2489 case Node::Class_SubTypeCheck:
2490 break;
2491 case Node::Class_Opaque1:
2492 break;
2493 default:
2494 assert(n->Opcode() == Op_LoopLimit ||
2495 n->Opcode() == Op_ModD ||
2496 n->Opcode() == Op_ModF ||
2497 n->is_OpaqueNotNull() ||
2498 n->is_OpaqueInitializedAssertionPredicate() ||
2499 n->Opcode() == Op_MaxL ||
2500 n->Opcode() == Op_MinL,
2501 "unknown node type in macro list");
2502 }
2503 assert(success == (C->macro_count() < old_macro_count), "elimination reduces macro count");
2504 progress = progress || success;
2505 if (success) {
2506 C->print_method(PHASE_AFTER_MACRO_ELIMINATION_STEP, 5, n);
2507 }
2508 }
2509 }
2510 #ifndef PRODUCT
2511 if (PrintOptoStatistics) {
2512 int membar_after = count_MemBar(C);
2513 AtomicAccess::add(&PhaseMacroExpand::_memory_barriers_removed_counter, membar_before - membar_after);
2514 }
2515 #endif
2516 }
2517
2518 void PhaseMacroExpand::eliminate_opaque_looplimit_macro_nodes() {
2519 if (C->macro_count() == 0) {
2520 return;
|