1 /* 2 * Copyright (c) 1999, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "c1/c1_IR.hpp" 27 #include "c1/c1_Instruction.hpp" 28 #include "c1/c1_InstructionPrinter.hpp" 29 #include "c1/c1_ValueStack.hpp" 30 #include "ci/ciFlatArrayKlass.hpp" 31 #include "ci/ciInlineKlass.hpp" 32 #include "ci/ciObjArrayKlass.hpp" 33 #include "ci/ciTypeArrayKlass.hpp" 34 #include "utilities/bitMap.inline.hpp" 35 36 37 // Implementation of Instruction 38 39 40 int Instruction::dominator_depth() { 41 int result = -1; 42 if (block()) { 43 result = block()->dominator_depth(); 44 } 45 assert(result != -1 || this->as_Local(), "Only locals have dominator depth -1"); 46 return result; 47 } 48 49 Instruction::Condition Instruction::mirror(Condition cond) { 50 switch (cond) { 51 case eql: return eql; 52 case neq: return neq; 53 case lss: return gtr; 54 case leq: return geq; 55 case gtr: return lss; 56 case geq: return leq; 57 case aeq: return beq; 58 case beq: return aeq; 59 } 60 ShouldNotReachHere(); 61 return eql; 62 } 63 64 65 Instruction::Condition Instruction::negate(Condition cond) { 66 switch (cond) { 67 case eql: return neq; 68 case neq: return eql; 69 case lss: return geq; 70 case leq: return gtr; 71 case gtr: return leq; 72 case geq: return lss; 73 case aeq: assert(false, "Above equal cannot be negated"); 74 case beq: assert(false, "Below equal cannot be negated"); 75 } 76 ShouldNotReachHere(); 77 return eql; 78 } 79 80 void Instruction::update_exception_state(ValueStack* state) { 81 if (state != nullptr && (state->kind() == ValueStack::EmptyExceptionState || state->kind() == ValueStack::ExceptionState)) { 82 assert(state->kind() == ValueStack::EmptyExceptionState || Compilation::current()->env()->should_retain_local_variables(), "unexpected state kind"); 83 _exception_state = state; 84 } else { 85 _exception_state = nullptr; 86 } 87 } 88 89 // Prev without need to have BlockBegin 90 Instruction* Instruction::prev() { 91 Instruction* p = nullptr; 92 Instruction* q = block(); 93 while (q != this) { 94 assert(q != nullptr, "this is not in the block's instruction list"); 95 p = q; q = q->next(); 96 } 97 return p; 98 } 99 100 101 void Instruction::state_values_do(ValueVisitor* f) { 102 if (state_before() != nullptr) { 103 state_before()->values_do(f); 104 } 105 if (exception_state() != nullptr) { 106 exception_state()->values_do(f); 107 } 108 } 109 110 ciType* Instruction::exact_type() const { 111 ciType* t = declared_type(); 112 if (t != nullptr && t->is_klass()) { 113 return t->as_klass()->exact_klass(); 114 } 115 return nullptr; 116 } 117 118 ciKlass* Instruction::as_loaded_klass_or_null() const { 119 ciType* type = declared_type(); 120 if (type != nullptr && type->is_klass()) { 121 ciKlass* klass = type->as_klass(); 122 if (klass->is_loaded()) { 123 return klass; 124 } 125 } 126 return nullptr; 127 } 128 129 bool Instruction::is_loaded_flat_array() const { 130 if (UseFlatArray) { 131 ciType* type = declared_type(); 132 return type != nullptr && type->is_flat_array_klass(); 133 } 134 return false; 135 } 136 137 bool Instruction::maybe_flat_array() { 138 if (UseFlatArray) { 139 ciType* type = declared_type(); 140 if (type != nullptr) { 141 if (type->is_obj_array_klass()) { 142 // Due to array covariance, the runtime type might be a flat array. 143 ciKlass* element_klass = type->as_obj_array_klass()->element_klass(); 144 if (element_klass->can_be_inline_klass() && (!element_klass->is_inlinetype() || element_klass->as_inline_klass()->flat_in_array())) { 145 return true; 146 } 147 } else if (type->is_flat_array_klass()) { 148 return true; 149 } else if (type->is_klass() && type->as_klass()->is_java_lang_Object()) { 150 // This can happen as a parameter to System.arraycopy() 151 return true; 152 } 153 } else { 154 // Type info gets lost during Phi merging (Phi, IfOp, etc), but we might be storing into a 155 // flat array, so we should do a runtime check. 156 return true; 157 } 158 } 159 return false; 160 } 161 162 bool Instruction::maybe_null_free_array() { 163 ciType* type = declared_type(); 164 if (type != nullptr) { 165 if (type->is_obj_array_klass()) { 166 // Due to array covariance, the runtime type might be a null-free array. 167 if (type->as_obj_array_klass()->can_be_inline_array_klass()) { 168 return true; 169 } 170 } 171 } else { 172 // Type info gets lost during Phi merging (Phi, IfOp, etc), but we might be storing into a 173 // null-free array, so we should do a runtime check. 174 return true; 175 } 176 return false; 177 } 178 179 #ifndef PRODUCT 180 void Instruction::check_state(ValueStack* state) { 181 if (state != nullptr) { 182 state->verify(); 183 } 184 } 185 186 187 void Instruction::print() { 188 InstructionPrinter ip; 189 print(ip); 190 } 191 192 193 void Instruction::print_line() { 194 InstructionPrinter ip; 195 ip.print_line(this); 196 } 197 198 199 void Instruction::print(InstructionPrinter& ip) { 200 ip.print_head(); 201 ip.print_line(this); 202 tty->cr(); 203 } 204 #endif // PRODUCT 205 206 207 // perform constant and interval tests on index value 208 bool AccessIndexed::compute_needs_range_check() { 209 if (length()) { 210 Constant* clength = length()->as_Constant(); 211 Constant* cindex = index()->as_Constant(); 212 if (clength && cindex) { 213 IntConstant* l = clength->type()->as_IntConstant(); 214 IntConstant* i = cindex->type()->as_IntConstant(); 215 if (l && i && i->value() < l->value() && i->value() >= 0) { 216 return false; 217 } 218 } 219 } 220 221 if (!this->check_flag(NeedsRangeCheckFlag)) { 222 return false; 223 } 224 225 return true; 226 } 227 228 229 ciType* Constant::exact_type() const { 230 if (type()->is_object() && type()->as_ObjectType()->is_loaded()) { 231 return type()->as_ObjectType()->exact_type(); 232 } 233 return nullptr; 234 } 235 236 ciType* LoadIndexed::exact_type() const { 237 ciType* array_type = array()->exact_type(); 238 if (delayed() == nullptr && array_type != nullptr) { 239 assert(array_type->is_array_klass(), "what else?"); 240 ciArrayKlass* ak = (ciArrayKlass*)array_type; 241 242 if (ak->element_type()->is_instance_klass()) { 243 ciInstanceKlass* ik = (ciInstanceKlass*)ak->element_type(); 244 if (ik->is_loaded() && ik->is_final()) { 245 return ik; 246 } 247 } 248 } 249 return Instruction::exact_type(); 250 } 251 252 ciType* LoadIndexed::declared_type() const { 253 if (delayed() != nullptr) { 254 return delayed()->field()->type(); 255 } 256 ciType* array_type = array()->declared_type(); 257 if (array_type == nullptr || !array_type->is_loaded()) { 258 return nullptr; 259 } 260 assert(array_type->is_array_klass(), "what else?"); 261 ciArrayKlass* ak = (ciArrayKlass*)array_type; 262 return ak->element_type(); 263 } 264 265 bool StoreIndexed::is_exact_flat_array_store() const { 266 if (array()->is_loaded_flat_array() && value()->as_Constant() == nullptr && value()->declared_type() != nullptr) { 267 ciKlass* element_klass = array()->declared_type()->as_flat_array_klass()->element_klass(); 268 ciKlass* actual_klass = value()->declared_type()->as_klass(); 269 270 // The following check can fail with inlining: 271 // void test45_inline(Object[] oa, Object o, int index) { oa[index] = o; } 272 // void test45(MyValue1[] va, int index, MyValue2 v) { test45_inline(va, v, index); } 273 if (element_klass == actual_klass) { 274 return true; 275 } 276 } 277 return false; 278 } 279 280 ciType* LoadField::declared_type() const { 281 return field()->type(); 282 } 283 284 285 ciType* NewTypeArray::exact_type() const { 286 return ciTypeArrayKlass::make(elt_type()); 287 } 288 289 ciType* NewObjectArray::exact_type() const { 290 return ciArrayKlass::make(klass()); 291 } 292 293 ciType* NewMultiArray::exact_type() const { 294 return _klass; 295 } 296 297 ciType* NewArray::declared_type() const { 298 return exact_type(); 299 } 300 301 ciType* NewInstance::exact_type() const { 302 return klass(); 303 } 304 305 ciType* NewInstance::declared_type() const { 306 return exact_type(); 307 } 308 309 ciType* CheckCast::declared_type() const { 310 return klass(); 311 } 312 313 // Implementation of ArithmeticOp 314 315 bool ArithmeticOp::is_commutative() const { 316 switch (op()) { 317 case Bytecodes::_iadd: // fall through 318 case Bytecodes::_ladd: // fall through 319 case Bytecodes::_fadd: // fall through 320 case Bytecodes::_dadd: // fall through 321 case Bytecodes::_imul: // fall through 322 case Bytecodes::_lmul: // fall through 323 case Bytecodes::_fmul: // fall through 324 case Bytecodes::_dmul: return true; 325 default : return false; 326 } 327 } 328 329 330 bool ArithmeticOp::can_trap() const { 331 switch (op()) { 332 case Bytecodes::_idiv: // fall through 333 case Bytecodes::_ldiv: // fall through 334 case Bytecodes::_irem: // fall through 335 case Bytecodes::_lrem: return true; 336 default : return false; 337 } 338 } 339 340 341 // Implementation of LogicOp 342 343 bool LogicOp::is_commutative() const { 344 #ifdef ASSERT 345 switch (op()) { 346 case Bytecodes::_iand: // fall through 347 case Bytecodes::_land: // fall through 348 case Bytecodes::_ior : // fall through 349 case Bytecodes::_lor : // fall through 350 case Bytecodes::_ixor: // fall through 351 case Bytecodes::_lxor: break; 352 default : ShouldNotReachHere(); break; 353 } 354 #endif 355 // all LogicOps are commutative 356 return true; 357 } 358 359 360 // Implementation of IfOp 361 362 bool IfOp::is_commutative() const { 363 return cond() == eql || cond() == neq; 364 } 365 366 367 // Implementation of StateSplit 368 369 void StateSplit::substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block) { 370 NOT_PRODUCT(bool assigned = false;) 371 for (int i = 0; i < list.length(); i++) { 372 BlockBegin** b = list.adr_at(i); 373 if (*b == old_block) { 374 *b = new_block; 375 NOT_PRODUCT(assigned = true;) 376 } 377 } 378 assert(assigned == true, "should have assigned at least once"); 379 } 380 381 382 IRScope* StateSplit::scope() const { 383 return _state->scope(); 384 } 385 386 387 void StateSplit::state_values_do(ValueVisitor* f) { 388 Instruction::state_values_do(f); 389 if (state() != nullptr) state()->values_do(f); 390 } 391 392 393 void BlockBegin::state_values_do(ValueVisitor* f) { 394 StateSplit::state_values_do(f); 395 396 if (is_set(BlockBegin::exception_entry_flag)) { 397 for (int i = 0; i < number_of_exception_states(); i++) { 398 exception_state_at(i)->values_do(f); 399 } 400 } 401 } 402 403 404 StoreField::StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 405 ValueStack* state_before, bool needs_patching) 406 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 407 , _value(value) 408 , _enclosing_field(nullptr) 409 { 410 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 411 #ifdef ASSERT 412 AssertValues assert_value; 413 values_do(&assert_value); 414 #endif 415 pin(); 416 } 417 418 StoreIndexed::StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, 419 ValueStack* state_before, bool check_boolean, bool mismatched) 420 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 421 , _value(value), _check_boolean(check_boolean) 422 { 423 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 424 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 425 #ifdef ASSERT 426 AssertValues assert_value; 427 values_do(&assert_value); 428 #endif 429 pin(); 430 } 431 432 433 // Implementation of Invoke 434 435 436 Invoke::Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 437 ciMethod* target, ValueStack* state_before) 438 : StateSplit(result_type, state_before) 439 , _code(code) 440 , _recv(recv) 441 , _args(args) 442 , _target(target) 443 { 444 set_flag(TargetIsLoadedFlag, target->is_loaded()); 445 set_flag(TargetIsFinalFlag, target_is_loaded() && target->is_final_method()); 446 447 assert(args != nullptr, "args must exist"); 448 #ifdef ASSERT 449 AssertValues assert_value; 450 values_do(&assert_value); 451 #endif 452 453 // provide an initial guess of signature size. 454 _signature = new BasicTypeList(number_of_arguments() + (has_receiver() ? 1 : 0)); 455 if (has_receiver()) { 456 _signature->append(as_BasicType(receiver()->type())); 457 } 458 for (int i = 0; i < number_of_arguments(); i++) { 459 Value v = argument_at(i); 460 ValueType* t = v->type(); 461 BasicType bt = as_BasicType(t); 462 _signature->append(bt); 463 } 464 } 465 466 467 void Invoke::state_values_do(ValueVisitor* f) { 468 StateSplit::state_values_do(f); 469 if (state_before() != nullptr) state_before()->values_do(f); 470 if (state() != nullptr) state()->values_do(f); 471 } 472 473 ciType* Invoke::declared_type() const { 474 ciSignature* declared_signature = state()->scope()->method()->get_declared_signature_at_bci(state()->bci()); 475 ciType *t = declared_signature->return_type(); 476 assert(t->basic_type() != T_VOID, "need return value of void method?"); 477 return t; 478 } 479 480 // Implementation of Constant 481 intx Constant::hash() const { 482 if (state_before() == nullptr) { 483 switch (type()->tag()) { 484 case intTag: 485 return HASH2(name(), type()->as_IntConstant()->value()); 486 case addressTag: 487 return HASH2(name(), type()->as_AddressConstant()->value()); 488 case longTag: 489 { 490 jlong temp = type()->as_LongConstant()->value(); 491 return HASH3(name(), high(temp), low(temp)); 492 } 493 case floatTag: 494 return HASH2(name(), jint_cast(type()->as_FloatConstant()->value())); 495 case doubleTag: 496 { 497 jlong temp = jlong_cast(type()->as_DoubleConstant()->value()); 498 return HASH3(name(), high(temp), low(temp)); 499 } 500 case objectTag: 501 assert(type()->as_ObjectType()->is_loaded(), "can't handle unloaded values"); 502 return HASH2(name(), type()->as_ObjectType()->constant_value()); 503 case metaDataTag: 504 assert(type()->as_MetadataType()->is_loaded(), "can't handle unloaded values"); 505 return HASH2(name(), type()->as_MetadataType()->constant_value()); 506 default: 507 ShouldNotReachHere(); 508 } 509 } 510 return 0; 511 } 512 513 bool Constant::is_equal(Value v) const { 514 if (v->as_Constant() == nullptr) return false; 515 516 switch (type()->tag()) { 517 case intTag: 518 { 519 IntConstant* t1 = type()->as_IntConstant(); 520 IntConstant* t2 = v->type()->as_IntConstant(); 521 return (t1 != nullptr && t2 != nullptr && 522 t1->value() == t2->value()); 523 } 524 case longTag: 525 { 526 LongConstant* t1 = type()->as_LongConstant(); 527 LongConstant* t2 = v->type()->as_LongConstant(); 528 return (t1 != nullptr && t2 != nullptr && 529 t1->value() == t2->value()); 530 } 531 case floatTag: 532 { 533 FloatConstant* t1 = type()->as_FloatConstant(); 534 FloatConstant* t2 = v->type()->as_FloatConstant(); 535 return (t1 != nullptr && t2 != nullptr && 536 jint_cast(t1->value()) == jint_cast(t2->value())); 537 } 538 case doubleTag: 539 { 540 DoubleConstant* t1 = type()->as_DoubleConstant(); 541 DoubleConstant* t2 = v->type()->as_DoubleConstant(); 542 return (t1 != nullptr && t2 != nullptr && 543 jlong_cast(t1->value()) == jlong_cast(t2->value())); 544 } 545 case objectTag: 546 { 547 ObjectType* t1 = type()->as_ObjectType(); 548 ObjectType* t2 = v->type()->as_ObjectType(); 549 return (t1 != nullptr && t2 != nullptr && 550 t1->is_loaded() && t2->is_loaded() && 551 t1->constant_value() == t2->constant_value()); 552 } 553 case metaDataTag: 554 { 555 MetadataType* t1 = type()->as_MetadataType(); 556 MetadataType* t2 = v->type()->as_MetadataType(); 557 return (t1 != nullptr && t2 != nullptr && 558 t1->is_loaded() && t2->is_loaded() && 559 t1->constant_value() == t2->constant_value()); 560 } 561 default: 562 return false; 563 } 564 } 565 566 Constant::CompareResult Constant::compare(Instruction::Condition cond, Value right) const { 567 Constant* rc = right->as_Constant(); 568 // other is not a constant 569 if (rc == nullptr) return not_comparable; 570 571 ValueType* lt = type(); 572 ValueType* rt = rc->type(); 573 // different types 574 if (lt->base() != rt->base()) return not_comparable; 575 switch (lt->tag()) { 576 case intTag: { 577 int x = lt->as_IntConstant()->value(); 578 int y = rt->as_IntConstant()->value(); 579 switch (cond) { 580 case If::eql: return x == y ? cond_true : cond_false; 581 case If::neq: return x != y ? cond_true : cond_false; 582 case If::lss: return x < y ? cond_true : cond_false; 583 case If::leq: return x <= y ? cond_true : cond_false; 584 case If::gtr: return x > y ? cond_true : cond_false; 585 case If::geq: return x >= y ? cond_true : cond_false; 586 default : break; 587 } 588 break; 589 } 590 case longTag: { 591 jlong x = lt->as_LongConstant()->value(); 592 jlong y = rt->as_LongConstant()->value(); 593 switch (cond) { 594 case If::eql: return x == y ? cond_true : cond_false; 595 case If::neq: return x != y ? cond_true : cond_false; 596 case If::lss: return x < y ? cond_true : cond_false; 597 case If::leq: return x <= y ? cond_true : cond_false; 598 case If::gtr: return x > y ? cond_true : cond_false; 599 case If::geq: return x >= y ? cond_true : cond_false; 600 default : break; 601 } 602 break; 603 } 604 case objectTag: { 605 ciObject* xvalue = lt->as_ObjectType()->constant_value(); 606 ciObject* yvalue = rt->as_ObjectType()->constant_value(); 607 assert(xvalue != nullptr && yvalue != nullptr, "not constants"); 608 if (xvalue->is_loaded() && yvalue->is_loaded()) { 609 switch (cond) { 610 case If::eql: return xvalue == yvalue ? cond_true : cond_false; 611 case If::neq: return xvalue != yvalue ? cond_true : cond_false; 612 default : break; 613 } 614 } 615 break; 616 } 617 case metaDataTag: { 618 ciMetadata* xvalue = lt->as_MetadataType()->constant_value(); 619 ciMetadata* yvalue = rt->as_MetadataType()->constant_value(); 620 assert(xvalue != nullptr && yvalue != nullptr, "not constants"); 621 if (xvalue->is_loaded() && yvalue->is_loaded()) { 622 switch (cond) { 623 case If::eql: return xvalue == yvalue ? cond_true : cond_false; 624 case If::neq: return xvalue != yvalue ? cond_true : cond_false; 625 default : break; 626 } 627 } 628 break; 629 } 630 default: 631 break; 632 } 633 return not_comparable; 634 } 635 636 637 // Implementation of BlockBegin 638 639 void BlockBegin::set_end(BlockEnd* new_end) { // Assumes that no predecessor of new_end still has it as its successor 640 assert(new_end != nullptr, "Should not reset block new_end to null"); 641 if (new_end == _end) return; 642 643 // Remove this block as predecessor of its current successors 644 if (_end != nullptr) { 645 for (int i = 0; i < number_of_sux(); i++) { 646 sux_at(i)->remove_predecessor(this); 647 } 648 } 649 650 _end = new_end; 651 652 // Add this block as predecessor of its new successors 653 for (int i = 0; i < number_of_sux(); i++) { 654 sux_at(i)->add_predecessor(this); 655 } 656 } 657 658 659 void BlockBegin::disconnect_edge(BlockBegin* from, BlockBegin* to) { 660 // disconnect any edges between from and to 661 #ifndef PRODUCT 662 if (PrintIR && Verbose) { 663 tty->print_cr("Disconnected edge B%d -> B%d", from->block_id(), to->block_id()); 664 } 665 #endif 666 for (int s = 0; s < from->number_of_sux();) { 667 BlockBegin* sux = from->sux_at(s); 668 if (sux == to) { 669 int index = sux->_predecessors.find(from); 670 if (index >= 0) { 671 sux->_predecessors.remove_at(index); 672 } 673 from->end()->remove_sux_at(s); 674 } else { 675 s++; 676 } 677 } 678 } 679 680 681 void BlockBegin::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) { 682 // modify predecessors before substituting successors 683 for (int i = 0; i < number_of_sux(); i++) { 684 if (sux_at(i) == old_sux) { 685 // remove old predecessor before adding new predecessor 686 // otherwise there is a dead predecessor in the list 687 new_sux->remove_predecessor(old_sux); 688 new_sux->add_predecessor(this); 689 } 690 } 691 old_sux->remove_predecessor(this); 692 end()->substitute_sux(old_sux, new_sux); 693 } 694 695 696 697 // In general it is not possible to calculate a value for the field "depth_first_number" 698 // of the inserted block, without recomputing the values of the other blocks 699 // in the CFG. Therefore the value of "depth_first_number" in BlockBegin becomes meaningless. 700 BlockBegin* BlockBegin::insert_block_between(BlockBegin* sux) { 701 assert(!sux->is_set(critical_edge_split_flag), "sanity check"); 702 703 int bci = sux->bci(); 704 // critical edge splitting may introduce a goto after a if and array 705 // bound check elimination may insert a predicate between the if and 706 // goto. The bci of the goto can't be the one of the if otherwise 707 // the state and bci are inconsistent and a deoptimization triggered 708 // by the predicate would lead to incorrect execution/a crash. 709 BlockBegin* new_sux = new BlockBegin(bci); 710 711 // mark this block (special treatment when block order is computed) 712 new_sux->set(critical_edge_split_flag); 713 714 // This goto is not a safepoint. 715 Goto* e = new Goto(sux, false); 716 new_sux->set_next(e, bci); 717 new_sux->set_end(e); 718 // setup states 719 ValueStack* s = end()->state(); 720 new_sux->set_state(s->copy(s->kind(), bci)); 721 e->set_state(s->copy(s->kind(), bci)); 722 assert(new_sux->state()->locals_size() == s->locals_size(), "local size mismatch!"); 723 assert(new_sux->state()->stack_size() == s->stack_size(), "stack size mismatch!"); 724 assert(new_sux->state()->locks_size() == s->locks_size(), "locks size mismatch!"); 725 726 // link predecessor to new block 727 end()->substitute_sux(sux, new_sux); 728 729 // The ordering needs to be the same, so remove the link that the 730 // set_end call above added and substitute the new_sux for this 731 // block. 732 sux->remove_predecessor(new_sux); 733 734 // the successor could be the target of a switch so it might have 735 // multiple copies of this predecessor, so substitute the new_sux 736 // for the first and delete the rest. 737 bool assigned = false; 738 BlockList& list = sux->_predecessors; 739 for (int i = 0; i < list.length(); i++) { 740 BlockBegin** b = list.adr_at(i); 741 if (*b == this) { 742 if (assigned) { 743 list.remove_at(i); 744 // reprocess this index 745 i--; 746 } else { 747 assigned = true; 748 *b = new_sux; 749 } 750 // link the new block back to it's predecessors. 751 new_sux->add_predecessor(this); 752 } 753 } 754 assert(assigned == true, "should have assigned at least once"); 755 return new_sux; 756 } 757 758 759 void BlockBegin::add_predecessor(BlockBegin* pred) { 760 _predecessors.append(pred); 761 } 762 763 764 void BlockBegin::remove_predecessor(BlockBegin* pred) { 765 int idx; 766 while ((idx = _predecessors.find(pred)) >= 0) { 767 _predecessors.remove_at(idx); 768 } 769 } 770 771 772 void BlockBegin::add_exception_handler(BlockBegin* b) { 773 assert(b != nullptr && (b->is_set(exception_entry_flag)), "exception handler must exist"); 774 // add only if not in the list already 775 if (!_exception_handlers.contains(b)) _exception_handlers.append(b); 776 } 777 778 int BlockBegin::add_exception_state(ValueStack* state) { 779 assert(is_set(exception_entry_flag), "only for xhandlers"); 780 if (_exception_states == nullptr) { 781 _exception_states = new ValueStackStack(4); 782 } 783 _exception_states->append(state); 784 return _exception_states->length() - 1; 785 } 786 787 788 void BlockBegin::iterate_preorder(boolArray& mark, BlockClosure* closure) { 789 if (!mark.at(block_id())) { 790 mark.at_put(block_id(), true); 791 closure->block_do(this); 792 BlockEnd* e = end(); // must do this after block_do because block_do may change it! 793 { for (int i = number_of_exception_handlers() - 1; i >= 0; i--) exception_handler_at(i)->iterate_preorder(mark, closure); } 794 { for (int i = e->number_of_sux () - 1; i >= 0; i--) e->sux_at (i)->iterate_preorder(mark, closure); } 795 } 796 } 797 798 799 void BlockBegin::iterate_postorder(boolArray& mark, BlockClosure* closure) { 800 if (!mark.at(block_id())) { 801 mark.at_put(block_id(), true); 802 BlockEnd* e = end(); 803 { for (int i = number_of_exception_handlers() - 1; i >= 0; i--) exception_handler_at(i)->iterate_postorder(mark, closure); } 804 { for (int i = e->number_of_sux () - 1; i >= 0; i--) e->sux_at (i)->iterate_postorder(mark, closure); } 805 closure->block_do(this); 806 } 807 } 808 809 810 void BlockBegin::iterate_preorder(BlockClosure* closure) { 811 int mark_len = number_of_blocks(); 812 boolArray mark(mark_len, mark_len, false); 813 iterate_preorder(mark, closure); 814 } 815 816 817 void BlockBegin::iterate_postorder(BlockClosure* closure) { 818 int mark_len = number_of_blocks(); 819 boolArray mark(mark_len, mark_len, false); 820 iterate_postorder(mark, closure); 821 } 822 823 824 void BlockBegin::block_values_do(ValueVisitor* f) { 825 for (Instruction* n = this; n != nullptr; n = n->next()) n->values_do(f); 826 } 827 828 829 #ifndef PRODUCT 830 #define TRACE_PHI(code) if (PrintPhiFunctions) { code; } 831 #else 832 #define TRACE_PHI(coce) 833 #endif 834 835 836 bool BlockBegin::try_merge(ValueStack* new_state, bool has_irreducible_loops) { 837 TRACE_PHI(tty->print_cr("********** try_merge for block B%d", block_id())); 838 839 // local variables used for state iteration 840 int index; 841 Value new_value, existing_value; 842 843 ValueStack* existing_state = state(); 844 if (existing_state == nullptr) { 845 TRACE_PHI(tty->print_cr("first call of try_merge for this block")); 846 847 if (is_set(BlockBegin::was_visited_flag)) { 848 // this actually happens for complicated jsr/ret structures 849 return false; // BAILOUT in caller 850 } 851 852 // copy state because it is altered 853 new_state = new_state->copy(ValueStack::BlockBeginState, bci()); 854 855 // Use method liveness to invalidate dead locals 856 MethodLivenessResult liveness = new_state->scope()->method()->liveness_at_bci(bci()); 857 if (liveness.is_valid()) { 858 assert((int)liveness.size() == new_state->locals_size(), "error in use of liveness"); 859 860 for_each_local_value(new_state, index, new_value) { 861 if (!liveness.at(index) || new_value->type()->is_illegal()) { 862 new_state->invalidate_local(index); 863 TRACE_PHI(tty->print_cr("invalidating dead local %d", index)); 864 } 865 } 866 } 867 868 if (is_set(BlockBegin::parser_loop_header_flag)) { 869 TRACE_PHI(tty->print_cr("loop header block, initializing phi functions")); 870 871 for_each_stack_value(new_state, index, new_value) { 872 new_state->setup_phi_for_stack(this, index); 873 TRACE_PHI(tty->print_cr("creating phi-function %c%d for stack %d", new_state->stack_at(index)->type()->tchar(), new_state->stack_at(index)->id(), index)); 874 } 875 876 BitMap& requires_phi_function = new_state->scope()->requires_phi_function(); 877 for_each_local_value(new_state, index, new_value) { 878 bool requires_phi = requires_phi_function.at(index) || (new_value->type()->is_double_word() && requires_phi_function.at(index + 1)); 879 if (requires_phi || !SelectivePhiFunctions || has_irreducible_loops) { 880 new_state->setup_phi_for_local(this, index); 881 TRACE_PHI(tty->print_cr("creating phi-function %c%d for local %d", new_state->local_at(index)->type()->tchar(), new_state->local_at(index)->id(), index)); 882 } 883 } 884 } 885 886 // initialize state of block 887 set_state(new_state); 888 889 } else if (existing_state->is_same(new_state)) { 890 TRACE_PHI(tty->print_cr("existing state found")); 891 892 assert(existing_state->scope() == new_state->scope(), "not matching"); 893 assert(existing_state->locals_size() == new_state->locals_size(), "not matching"); 894 assert(existing_state->stack_size() == new_state->stack_size(), "not matching"); 895 896 if (is_set(BlockBegin::was_visited_flag)) { 897 TRACE_PHI(tty->print_cr("loop header block, phis must be present")); 898 899 if (!is_set(BlockBegin::parser_loop_header_flag)) { 900 // this actually happens for complicated jsr/ret structures 901 return false; // BAILOUT in caller 902 } 903 904 for_each_local_value(existing_state, index, existing_value) { 905 Value new_value = new_state->local_at(index); 906 if (new_value == nullptr || new_value->type()->tag() != existing_value->type()->tag()) { 907 Phi* existing_phi = existing_value->as_Phi(); 908 if (existing_phi == nullptr) { 909 return false; // BAILOUT in caller 910 } 911 // Invalidate the phi function here. This case is very rare except for 912 // JVMTI capability "can_access_local_variables". 913 // In really rare cases we will bail out in LIRGenerator::move_to_phi. 914 existing_phi->make_illegal(); 915 existing_state->invalidate_local(index); 916 TRACE_PHI(tty->print_cr("invalidating local %d because of type mismatch", index)); 917 } 918 919 if (existing_value != new_state->local_at(index) && existing_value->as_Phi() == nullptr) { 920 TRACE_PHI(tty->print_cr("required phi for local %d is missing, irreducible loop?", index)); 921 return false; // BAILOUT in caller 922 } 923 } 924 925 #ifdef ASSERT 926 // check that all necessary phi functions are present 927 for_each_stack_value(existing_state, index, existing_value) { 928 assert(existing_value->as_Phi() != nullptr && existing_value->as_Phi()->block() == this, "phi function required"); 929 } 930 for_each_local_value(existing_state, index, existing_value) { 931 assert(existing_value == new_state->local_at(index) || (existing_value->as_Phi() != nullptr && existing_value->as_Phi()->as_Phi()->block() == this), "phi function required"); 932 } 933 #endif 934 935 } else { 936 TRACE_PHI(tty->print_cr("creating phi functions on demand")); 937 938 // create necessary phi functions for stack 939 for_each_stack_value(existing_state, index, existing_value) { 940 Value new_value = new_state->stack_at(index); 941 Phi* existing_phi = existing_value->as_Phi(); 942 943 if (new_value != existing_value && (existing_phi == nullptr || existing_phi->block() != this)) { 944 existing_state->setup_phi_for_stack(this, index); 945 TRACE_PHI(tty->print_cr("creating phi-function %c%d for stack %d", existing_state->stack_at(index)->type()->tchar(), existing_state->stack_at(index)->id(), index)); 946 } 947 } 948 949 // create necessary phi functions for locals 950 for_each_local_value(existing_state, index, existing_value) { 951 Value new_value = new_state->local_at(index); 952 Phi* existing_phi = existing_value->as_Phi(); 953 954 if (new_value == nullptr || new_value->type()->tag() != existing_value->type()->tag()) { 955 existing_state->invalidate_local(index); 956 TRACE_PHI(tty->print_cr("invalidating local %d because of type mismatch", index)); 957 } else if (new_value != existing_value && (existing_phi == nullptr || existing_phi->block() != this)) { 958 existing_state->setup_phi_for_local(this, index); 959 TRACE_PHI(tty->print_cr("creating phi-function %c%d for local %d", existing_state->local_at(index)->type()->tchar(), existing_state->local_at(index)->id(), index)); 960 } 961 } 962 } 963 964 assert(existing_state->caller_state() == new_state->caller_state(), "caller states must be equal"); 965 966 } else { 967 assert(false, "stack or locks not matching (invalid bytecodes)"); 968 return false; 969 } 970 971 TRACE_PHI(tty->print_cr("********** try_merge for block B%d successful", block_id())); 972 973 return true; 974 } 975 976 977 #ifndef PRODUCT 978 void BlockBegin::print_block() { 979 InstructionPrinter ip; 980 print_block(ip, false); 981 } 982 983 984 void BlockBegin::print_block(InstructionPrinter& ip, bool live_only) { 985 ip.print_instr(this); tty->cr(); 986 ip.print_stack(this->state()); tty->cr(); 987 ip.print_inline_level(this); 988 ip.print_head(); 989 for (Instruction* n = next(); n != nullptr; n = n->next()) { 990 if (!live_only || n->is_pinned() || n->use_count() > 0) { 991 ip.print_line(n); 992 } 993 } 994 tty->cr(); 995 } 996 #endif // PRODUCT 997 998 999 // Implementation of BlockList 1000 1001 void BlockList::iterate_forward (BlockClosure* closure) { 1002 const int l = length(); 1003 for (int i = 0; i < l; i++) closure->block_do(at(i)); 1004 } 1005 1006 1007 void BlockList::iterate_backward(BlockClosure* closure) { 1008 for (int i = length() - 1; i >= 0; i--) closure->block_do(at(i)); 1009 } 1010 1011 1012 void BlockList::values_do(ValueVisitor* f) { 1013 for (int i = length() - 1; i >= 0; i--) at(i)->block_values_do(f); 1014 } 1015 1016 1017 #ifndef PRODUCT 1018 void BlockList::print(bool cfg_only, bool live_only) { 1019 InstructionPrinter ip; 1020 for (int i = 0; i < length(); i++) { 1021 BlockBegin* block = at(i); 1022 if (cfg_only) { 1023 ip.print_instr(block); tty->cr(); 1024 } else { 1025 block->print_block(ip, live_only); 1026 } 1027 } 1028 } 1029 #endif // PRODUCT 1030 1031 1032 // Implementation of BlockEnd 1033 1034 void BlockEnd::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) { 1035 substitute(*_sux, old_sux, new_sux); 1036 } 1037 1038 // Implementation of Phi 1039 1040 // Normal phi functions take their operands from the last instruction of the 1041 // predecessor. Special handling is needed for xhanlder entries because there 1042 // the state of arbitrary instructions are needed. 1043 1044 Value Phi::operand_at(int i) const { 1045 ValueStack* state; 1046 if (_block->is_set(BlockBegin::exception_entry_flag)) { 1047 state = _block->exception_state_at(i); 1048 } else { 1049 state = _block->pred_at(i)->end()->state(); 1050 } 1051 assert(state != nullptr, ""); 1052 1053 if (is_local()) { 1054 return state->local_at(local_index()); 1055 } else { 1056 return state->stack_at(stack_index()); 1057 } 1058 } 1059 1060 1061 int Phi::operand_count() const { 1062 if (_block->is_set(BlockBegin::exception_entry_flag)) { 1063 return _block->number_of_exception_states(); 1064 } else { 1065 return _block->number_of_preds(); 1066 } 1067 } 1068 1069 #ifdef ASSERT 1070 // Constructor of Assert 1071 Assert::Assert(Value x, Condition cond, bool unordered_is_true, Value y) : Instruction(illegalType) 1072 , _x(x) 1073 , _cond(cond) 1074 , _y(y) 1075 { 1076 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1077 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1078 pin(); 1079 1080 stringStream strStream; 1081 Compilation::current()->method()->print_name(&strStream); 1082 1083 stringStream strStream1; 1084 InstructionPrinter ip1(1, &strStream1); 1085 ip1.print_instr(x); 1086 1087 stringStream strStream2; 1088 InstructionPrinter ip2(1, &strStream2); 1089 ip2.print_instr(y); 1090 1091 stringStream ss; 1092 ss.print("Assertion %s %s %s in method %s", strStream1.freeze(), ip2.cond_name(cond), strStream2.freeze(), strStream.freeze()); 1093 1094 _message = ss.as_string(); 1095 } 1096 #endif 1097 1098 void RangeCheckPredicate::check_state() { 1099 assert(state()->kind() != ValueStack::EmptyExceptionState && state()->kind() != ValueStack::ExceptionState, "will deopt with empty state"); 1100 } 1101 1102 void ProfileInvoke::state_values_do(ValueVisitor* f) { 1103 if (state() != nullptr) state()->values_do(f); 1104 } 1105