1 /*
2 * Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "c1/c1_Instruction.hpp"
26 #include "c1/c1_InstructionPrinter.hpp"
27 #include "c1/c1_IR.hpp"
28 #include "c1/c1_ValueStack.hpp"
29 #include "ci/ciFlatArrayKlass.hpp"
30 #include "ci/ciInlineKlass.hpp"
31 #include "ci/ciObjArrayKlass.hpp"
32 #include "ci/ciTypeArrayKlass.hpp"
33 #include "utilities/bitMap.inline.hpp"
34
35
36 // Implementation of Instruction
37
38
39 int Instruction::dominator_depth() {
40 int result = -1;
41 if (block()) {
42 result = block()->dominator_depth();
43 }
44 assert(result != -1 || this->as_Local(), "Only locals have dominator depth -1");
45 return result;
46 }
47
48 Instruction::Condition Instruction::mirror(Condition cond) {
49 switch (cond) {
50 case eql: return eql;
51 case neq: return neq;
52 case lss: return gtr;
53 case leq: return geq;
54 case gtr: return lss;
55 case geq: return leq;
56 case aeq: return beq;
57 case beq: return aeq;
58 }
59 ShouldNotReachHere();
60 return eql;
61 }
62
63
64 Instruction::Condition Instruction::negate(Condition cond) {
65 switch (cond) {
66 case eql: return neq;
67 case neq: return eql;
68 case lss: return geq;
69 case leq: return gtr;
70 case gtr: return leq;
71 case geq: return lss;
72 case aeq: assert(false, "Above equal cannot be negated");
73 case beq: assert(false, "Below equal cannot be negated");
74 }
75 ShouldNotReachHere();
76 return eql;
77 }
78
79 void Instruction::update_exception_state(ValueStack* state) {
80 if (state != nullptr && (state->kind() == ValueStack::EmptyExceptionState || state->kind() == ValueStack::ExceptionState)) {
81 assert(state->kind() == ValueStack::EmptyExceptionState || Compilation::current()->env()->should_retain_local_variables(), "unexpected state kind");
82 _exception_state = state;
83 } else {
84 _exception_state = nullptr;
85 }
86 }
87
88 // Prev without need to have BlockBegin
89 Instruction* Instruction::prev() {
90 Instruction* p = nullptr;
91 Instruction* q = block();
92 while (q != this) {
93 assert(q != nullptr, "this is not in the block's instruction list");
94 p = q; q = q->next();
95 }
96 return p;
97 }
98
99
100 void Instruction::state_values_do(ValueVisitor* f) {
101 if (state_before() != nullptr) {
102 state_before()->values_do(f);
103 }
104 if (exception_state() != nullptr) {
105 exception_state()->values_do(f);
106 }
107 }
108
109 ciType* Instruction::exact_type() const {
110 ciType* t = declared_type();
111 if (t != nullptr && t->is_klass()) {
112 return t->as_klass()->exact_klass();
113 }
114 return nullptr;
115 }
116
117 ciKlass* Instruction::as_loaded_klass_or_null() const {
118 ciType* type = declared_type();
119 if (type != nullptr && type->is_klass()) {
120 ciKlass* klass = type->as_klass();
121 if (klass->is_loaded()) {
122 return klass;
123 }
124 }
125 return nullptr;
126 }
127
128 bool Instruction::is_loaded_flat_array() const {
129 if (UseArrayFlattening) {
130 ciType* type = declared_type();
131 return type != nullptr && type->is_flat_array_klass();
132 }
133 return false;
134 }
135
136 bool Instruction::maybe_flat_array() {
137 if (UseArrayFlattening) {
138 ciType* type = declared_type();
139 if (type != nullptr) {
140 if (type->is_ref_array_klass()) {
141 return false;
142 } else if (type->is_flat_array_klass()) {
143 return true;
144 } else if (type->is_obj_array_klass()) {
145 // This is the unrefined array type
146 ciKlass* element_klass = type->as_obj_array_klass()->element_klass();
147 if (element_klass->can_be_inline_klass() && (!element_klass->is_inlinetype() || element_klass->as_inline_klass()->maybe_flat_in_array())) {
148 return true;
149 }
150 } else if (type->is_klass() && type->as_klass()->is_java_lang_Object()) {
151 // This can happen as a parameter to System.arraycopy()
152 return true;
153 }
154 } else {
155 // Type info gets lost during Phi merging (Phi, IfOp, etc), but we might be storing into a
156 // flat array, so we should do a runtime check.
157 return true;
158 }
159 }
160 return false;
161 }
162
163 bool Instruction::maybe_null_free_array() {
164 ciType* type = declared_type();
165 if (type != nullptr) {
166 if (type->is_obj_array_klass()) {
167 // Due to array covariance, the runtime type might be a null-free array.
168 if (type->as_obj_array_klass()->can_be_inline_array_klass()) {
169 return true;
170 }
171 }
172 } else {
173 // Type info gets lost during Phi merging (Phi, IfOp, etc), but we might be storing into a
174 // null-free array, so we should do a runtime check.
175 return true;
176 }
177 return false;
178 }
179
180 #ifndef PRODUCT
181 void Instruction::check_state(ValueStack* state) {
182 if (state != nullptr) {
183 state->verify();
184 }
185 }
186
187
188 void Instruction::print() {
189 InstructionPrinter ip;
190 print(ip);
191 }
192
193
194 void Instruction::print_line() {
195 InstructionPrinter ip;
196 ip.print_line(this);
197 }
198
199
200 void Instruction::print(InstructionPrinter& ip) {
201 ip.print_head();
202 ip.print_line(this);
203 tty->cr();
204 }
205 #endif // PRODUCT
206
207
208 // perform constant and interval tests on index value
209 bool AccessIndexed::compute_needs_range_check() {
210 if (length()) {
211 Constant* clength = length()->as_Constant();
212 Constant* cindex = index()->as_Constant();
213 if (clength && cindex) {
214 IntConstant* l = clength->type()->as_IntConstant();
215 IntConstant* i = cindex->type()->as_IntConstant();
216 if (l && i && i->value() < l->value() && i->value() >= 0) {
217 return false;
218 }
219 }
220 }
221
222 if (!this->check_flag(NeedsRangeCheckFlag)) {
223 return false;
224 }
225
226 return true;
227 }
228
229
230 ciType* Constant::exact_type() const {
231 if (type()->is_object() && type()->as_ObjectType()->is_loaded()) {
232 return type()->as_ObjectType()->exact_type();
233 }
234 return nullptr;
235 }
236
237 ciType* LoadIndexed::exact_type() const {
238 ciType* array_type = array()->exact_type();
239 if (delayed() == nullptr && array_type != nullptr) {
240 assert(array_type->is_array_klass(), "what else?");
241 ciArrayKlass* ak = (ciArrayKlass*)array_type;
242
243 if (ak->element_type()->is_instance_klass()) {
244 ciInstanceKlass* ik = (ciInstanceKlass*)ak->element_type();
245 if (ik->is_loaded() && ik->is_final()) {
246 return ik;
247 }
248 }
249 }
250 return Instruction::exact_type();
251 }
252
253 ciType* LoadIndexed::declared_type() const {
254 if (delayed() != nullptr) {
255 return delayed()->field()->type();
256 }
257 ciType* array_type = array()->declared_type();
258 if (array_type == nullptr || !array_type->is_loaded()) {
259 return nullptr;
260 }
261 assert(array_type->is_array_klass(), "what else?");
262 ciArrayKlass* ak = (ciArrayKlass*)array_type;
263 return ak->element_type();
264 }
265
266 bool StoreIndexed::is_exact_flat_array_store() const {
267 if (array()->is_loaded_flat_array() && value()->as_Constant() == nullptr && value()->declared_type() != nullptr) {
268 ciKlass* element_klass = array()->declared_type()->as_flat_array_klass()->element_klass();
269 ciKlass* actual_klass = value()->declared_type()->as_klass();
270
271 // The following check can fail with inlining:
272 // void test45_inline(Object[] oa, Object o, int index) { oa[index] = o; }
273 // void test45(MyValue1[] va, int index, MyValue2 v) { test45_inline(va, v, index); }
274 if (element_klass == actual_klass) {
275 return true;
276 }
277 }
278 return false;
279 }
280
281 ciType* LoadField::declared_type() const {
282 return field()->type();
283 }
284
285
286 ciType* NewTypeArray::exact_type() const {
287 return ciTypeArrayKlass::make(elt_type());
288 }
289
290 ciType* NewObjectArray::exact_type() const {
291 return ciArrayKlass::make(klass());
292 }
293
294 ciType* NewMultiArray::exact_type() const {
295 return _klass;
296 }
297
298 ciType* NewArray::declared_type() const {
299 return exact_type();
300 }
301
302 ciType* NewInstance::exact_type() const {
303 return klass();
304 }
305
306 ciType* NewInstance::declared_type() const {
307 return exact_type();
308 }
309
310 ciType* CheckCast::declared_type() const {
311 return klass();
312 }
313
314 // Implementation of ArithmeticOp
315
316 bool ArithmeticOp::is_commutative() const {
317 switch (op()) {
318 case Bytecodes::_iadd: // fall through
319 case Bytecodes::_ladd: // fall through
320 case Bytecodes::_fadd: // fall through
321 case Bytecodes::_dadd: // fall through
322 case Bytecodes::_imul: // fall through
323 case Bytecodes::_lmul: // fall through
324 case Bytecodes::_fmul: // fall through
325 case Bytecodes::_dmul: return true;
326 default : return false;
327 }
328 }
329
330
331 bool ArithmeticOp::can_trap() const {
332 switch (op()) {
333 case Bytecodes::_idiv: // fall through
334 case Bytecodes::_ldiv: // fall through
335 case Bytecodes::_irem: // fall through
336 case Bytecodes::_lrem: return true;
337 default : return false;
338 }
339 }
340
341
342 // Implementation of LogicOp
343
344 bool LogicOp::is_commutative() const {
345 #ifdef ASSERT
346 switch (op()) {
347 case Bytecodes::_iand: // fall through
348 case Bytecodes::_land: // fall through
349 case Bytecodes::_ior : // fall through
350 case Bytecodes::_lor : // fall through
351 case Bytecodes::_ixor: // fall through
352 case Bytecodes::_lxor: break;
353 default : ShouldNotReachHere(); break;
354 }
355 #endif
356 // all LogicOps are commutative
357 return true;
358 }
359
360
361 // Implementation of IfOp
362
363 bool IfOp::is_commutative() const {
364 return cond() == eql || cond() == neq;
365 }
366
367
368 // Implementation of StateSplit
369
370 void StateSplit::substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block) {
371 NOT_PRODUCT(bool assigned = false;)
372 for (int i = 0; i < list.length(); i++) {
373 BlockBegin** b = list.adr_at(i);
374 if (*b == old_block) {
375 *b = new_block;
376 NOT_PRODUCT(assigned = true;)
377 }
378 }
379 assert(assigned == true, "should have assigned at least once");
380 }
381
382
383 IRScope* StateSplit::scope() const {
384 return _state->scope();
385 }
386
387
388 void StateSplit::state_values_do(ValueVisitor* f) {
389 Instruction::state_values_do(f);
390 if (state() != nullptr) state()->values_do(f);
391 }
392
393
394 void BlockBegin::state_values_do(ValueVisitor* f) {
395 StateSplit::state_values_do(f);
396
397 if (is_set(BlockBegin::exception_entry_flag)) {
398 for (int i = 0; i < number_of_exception_states(); i++) {
399 exception_state_at(i)->values_do(f);
400 }
401 }
402 }
403
404
405 StoreField::StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
406 ValueStack* state_before, bool needs_patching)
407 : AccessField(obj, offset, field, is_static, state_before, needs_patching)
408 , _value(value)
409 , _enclosing_field(nullptr)
410 {
411 #ifdef ASSERT
412 AssertValues assert_value;
413 values_do(&assert_value);
414 #endif
415 pin();
416 }
417
418 StoreIndexed::StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value,
419 ValueStack* state_before, bool check_boolean, bool mismatched)
420 : AccessIndexed(array, index, length, elt_type, state_before, mismatched)
421 , _value(value), _check_boolean(check_boolean)
422 {
423 #ifdef ASSERT
424 AssertValues assert_value;
425 values_do(&assert_value);
426 #endif
427 pin();
428 }
429
430
431 // Implementation of Invoke
432
433
434 Invoke::Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
435 ciMethod* target, ValueStack* state_before)
436 : StateSplit(result_type, state_before)
437 , _code(code)
438 , _recv(recv)
439 , _args(args)
440 , _target(target)
441 {
442 set_flag(TargetIsLoadedFlag, target->is_loaded());
443 set_flag(TargetIsFinalFlag, target_is_loaded() && target->is_final_method());
444
445 assert(args != nullptr, "args must exist");
446 #ifdef ASSERT
447 AssertValues assert_value;
448 values_do(&assert_value);
449 #endif
450
451 // provide an initial guess of signature size.
452 _signature = new BasicTypeList(number_of_arguments() + (has_receiver() ? 1 : 0));
453 if (has_receiver()) {
454 _signature->append(as_BasicType(receiver()->type()));
455 }
456 for (int i = 0; i < number_of_arguments(); i++) {
457 Value v = argument_at(i);
458 ValueType* t = v->type();
459 BasicType bt = as_BasicType(t);
460 _signature->append(bt);
461 }
462 }
463
464
465 void Invoke::state_values_do(ValueVisitor* f) {
466 StateSplit::state_values_do(f);
467 if (state_before() != nullptr) state_before()->values_do(f);
468 if (state() != nullptr) state()->values_do(f);
469 }
470
471 ciType* Invoke::declared_type() const {
472 ciSignature* declared_signature = state()->scope()->method()->get_declared_signature_at_bci(state()->bci());
473 ciType *t = declared_signature->return_type();
474 assert(t->basic_type() != T_VOID, "need return value of void method?");
475 return t;
476 }
477
478 // Implementation of Constant
479 intx Constant::hash() const {
480 if (state_before() == nullptr) {
481 switch (type()->tag()) {
482 case intTag:
483 return HASH2(name(), type()->as_IntConstant()->value());
484 case addressTag:
485 return HASH2(name(), type()->as_AddressConstant()->value());
486 case longTag:
487 {
488 jlong temp = type()->as_LongConstant()->value();
489 return HASH3(name(), high(temp), low(temp));
490 }
491 case floatTag:
492 return HASH2(name(), jint_cast(type()->as_FloatConstant()->value()));
493 case doubleTag:
494 {
495 jlong temp = jlong_cast(type()->as_DoubleConstant()->value());
496 return HASH3(name(), high(temp), low(temp));
497 }
498 case objectTag:
499 assert(type()->as_ObjectType()->is_loaded(), "can't handle unloaded values");
500 return HASH2(name(), type()->as_ObjectType()->constant_value());
501 case metaDataTag:
502 assert(type()->as_MetadataType()->is_loaded(), "can't handle unloaded values");
503 return HASH2(name(), type()->as_MetadataType()->constant_value());
504 default:
505 ShouldNotReachHere();
506 }
507 }
508 return 0;
509 }
510
511 bool Constant::is_equal(Value v) const {
512 if (v->as_Constant() == nullptr) return false;
513
514 switch (type()->tag()) {
515 case intTag:
516 {
517 IntConstant* t1 = type()->as_IntConstant();
518 IntConstant* t2 = v->type()->as_IntConstant();
519 return (t1 != nullptr && t2 != nullptr &&
520 t1->value() == t2->value());
521 }
522 case longTag:
523 {
524 LongConstant* t1 = type()->as_LongConstant();
525 LongConstant* t2 = v->type()->as_LongConstant();
526 return (t1 != nullptr && t2 != nullptr &&
527 t1->value() == t2->value());
528 }
529 case floatTag:
530 {
531 FloatConstant* t1 = type()->as_FloatConstant();
532 FloatConstant* t2 = v->type()->as_FloatConstant();
533 return (t1 != nullptr && t2 != nullptr &&
534 jint_cast(t1->value()) == jint_cast(t2->value()));
535 }
536 case doubleTag:
537 {
538 DoubleConstant* t1 = type()->as_DoubleConstant();
539 DoubleConstant* t2 = v->type()->as_DoubleConstant();
540 return (t1 != nullptr && t2 != nullptr &&
541 jlong_cast(t1->value()) == jlong_cast(t2->value()));
542 }
543 case objectTag:
544 {
545 ObjectType* t1 = type()->as_ObjectType();
546 ObjectType* t2 = v->type()->as_ObjectType();
547 return (t1 != nullptr && t2 != nullptr &&
548 t1->is_loaded() && t2->is_loaded() &&
549 t1->constant_value() == t2->constant_value());
550 }
551 case metaDataTag:
552 {
553 MetadataType* t1 = type()->as_MetadataType();
554 MetadataType* t2 = v->type()->as_MetadataType();
555 return (t1 != nullptr && t2 != nullptr &&
556 t1->is_loaded() && t2->is_loaded() &&
557 t1->constant_value() == t2->constant_value());
558 }
559 default:
560 return false;
561 }
562 }
563
564 Constant::CompareResult Constant::compare(Instruction::Condition cond, Value right) const {
565 Constant* rc = right->as_Constant();
566 // other is not a constant
567 if (rc == nullptr) return not_comparable;
568
569 ValueType* lt = type();
570 ValueType* rt = rc->type();
571 // different types
572 if (lt->base() != rt->base()) return not_comparable;
573 switch (lt->tag()) {
574 case intTag: {
575 int x = lt->as_IntConstant()->value();
576 int y = rt->as_IntConstant()->value();
577 switch (cond) {
578 case If::eql: return x == y ? cond_true : cond_false;
579 case If::neq: return x != y ? cond_true : cond_false;
580 case If::lss: return x < y ? cond_true : cond_false;
581 case If::leq: return x <= y ? cond_true : cond_false;
582 case If::gtr: return x > y ? cond_true : cond_false;
583 case If::geq: return x >= y ? cond_true : cond_false;
584 default : break;
585 }
586 break;
587 }
588 case longTag: {
589 jlong x = lt->as_LongConstant()->value();
590 jlong y = rt->as_LongConstant()->value();
591 switch (cond) {
592 case If::eql: return x == y ? cond_true : cond_false;
593 case If::neq: return x != y ? cond_true : cond_false;
594 case If::lss: return x < y ? cond_true : cond_false;
595 case If::leq: return x <= y ? cond_true : cond_false;
596 case If::gtr: return x > y ? cond_true : cond_false;
597 case If::geq: return x >= y ? cond_true : cond_false;
598 default : break;
599 }
600 break;
601 }
602 case objectTag: {
603 ciObject* xvalue = lt->as_ObjectType()->constant_value();
604 ciObject* yvalue = rt->as_ObjectType()->constant_value();
605 assert(xvalue != nullptr && yvalue != nullptr, "not constants");
606 if (xvalue->is_loaded() && yvalue->is_loaded()) {
607 switch (cond) {
608 case If::eql: return xvalue == yvalue ? cond_true : cond_false;
609 case If::neq: return xvalue != yvalue ? cond_true : cond_false;
610 default : break;
611 }
612 }
613 break;
614 }
615 case metaDataTag: {
616 ciMetadata* xvalue = lt->as_MetadataType()->constant_value();
617 ciMetadata* yvalue = rt->as_MetadataType()->constant_value();
618 assert(xvalue != nullptr && yvalue != nullptr, "not constants");
619 if (xvalue->is_loaded() && yvalue->is_loaded()) {
620 switch (cond) {
621 case If::eql: return xvalue == yvalue ? cond_true : cond_false;
622 case If::neq: return xvalue != yvalue ? cond_true : cond_false;
623 default : break;
624 }
625 }
626 break;
627 }
628 default:
629 break;
630 }
631 return not_comparable;
632 }
633
634
635 // Implementation of BlockBegin
636
637 void BlockBegin::set_end(BlockEnd* new_end) { // Assumes that no predecessor of new_end still has it as its successor
638 assert(new_end != nullptr, "Should not reset block new_end to null");
639 if (new_end == _end) return;
640
641 // Remove this block as predecessor of its current successors
642 if (_end != nullptr) {
643 for (int i = 0; i < number_of_sux(); i++) {
644 sux_at(i)->remove_predecessor(this);
645 }
646 }
647
648 _end = new_end;
649
650 // Add this block as predecessor of its new successors
651 for (int i = 0; i < number_of_sux(); i++) {
652 sux_at(i)->add_predecessor(this);
653 }
654 }
655
656
657 void BlockBegin::disconnect_edge(BlockBegin* from, BlockBegin* to) {
658 // disconnect any edges between from and to
659 #ifndef PRODUCT
660 if (PrintIR && Verbose) {
661 tty->print_cr("Disconnected edge B%d -> B%d", from->block_id(), to->block_id());
662 }
663 #endif
664 for (int s = 0; s < from->number_of_sux();) {
665 BlockBegin* sux = from->sux_at(s);
666 if (sux == to) {
667 int index = sux->_predecessors.find(from);
668 if (index >= 0) {
669 sux->_predecessors.remove_at(index);
670 }
671 from->end()->remove_sux_at(s);
672 } else {
673 s++;
674 }
675 }
676 }
677
678
679 void BlockBegin::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) {
680 // modify predecessors before substituting successors
681 for (int i = 0; i < number_of_sux(); i++) {
682 if (sux_at(i) == old_sux) {
683 // remove old predecessor before adding new predecessor
684 // otherwise there is a dead predecessor in the list
685 new_sux->remove_predecessor(old_sux);
686 new_sux->add_predecessor(this);
687 }
688 }
689 old_sux->remove_predecessor(this);
690 end()->substitute_sux(old_sux, new_sux);
691 }
692
693
694
695 // In general it is not possible to calculate a value for the field "depth_first_number"
696 // of the inserted block, without recomputing the values of the other blocks
697 // in the CFG. Therefore the value of "depth_first_number" in BlockBegin becomes meaningless.
698 BlockBegin* BlockBegin::insert_block_between(BlockBegin* sux) {
699 assert(!sux->is_set(critical_edge_split_flag), "sanity check");
700
701 int bci = sux->bci();
702 // critical edge splitting may introduce a goto after a if and array
703 // bound check elimination may insert a predicate between the if and
704 // goto. The bci of the goto can't be the one of the if otherwise
705 // the state and bci are inconsistent and a deoptimization triggered
706 // by the predicate would lead to incorrect execution/a crash.
707 BlockBegin* new_sux = new BlockBegin(bci);
708
709 // mark this block (special treatment when block order is computed)
710 new_sux->set(critical_edge_split_flag);
711
712 // This goto is not a safepoint.
713 Goto* e = new Goto(sux, false);
714 new_sux->set_next(e, bci);
715 new_sux->set_end(e);
716 // setup states
717 ValueStack* s = end()->state();
718 new_sux->set_state(s->copy(s->kind(), bci));
719 e->set_state(s->copy(s->kind(), bci));
720 assert(new_sux->state()->locals_size() == s->locals_size(), "local size mismatch!");
721 assert(new_sux->state()->stack_size() == s->stack_size(), "stack size mismatch!");
722 assert(new_sux->state()->locks_size() == s->locks_size(), "locks size mismatch!");
723
724 // link predecessor to new block
725 end()->substitute_sux(sux, new_sux);
726
727 // The ordering needs to be the same, so remove the link that the
728 // set_end call above added and substitute the new_sux for this
729 // block.
730 sux->remove_predecessor(new_sux);
731
732 // the successor could be the target of a switch so it might have
733 // multiple copies of this predecessor, so substitute the new_sux
734 // for the first and delete the rest.
735 bool assigned = false;
736 BlockList& list = sux->_predecessors;
737 for (int i = 0; i < list.length(); i++) {
738 BlockBegin** b = list.adr_at(i);
739 if (*b == this) {
740 if (assigned) {
741 list.remove_at(i);
742 // reprocess this index
743 i--;
744 } else {
745 assigned = true;
746 *b = new_sux;
747 }
748 // link the new block back to it's predecessors.
749 new_sux->add_predecessor(this);
750 }
751 }
752 assert(assigned == true, "should have assigned at least once");
753 return new_sux;
754 }
755
756
757 void BlockBegin::add_predecessor(BlockBegin* pred) {
758 _predecessors.append(pred);
759 }
760
761
762 void BlockBegin::remove_predecessor(BlockBegin* pred) {
763 int idx;
764 while ((idx = _predecessors.find(pred)) >= 0) {
765 _predecessors.remove_at(idx);
766 }
767 }
768
769
770 void BlockBegin::add_exception_handler(BlockBegin* b) {
771 assert(b != nullptr && (b->is_set(exception_entry_flag)), "exception handler must exist");
772 // add only if not in the list already
773 if (!_exception_handlers.contains(b)) _exception_handlers.append(b);
774 }
775
776 int BlockBegin::add_exception_state(ValueStack* state) {
777 assert(is_set(exception_entry_flag), "only for xhandlers");
778 if (_exception_states == nullptr) {
779 _exception_states = new ValueStackStack(4);
780 }
781 _exception_states->append(state);
782 return _exception_states->length() - 1;
783 }
784
785
786 void BlockBegin::iterate_preorder(boolArray& mark, BlockClosure* closure) {
787 if (!mark.at(block_id())) {
788 mark.at_put(block_id(), true);
789 closure->block_do(this);
790 BlockEnd* e = end(); // must do this after block_do because block_do may change it!
791 { for (int i = number_of_exception_handlers() - 1; i >= 0; i--) exception_handler_at(i)->iterate_preorder(mark, closure); }
792 { for (int i = e->number_of_sux () - 1; i >= 0; i--) e->sux_at (i)->iterate_preorder(mark, closure); }
793 }
794 }
795
796
797 void BlockBegin::iterate_postorder(boolArray& mark, BlockClosure* closure) {
798 if (!mark.at(block_id())) {
799 mark.at_put(block_id(), true);
800 BlockEnd* e = end();
801 { for (int i = number_of_exception_handlers() - 1; i >= 0; i--) exception_handler_at(i)->iterate_postorder(mark, closure); }
802 { for (int i = e->number_of_sux () - 1; i >= 0; i--) e->sux_at (i)->iterate_postorder(mark, closure); }
803 closure->block_do(this);
804 }
805 }
806
807
808 void BlockBegin::iterate_preorder(BlockClosure* closure) {
809 int mark_len = number_of_blocks();
810 boolArray mark(mark_len, mark_len, false);
811 iterate_preorder(mark, closure);
812 }
813
814
815 void BlockBegin::iterate_postorder(BlockClosure* closure) {
816 int mark_len = number_of_blocks();
817 boolArray mark(mark_len, mark_len, false);
818 iterate_postorder(mark, closure);
819 }
820
821
822 void BlockBegin::block_values_do(ValueVisitor* f) {
823 for (Instruction* n = this; n != nullptr; n = n->next()) n->values_do(f);
824 }
825
826
827 #ifndef PRODUCT
828 #define TRACE_PHI(code) if (PrintPhiFunctions) { code; }
829 #else
830 #define TRACE_PHI(coce)
831 #endif
832
833
834 bool BlockBegin::try_merge(ValueStack* new_state, bool has_irreducible_loops) {
835 TRACE_PHI(tty->print_cr("********** try_merge for block B%d", block_id()));
836
837 // local variables used for state iteration
838 int index;
839 Value new_value, existing_value;
840
841 ValueStack* existing_state = state();
842 if (existing_state == nullptr) {
843 TRACE_PHI(tty->print_cr("first call of try_merge for this block"));
844
845 if (is_set(BlockBegin::was_visited_flag)) {
846 // this actually happens for complicated jsr/ret structures
847 return false; // BAILOUT in caller
848 }
849
850 // copy state because it is altered
851 new_state = new_state->copy(ValueStack::BlockBeginState, bci());
852
853 // Use method liveness to invalidate dead locals
854 MethodLivenessResult liveness = new_state->scope()->method()->liveness_at_bci(bci());
855 if (liveness.is_valid()) {
856 assert((int)liveness.size() == new_state->locals_size(), "error in use of liveness");
857
858 for_each_local_value(new_state, index, new_value) {
859 if (!liveness.at(index) || new_value->type()->is_illegal()) {
860 new_state->invalidate_local(index);
861 TRACE_PHI(tty->print_cr("invalidating dead local %d", index));
862 }
863 }
864 }
865
866 if (is_set(BlockBegin::parser_loop_header_flag)) {
867 TRACE_PHI(tty->print_cr("loop header block, initializing phi functions"));
868
869 for_each_stack_value(new_state, index, new_value) {
870 new_state->setup_phi_for_stack(this, index);
871 TRACE_PHI(tty->print_cr("creating phi-function %c%d for stack %d", new_state->stack_at(index)->type()->tchar(), new_state->stack_at(index)->id(), index));
872 }
873
874 BitMap& requires_phi_function = new_state->scope()->requires_phi_function();
875 for_each_local_value(new_state, index, new_value) {
876 bool requires_phi = requires_phi_function.at(index) || (new_value->type()->is_double_word() && requires_phi_function.at(index + 1));
877 if (requires_phi || !SelectivePhiFunctions || has_irreducible_loops) {
878 new_state->setup_phi_for_local(this, index);
879 TRACE_PHI(tty->print_cr("creating phi-function %c%d for local %d", new_state->local_at(index)->type()->tchar(), new_state->local_at(index)->id(), index));
880 }
881 }
882 }
883
884 // initialize state of block
885 set_state(new_state);
886
887 } else if (existing_state->is_same(new_state)) {
888 TRACE_PHI(tty->print_cr("existing state found"));
889
890 assert(existing_state->scope() == new_state->scope(), "not matching");
891 assert(existing_state->locals_size() == new_state->locals_size(), "not matching");
892 assert(existing_state->stack_size() == new_state->stack_size(), "not matching");
893
894 if (is_set(BlockBegin::was_visited_flag)) {
895 TRACE_PHI(tty->print_cr("loop header block, phis must be present"));
896
897 if (!is_set(BlockBegin::parser_loop_header_flag)) {
898 // this actually happens for complicated jsr/ret structures
899 return false; // BAILOUT in caller
900 }
901
902 for_each_local_value(existing_state, index, existing_value) {
903 Value new_value = new_state->local_at(index);
904 if (new_value == nullptr || new_value->type()->tag() != existing_value->type()->tag()) {
905 Phi* existing_phi = existing_value->as_Phi();
906 if (existing_phi == nullptr) {
907 return false; // BAILOUT in caller
908 }
909 // Invalidate the phi function here. This case is very rare except for
910 // JVMTI capability "can_access_local_variables".
911 // In really rare cases we will bail out in LIRGenerator::move_to_phi.
912 existing_phi->make_illegal();
913 existing_state->invalidate_local(index);
914 TRACE_PHI(tty->print_cr("invalidating local %d because of type mismatch", index));
915 }
916
917 if (existing_value != new_state->local_at(index) && existing_value->as_Phi() == nullptr) {
918 TRACE_PHI(tty->print_cr("required phi for local %d is missing, irreducible loop?", index));
919 return false; // BAILOUT in caller
920 }
921 }
922
923 #ifdef ASSERT
924 // check that all necessary phi functions are present
925 for_each_stack_value(existing_state, index, existing_value) {
926 assert(existing_value->as_Phi() != nullptr && existing_value->as_Phi()->block() == this, "phi function required");
927 }
928 for_each_local_value(existing_state, index, existing_value) {
929 assert(existing_value == new_state->local_at(index) || (existing_value->as_Phi() != nullptr && existing_value->as_Phi()->as_Phi()->block() == this), "phi function required");
930 }
931 #endif
932
933 } else {
934 TRACE_PHI(tty->print_cr("creating phi functions on demand"));
935
936 // create necessary phi functions for stack
937 for_each_stack_value(existing_state, index, existing_value) {
938 Value new_value = new_state->stack_at(index);
939 Phi* existing_phi = existing_value->as_Phi();
940
941 if (new_value != existing_value && (existing_phi == nullptr || existing_phi->block() != this)) {
942 existing_state->setup_phi_for_stack(this, index);
943 TRACE_PHI(tty->print_cr("creating phi-function %c%d for stack %d", existing_state->stack_at(index)->type()->tchar(), existing_state->stack_at(index)->id(), index));
944 }
945 }
946
947 // create necessary phi functions for locals
948 for_each_local_value(existing_state, index, existing_value) {
949 Value new_value = new_state->local_at(index);
950 Phi* existing_phi = existing_value->as_Phi();
951
952 if (new_value == nullptr || new_value->type()->tag() != existing_value->type()->tag()) {
953 existing_state->invalidate_local(index);
954 TRACE_PHI(tty->print_cr("invalidating local %d because of type mismatch", index));
955 } else if (new_value != existing_value && (existing_phi == nullptr || existing_phi->block() != this)) {
956 existing_state->setup_phi_for_local(this, index);
957 TRACE_PHI(tty->print_cr("creating phi-function %c%d for local %d", existing_state->local_at(index)->type()->tchar(), existing_state->local_at(index)->id(), index));
958 }
959 }
960 }
961
962 assert(existing_state->caller_state() == new_state->caller_state(), "caller states must be equal");
963
964 } else {
965 assert(false, "stack or locks not matching (invalid bytecodes)");
966 return false;
967 }
968
969 TRACE_PHI(tty->print_cr("********** try_merge for block B%d successful", block_id()));
970
971 return true;
972 }
973
974
975 #ifndef PRODUCT
976 void BlockBegin::print_block() {
977 InstructionPrinter ip;
978 print_block(ip, false);
979 }
980
981
982 void BlockBegin::print_block(InstructionPrinter& ip, bool live_only) {
983 ip.print_instr(this); tty->cr();
984 ip.print_stack(this->state()); tty->cr();
985 ip.print_inline_level(this);
986 ip.print_head();
987 for (Instruction* n = next(); n != nullptr; n = n->next()) {
988 if (!live_only || n->is_pinned() || n->use_count() > 0) {
989 ip.print_line(n);
990 }
991 }
992 tty->cr();
993 }
994 #endif // PRODUCT
995
996
997 // Implementation of BlockList
998
999 void BlockList::iterate_forward (BlockClosure* closure) {
1000 const int l = length();
1001 for (int i = 0; i < l; i++) closure->block_do(at(i));
1002 }
1003
1004
1005 void BlockList::iterate_backward(BlockClosure* closure) {
1006 for (int i = length() - 1; i >= 0; i--) closure->block_do(at(i));
1007 }
1008
1009
1010 void BlockList::values_do(ValueVisitor* f) {
1011 for (int i = length() - 1; i >= 0; i--) at(i)->block_values_do(f);
1012 }
1013
1014
1015 #ifndef PRODUCT
1016 void BlockList::print(bool cfg_only, bool live_only) {
1017 InstructionPrinter ip;
1018 for (int i = 0; i < length(); i++) {
1019 BlockBegin* block = at(i);
1020 if (cfg_only) {
1021 ip.print_instr(block); tty->cr();
1022 } else {
1023 block->print_block(ip, live_only);
1024 }
1025 }
1026 }
1027 #endif // PRODUCT
1028
1029
1030 // Implementation of BlockEnd
1031
1032 void BlockEnd::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) {
1033 substitute(*_sux, old_sux, new_sux);
1034 }
1035
1036 // Implementation of Phi
1037
1038 // Normal phi functions take their operands from the last instruction of the
1039 // predecessor. Special handling is needed for xhanlder entries because there
1040 // the state of arbitrary instructions are needed.
1041
1042 Value Phi::operand_at(int i) const {
1043 ValueStack* state;
1044 if (_block->is_set(BlockBegin::exception_entry_flag)) {
1045 state = _block->exception_state_at(i);
1046 } else {
1047 state = _block->pred_at(i)->end()->state();
1048 }
1049 assert(state != nullptr, "");
1050
1051 if (is_local()) {
1052 return state->local_at(local_index());
1053 } else {
1054 return state->stack_at(stack_index());
1055 }
1056 }
1057
1058
1059 int Phi::operand_count() const {
1060 if (_block->is_set(BlockBegin::exception_entry_flag)) {
1061 return _block->number_of_exception_states();
1062 } else {
1063 return _block->number_of_preds();
1064 }
1065 }
1066
1067 #ifdef ASSERT
1068 // Constructor of Assert
1069 Assert::Assert(Value x, Condition cond, bool unordered_is_true, Value y) : Instruction(illegalType)
1070 , _x(x)
1071 , _cond(cond)
1072 , _y(y)
1073 {
1074 set_flag(UnorderedIsTrueFlag, unordered_is_true);
1075 assert(x->type()->tag() == y->type()->tag(), "types must match");
1076 pin();
1077
1078 stringStream strStream;
1079 Compilation::current()->method()->print_name(&strStream);
1080
1081 stringStream strStream1;
1082 InstructionPrinter ip1(1, &strStream1);
1083 ip1.print_instr(x);
1084
1085 stringStream strStream2;
1086 InstructionPrinter ip2(1, &strStream2);
1087 ip2.print_instr(y);
1088
1089 stringStream ss;
1090 ss.print("Assertion %s %s %s in method %s", strStream1.freeze(), ip2.cond_name(cond), strStream2.freeze(), strStream.freeze());
1091
1092 _message = ss.as_string();
1093 }
1094 #endif
1095
1096 void RangeCheckPredicate::check_state() {
1097 assert(state()->kind() != ValueStack::EmptyExceptionState && state()->kind() != ValueStack::ExceptionState, "will deopt with empty state");
1098 }
1099
1100 void ProfileInvoke::state_values_do(ValueVisitor* f) {
1101 if (state() != nullptr) state()->values_do(f);
1102 }
1103