1 /* 2 * Copyright (c) 1999, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 39 40 // Instruction class hierarchy 41 // 42 // All leaf classes in the class hierarchy are concrete classes 43 // (i.e., are instantiated). All other classes are abstract and 44 // serve factoring. 45 46 class Instruction; 47 class Phi; 48 class Local; 49 class Constant; 50 class AccessField; 51 class LoadField; 52 class StoreField; 53 class AccessArray; 54 class ArrayLength; 55 class AccessIndexed; 56 class LoadIndexed; 57 class StoreIndexed; 58 class NegateOp; 59 class Op2; 60 class ArithmeticOp; 61 class ShiftOp; 62 class LogicOp; 63 class CompareOp; 64 class IfOp; 65 class Convert; 66 class NullCheck; 67 class TypeCast; 68 class OsrEntry; 69 class ExceptionObject; 70 class StateSplit; 71 class Invoke; 72 class NewInstance; 73 class NewArray; 74 class NewTypeArray; 75 class NewObjectArray; 76 class NewMultiArray; 77 class TypeCheck; 78 class CheckCast; 79 class InstanceOf; 80 class AccessMonitor; 81 class MonitorEnter; 82 class MonitorExit; 83 class Intrinsic; 84 class BlockBegin; 85 class BlockEnd; 86 class Goto; 87 class If; 88 class Switch; 89 class TableSwitch; 90 class LookupSwitch; 91 class Return; 92 class Throw; 93 class Base; 94 class RoundFP; 95 class UnsafeOp; 96 class UnsafeGet; 97 class UnsafePut; 98 class UnsafeGetAndSet; 99 class ProfileCall; 100 class ProfileReturnType; 101 class ProfileInvoke; 102 class RuntimeCall; 103 class MemBar; 104 class RangeCheckPredicate; 105 #ifdef ASSERT 106 class Assert; 107 #endif 108 109 // A Value is a reference to the instruction creating the value 110 typedef Instruction* Value; 111 typedef GrowableArray<Value> Values; 112 typedef GrowableArray<ValueStack*> ValueStackStack; 113 114 // BlockClosure is the base class for block traversal/iteration. 115 116 class BlockClosure: public CompilationResourceObj { 117 public: 118 virtual void block_do(BlockBegin* block) = 0; 119 }; 120 121 122 // A simple closure class for visiting the values of an Instruction 123 class ValueVisitor: public StackObj { 124 public: 125 virtual void visit(Value* v) = 0; 126 }; 127 128 129 // Some array and list classes 130 typedef GrowableArray<BlockBegin*> BlockBeginArray; 131 132 class BlockList: public GrowableArray<BlockBegin*> { 133 public: 134 BlockList(): GrowableArray<BlockBegin*>() {} 135 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 136 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 137 138 void iterate_forward(BlockClosure* closure); 139 void iterate_backward(BlockClosure* closure); 140 void values_do(ValueVisitor* f); 141 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 142 }; 143 144 145 // InstructionVisitors provide type-based dispatch for instructions. 146 // For each concrete Instruction class X, a virtual function do_X is 147 // provided. Functionality that needs to be implemented for all classes 148 // (e.g., printing, code generation) is factored out into a specialised 149 // visitor instead of added to the Instruction classes itself. 150 151 class InstructionVisitor: public StackObj { 152 public: 153 virtual void do_Phi (Phi* x) = 0; 154 virtual void do_Local (Local* x) = 0; 155 virtual void do_Constant (Constant* x) = 0; 156 virtual void do_LoadField (LoadField* x) = 0; 157 virtual void do_StoreField (StoreField* x) = 0; 158 virtual void do_ArrayLength (ArrayLength* x) = 0; 159 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 160 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 161 virtual void do_NegateOp (NegateOp* x) = 0; 162 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 163 virtual void do_ShiftOp (ShiftOp* x) = 0; 164 virtual void do_LogicOp (LogicOp* x) = 0; 165 virtual void do_CompareOp (CompareOp* x) = 0; 166 virtual void do_IfOp (IfOp* x) = 0; 167 virtual void do_Convert (Convert* x) = 0; 168 virtual void do_NullCheck (NullCheck* x) = 0; 169 virtual void do_TypeCast (TypeCast* x) = 0; 170 virtual void do_Invoke (Invoke* x) = 0; 171 virtual void do_NewInstance (NewInstance* x) = 0; 172 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 173 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 174 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 175 virtual void do_CheckCast (CheckCast* x) = 0; 176 virtual void do_InstanceOf (InstanceOf* x) = 0; 177 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 178 virtual void do_MonitorExit (MonitorExit* x) = 0; 179 virtual void do_Intrinsic (Intrinsic* x) = 0; 180 virtual void do_BlockBegin (BlockBegin* x) = 0; 181 virtual void do_Goto (Goto* x) = 0; 182 virtual void do_If (If* x) = 0; 183 virtual void do_TableSwitch (TableSwitch* x) = 0; 184 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 185 virtual void do_Return (Return* x) = 0; 186 virtual void do_Throw (Throw* x) = 0; 187 virtual void do_Base (Base* x) = 0; 188 virtual void do_OsrEntry (OsrEntry* x) = 0; 189 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 190 virtual void do_RoundFP (RoundFP* x) = 0; 191 virtual void do_UnsafeGet (UnsafeGet* x) = 0; 192 virtual void do_UnsafePut (UnsafePut* x) = 0; 193 virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x) = 0; 194 virtual void do_ProfileCall (ProfileCall* x) = 0; 195 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 196 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 197 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 198 virtual void do_MemBar (MemBar* x) = 0; 199 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 200 #ifdef ASSERT 201 virtual void do_Assert (Assert* x) = 0; 202 #endif 203 }; 204 205 206 // Hashing support 207 // 208 // Note: This hash functions affect the performance 209 // of ValueMap - make changes carefully! 210 211 #define HASH1(x1 ) ((intx)(x1)) 212 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 213 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 214 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 215 216 217 // The following macros are used to implement instruction-specific hashing. 218 // By default, each instruction implements hash() and is_equal(Value), used 219 // for value numbering/common subexpression elimination. The default imple- 220 // mentation disables value numbering. Each instruction which can be value- 221 // numbered, should define corresponding hash() and is_equal(Value) functions 222 // via the macros below. The f arguments specify all the values/op codes, etc. 223 // that need to be identical for two instructions to be identical. 224 // 225 // Note: The default implementation of hash() returns 0 in order to indicate 226 // that the instruction should not be considered for value numbering. 227 // The currently used hash functions do not guarantee that never a 0 228 // is produced. While this is still correct, it may be a performance 229 // bug (no value numbering for that node). However, this situation is 230 // so unlikely, that we are not going to handle it specially. 231 232 #define HASHING1(class_name, enabled, f1) \ 233 virtual intx hash() const { \ 234 return (enabled) ? HASH2(name(), f1) : 0; \ 235 } \ 236 virtual bool is_equal(Value v) const { \ 237 if (!(enabled) ) return false; \ 238 class_name* _v = v->as_##class_name(); \ 239 if (_v == nullptr) return false; \ 240 if (f1 != _v->f1) return false; \ 241 return true; \ 242 } \ 243 244 245 #define HASHING2(class_name, enabled, f1, f2) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == nullptr) return false; \ 253 if (f1 != _v->f1) return false; \ 254 if (f2 != _v->f2) return false; \ 255 return true; \ 256 } \ 257 258 259 #define HASHING3(class_name, enabled, f1, f2, f3) \ 260 virtual intx hash() const { \ 261 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 262 } \ 263 virtual bool is_equal(Value v) const { \ 264 if (!(enabled) ) return false; \ 265 class_name* _v = v->as_##class_name(); \ 266 if (_v == nullptr) return false; \ 267 if (f1 != _v->f1) return false; \ 268 if (f2 != _v->f2) return false; \ 269 if (f3 != _v->f3) return false; \ 270 return true; \ 271 } \ 272 273 274 // The mother of all instructions... 275 276 class Instruction: public CompilationResourceObj { 277 private: 278 int _id; // the unique instruction id 279 #ifndef PRODUCT 280 int _printable_bci; // the bci of the instruction for printing 281 #endif 282 int _use_count; // the number of instructions referring to this value (w/o prev/next); only roots can have use count = 0 or > 1 283 int _pin_state; // set of PinReason describing the reason for pinning 284 unsigned int _flags; // Flag bits 285 ValueType* _type; // the instruction value type 286 Instruction* _next; // the next instruction if any (null for BlockEnd instructions) 287 Instruction* _subst; // the substitution instruction if any 288 LIR_Opr _operand; // LIR specific information 289 290 ValueStack* _state_before; // Copy of state with input operands still on stack (or null) 291 ValueStack* _exception_state; // Copy of state for exception handling 292 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 293 294 friend class UseCountComputer; 295 296 void update_exception_state(ValueStack* state); 297 298 protected: 299 BlockBegin* _block; // Block that contains this instruction 300 301 void set_type(ValueType* type) { 302 assert(type != nullptr, "type must exist"); 303 _type = type; 304 } 305 306 // Helper class to keep track of which arguments need a null check 307 class ArgsNonNullState { 308 private: 309 int _nonnull_state; // mask identifying which args are nonnull 310 public: 311 ArgsNonNullState() 312 : _nonnull_state(AllBits) {} 313 314 // Does argument number i needs a null check? 315 bool arg_needs_null_check(int i) const { 316 // No data is kept for arguments starting at position 33 so 317 // conservatively assume that they need a null check. 318 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 319 return is_set_nth_bit(_nonnull_state, i); 320 } 321 return true; 322 } 323 324 // Set whether argument number i needs a null check or not 325 void set_arg_needs_null_check(int i, bool check) { 326 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 327 if (check) { 328 _nonnull_state |= (int)nth_bit(i); 329 } else { 330 _nonnull_state &= (int)~(nth_bit(i)); 331 } 332 } 333 } 334 }; 335 336 public: 337 void* operator new(size_t size) throw() { 338 Compilation* c = Compilation::current(); 339 void* res = c->arena()->Amalloc(size); 340 return res; 341 } 342 343 static const int no_bci = -99; 344 345 enum InstructionFlag { 346 NeedsNullCheckFlag = 0, 347 CanTrapFlag, 348 DirectCompareFlag, 349 IsEliminatedFlag, 350 IsSafepointFlag, 351 IsStaticFlag, 352 NeedsStoreCheckFlag, 353 NeedsWriteBarrierFlag, 354 PreservesStateFlag, 355 TargetIsFinalFlag, 356 TargetIsLoadedFlag, 357 UnorderedIsTrueFlag, 358 NeedsPatchingFlag, 359 ThrowIncompatibleClassChangeErrorFlag, 360 InvokeSpecialReceiverCheckFlag, 361 ProfileMDOFlag, 362 IsLinkedInBlockFlag, 363 NeedsRangeCheckFlag, 364 InWorkListFlag, 365 DeoptimizeOnException, 366 KillsMemoryFlag, 367 OmitChecksFlag, 368 InstructionLastFlag 369 }; 370 371 public: 372 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 373 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 374 375 // 'globally' used condition values 376 enum Condition { 377 eql, neq, lss, leq, gtr, geq, aeq, beq 378 }; 379 380 // Instructions may be pinned for many reasons and under certain conditions 381 // with enough knowledge it's possible to safely unpin them. 382 enum PinReason { 383 PinUnknown = 1 << 0 384 , PinExplicitNullCheck = 1 << 3 385 , PinStackForStateSplit= 1 << 12 386 , PinStateSplitConstructor= 1 << 13 387 , PinGlobalValueNumbering= 1 << 14 388 }; 389 390 static Condition mirror(Condition cond); 391 static Condition negate(Condition cond); 392 393 // initialization 394 static int number_of_instructions() { 395 return Compilation::current()->number_of_instructions(); 396 } 397 398 // creation 399 Instruction(ValueType* type, ValueStack* state_before = nullptr, bool type_is_constant = false) 400 : _id(Compilation::current()->get_next_id()), 401 #ifndef PRODUCT 402 _printable_bci(-99), 403 #endif 404 _use_count(0) 405 , _pin_state(0) 406 , _flags(0) 407 , _type(type) 408 , _next(nullptr) 409 , _subst(nullptr) 410 , _operand(LIR_OprFact::illegalOpr) 411 , _state_before(state_before) 412 , _exception_handlers(nullptr) 413 , _block(nullptr) 414 { 415 check_state(state_before); 416 assert(type != nullptr && (!type->is_constant() || type_is_constant), "type must exist"); 417 update_exception_state(_state_before); 418 } 419 420 // accessors 421 int id() const { return _id; } 422 #ifndef PRODUCT 423 bool has_printable_bci() const { return _printable_bci != -99; } 424 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 425 void set_printable_bci(int bci) { _printable_bci = bci; } 426 #endif 427 int dominator_depth(); 428 int use_count() const { return _use_count; } 429 int pin_state() const { return _pin_state; } 430 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 431 ValueType* type() const { return _type; } 432 BlockBegin *block() const { return _block; } 433 Instruction* prev(); // use carefully, expensive operation 434 Instruction* next() const { return _next; } 435 bool has_subst() const { return _subst != nullptr; } 436 Instruction* subst() { return _subst == nullptr ? this : _subst->subst(); } 437 LIR_Opr operand() const { return _operand; } 438 439 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 440 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 441 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 442 bool can_be_linked() { return as_Local() == nullptr && as_Phi() == nullptr; } 443 444 bool is_null_obj() { return as_Constant() != nullptr && type()->as_ObjectType()->constant_value()->is_null_object(); } 445 446 bool has_uses() const { return use_count() > 0; } 447 ValueStack* state_before() const { return _state_before; } 448 ValueStack* exception_state() const { return _exception_state; } 449 virtual bool needs_exception_state() const { return true; } 450 XHandlers* exception_handlers() const { return _exception_handlers; } 451 452 // manipulation 453 void pin(PinReason reason) { _pin_state |= reason; } 454 void pin() { _pin_state |= PinUnknown; } 455 // DANGEROUS: only used by EliminateStores 456 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 457 458 Instruction* set_next(Instruction* next) { 459 assert(next->has_printable_bci(), "_printable_bci should have been set"); 460 assert(next != nullptr, "must not be null"); 461 assert(as_BlockEnd() == nullptr, "BlockEnd instructions must have no next"); 462 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 463 464 BlockBegin *block = this->block(); 465 next->_block = block; 466 467 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 468 _next = next; 469 return next; 470 } 471 472 Instruction* set_next(Instruction* next, int bci) { 473 #ifndef PRODUCT 474 next->set_printable_bci(bci); 475 #endif 476 return set_next(next); 477 } 478 479 // when blocks are merged 480 void fixup_block_pointers() { 481 Instruction *cur = next()->next(); // next()'s block is set in set_next 482 while (cur && cur->_block != block()) { 483 cur->_block = block(); 484 cur = cur->next(); 485 } 486 } 487 488 Instruction *insert_after(Instruction *i) { 489 Instruction* n = _next; 490 set_next(i); 491 i->set_next(n); 492 return _next; 493 } 494 495 Instruction *insert_after_same_bci(Instruction *i) { 496 #ifndef PRODUCT 497 i->set_printable_bci(printable_bci()); 498 #endif 499 return insert_after(i); 500 } 501 502 void set_subst(Instruction* subst) { 503 assert(subst == nullptr || 504 type()->base() == subst->type()->base() || 505 subst->type()->base() == illegalType, "type can't change"); 506 _subst = subst; 507 } 508 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 509 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 510 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 511 512 // machine-specifics 513 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 514 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 515 516 // generic 517 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 518 virtual Phi* as_Phi() { return nullptr; } 519 virtual Local* as_Local() { return nullptr; } 520 virtual Constant* as_Constant() { return nullptr; } 521 virtual AccessField* as_AccessField() { return nullptr; } 522 virtual LoadField* as_LoadField() { return nullptr; } 523 virtual StoreField* as_StoreField() { return nullptr; } 524 virtual AccessArray* as_AccessArray() { return nullptr; } 525 virtual ArrayLength* as_ArrayLength() { return nullptr; } 526 virtual AccessIndexed* as_AccessIndexed() { return nullptr; } 527 virtual LoadIndexed* as_LoadIndexed() { return nullptr; } 528 virtual StoreIndexed* as_StoreIndexed() { return nullptr; } 529 virtual NegateOp* as_NegateOp() { return nullptr; } 530 virtual Op2* as_Op2() { return nullptr; } 531 virtual ArithmeticOp* as_ArithmeticOp() { return nullptr; } 532 virtual ShiftOp* as_ShiftOp() { return nullptr; } 533 virtual LogicOp* as_LogicOp() { return nullptr; } 534 virtual CompareOp* as_CompareOp() { return nullptr; } 535 virtual IfOp* as_IfOp() { return nullptr; } 536 virtual Convert* as_Convert() { return nullptr; } 537 virtual NullCheck* as_NullCheck() { return nullptr; } 538 virtual OsrEntry* as_OsrEntry() { return nullptr; } 539 virtual StateSplit* as_StateSplit() { return nullptr; } 540 virtual Invoke* as_Invoke() { return nullptr; } 541 virtual NewInstance* as_NewInstance() { return nullptr; } 542 virtual NewArray* as_NewArray() { return nullptr; } 543 virtual NewTypeArray* as_NewTypeArray() { return nullptr; } 544 virtual NewObjectArray* as_NewObjectArray() { return nullptr; } 545 virtual NewMultiArray* as_NewMultiArray() { return nullptr; } 546 virtual TypeCheck* as_TypeCheck() { return nullptr; } 547 virtual CheckCast* as_CheckCast() { return nullptr; } 548 virtual InstanceOf* as_InstanceOf() { return nullptr; } 549 virtual TypeCast* as_TypeCast() { return nullptr; } 550 virtual AccessMonitor* as_AccessMonitor() { return nullptr; } 551 virtual MonitorEnter* as_MonitorEnter() { return nullptr; } 552 virtual MonitorExit* as_MonitorExit() { return nullptr; } 553 virtual Intrinsic* as_Intrinsic() { return nullptr; } 554 virtual BlockBegin* as_BlockBegin() { return nullptr; } 555 virtual BlockEnd* as_BlockEnd() { return nullptr; } 556 virtual Goto* as_Goto() { return nullptr; } 557 virtual If* as_If() { return nullptr; } 558 virtual TableSwitch* as_TableSwitch() { return nullptr; } 559 virtual LookupSwitch* as_LookupSwitch() { return nullptr; } 560 virtual Return* as_Return() { return nullptr; } 561 virtual Throw* as_Throw() { return nullptr; } 562 virtual Base* as_Base() { return nullptr; } 563 virtual RoundFP* as_RoundFP() { return nullptr; } 564 virtual ExceptionObject* as_ExceptionObject() { return nullptr; } 565 virtual UnsafeOp* as_UnsafeOp() { return nullptr; } 566 virtual ProfileInvoke* as_ProfileInvoke() { return nullptr; } 567 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return nullptr; } 568 569 #ifdef ASSERT 570 virtual Assert* as_Assert() { return nullptr; } 571 #endif 572 573 virtual void visit(InstructionVisitor* v) = 0; 574 575 virtual bool can_trap() const { return false; } 576 577 virtual void input_values_do(ValueVisitor* f) = 0; 578 virtual void state_values_do(ValueVisitor* f); 579 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 580 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 581 582 virtual ciType* exact_type() const; 583 virtual ciType* declared_type() const { return nullptr; } 584 585 // hashing 586 virtual const char* name() const = 0; 587 HASHING1(Instruction, false, id()) // hashing disabled by default 588 589 // debugging 590 static void check_state(ValueStack* state) PRODUCT_RETURN; 591 void print() PRODUCT_RETURN; 592 void print_line() PRODUCT_RETURN; 593 void print(InstructionPrinter& ip) PRODUCT_RETURN; 594 }; 595 596 597 // The following macros are used to define base (i.e., non-leaf) 598 // and leaf instruction classes. They define class-name related 599 // generic functionality in one place. 600 601 #define BASE(class_name, super_class_name) \ 602 class class_name: public super_class_name { \ 603 public: \ 604 virtual class_name* as_##class_name() { return this; } \ 605 606 607 #define LEAF(class_name, super_class_name) \ 608 BASE(class_name, super_class_name) \ 609 public: \ 610 virtual const char* name() const { return #class_name; } \ 611 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 612 613 614 // Debugging support 615 616 617 #ifdef ASSERT 618 class AssertValues: public ValueVisitor { 619 void visit(Value* x) { assert((*x) != nullptr, "value must exist"); } 620 }; 621 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 622 #else 623 #define ASSERT_VALUES 624 #endif // ASSERT 625 626 627 // A Phi is a phi function in the sense of SSA form. It stands for 628 // the value of a local variable at the beginning of a join block. 629 // A Phi consists of n operands, one for every incoming branch. 630 631 LEAF(Phi, Instruction) 632 private: 633 int _pf_flags; // the flags of the phi function 634 int _index; // to value on operand stack (index < 0) or to local 635 public: 636 // creation 637 Phi(ValueType* type, BlockBegin* b, int index) 638 : Instruction(type->base()) 639 , _pf_flags(0) 640 , _index(index) 641 { 642 _block = b; 643 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 644 if (type->is_illegal()) { 645 make_illegal(); 646 } 647 } 648 649 // flags 650 enum Flag { 651 no_flag = 0, 652 visited = 1 << 0, 653 cannot_simplify = 1 << 1 654 }; 655 656 // accessors 657 bool is_local() const { return _index >= 0; } 658 bool is_on_stack() const { return !is_local(); } 659 int local_index() const { assert(is_local(), ""); return _index; } 660 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 661 662 Value operand_at(int i) const; 663 int operand_count() const; 664 665 void set(Flag f) { _pf_flags |= f; } 666 void clear(Flag f) { _pf_flags &= ~f; } 667 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 668 669 // Invalidates phis corresponding to merges of locals of two different types 670 // (these should never be referenced, otherwise the bytecodes are illegal) 671 void make_illegal() { 672 set(cannot_simplify); 673 set_type(illegalType); 674 } 675 676 bool is_illegal() const { 677 return type()->is_illegal(); 678 } 679 680 // generic 681 virtual void input_values_do(ValueVisitor* f) { 682 } 683 }; 684 685 686 // A local is a placeholder for an incoming argument to a function call. 687 LEAF(Local, Instruction) 688 private: 689 int _java_index; // the local index within the method to which the local belongs 690 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 691 ciType* _declared_type; 692 public: 693 // creation 694 Local(ciType* declared, ValueType* type, int index, bool receiver) 695 : Instruction(type) 696 , _java_index(index) 697 , _is_receiver(receiver) 698 , _declared_type(declared) 699 { 700 NOT_PRODUCT(set_printable_bci(-1)); 701 } 702 703 // accessors 704 int java_index() const { return _java_index; } 705 bool is_receiver() const { return _is_receiver; } 706 707 virtual ciType* declared_type() const { return _declared_type; } 708 709 // generic 710 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 711 }; 712 713 714 LEAF(Constant, Instruction) 715 public: 716 // creation 717 Constant(ValueType* type): 718 Instruction(type, nullptr, /*type_is_constant*/ true) 719 { 720 assert(type->is_constant(), "must be a constant"); 721 } 722 723 Constant(ValueType* type, ValueStack* state_before, bool kills_memory = false): 724 Instruction(type, state_before, /*type_is_constant*/ true) 725 { 726 assert(state_before != nullptr, "only used for constants which need patching"); 727 assert(type->is_constant(), "must be a constant"); 728 set_flag(KillsMemoryFlag, kills_memory); 729 pin(); // since it's patching it needs to be pinned 730 } 731 732 // generic 733 virtual bool can_trap() const { return state_before() != nullptr; } 734 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 735 736 virtual intx hash() const; 737 virtual bool is_equal(Value v) const; 738 739 virtual ciType* exact_type() const; 740 741 bool kills_memory() const { return check_flag(KillsMemoryFlag); } 742 743 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 744 745 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 746 BlockBegin* compare(Instruction::Condition cond, Value right, 747 BlockBegin* true_sux, BlockBegin* false_sux) const { 748 switch (compare(cond, right)) { 749 case not_comparable: 750 return nullptr; 751 case cond_false: 752 return false_sux; 753 case cond_true: 754 return true_sux; 755 default: 756 ShouldNotReachHere(); 757 return nullptr; 758 } 759 } 760 }; 761 762 763 BASE(AccessField, Instruction) 764 private: 765 Value _obj; 766 int _offset; 767 ciField* _field; 768 NullCheck* _explicit_null_check; // For explicit null check elimination 769 770 public: 771 // creation 772 AccessField(Value obj, int offset, ciField* field, bool is_static, 773 ValueStack* state_before, bool needs_patching) 774 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 775 , _obj(obj) 776 , _offset(offset) 777 , _field(field) 778 , _explicit_null_check(nullptr) 779 { 780 set_needs_null_check(!is_static); 781 set_flag(IsStaticFlag, is_static); 782 set_flag(NeedsPatchingFlag, needs_patching); 783 ASSERT_VALUES 784 // pin of all instructions with memory access 785 pin(); 786 } 787 788 // accessors 789 Value obj() const { return _obj; } 790 int offset() const { return _offset; } 791 ciField* field() const { return _field; } 792 BasicType field_type() const { return _field->type()->basic_type(); } 793 bool is_static() const { return check_flag(IsStaticFlag); } 794 NullCheck* explicit_null_check() const { return _explicit_null_check; } 795 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 796 797 // Unresolved getstatic and putstatic can cause initialization. 798 // Technically it occurs at the Constant that materializes the base 799 // of the static fields but it's simpler to model it here. 800 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 801 802 // manipulation 803 804 // Under certain circumstances, if a previous NullCheck instruction 805 // proved the target object non-null, we can eliminate the explicit 806 // null check and do an implicit one, simply specifying the debug 807 // information from the NullCheck. This field should only be consulted 808 // if needs_null_check() is true. 809 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 810 811 // generic 812 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 813 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 814 }; 815 816 817 LEAF(LoadField, AccessField) 818 public: 819 // creation 820 LoadField(Value obj, int offset, ciField* field, bool is_static, 821 ValueStack* state_before, bool needs_patching) 822 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 823 {} 824 825 ciType* declared_type() const; 826 827 // generic; cannot be eliminated if needs patching or if volatile. 828 HASHING3(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset(), declared_type()) 829 }; 830 831 832 LEAF(StoreField, AccessField) 833 private: 834 Value _value; 835 836 public: 837 // creation 838 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 839 ValueStack* state_before, bool needs_patching) 840 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 841 , _value(value) 842 { 843 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 844 ASSERT_VALUES 845 pin(); 846 } 847 848 // accessors 849 Value value() const { return _value; } 850 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 851 852 // generic 853 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 854 }; 855 856 857 BASE(AccessArray, Instruction) 858 private: 859 Value _array; 860 861 public: 862 // creation 863 AccessArray(ValueType* type, Value array, ValueStack* state_before) 864 : Instruction(type, state_before) 865 , _array(array) 866 { 867 set_needs_null_check(true); 868 ASSERT_VALUES 869 pin(); // instruction with side effect (null exception or range check throwing) 870 } 871 872 Value array() const { return _array; } 873 874 // generic 875 virtual bool can_trap() const { return needs_null_check(); } 876 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 877 }; 878 879 880 LEAF(ArrayLength, AccessArray) 881 private: 882 NullCheck* _explicit_null_check; // For explicit null check elimination 883 884 public: 885 // creation 886 ArrayLength(Value array, ValueStack* state_before) 887 : AccessArray(intType, array, state_before) 888 , _explicit_null_check(nullptr) {} 889 890 // accessors 891 NullCheck* explicit_null_check() const { return _explicit_null_check; } 892 893 // setters 894 // See LoadField::set_explicit_null_check for documentation 895 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 896 897 // generic 898 HASHING1(ArrayLength, true, array()->subst()) 899 }; 900 901 902 BASE(AccessIndexed, AccessArray) 903 private: 904 Value _index; 905 Value _length; 906 BasicType _elt_type; 907 bool _mismatched; 908 909 public: 910 // creation 911 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 912 : AccessArray(as_ValueType(elt_type), array, state_before) 913 , _index(index) 914 , _length(length) 915 , _elt_type(elt_type) 916 , _mismatched(mismatched) 917 { 918 set_flag(Instruction::NeedsRangeCheckFlag, true); 919 ASSERT_VALUES 920 } 921 922 // accessors 923 Value index() const { return _index; } 924 Value length() const { return _length; } 925 BasicType elt_type() const { return _elt_type; } 926 bool mismatched() const { return _mismatched; } 927 928 void clear_length() { _length = nullptr; } 929 // perform elimination of range checks involving constants 930 bool compute_needs_range_check(); 931 932 // generic 933 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != nullptr) f->visit(&_length); } 934 }; 935 936 937 LEAF(LoadIndexed, AccessIndexed) 938 private: 939 NullCheck* _explicit_null_check; // For explicit null check elimination 940 941 public: 942 // creation 943 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 944 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 945 , _explicit_null_check(nullptr) {} 946 947 // accessors 948 NullCheck* explicit_null_check() const { return _explicit_null_check; } 949 950 // setters 951 // See LoadField::set_explicit_null_check for documentation 952 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 953 954 ciType* exact_type() const; 955 ciType* declared_type() const; 956 957 // generic; 958 HASHING3(LoadIndexed, true, elt_type(), array()->subst(), index()->subst()) 959 }; 960 961 962 LEAF(StoreIndexed, AccessIndexed) 963 private: 964 Value _value; 965 966 ciMethod* _profiled_method; 967 int _profiled_bci; 968 bool _check_boolean; 969 970 public: 971 // creation 972 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 973 bool check_boolean, bool mismatched = false) 974 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 975 , _value(value), _profiled_method(nullptr), _profiled_bci(0), _check_boolean(check_boolean) 976 { 977 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 978 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 979 ASSERT_VALUES 980 pin(); 981 } 982 983 // accessors 984 Value value() const { return _value; } 985 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 986 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 987 bool check_boolean() const { return _check_boolean; } 988 // Helpers for MethodData* profiling 989 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 990 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 991 void set_profiled_bci(int bci) { _profiled_bci = bci; } 992 bool should_profile() const { return check_flag(ProfileMDOFlag); } 993 ciMethod* profiled_method() const { return _profiled_method; } 994 int profiled_bci() const { return _profiled_bci; } 995 // generic 996 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 997 }; 998 999 1000 LEAF(NegateOp, Instruction) 1001 private: 1002 Value _x; 1003 1004 public: 1005 // creation 1006 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1007 ASSERT_VALUES 1008 } 1009 1010 // accessors 1011 Value x() const { return _x; } 1012 1013 // generic 1014 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1015 }; 1016 1017 1018 BASE(Op2, Instruction) 1019 private: 1020 Bytecodes::Code _op; 1021 Value _x; 1022 Value _y; 1023 1024 public: 1025 // creation 1026 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = nullptr) 1027 : Instruction(type, state_before) 1028 , _op(op) 1029 , _x(x) 1030 , _y(y) 1031 { 1032 ASSERT_VALUES 1033 } 1034 1035 // accessors 1036 Bytecodes::Code op() const { return _op; } 1037 Value x() const { return _x; } 1038 Value y() const { return _y; } 1039 1040 // manipulators 1041 void swap_operands() { 1042 assert(is_commutative(), "operation must be commutative"); 1043 Value t = _x; _x = _y; _y = t; 1044 } 1045 1046 // generic 1047 virtual bool is_commutative() const { return false; } 1048 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1049 }; 1050 1051 1052 LEAF(ArithmeticOp, Op2) 1053 public: 1054 // creation 1055 ArithmeticOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1056 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1057 { 1058 if (can_trap()) pin(); 1059 } 1060 1061 // generic 1062 virtual bool is_commutative() const; 1063 virtual bool can_trap() const; 1064 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1065 }; 1066 1067 1068 LEAF(ShiftOp, Op2) 1069 public: 1070 // creation 1071 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1072 1073 // generic 1074 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1075 }; 1076 1077 1078 LEAF(LogicOp, Op2) 1079 public: 1080 // creation 1081 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1082 1083 // generic 1084 virtual bool is_commutative() const; 1085 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1086 }; 1087 1088 1089 LEAF(CompareOp, Op2) 1090 public: 1091 // creation 1092 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1093 : Op2(intType, op, x, y, state_before) 1094 {} 1095 1096 // generic 1097 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1098 }; 1099 1100 1101 LEAF(IfOp, Op2) 1102 private: 1103 Value _tval; 1104 Value _fval; 1105 1106 public: 1107 // creation 1108 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1109 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1110 , _tval(tval) 1111 , _fval(fval) 1112 { 1113 ASSERT_VALUES 1114 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1115 } 1116 1117 // accessors 1118 virtual bool is_commutative() const; 1119 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1120 Condition cond() const { return (Condition)Op2::op(); } 1121 Value tval() const { return _tval; } 1122 Value fval() const { return _fval; } 1123 1124 // generic 1125 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1126 }; 1127 1128 1129 LEAF(Convert, Instruction) 1130 private: 1131 Bytecodes::Code _op; 1132 Value _value; 1133 1134 public: 1135 // creation 1136 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1137 ASSERT_VALUES 1138 } 1139 1140 // accessors 1141 Bytecodes::Code op() const { return _op; } 1142 Value value() const { return _value; } 1143 1144 // generic 1145 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1146 HASHING2(Convert, true, op(), value()->subst()) 1147 }; 1148 1149 1150 LEAF(NullCheck, Instruction) 1151 private: 1152 Value _obj; 1153 1154 public: 1155 // creation 1156 NullCheck(Value obj, ValueStack* state_before) 1157 : Instruction(obj->type()->base(), state_before) 1158 , _obj(obj) 1159 { 1160 ASSERT_VALUES 1161 set_can_trap(true); 1162 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1163 pin(Instruction::PinExplicitNullCheck); 1164 } 1165 1166 // accessors 1167 Value obj() const { return _obj; } 1168 1169 // setters 1170 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1171 1172 // generic 1173 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1174 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1175 HASHING1(NullCheck, true, obj()->subst()) 1176 }; 1177 1178 1179 // This node is supposed to cast the type of another node to a more precise 1180 // declared type. 1181 LEAF(TypeCast, Instruction) 1182 private: 1183 ciType* _declared_type; 1184 Value _obj; 1185 1186 public: 1187 // The type of this node is the same type as the object type (and it might be constant). 1188 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1189 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1190 _declared_type(type), 1191 _obj(obj) {} 1192 1193 // accessors 1194 ciType* declared_type() const { return _declared_type; } 1195 Value obj() const { return _obj; } 1196 1197 // generic 1198 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1199 }; 1200 1201 1202 BASE(StateSplit, Instruction) 1203 private: 1204 ValueStack* _state; 1205 1206 protected: 1207 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1208 1209 public: 1210 // creation 1211 StateSplit(ValueType* type, ValueStack* state_before = nullptr) 1212 : Instruction(type, state_before) 1213 , _state(nullptr) 1214 { 1215 pin(PinStateSplitConstructor); 1216 } 1217 1218 // accessors 1219 ValueStack* state() const { return _state; } 1220 IRScope* scope() const; // the state's scope 1221 1222 // manipulation 1223 void set_state(ValueStack* state) { assert(_state == nullptr, "overwriting existing state"); check_state(state); _state = state; } 1224 1225 // generic 1226 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1227 virtual void state_values_do(ValueVisitor* f); 1228 }; 1229 1230 1231 LEAF(Invoke, StateSplit) 1232 private: 1233 Bytecodes::Code _code; 1234 Value _recv; 1235 Values* _args; 1236 BasicTypeList* _signature; 1237 ciMethod* _target; 1238 1239 public: 1240 // creation 1241 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1242 ciMethod* target, ValueStack* state_before); 1243 1244 // accessors 1245 Bytecodes::Code code() const { return _code; } 1246 Value receiver() const { return _recv; } 1247 bool has_receiver() const { return receiver() != nullptr; } 1248 int number_of_arguments() const { return _args->length(); } 1249 Value argument_at(int i) const { return _args->at(i); } 1250 BasicTypeList* signature() const { return _signature; } 1251 ciMethod* target() const { return _target; } 1252 1253 ciType* declared_type() const; 1254 1255 // Returns false if target is not loaded 1256 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1257 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1258 1259 // JSR 292 support 1260 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1261 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1262 1263 virtual bool needs_exception_state() const { return false; } 1264 1265 // generic 1266 virtual bool can_trap() const { return true; } 1267 virtual void input_values_do(ValueVisitor* f) { 1268 StateSplit::input_values_do(f); 1269 if (has_receiver()) f->visit(&_recv); 1270 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1271 } 1272 virtual void state_values_do(ValueVisitor *f); 1273 }; 1274 1275 1276 LEAF(NewInstance, StateSplit) 1277 private: 1278 ciInstanceKlass* _klass; 1279 bool _is_unresolved; 1280 1281 public: 1282 // creation 1283 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1284 : StateSplit(instanceType, state_before) 1285 , _klass(klass), _is_unresolved(is_unresolved) 1286 {} 1287 1288 // accessors 1289 ciInstanceKlass* klass() const { return _klass; } 1290 bool is_unresolved() const { return _is_unresolved; } 1291 1292 virtual bool needs_exception_state() const { return false; } 1293 1294 // generic 1295 virtual bool can_trap() const { return true; } 1296 ciType* exact_type() const; 1297 ciType* declared_type() const; 1298 }; 1299 1300 1301 BASE(NewArray, StateSplit) 1302 private: 1303 Value _length; 1304 1305 public: 1306 // creation 1307 NewArray(Value length, ValueStack* state_before) 1308 : StateSplit(objectType, state_before) 1309 , _length(length) 1310 { 1311 // Do not ASSERT_VALUES since length is null for NewMultiArray 1312 } 1313 1314 // accessors 1315 Value length() const { return _length; } 1316 1317 virtual bool needs_exception_state() const { return false; } 1318 1319 ciType* exact_type() const { return nullptr; } 1320 ciType* declared_type() const; 1321 1322 // generic 1323 virtual bool can_trap() const { return true; } 1324 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1325 }; 1326 1327 1328 LEAF(NewTypeArray, NewArray) 1329 private: 1330 BasicType _elt_type; 1331 bool _zero_array; 1332 1333 public: 1334 // creation 1335 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before, bool zero_array) 1336 : NewArray(length, state_before) 1337 , _elt_type(elt_type) 1338 , _zero_array(zero_array) 1339 {} 1340 1341 // accessors 1342 BasicType elt_type() const { return _elt_type; } 1343 bool zero_array() const { return _zero_array; } 1344 ciType* exact_type() const; 1345 }; 1346 1347 1348 LEAF(NewObjectArray, NewArray) 1349 private: 1350 ciKlass* _klass; 1351 1352 public: 1353 // creation 1354 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1355 1356 // accessors 1357 ciKlass* klass() const { return _klass; } 1358 ciType* exact_type() const; 1359 }; 1360 1361 1362 LEAF(NewMultiArray, NewArray) 1363 private: 1364 ciKlass* _klass; 1365 Values* _dims; 1366 1367 public: 1368 // creation 1369 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(nullptr, state_before), _klass(klass), _dims(dims) { 1370 ASSERT_VALUES 1371 } 1372 1373 // accessors 1374 ciKlass* klass() const { return _klass; } 1375 Values* dims() const { return _dims; } 1376 int rank() const { return dims()->length(); } 1377 1378 // generic 1379 virtual void input_values_do(ValueVisitor* f) { 1380 // NOTE: we do not call NewArray::input_values_do since "length" 1381 // is meaningless for a multi-dimensional array; passing the 1382 // zeroth element down to NewArray as its length is a bad idea 1383 // since there will be a copy in the "dims" array which doesn't 1384 // get updated, and the value must not be traversed twice. Was bug 1385 // - kbr 4/10/2001 1386 StateSplit::input_values_do(f); 1387 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1388 } 1389 }; 1390 1391 1392 BASE(TypeCheck, StateSplit) 1393 private: 1394 ciKlass* _klass; 1395 Value _obj; 1396 1397 ciMethod* _profiled_method; 1398 int _profiled_bci; 1399 1400 public: 1401 // creation 1402 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1403 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1404 _profiled_method(nullptr), _profiled_bci(0) { 1405 ASSERT_VALUES 1406 set_direct_compare(false); 1407 } 1408 1409 // accessors 1410 ciKlass* klass() const { return _klass; } 1411 Value obj() const { return _obj; } 1412 bool is_loaded() const { return klass() != nullptr; } 1413 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1414 1415 // manipulation 1416 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1417 1418 // generic 1419 virtual bool can_trap() const { return true; } 1420 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1421 1422 // Helpers for MethodData* profiling 1423 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1424 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1425 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1426 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1427 ciMethod* profiled_method() const { return _profiled_method; } 1428 int profiled_bci() const { return _profiled_bci; } 1429 }; 1430 1431 1432 LEAF(CheckCast, TypeCheck) 1433 public: 1434 // creation 1435 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1436 : TypeCheck(klass, obj, objectType, state_before) {} 1437 1438 void set_incompatible_class_change_check() { 1439 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1440 } 1441 bool is_incompatible_class_change_check() const { 1442 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1443 } 1444 void set_invokespecial_receiver_check() { 1445 set_flag(InvokeSpecialReceiverCheckFlag, true); 1446 } 1447 bool is_invokespecial_receiver_check() const { 1448 return check_flag(InvokeSpecialReceiverCheckFlag); 1449 } 1450 1451 virtual bool needs_exception_state() const { 1452 return !is_invokespecial_receiver_check(); 1453 } 1454 1455 ciType* declared_type() const; 1456 }; 1457 1458 1459 LEAF(InstanceOf, TypeCheck) 1460 public: 1461 // creation 1462 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1463 1464 virtual bool needs_exception_state() const { return false; } 1465 }; 1466 1467 1468 BASE(AccessMonitor, StateSplit) 1469 private: 1470 Value _obj; 1471 int _monitor_no; 1472 1473 public: 1474 // creation 1475 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = nullptr) 1476 : StateSplit(illegalType, state_before) 1477 , _obj(obj) 1478 , _monitor_no(monitor_no) 1479 { 1480 set_needs_null_check(true); 1481 ASSERT_VALUES 1482 } 1483 1484 // accessors 1485 Value obj() const { return _obj; } 1486 int monitor_no() const { return _monitor_no; } 1487 1488 // generic 1489 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1490 }; 1491 1492 1493 LEAF(MonitorEnter, AccessMonitor) 1494 public: 1495 // creation 1496 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1497 : AccessMonitor(obj, monitor_no, state_before) 1498 { 1499 ASSERT_VALUES 1500 } 1501 1502 // generic 1503 virtual bool can_trap() const { return true; } 1504 }; 1505 1506 1507 LEAF(MonitorExit, AccessMonitor) 1508 public: 1509 // creation 1510 MonitorExit(Value obj, int monitor_no) 1511 : AccessMonitor(obj, monitor_no, nullptr) 1512 { 1513 ASSERT_VALUES 1514 } 1515 }; 1516 1517 1518 LEAF(Intrinsic, StateSplit) 1519 private: 1520 vmIntrinsics::ID _id; 1521 ArgsNonNullState _nonnull_state; 1522 Values* _args; 1523 Value _recv; 1524 1525 public: 1526 // preserves_state can be set to true for Intrinsics 1527 // which are guaranteed to preserve register state across any slow 1528 // cases; setting it to true does not mean that the Intrinsic can 1529 // not trap, only that if we continue execution in the same basic 1530 // block after the Intrinsic, all of the registers are intact. This 1531 // allows load elimination and common expression elimination to be 1532 // performed across the Intrinsic. The default value is false. 1533 Intrinsic(ValueType* type, 1534 vmIntrinsics::ID id, 1535 Values* args, 1536 bool has_receiver, 1537 ValueStack* state_before, 1538 bool preserves_state, 1539 bool cantrap = true) 1540 : StateSplit(type, state_before) 1541 , _id(id) 1542 , _args(args) 1543 , _recv(nullptr) 1544 { 1545 assert(args != nullptr, "args must exist"); 1546 ASSERT_VALUES 1547 set_flag(PreservesStateFlag, preserves_state); 1548 set_flag(CanTrapFlag, cantrap); 1549 if (has_receiver) { 1550 _recv = argument_at(0); 1551 } 1552 set_needs_null_check(has_receiver); 1553 1554 // some intrinsics can't trap, so don't force them to be pinned 1555 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1556 unpin(PinStateSplitConstructor); 1557 } 1558 } 1559 1560 // accessors 1561 vmIntrinsics::ID id() const { return _id; } 1562 int number_of_arguments() const { return _args->length(); } 1563 Value argument_at(int i) const { return _args->at(i); } 1564 1565 bool has_receiver() const { return (_recv != nullptr); } 1566 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1567 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1568 1569 bool arg_needs_null_check(int i) const { 1570 return _nonnull_state.arg_needs_null_check(i); 1571 } 1572 1573 void set_arg_needs_null_check(int i, bool check) { 1574 _nonnull_state.set_arg_needs_null_check(i, check); 1575 } 1576 1577 // generic 1578 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1579 virtual void input_values_do(ValueVisitor* f) { 1580 StateSplit::input_values_do(f); 1581 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1582 } 1583 }; 1584 1585 1586 class LIR_List; 1587 1588 LEAF(BlockBegin, StateSplit) 1589 private: 1590 int _block_id; // the unique block id 1591 int _bci; // start-bci of block 1592 int _depth_first_number; // number of this block in a depth-first ordering 1593 int _linear_scan_number; // number of this block in linear-scan ordering 1594 int _dominator_depth; 1595 int _loop_depth; // the loop nesting level of this block 1596 int _loop_index; // number of the innermost loop of this block 1597 int _flags; // the flags associated with this block 1598 1599 // fields used by BlockListBuilder 1600 int _total_preds; // number of predecessors found by BlockListBuilder 1601 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1602 1603 // SSA specific fields: (factor out later) 1604 BlockList _predecessors; // the predecessors of this block 1605 BlockList _dominates; // list of blocks that are dominated by this block 1606 BlockBegin* _dominator; // the dominator of this block 1607 // SSA specific ends 1608 BlockEnd* _end; // the last instruction of this block 1609 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1610 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1611 int _exception_handler_pco; // if this block is the start of an exception handler, 1612 // this records the PC offset in the assembly code of the 1613 // first instruction in this block 1614 Label _label; // the label associated with this block 1615 LIR_List* _lir; // the low level intermediate representation for this block 1616 1617 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1618 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1619 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1620 ResourceBitMap _live_kill; // set of registers defined in this block 1621 1622 ResourceBitMap _fpu_register_usage; 1623 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1624 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1625 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1626 1627 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1628 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1629 1630 friend class SuxAndWeightAdjuster; 1631 1632 public: 1633 void* operator new(size_t size) throw() { 1634 Compilation* c = Compilation::current(); 1635 void* res = c->arena()->Amalloc(size); 1636 return res; 1637 } 1638 1639 // initialization/counting 1640 static int number_of_blocks() { 1641 return Compilation::current()->number_of_blocks(); 1642 } 1643 1644 // creation 1645 BlockBegin(int bci) 1646 : StateSplit(illegalType) 1647 , _block_id(Compilation::current()->get_next_block_id()) 1648 , _bci(bci) 1649 , _depth_first_number(-1) 1650 , _linear_scan_number(-1) 1651 , _dominator_depth(-1) 1652 , _loop_depth(0) 1653 , _loop_index(-1) 1654 , _flags(0) 1655 , _total_preds(0) 1656 , _stores_to_locals() 1657 , _predecessors(2) 1658 , _dominates(2) 1659 , _dominator(nullptr) 1660 , _end(nullptr) 1661 , _exception_handlers(1) 1662 , _exception_states(nullptr) 1663 , _exception_handler_pco(-1) 1664 , _lir(nullptr) 1665 , _live_in() 1666 , _live_out() 1667 , _live_gen() 1668 , _live_kill() 1669 , _fpu_register_usage() 1670 , _fpu_stack_state(nullptr) 1671 , _first_lir_instruction_id(-1) 1672 , _last_lir_instruction_id(-1) 1673 { 1674 _block = this; 1675 #ifndef PRODUCT 1676 set_printable_bci(bci); 1677 #endif 1678 } 1679 1680 // accessors 1681 int block_id() const { return _block_id; } 1682 int bci() const { return _bci; } 1683 BlockList* dominates() { return &_dominates; } 1684 BlockBegin* dominator() const { return _dominator; } 1685 int loop_depth() const { return _loop_depth; } 1686 int dominator_depth() const { return _dominator_depth; } 1687 int depth_first_number() const { return _depth_first_number; } 1688 int linear_scan_number() const { return _linear_scan_number; } 1689 BlockEnd* end() const { return _end; } 1690 Label* label() { return &_label; } 1691 LIR_List* lir() const { return _lir; } 1692 int exception_handler_pco() const { return _exception_handler_pco; } 1693 ResourceBitMap& live_in() { return _live_in; } 1694 ResourceBitMap& live_out() { return _live_out; } 1695 ResourceBitMap& live_gen() { return _live_gen; } 1696 ResourceBitMap& live_kill() { return _live_kill; } 1697 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1698 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1699 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1700 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1701 int total_preds() const { return _total_preds; } 1702 BitMap& stores_to_locals() { return _stores_to_locals; } 1703 1704 // manipulation 1705 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1706 void set_loop_depth(int d) { _loop_depth = d; } 1707 void set_dominator_depth(int d) { _dominator_depth = d; } 1708 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1709 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1710 void set_end(BlockEnd* new_end); 1711 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1712 BlockBegin* insert_block_between(BlockBegin* sux); 1713 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1714 void set_lir(LIR_List* lir) { _lir = lir; } 1715 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1716 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1717 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1718 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1719 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1720 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1721 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1722 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1723 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1724 void increment_total_preds(int n = 1) { _total_preds += n; } 1725 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1726 1727 // generic 1728 virtual void state_values_do(ValueVisitor* f); 1729 1730 // successors and predecessors 1731 int number_of_sux() const; 1732 BlockBegin* sux_at(int i) const; 1733 void add_predecessor(BlockBegin* pred); 1734 void remove_predecessor(BlockBegin* pred); 1735 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1736 int number_of_preds() const { return _predecessors.length(); } 1737 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1738 1739 // exception handlers potentially invoked by this block 1740 void add_exception_handler(BlockBegin* b); 1741 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1742 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1743 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1744 1745 // states of the instructions that have an edge to this exception handler 1746 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == nullptr ? 0 : _exception_states->length(); } 1747 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1748 int add_exception_state(ValueStack* state); 1749 1750 // flags 1751 enum Flag { 1752 no_flag = 0, 1753 std_entry_flag = 1 << 0, 1754 osr_entry_flag = 1 << 1, 1755 exception_entry_flag = 1 << 2, 1756 subroutine_entry_flag = 1 << 3, 1757 backward_branch_target_flag = 1 << 4, 1758 is_on_work_list_flag = 1 << 5, 1759 was_visited_flag = 1 << 6, 1760 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1761 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1762 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1763 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1764 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1765 }; 1766 1767 void set(Flag f) { _flags |= f; } 1768 void clear(Flag f) { _flags &= ~f; } 1769 bool is_set(Flag f) const { return (_flags & f) != 0; } 1770 bool is_entry_block() const { 1771 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1772 return (_flags & entry_mask) != 0; 1773 } 1774 1775 // iteration 1776 void iterate_preorder (BlockClosure* closure); 1777 void iterate_postorder (BlockClosure* closure); 1778 1779 void block_values_do(ValueVisitor* f); 1780 1781 // loops 1782 void set_loop_index(int ix) { _loop_index = ix; } 1783 int loop_index() const { return _loop_index; } 1784 1785 // merging 1786 bool try_merge(ValueStack* state, bool has_irreducible_loops); // try to merge states at block begin 1787 void merge(ValueStack* state, bool has_irreducible_loops) { 1788 bool b = try_merge(state, has_irreducible_loops); 1789 assert(b, "merge failed"); 1790 } 1791 1792 // debugging 1793 void print_block() PRODUCT_RETURN; 1794 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1795 1796 }; 1797 1798 1799 BASE(BlockEnd, StateSplit) 1800 private: 1801 BlockList* _sux; 1802 1803 protected: 1804 BlockList* sux() const { return _sux; } 1805 1806 void set_sux(BlockList* sux) { 1807 #ifdef ASSERT 1808 assert(sux != nullptr, "sux must exist"); 1809 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != nullptr, "sux must exist"); 1810 #endif 1811 _sux = sux; 1812 } 1813 1814 public: 1815 // creation 1816 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1817 : StateSplit(type, state_before) 1818 , _sux(nullptr) 1819 { 1820 set_flag(IsSafepointFlag, is_safepoint); 1821 } 1822 1823 // accessors 1824 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1825 // For compatibility with old code, for new code use block() 1826 BlockBegin* begin() const { return _block; } 1827 1828 // manipulation 1829 inline void remove_sux_at(int i) { _sux->remove_at(i);} 1830 inline int find_sux(BlockBegin* sux) {return _sux->find(sux);} 1831 1832 // successors 1833 int number_of_sux() const { return _sux != nullptr ? _sux->length() : 0; } 1834 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1835 bool is_sux(BlockBegin* sux) const { return _sux == nullptr ? false : _sux->contains(sux); } 1836 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1837 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1838 }; 1839 1840 1841 LEAF(Goto, BlockEnd) 1842 public: 1843 enum Direction { 1844 none, // Just a regular goto 1845 taken, not_taken // Goto produced from If 1846 }; 1847 private: 1848 ciMethod* _profiled_method; 1849 int _profiled_bci; 1850 Direction _direction; 1851 public: 1852 // creation 1853 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1854 : BlockEnd(illegalType, state_before, is_safepoint) 1855 , _profiled_method(nullptr) 1856 , _profiled_bci(0) 1857 , _direction(none) { 1858 BlockList* s = new BlockList(1); 1859 s->append(sux); 1860 set_sux(s); 1861 } 1862 1863 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, nullptr, is_safepoint) 1864 , _profiled_method(nullptr) 1865 , _profiled_bci(0) 1866 , _direction(none) { 1867 BlockList* s = new BlockList(1); 1868 s->append(sux); 1869 set_sux(s); 1870 } 1871 1872 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1873 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1874 int profiled_bci() const { return _profiled_bci; } 1875 Direction direction() const { return _direction; } 1876 1877 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1878 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1879 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1880 void set_direction(Direction d) { _direction = d; } 1881 }; 1882 1883 #ifdef ASSERT 1884 LEAF(Assert, Instruction) 1885 private: 1886 Value _x; 1887 Condition _cond; 1888 Value _y; 1889 char *_message; 1890 1891 public: 1892 // creation 1893 // unordered_is_true is valid for float/double compares only 1894 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1895 1896 // accessors 1897 Value x() const { return _x; } 1898 Condition cond() const { return _cond; } 1899 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1900 Value y() const { return _y; } 1901 const char *message() const { return _message; } 1902 1903 // generic 1904 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1905 }; 1906 #endif 1907 1908 LEAF(RangeCheckPredicate, StateSplit) 1909 private: 1910 Value _x; 1911 Condition _cond; 1912 Value _y; 1913 1914 void check_state(); 1915 1916 public: 1917 // creation 1918 // unordered_is_true is valid for float/double compares only 1919 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1920 , _x(x) 1921 , _cond(cond) 1922 , _y(y) 1923 { 1924 ASSERT_VALUES 1925 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1926 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1927 this->set_state(state); 1928 check_state(); 1929 } 1930 1931 // Always deoptimize 1932 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 1933 { 1934 this->set_state(state); 1935 _x = _y = nullptr; 1936 check_state(); 1937 } 1938 1939 // accessors 1940 Value x() const { return _x; } 1941 Condition cond() const { return _cond; } 1942 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1943 Value y() const { return _y; } 1944 1945 void always_fail() { _x = _y = nullptr; } 1946 1947 // generic 1948 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1949 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 1950 }; 1951 1952 LEAF(If, BlockEnd) 1953 private: 1954 Value _x; 1955 Condition _cond; 1956 Value _y; 1957 ciMethod* _profiled_method; 1958 int _profiled_bci; // Canonicalizer may alter bci of If node 1959 bool _swapped; // Is the order reversed with respect to the original If in the 1960 // bytecode stream? 1961 public: 1962 // creation 1963 // unordered_is_true is valid for float/double compares only 1964 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1965 : BlockEnd(illegalType, state_before, is_safepoint) 1966 , _x(x) 1967 , _cond(cond) 1968 , _y(y) 1969 , _profiled_method(nullptr) 1970 , _profiled_bci(0) 1971 , _swapped(false) 1972 { 1973 ASSERT_VALUES 1974 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1975 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1976 BlockList* s = new BlockList(2); 1977 s->append(tsux); 1978 s->append(fsux); 1979 set_sux(s); 1980 } 1981 1982 // accessors 1983 Value x() const { return _x; } 1984 Condition cond() const { return _cond; } 1985 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1986 Value y() const { return _y; } 1987 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1988 BlockBegin* tsux() const { return sux_for(true); } 1989 BlockBegin* fsux() const { return sux_for(false); } 1990 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 1991 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1992 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1993 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 1994 bool is_swapped() const { return _swapped; } 1995 1996 // manipulation 1997 void swap_operands() { 1998 Value t = _x; _x = _y; _y = t; 1999 _cond = mirror(_cond); 2000 } 2001 2002 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2003 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2004 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2005 void set_swapped(bool value) { _swapped = value; } 2006 // generic 2007 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2008 }; 2009 2010 2011 BASE(Switch, BlockEnd) 2012 private: 2013 Value _tag; 2014 2015 public: 2016 // creation 2017 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2018 : BlockEnd(illegalType, state_before, is_safepoint) 2019 , _tag(tag) { 2020 ASSERT_VALUES 2021 set_sux(sux); 2022 } 2023 2024 // accessors 2025 Value tag() const { return _tag; } 2026 int length() const { return number_of_sux() - 1; } 2027 2028 virtual bool needs_exception_state() const { return false; } 2029 2030 // generic 2031 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2032 }; 2033 2034 2035 LEAF(TableSwitch, Switch) 2036 private: 2037 int _lo_key; 2038 2039 public: 2040 // creation 2041 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2042 : Switch(tag, sux, state_before, is_safepoint) 2043 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2044 2045 // accessors 2046 int lo_key() const { return _lo_key; } 2047 int hi_key() const { return _lo_key + (length() - 1); } 2048 }; 2049 2050 2051 LEAF(LookupSwitch, Switch) 2052 private: 2053 intArray* _keys; 2054 2055 public: 2056 // creation 2057 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2058 : Switch(tag, sux, state_before, is_safepoint) 2059 , _keys(keys) { 2060 assert(keys != nullptr, "keys must exist"); 2061 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2062 } 2063 2064 // accessors 2065 int key_at(int i) const { return _keys->at(i); } 2066 }; 2067 2068 2069 LEAF(Return, BlockEnd) 2070 private: 2071 Value _result; 2072 2073 public: 2074 // creation 2075 Return(Value result) : 2076 BlockEnd(result == nullptr ? voidType : result->type()->base(), nullptr, true), 2077 _result(result) {} 2078 2079 // accessors 2080 Value result() const { return _result; } 2081 bool has_result() const { return result() != nullptr; } 2082 2083 // generic 2084 virtual void input_values_do(ValueVisitor* f) { 2085 BlockEnd::input_values_do(f); 2086 if (has_result()) f->visit(&_result); 2087 } 2088 }; 2089 2090 2091 LEAF(Throw, BlockEnd) 2092 private: 2093 Value _exception; 2094 2095 public: 2096 // creation 2097 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2098 ASSERT_VALUES 2099 } 2100 2101 // accessors 2102 Value exception() const { return _exception; } 2103 2104 // generic 2105 virtual bool can_trap() const { return true; } 2106 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2107 }; 2108 2109 2110 LEAF(Base, BlockEnd) 2111 public: 2112 // creation 2113 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, nullptr, false) { 2114 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2115 assert(osr_entry == nullptr || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2116 BlockList* s = new BlockList(2); 2117 if (osr_entry != nullptr) s->append(osr_entry); 2118 s->append(std_entry); // must be default sux! 2119 set_sux(s); 2120 } 2121 2122 // accessors 2123 BlockBegin* std_entry() const { return default_sux(); } 2124 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? nullptr : sux_at(0); } 2125 }; 2126 2127 2128 LEAF(OsrEntry, Instruction) 2129 public: 2130 // creation 2131 #ifdef _LP64 2132 OsrEntry() : Instruction(longType) { pin(); } 2133 #else 2134 OsrEntry() : Instruction(intType) { pin(); } 2135 #endif 2136 2137 // generic 2138 virtual void input_values_do(ValueVisitor* f) { } 2139 }; 2140 2141 2142 // Models the incoming exception at a catch site 2143 LEAF(ExceptionObject, Instruction) 2144 public: 2145 // creation 2146 ExceptionObject() : Instruction(objectType) { 2147 pin(); 2148 } 2149 2150 // generic 2151 virtual void input_values_do(ValueVisitor* f) { } 2152 }; 2153 2154 2155 // Models needed rounding for floating-point values on Intel. 2156 // Currently only used to represent rounding of double-precision 2157 // values stored into local variables, but could be used to model 2158 // intermediate rounding of single-precision values as well. 2159 LEAF(RoundFP, Instruction) 2160 private: 2161 Value _input; // floating-point value to be rounded 2162 2163 public: 2164 RoundFP(Value input) 2165 : Instruction(input->type()) // Note: should not be used for constants 2166 , _input(input) 2167 { 2168 ASSERT_VALUES 2169 } 2170 2171 // accessors 2172 Value input() const { return _input; } 2173 2174 // generic 2175 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2176 }; 2177 2178 2179 BASE(UnsafeOp, Instruction) 2180 private: 2181 Value _object; // Object to be fetched from or mutated 2182 Value _offset; // Offset within object 2183 bool _is_volatile; // true if volatile - dl/JSR166 2184 BasicType _basic_type; // ValueType can not express byte-sized integers 2185 2186 protected: 2187 // creation 2188 UnsafeOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2189 : Instruction(is_put ? voidType : as_ValueType(basic_type)), 2190 _object(object), _offset(offset), _is_volatile(is_volatile), _basic_type(basic_type) 2191 { 2192 //Note: Unsafe ops are not not guaranteed to throw NPE. 2193 // Convservatively, Unsafe operations must be pinned though we could be 2194 // looser about this if we wanted to.. 2195 pin(); 2196 } 2197 2198 public: 2199 // accessors 2200 BasicType basic_type() { return _basic_type; } 2201 Value object() { return _object; } 2202 Value offset() { return _offset; } 2203 bool is_volatile() { return _is_volatile; } 2204 2205 // generic 2206 virtual void input_values_do(ValueVisitor* f) { f->visit(&_object); 2207 f->visit(&_offset); } 2208 }; 2209 2210 LEAF(UnsafeGet, UnsafeOp) 2211 private: 2212 bool _is_raw; 2213 public: 2214 UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile) 2215 : UnsafeOp(basic_type, object, offset, false, is_volatile) 2216 { 2217 ASSERT_VALUES 2218 _is_raw = false; 2219 } 2220 UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile, bool is_raw) 2221 : UnsafeOp(basic_type, object, offset, false, is_volatile), _is_raw(is_raw) 2222 { 2223 ASSERT_VALUES 2224 } 2225 2226 // accessors 2227 bool is_raw() { return _is_raw; } 2228 }; 2229 2230 2231 LEAF(UnsafePut, UnsafeOp) 2232 private: 2233 Value _value; // Value to be stored 2234 public: 2235 UnsafePut(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2236 : UnsafeOp(basic_type, object, offset, true, is_volatile) 2237 , _value(value) 2238 { 2239 ASSERT_VALUES 2240 } 2241 2242 // accessors 2243 Value value() { return _value; } 2244 2245 // generic 2246 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2247 f->visit(&_value); } 2248 }; 2249 2250 LEAF(UnsafeGetAndSet, UnsafeOp) 2251 private: 2252 Value _value; // Value to be stored 2253 bool _is_add; 2254 public: 2255 UnsafeGetAndSet(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2256 : UnsafeOp(basic_type, object, offset, false, false) 2257 , _value(value) 2258 , _is_add(is_add) 2259 { 2260 ASSERT_VALUES 2261 } 2262 2263 // accessors 2264 bool is_add() const { return _is_add; } 2265 Value value() { return _value; } 2266 2267 // generic 2268 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2269 f->visit(&_value); } 2270 }; 2271 2272 LEAF(ProfileCall, Instruction) 2273 private: 2274 ciMethod* _method; 2275 int _bci_of_invoke; 2276 ciMethod* _callee; // the method that is called at the given bci 2277 Value _recv; 2278 ciKlass* _known_holder; 2279 Values* _obj_args; // arguments for type profiling 2280 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2281 bool _inlined; // Are we profiling a call that is inlined 2282 2283 public: 2284 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2285 : Instruction(voidType) 2286 , _method(method) 2287 , _bci_of_invoke(bci) 2288 , _callee(callee) 2289 , _recv(recv) 2290 , _known_holder(known_holder) 2291 , _obj_args(obj_args) 2292 , _inlined(inlined) 2293 { 2294 // The ProfileCall has side-effects and must occur precisely where located 2295 pin(); 2296 } 2297 2298 ciMethod* method() const { return _method; } 2299 int bci_of_invoke() const { return _bci_of_invoke; } 2300 ciMethod* callee() const { return _callee; } 2301 Value recv() const { return _recv; } 2302 ciKlass* known_holder() const { return _known_holder; } 2303 int nb_profiled_args() const { return _obj_args == nullptr ? 0 : _obj_args->length(); } 2304 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2305 bool arg_needs_null_check(int i) const { 2306 return _nonnull_state.arg_needs_null_check(i); 2307 } 2308 bool inlined() const { return _inlined; } 2309 2310 void set_arg_needs_null_check(int i, bool check) { 2311 _nonnull_state.set_arg_needs_null_check(i, check); 2312 } 2313 2314 virtual void input_values_do(ValueVisitor* f) { 2315 if (_recv != nullptr) { 2316 f->visit(&_recv); 2317 } 2318 for (int i = 0; i < nb_profiled_args(); i++) { 2319 f->visit(_obj_args->adr_at(i)); 2320 } 2321 } 2322 }; 2323 2324 LEAF(ProfileReturnType, Instruction) 2325 private: 2326 ciMethod* _method; 2327 ciMethod* _callee; 2328 int _bci_of_invoke; 2329 Value _ret; 2330 2331 public: 2332 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2333 : Instruction(voidType) 2334 , _method(method) 2335 , _callee(callee) 2336 , _bci_of_invoke(bci) 2337 , _ret(ret) 2338 { 2339 set_needs_null_check(true); 2340 // The ProfileType has side-effects and must occur precisely where located 2341 pin(); 2342 } 2343 2344 ciMethod* method() const { return _method; } 2345 ciMethod* callee() const { return _callee; } 2346 int bci_of_invoke() const { return _bci_of_invoke; } 2347 Value ret() const { return _ret; } 2348 2349 virtual void input_values_do(ValueVisitor* f) { 2350 if (_ret != nullptr) { 2351 f->visit(&_ret); 2352 } 2353 } 2354 }; 2355 2356 // Call some C runtime function that doesn't safepoint, 2357 // optionally passing the current thread as the first argument. 2358 LEAF(RuntimeCall, Instruction) 2359 private: 2360 const char* _entry_name; 2361 address _entry; 2362 Values* _args; 2363 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2364 2365 public: 2366 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2367 : Instruction(type) 2368 , _entry_name(entry_name) 2369 , _entry(entry) 2370 , _args(args) 2371 , _pass_thread(pass_thread) { 2372 ASSERT_VALUES 2373 pin(); 2374 } 2375 2376 const char* entry_name() const { return _entry_name; } 2377 address entry() const { return _entry; } 2378 int number_of_arguments() const { return _args->length(); } 2379 Value argument_at(int i) const { return _args->at(i); } 2380 bool pass_thread() const { return _pass_thread; } 2381 2382 virtual void input_values_do(ValueVisitor* f) { 2383 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2384 } 2385 }; 2386 2387 // Use to trip invocation counter of an inlined method 2388 2389 LEAF(ProfileInvoke, Instruction) 2390 private: 2391 ciMethod* _inlinee; 2392 ValueStack* _state; 2393 2394 public: 2395 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2396 : Instruction(voidType) 2397 , _inlinee(inlinee) 2398 , _state(state) 2399 { 2400 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2401 pin(); 2402 } 2403 2404 ciMethod* inlinee() { return _inlinee; } 2405 ValueStack* state() { return _state; } 2406 virtual void input_values_do(ValueVisitor*) {} 2407 virtual void state_values_do(ValueVisitor*); 2408 }; 2409 2410 LEAF(MemBar, Instruction) 2411 private: 2412 LIR_Code _code; 2413 2414 public: 2415 MemBar(LIR_Code code) 2416 : Instruction(voidType) 2417 , _code(code) 2418 { 2419 pin(); 2420 } 2421 2422 LIR_Code code() { return _code; } 2423 2424 virtual void input_values_do(ValueVisitor*) {} 2425 }; 2426 2427 class BlockPair: public CompilationResourceObj { 2428 private: 2429 BlockBegin* _from; 2430 int _index; // sux index of 'to' block 2431 public: 2432 BlockPair(BlockBegin* from, int index): _from(from), _index(index) {} 2433 BlockBegin* from() const { return _from; } 2434 int index() const { return _index; } 2435 }; 2436 2437 typedef GrowableArray<BlockPair*> BlockPairList; 2438 2439 inline int BlockBegin::number_of_sux() const { assert(_end != nullptr, "need end"); return _end->number_of_sux(); } 2440 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end != nullptr , "need end"); return _end->sux_at(i); } 2441 2442 #undef ASSERT_VALUES 2443 2444 #endif // SHARE_C1_C1_INSTRUCTION_HPP