1 /* 2 * Copyright (c) 1999, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 39 40 // Instruction class hierarchy 41 // 42 // All leaf classes in the class hierarchy are concrete classes 43 // (i.e., are instantiated). All other classes are abstract and 44 // serve factoring. 45 46 class Instruction; 47 class Phi; 48 class Local; 49 class Constant; 50 class AccessField; 51 class LoadField; 52 class StoreField; 53 class AccessArray; 54 class ArrayLength; 55 class AccessIndexed; 56 class LoadIndexed; 57 class StoreIndexed; 58 class NegateOp; 59 class Op2; 60 class ArithmeticOp; 61 class ShiftOp; 62 class LogicOp; 63 class CompareOp; 64 class IfOp; 65 class Convert; 66 class NullCheck; 67 class TypeCast; 68 class OsrEntry; 69 class ExceptionObject; 70 class StateSplit; 71 class Invoke; 72 class NewInstance; 73 class NewArray; 74 class NewTypeArray; 75 class NewObjectArray; 76 class NewMultiArray; 77 class TypeCheck; 78 class CheckCast; 79 class InstanceOf; 80 class AccessMonitor; 81 class MonitorEnter; 82 class MonitorExit; 83 class Intrinsic; 84 class BlockBegin; 85 class BlockEnd; 86 class Goto; 87 class If; 88 class Switch; 89 class TableSwitch; 90 class LookupSwitch; 91 class Return; 92 class Throw; 93 class Base; 94 class RoundFP; 95 class UnsafeOp; 96 class UnsafeGet; 97 class UnsafePut; 98 class UnsafeGetAndSet; 99 class ProfileCall; 100 class ProfileReturnType; 101 class ProfileInvoke; 102 class RuntimeCall; 103 class MemBar; 104 class RangeCheckPredicate; 105 #ifdef ASSERT 106 class Assert; 107 #endif 108 109 // A Value is a reference to the instruction creating the value 110 typedef Instruction* Value; 111 typedef GrowableArray<Value> Values; 112 typedef GrowableArray<ValueStack*> ValueStackStack; 113 114 // BlockClosure is the base class for block traversal/iteration. 115 116 class BlockClosure: public CompilationResourceObj { 117 public: 118 virtual void block_do(BlockBegin* block) = 0; 119 }; 120 121 122 // A simple closure class for visiting the values of an Instruction 123 class ValueVisitor: public StackObj { 124 public: 125 virtual void visit(Value* v) = 0; 126 }; 127 128 129 // Some array and list classes 130 typedef GrowableArray<BlockBegin*> BlockBeginArray; 131 132 class BlockList: public GrowableArray<BlockBegin*> { 133 public: 134 BlockList(): GrowableArray<BlockBegin*>() {} 135 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 136 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 137 138 void iterate_forward(BlockClosure* closure); 139 void iterate_backward(BlockClosure* closure); 140 void values_do(ValueVisitor* f); 141 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 142 }; 143 144 145 // InstructionVisitors provide type-based dispatch for instructions. 146 // For each concrete Instruction class X, a virtual function do_X is 147 // provided. Functionality that needs to be implemented for all classes 148 // (e.g., printing, code generation) is factored out into a specialised 149 // visitor instead of added to the Instruction classes itself. 150 151 class InstructionVisitor: public StackObj { 152 public: 153 virtual void do_Phi (Phi* x) = 0; 154 virtual void do_Local (Local* x) = 0; 155 virtual void do_Constant (Constant* x) = 0; 156 virtual void do_LoadField (LoadField* x) = 0; 157 virtual void do_StoreField (StoreField* x) = 0; 158 virtual void do_ArrayLength (ArrayLength* x) = 0; 159 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 160 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 161 virtual void do_NegateOp (NegateOp* x) = 0; 162 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 163 virtual void do_ShiftOp (ShiftOp* x) = 0; 164 virtual void do_LogicOp (LogicOp* x) = 0; 165 virtual void do_CompareOp (CompareOp* x) = 0; 166 virtual void do_IfOp (IfOp* x) = 0; 167 virtual void do_Convert (Convert* x) = 0; 168 virtual void do_NullCheck (NullCheck* x) = 0; 169 virtual void do_TypeCast (TypeCast* x) = 0; 170 virtual void do_Invoke (Invoke* x) = 0; 171 virtual void do_NewInstance (NewInstance* x) = 0; 172 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 173 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 174 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 175 virtual void do_CheckCast (CheckCast* x) = 0; 176 virtual void do_InstanceOf (InstanceOf* x) = 0; 177 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 178 virtual void do_MonitorExit (MonitorExit* x) = 0; 179 virtual void do_Intrinsic (Intrinsic* x) = 0; 180 virtual void do_BlockBegin (BlockBegin* x) = 0; 181 virtual void do_Goto (Goto* x) = 0; 182 virtual void do_If (If* x) = 0; 183 virtual void do_TableSwitch (TableSwitch* x) = 0; 184 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 185 virtual void do_Return (Return* x) = 0; 186 virtual void do_Throw (Throw* x) = 0; 187 virtual void do_Base (Base* x) = 0; 188 virtual void do_OsrEntry (OsrEntry* x) = 0; 189 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 190 virtual void do_RoundFP (RoundFP* x) = 0; 191 virtual void do_UnsafeGet (UnsafeGet* x) = 0; 192 virtual void do_UnsafePut (UnsafePut* x) = 0; 193 virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x) = 0; 194 virtual void do_ProfileCall (ProfileCall* x) = 0; 195 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 196 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 197 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 198 virtual void do_MemBar (MemBar* x) = 0; 199 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 200 #ifdef ASSERT 201 virtual void do_Assert (Assert* x) = 0; 202 #endif 203 }; 204 205 206 // Hashing support 207 // 208 // Note: This hash functions affect the performance 209 // of ValueMap - make changes carefully! 210 211 #define HASH1(x1 ) ((intx)(x1)) 212 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 213 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 214 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 215 216 217 // The following macros are used to implement instruction-specific hashing. 218 // By default, each instruction implements hash() and is_equal(Value), used 219 // for value numbering/common subexpression elimination. The default imple- 220 // mentation disables value numbering. Each instruction which can be value- 221 // numbered, should define corresponding hash() and is_equal(Value) functions 222 // via the macros below. The f arguments specify all the values/op codes, etc. 223 // that need to be identical for two instructions to be identical. 224 // 225 // Note: The default implementation of hash() returns 0 in order to indicate 226 // that the instruction should not be considered for value numbering. 227 // The currently used hash functions do not guarantee that never a 0 228 // is produced. While this is still correct, it may be a performance 229 // bug (no value numbering for that node). However, this situation is 230 // so unlikely, that we are not going to handle it specially. 231 232 #define HASHING1(class_name, enabled, f1) \ 233 virtual intx hash() const { \ 234 return (enabled) ? HASH2(name(), f1) : 0; \ 235 } \ 236 virtual bool is_equal(Value v) const { \ 237 if (!(enabled) ) return false; \ 238 class_name* _v = v->as_##class_name(); \ 239 if (_v == nullptr) return false; \ 240 if (f1 != _v->f1) return false; \ 241 return true; \ 242 } \ 243 244 245 #define HASHING2(class_name, enabled, f1, f2) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == nullptr) return false; \ 253 if (f1 != _v->f1) return false; \ 254 if (f2 != _v->f2) return false; \ 255 return true; \ 256 } \ 257 258 259 #define HASHING3(class_name, enabled, f1, f2, f3) \ 260 virtual intx hash() const { \ 261 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 262 } \ 263 virtual bool is_equal(Value v) const { \ 264 if (!(enabled) ) return false; \ 265 class_name* _v = v->as_##class_name(); \ 266 if (_v == nullptr) return false; \ 267 if (f1 != _v->f1) return false; \ 268 if (f2 != _v->f2) return false; \ 269 if (f3 != _v->f3) return false; \ 270 return true; \ 271 } \ 272 273 274 // The mother of all instructions... 275 276 class Instruction: public CompilationResourceObj { 277 private: 278 int _id; // the unique instruction id 279 #ifndef PRODUCT 280 int _printable_bci; // the bci of the instruction for printing 281 #endif 282 int _use_count; // the number of instructions referring to this value (w/o prev/next); only roots can have use count = 0 or > 1 283 int _pin_state; // set of PinReason describing the reason for pinning 284 unsigned int _flags; // Flag bits 285 ValueType* _type; // the instruction value type 286 Instruction* _next; // the next instruction if any (null for BlockEnd instructions) 287 Instruction* _subst; // the substitution instruction if any 288 LIR_Opr _operand; // LIR specific information 289 290 ValueStack* _state_before; // Copy of state with input operands still on stack (or null) 291 ValueStack* _exception_state; // Copy of state for exception handling 292 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 293 294 friend class UseCountComputer; 295 296 void update_exception_state(ValueStack* state); 297 298 protected: 299 BlockBegin* _block; // Block that contains this instruction 300 301 void set_type(ValueType* type) { 302 assert(type != nullptr, "type must exist"); 303 _type = type; 304 } 305 306 // Helper class to keep track of which arguments need a null check 307 class ArgsNonNullState { 308 private: 309 int _nonnull_state; // mask identifying which args are nonnull 310 public: 311 ArgsNonNullState() 312 : _nonnull_state(AllBits) {} 313 314 // Does argument number i needs a null check? 315 bool arg_needs_null_check(int i) const { 316 // No data is kept for arguments starting at position 33 so 317 // conservatively assume that they need a null check. 318 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 319 return is_set_nth_bit(_nonnull_state, i); 320 } 321 return true; 322 } 323 324 // Set whether argument number i needs a null check or not 325 void set_arg_needs_null_check(int i, bool check) { 326 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 327 if (check) { 328 _nonnull_state |= (int)nth_bit(i); 329 } else { 330 _nonnull_state &= (int)~(nth_bit(i)); 331 } 332 } 333 } 334 }; 335 336 public: 337 void* operator new(size_t size) throw() { 338 Compilation* c = Compilation::current(); 339 void* res = c->arena()->Amalloc(size); 340 return res; 341 } 342 343 static const int no_bci = -99; 344 345 enum InstructionFlag { 346 NeedsNullCheckFlag = 0, 347 CanTrapFlag, 348 DirectCompareFlag, 349 IsSafepointFlag, 350 IsStaticFlag, 351 PreservesStateFlag, 352 TargetIsFinalFlag, 353 TargetIsLoadedFlag, 354 UnorderedIsTrueFlag, 355 NeedsPatchingFlag, 356 ThrowIncompatibleClassChangeErrorFlag, 357 InvokeSpecialReceiverCheckFlag, 358 ProfileMDOFlag, 359 IsLinkedInBlockFlag, 360 NeedsRangeCheckFlag, 361 DeoptimizeOnException, 362 KillsMemoryFlag, 363 OmitChecksFlag, 364 InstructionLastFlag 365 }; 366 367 public: 368 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 369 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 370 371 // 'globally' used condition values 372 enum Condition { 373 eql, neq, lss, leq, gtr, geq, aeq, beq 374 }; 375 376 // Instructions may be pinned for many reasons and under certain conditions 377 // with enough knowledge it's possible to safely unpin them. 378 enum PinReason { 379 PinUnknown = 1 << 0 380 , PinExplicitNullCheck = 1 << 3 381 , PinStackForStateSplit= 1 << 12 382 , PinStateSplitConstructor= 1 << 13 383 , PinGlobalValueNumbering= 1 << 14 384 }; 385 386 static Condition mirror(Condition cond); 387 static Condition negate(Condition cond); 388 389 // initialization 390 static int number_of_instructions() { 391 return Compilation::current()->number_of_instructions(); 392 } 393 394 // creation 395 Instruction(ValueType* type, ValueStack* state_before = nullptr, bool type_is_constant = false) 396 : _id(Compilation::current()->get_next_id()), 397 #ifndef PRODUCT 398 _printable_bci(-99), 399 #endif 400 _use_count(0) 401 , _pin_state(0) 402 , _flags(0) 403 , _type(type) 404 , _next(nullptr) 405 , _subst(nullptr) 406 , _operand(LIR_OprFact::illegalOpr) 407 , _state_before(state_before) 408 , _exception_handlers(nullptr) 409 , _block(nullptr) 410 { 411 check_state(state_before); 412 assert(type != nullptr && (!type->is_constant() || type_is_constant), "type must exist"); 413 update_exception_state(_state_before); 414 } 415 416 // accessors 417 int id() const { return _id; } 418 #ifndef PRODUCT 419 bool has_printable_bci() const { return _printable_bci != -99; } 420 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 421 void set_printable_bci(int bci) { _printable_bci = bci; } 422 #endif 423 int dominator_depth(); 424 int use_count() const { return _use_count; } 425 int pin_state() const { return _pin_state; } 426 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 427 ValueType* type() const { return _type; } 428 BlockBegin *block() const { return _block; } 429 Instruction* prev(); // use carefully, expensive operation 430 Instruction* next() const { return _next; } 431 bool has_subst() const { return _subst != nullptr; } 432 Instruction* subst() { return _subst == nullptr ? this : _subst->subst(); } 433 LIR_Opr operand() const { return _operand; } 434 435 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 436 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 437 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 438 bool can_be_linked() { return as_Local() == nullptr && as_Phi() == nullptr; } 439 440 bool is_null_obj() { return as_Constant() != nullptr && type()->as_ObjectType()->constant_value()->is_null_object(); } 441 442 bool has_uses() const { return use_count() > 0; } 443 ValueStack* state_before() const { return _state_before; } 444 ValueStack* exception_state() const { return _exception_state; } 445 virtual bool needs_exception_state() const { return true; } 446 XHandlers* exception_handlers() const { return _exception_handlers; } 447 448 // manipulation 449 void pin(PinReason reason) { _pin_state |= reason; } 450 void pin() { _pin_state |= PinUnknown; } 451 // DANGEROUS: only used by EliminateStores 452 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 453 454 Instruction* set_next(Instruction* next) { 455 assert(next->has_printable_bci(), "_printable_bci should have been set"); 456 assert(next != nullptr, "must not be null"); 457 assert(as_BlockEnd() == nullptr, "BlockEnd instructions must have no next"); 458 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 459 460 BlockBegin *block = this->block(); 461 next->_block = block; 462 463 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 464 _next = next; 465 return next; 466 } 467 468 Instruction* set_next(Instruction* next, int bci) { 469 #ifndef PRODUCT 470 next->set_printable_bci(bci); 471 #endif 472 return set_next(next); 473 } 474 475 // when blocks are merged 476 void fixup_block_pointers() { 477 Instruction *cur = next()->next(); // next()'s block is set in set_next 478 while (cur && cur->_block != block()) { 479 cur->_block = block(); 480 cur = cur->next(); 481 } 482 } 483 484 Instruction *insert_after(Instruction *i) { 485 Instruction* n = _next; 486 set_next(i); 487 i->set_next(n); 488 return _next; 489 } 490 491 Instruction *insert_after_same_bci(Instruction *i) { 492 #ifndef PRODUCT 493 i->set_printable_bci(printable_bci()); 494 #endif 495 return insert_after(i); 496 } 497 498 void set_subst(Instruction* subst) { 499 assert(subst == nullptr || 500 type()->base() == subst->type()->base() || 501 subst->type()->base() == illegalType, "type can't change"); 502 _subst = subst; 503 } 504 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 505 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 506 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 507 508 // machine-specifics 509 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 510 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 511 512 // generic 513 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 514 virtual Phi* as_Phi() { return nullptr; } 515 virtual Local* as_Local() { return nullptr; } 516 virtual Constant* as_Constant() { return nullptr; } 517 virtual AccessField* as_AccessField() { return nullptr; } 518 virtual LoadField* as_LoadField() { return nullptr; } 519 virtual StoreField* as_StoreField() { return nullptr; } 520 virtual AccessArray* as_AccessArray() { return nullptr; } 521 virtual ArrayLength* as_ArrayLength() { return nullptr; } 522 virtual AccessIndexed* as_AccessIndexed() { return nullptr; } 523 virtual LoadIndexed* as_LoadIndexed() { return nullptr; } 524 virtual StoreIndexed* as_StoreIndexed() { return nullptr; } 525 virtual NegateOp* as_NegateOp() { return nullptr; } 526 virtual Op2* as_Op2() { return nullptr; } 527 virtual ArithmeticOp* as_ArithmeticOp() { return nullptr; } 528 virtual ShiftOp* as_ShiftOp() { return nullptr; } 529 virtual LogicOp* as_LogicOp() { return nullptr; } 530 virtual CompareOp* as_CompareOp() { return nullptr; } 531 virtual IfOp* as_IfOp() { return nullptr; } 532 virtual Convert* as_Convert() { return nullptr; } 533 virtual NullCheck* as_NullCheck() { return nullptr; } 534 virtual OsrEntry* as_OsrEntry() { return nullptr; } 535 virtual StateSplit* as_StateSplit() { return nullptr; } 536 virtual Invoke* as_Invoke() { return nullptr; } 537 virtual NewInstance* as_NewInstance() { return nullptr; } 538 virtual NewArray* as_NewArray() { return nullptr; } 539 virtual NewTypeArray* as_NewTypeArray() { return nullptr; } 540 virtual NewObjectArray* as_NewObjectArray() { return nullptr; } 541 virtual NewMultiArray* as_NewMultiArray() { return nullptr; } 542 virtual TypeCheck* as_TypeCheck() { return nullptr; } 543 virtual CheckCast* as_CheckCast() { return nullptr; } 544 virtual InstanceOf* as_InstanceOf() { return nullptr; } 545 virtual TypeCast* as_TypeCast() { return nullptr; } 546 virtual AccessMonitor* as_AccessMonitor() { return nullptr; } 547 virtual MonitorEnter* as_MonitorEnter() { return nullptr; } 548 virtual MonitorExit* as_MonitorExit() { return nullptr; } 549 virtual Intrinsic* as_Intrinsic() { return nullptr; } 550 virtual BlockBegin* as_BlockBegin() { return nullptr; } 551 virtual BlockEnd* as_BlockEnd() { return nullptr; } 552 virtual Goto* as_Goto() { return nullptr; } 553 virtual If* as_If() { return nullptr; } 554 virtual TableSwitch* as_TableSwitch() { return nullptr; } 555 virtual LookupSwitch* as_LookupSwitch() { return nullptr; } 556 virtual Return* as_Return() { return nullptr; } 557 virtual Throw* as_Throw() { return nullptr; } 558 virtual Base* as_Base() { return nullptr; } 559 virtual RoundFP* as_RoundFP() { return nullptr; } 560 virtual ExceptionObject* as_ExceptionObject() { return nullptr; } 561 virtual UnsafeOp* as_UnsafeOp() { return nullptr; } 562 virtual ProfileInvoke* as_ProfileInvoke() { return nullptr; } 563 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return nullptr; } 564 565 #ifdef ASSERT 566 virtual Assert* as_Assert() { return nullptr; } 567 #endif 568 569 virtual void visit(InstructionVisitor* v) = 0; 570 571 virtual bool can_trap() const { return false; } 572 573 virtual void input_values_do(ValueVisitor* f) = 0; 574 virtual void state_values_do(ValueVisitor* f); 575 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 576 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 577 578 virtual ciType* exact_type() const; 579 virtual ciType* declared_type() const { return nullptr; } 580 581 // hashing 582 virtual const char* name() const = 0; 583 HASHING1(Instruction, false, id()) // hashing disabled by default 584 585 // debugging 586 static void check_state(ValueStack* state) PRODUCT_RETURN; 587 void print() PRODUCT_RETURN; 588 void print_line() PRODUCT_RETURN; 589 void print(InstructionPrinter& ip) PRODUCT_RETURN; 590 }; 591 592 593 // The following macros are used to define base (i.e., non-leaf) 594 // and leaf instruction classes. They define class-name related 595 // generic functionality in one place. 596 597 #define BASE(class_name, super_class_name) \ 598 class class_name: public super_class_name { \ 599 public: \ 600 virtual class_name* as_##class_name() { return this; } \ 601 602 603 #define LEAF(class_name, super_class_name) \ 604 BASE(class_name, super_class_name) \ 605 public: \ 606 virtual const char* name() const { return #class_name; } \ 607 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 608 609 610 // Debugging support 611 612 613 #ifdef ASSERT 614 class AssertValues: public ValueVisitor { 615 void visit(Value* x) { assert((*x) != nullptr, "value must exist"); } 616 }; 617 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 618 #else 619 #define ASSERT_VALUES 620 #endif // ASSERT 621 622 623 // A Phi is a phi function in the sense of SSA form. It stands for 624 // the value of a local variable at the beginning of a join block. 625 // A Phi consists of n operands, one for every incoming branch. 626 627 LEAF(Phi, Instruction) 628 private: 629 int _pf_flags; // the flags of the phi function 630 int _index; // to value on operand stack (index < 0) or to local 631 public: 632 // creation 633 Phi(ValueType* type, BlockBegin* b, int index) 634 : Instruction(type->base()) 635 , _pf_flags(0) 636 , _index(index) 637 { 638 _block = b; 639 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 640 if (type->is_illegal()) { 641 make_illegal(); 642 } 643 } 644 645 // flags 646 enum Flag { 647 no_flag = 0, 648 visited = 1 << 0, 649 cannot_simplify = 1 << 1 650 }; 651 652 // accessors 653 bool is_local() const { return _index >= 0; } 654 bool is_on_stack() const { return !is_local(); } 655 int local_index() const { assert(is_local(), ""); return _index; } 656 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 657 658 Value operand_at(int i) const; 659 int operand_count() const; 660 661 void set(Flag f) { _pf_flags |= f; } 662 void clear(Flag f) { _pf_flags &= ~f; } 663 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 664 665 // Invalidates phis corresponding to merges of locals of two different types 666 // (these should never be referenced, otherwise the bytecodes are illegal) 667 void make_illegal() { 668 set(cannot_simplify); 669 set_type(illegalType); 670 } 671 672 bool is_illegal() const { 673 return type()->is_illegal(); 674 } 675 676 // generic 677 virtual void input_values_do(ValueVisitor* f) { 678 } 679 }; 680 681 682 // A local is a placeholder for an incoming argument to a function call. 683 LEAF(Local, Instruction) 684 private: 685 int _java_index; // the local index within the method to which the local belongs 686 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 687 ciType* _declared_type; 688 public: 689 // creation 690 Local(ciType* declared, ValueType* type, int index, bool receiver) 691 : Instruction(type) 692 , _java_index(index) 693 , _is_receiver(receiver) 694 , _declared_type(declared) 695 { 696 NOT_PRODUCT(set_printable_bci(-1)); 697 } 698 699 // accessors 700 int java_index() const { return _java_index; } 701 bool is_receiver() const { return _is_receiver; } 702 703 virtual ciType* declared_type() const { return _declared_type; } 704 705 // generic 706 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 707 }; 708 709 710 LEAF(Constant, Instruction) 711 public: 712 // creation 713 Constant(ValueType* type): 714 Instruction(type, nullptr, /*type_is_constant*/ true) 715 { 716 assert(type->is_constant(), "must be a constant"); 717 } 718 719 Constant(ValueType* type, ValueStack* state_before, bool kills_memory = false): 720 Instruction(type, state_before, /*type_is_constant*/ true) 721 { 722 assert(state_before != nullptr, "only used for constants which need patching"); 723 assert(type->is_constant(), "must be a constant"); 724 set_flag(KillsMemoryFlag, kills_memory); 725 pin(); // since it's patching it needs to be pinned 726 } 727 728 // generic 729 virtual bool can_trap() const { return state_before() != nullptr; } 730 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 731 732 virtual intx hash() const; 733 virtual bool is_equal(Value v) const; 734 735 virtual ciType* exact_type() const; 736 737 bool kills_memory() const { return check_flag(KillsMemoryFlag); } 738 739 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 740 741 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 742 BlockBegin* compare(Instruction::Condition cond, Value right, 743 BlockBegin* true_sux, BlockBegin* false_sux) const { 744 switch (compare(cond, right)) { 745 case not_comparable: 746 return nullptr; 747 case cond_false: 748 return false_sux; 749 case cond_true: 750 return true_sux; 751 default: 752 ShouldNotReachHere(); 753 return nullptr; 754 } 755 } 756 }; 757 758 759 BASE(AccessField, Instruction) 760 private: 761 Value _obj; 762 int _offset; 763 ciField* _field; 764 NullCheck* _explicit_null_check; // For explicit null check elimination 765 766 public: 767 // creation 768 AccessField(Value obj, int offset, ciField* field, bool is_static, 769 ValueStack* state_before, bool needs_patching) 770 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 771 , _obj(obj) 772 , _offset(offset) 773 , _field(field) 774 , _explicit_null_check(nullptr) 775 { 776 set_needs_null_check(!is_static); 777 set_flag(IsStaticFlag, is_static); 778 set_flag(NeedsPatchingFlag, needs_patching); 779 ASSERT_VALUES 780 // pin of all instructions with memory access 781 pin(); 782 } 783 784 // accessors 785 Value obj() const { return _obj; } 786 int offset() const { return _offset; } 787 ciField* field() const { return _field; } 788 BasicType field_type() const { return _field->type()->basic_type(); } 789 bool is_static() const { return check_flag(IsStaticFlag); } 790 NullCheck* explicit_null_check() const { return _explicit_null_check; } 791 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 792 793 // Unresolved getstatic and putstatic can cause initialization. 794 // Technically it occurs at the Constant that materializes the base 795 // of the static fields but it's simpler to model it here. 796 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 797 798 // manipulation 799 800 // Under certain circumstances, if a previous NullCheck instruction 801 // proved the target object non-null, we can eliminate the explicit 802 // null check and do an implicit one, simply specifying the debug 803 // information from the NullCheck. This field should only be consulted 804 // if needs_null_check() is true. 805 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 806 807 // generic 808 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 809 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 810 }; 811 812 813 LEAF(LoadField, AccessField) 814 public: 815 // creation 816 LoadField(Value obj, int offset, ciField* field, bool is_static, 817 ValueStack* state_before, bool needs_patching) 818 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 819 {} 820 821 ciType* declared_type() const; 822 823 // generic; cannot be eliminated if needs patching or if volatile. 824 HASHING3(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset(), declared_type()) 825 }; 826 827 828 LEAF(StoreField, AccessField) 829 private: 830 Value _value; 831 832 public: 833 // creation 834 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 835 ValueStack* state_before, bool needs_patching) 836 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 837 , _value(value) 838 { 839 ASSERT_VALUES 840 pin(); 841 } 842 843 // accessors 844 Value value() const { return _value; } 845 846 // generic 847 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 848 }; 849 850 851 BASE(AccessArray, Instruction) 852 private: 853 Value _array; 854 855 public: 856 // creation 857 AccessArray(ValueType* type, Value array, ValueStack* state_before) 858 : Instruction(type, state_before) 859 , _array(array) 860 { 861 set_needs_null_check(true); 862 ASSERT_VALUES 863 pin(); // instruction with side effect (null exception or range check throwing) 864 } 865 866 Value array() const { return _array; } 867 868 // generic 869 virtual bool can_trap() const { return needs_null_check(); } 870 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 871 }; 872 873 874 LEAF(ArrayLength, AccessArray) 875 private: 876 NullCheck* _explicit_null_check; // For explicit null check elimination 877 878 public: 879 // creation 880 ArrayLength(Value array, ValueStack* state_before) 881 : AccessArray(intType, array, state_before) 882 , _explicit_null_check(nullptr) {} 883 884 // accessors 885 NullCheck* explicit_null_check() const { return _explicit_null_check; } 886 887 // setters 888 // See LoadField::set_explicit_null_check for documentation 889 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 890 891 // generic 892 HASHING1(ArrayLength, true, array()->subst()) 893 }; 894 895 896 BASE(AccessIndexed, AccessArray) 897 private: 898 Value _index; 899 Value _length; 900 BasicType _elt_type; 901 bool _mismatched; 902 903 public: 904 // creation 905 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 906 : AccessArray(as_ValueType(elt_type), array, state_before) 907 , _index(index) 908 , _length(length) 909 , _elt_type(elt_type) 910 , _mismatched(mismatched) 911 { 912 set_flag(Instruction::NeedsRangeCheckFlag, true); 913 ASSERT_VALUES 914 } 915 916 // accessors 917 Value index() const { return _index; } 918 Value length() const { return _length; } 919 BasicType elt_type() const { return _elt_type; } 920 bool mismatched() const { return _mismatched; } 921 922 void clear_length() { _length = nullptr; } 923 // perform elimination of range checks involving constants 924 bool compute_needs_range_check(); 925 926 // generic 927 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != nullptr) f->visit(&_length); } 928 }; 929 930 931 LEAF(LoadIndexed, AccessIndexed) 932 private: 933 NullCheck* _explicit_null_check; // For explicit null check elimination 934 935 public: 936 // creation 937 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 938 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 939 , _explicit_null_check(nullptr) {} 940 941 // accessors 942 NullCheck* explicit_null_check() const { return _explicit_null_check; } 943 944 // setters 945 // See LoadField::set_explicit_null_check for documentation 946 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 947 948 ciType* exact_type() const; 949 ciType* declared_type() const; 950 951 // generic; 952 HASHING3(LoadIndexed, true, elt_type(), array()->subst(), index()->subst()) 953 }; 954 955 956 LEAF(StoreIndexed, AccessIndexed) 957 private: 958 Value _value; 959 960 ciMethod* _profiled_method; 961 int _profiled_bci; 962 bool _check_boolean; 963 964 public: 965 // creation 966 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 967 bool check_boolean, bool mismatched = false) 968 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 969 , _value(value), _profiled_method(nullptr), _profiled_bci(0), _check_boolean(check_boolean) 970 { 971 ASSERT_VALUES 972 pin(); 973 } 974 975 // accessors 976 Value value() const { return _value; } 977 bool check_boolean() const { return _check_boolean; } 978 // Helpers for MethodData* profiling 979 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 980 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 981 void set_profiled_bci(int bci) { _profiled_bci = bci; } 982 bool should_profile() const { return check_flag(ProfileMDOFlag); } 983 ciMethod* profiled_method() const { return _profiled_method; } 984 int profiled_bci() const { return _profiled_bci; } 985 // generic 986 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 987 }; 988 989 990 LEAF(NegateOp, Instruction) 991 private: 992 Value _x; 993 994 public: 995 // creation 996 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 997 ASSERT_VALUES 998 } 999 1000 // accessors 1001 Value x() const { return _x; } 1002 1003 // generic 1004 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1005 }; 1006 1007 1008 BASE(Op2, Instruction) 1009 private: 1010 Bytecodes::Code _op; 1011 Value _x; 1012 Value _y; 1013 1014 public: 1015 // creation 1016 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = nullptr) 1017 : Instruction(type, state_before) 1018 , _op(op) 1019 , _x(x) 1020 , _y(y) 1021 { 1022 ASSERT_VALUES 1023 } 1024 1025 // accessors 1026 Bytecodes::Code op() const { return _op; } 1027 Value x() const { return _x; } 1028 Value y() const { return _y; } 1029 1030 // manipulators 1031 void swap_operands() { 1032 assert(is_commutative(), "operation must be commutative"); 1033 Value t = _x; _x = _y; _y = t; 1034 } 1035 1036 // generic 1037 virtual bool is_commutative() const { return false; } 1038 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1039 }; 1040 1041 1042 LEAF(ArithmeticOp, Op2) 1043 public: 1044 // creation 1045 ArithmeticOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1046 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1047 { 1048 if (can_trap()) pin(); 1049 } 1050 1051 // generic 1052 virtual bool is_commutative() const; 1053 virtual bool can_trap() const; 1054 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1055 }; 1056 1057 1058 LEAF(ShiftOp, Op2) 1059 public: 1060 // creation 1061 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1062 1063 // generic 1064 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1065 }; 1066 1067 1068 LEAF(LogicOp, Op2) 1069 public: 1070 // creation 1071 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1072 1073 // generic 1074 virtual bool is_commutative() const; 1075 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1076 }; 1077 1078 1079 LEAF(CompareOp, Op2) 1080 public: 1081 // creation 1082 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1083 : Op2(intType, op, x, y, state_before) 1084 {} 1085 1086 // generic 1087 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1088 }; 1089 1090 1091 LEAF(IfOp, Op2) 1092 private: 1093 Value _tval; 1094 Value _fval; 1095 1096 public: 1097 // creation 1098 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1099 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1100 , _tval(tval) 1101 , _fval(fval) 1102 { 1103 ASSERT_VALUES 1104 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1105 } 1106 1107 // accessors 1108 virtual bool is_commutative() const; 1109 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1110 Condition cond() const { return (Condition)Op2::op(); } 1111 Value tval() const { return _tval; } 1112 Value fval() const { return _fval; } 1113 1114 // generic 1115 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1116 }; 1117 1118 1119 LEAF(Convert, Instruction) 1120 private: 1121 Bytecodes::Code _op; 1122 Value _value; 1123 1124 public: 1125 // creation 1126 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1127 ASSERT_VALUES 1128 } 1129 1130 // accessors 1131 Bytecodes::Code op() const { return _op; } 1132 Value value() const { return _value; } 1133 1134 // generic 1135 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1136 HASHING2(Convert, true, op(), value()->subst()) 1137 }; 1138 1139 1140 LEAF(NullCheck, Instruction) 1141 private: 1142 Value _obj; 1143 1144 public: 1145 // creation 1146 NullCheck(Value obj, ValueStack* state_before) 1147 : Instruction(obj->type()->base(), state_before) 1148 , _obj(obj) 1149 { 1150 ASSERT_VALUES 1151 set_can_trap(true); 1152 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1153 pin(Instruction::PinExplicitNullCheck); 1154 } 1155 1156 // accessors 1157 Value obj() const { return _obj; } 1158 1159 // setters 1160 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1161 1162 // generic 1163 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1164 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1165 HASHING1(NullCheck, true, obj()->subst()) 1166 }; 1167 1168 1169 // This node is supposed to cast the type of another node to a more precise 1170 // declared type. 1171 LEAF(TypeCast, Instruction) 1172 private: 1173 ciType* _declared_type; 1174 Value _obj; 1175 1176 public: 1177 // The type of this node is the same type as the object type (and it might be constant). 1178 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1179 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1180 _declared_type(type), 1181 _obj(obj) {} 1182 1183 // accessors 1184 ciType* declared_type() const { return _declared_type; } 1185 Value obj() const { return _obj; } 1186 1187 // generic 1188 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1189 }; 1190 1191 1192 BASE(StateSplit, Instruction) 1193 private: 1194 ValueStack* _state; 1195 1196 protected: 1197 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1198 1199 public: 1200 // creation 1201 StateSplit(ValueType* type, ValueStack* state_before = nullptr) 1202 : Instruction(type, state_before) 1203 , _state(nullptr) 1204 { 1205 pin(PinStateSplitConstructor); 1206 } 1207 1208 // accessors 1209 ValueStack* state() const { return _state; } 1210 IRScope* scope() const; // the state's scope 1211 1212 // manipulation 1213 void set_state(ValueStack* state) { assert(_state == nullptr, "overwriting existing state"); check_state(state); _state = state; } 1214 1215 // generic 1216 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1217 virtual void state_values_do(ValueVisitor* f); 1218 }; 1219 1220 1221 LEAF(Invoke, StateSplit) 1222 private: 1223 Bytecodes::Code _code; 1224 Value _recv; 1225 Values* _args; 1226 BasicTypeList* _signature; 1227 ciMethod* _target; 1228 1229 public: 1230 // creation 1231 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1232 ciMethod* target, ValueStack* state_before); 1233 1234 // accessors 1235 Bytecodes::Code code() const { return _code; } 1236 Value receiver() const { return _recv; } 1237 bool has_receiver() const { return receiver() != nullptr; } 1238 int number_of_arguments() const { return _args->length(); } 1239 Value argument_at(int i) const { return _args->at(i); } 1240 BasicTypeList* signature() const { return _signature; } 1241 ciMethod* target() const { return _target; } 1242 1243 ciType* declared_type() const; 1244 1245 // Returns false if target is not loaded 1246 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1247 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1248 1249 // JSR 292 support 1250 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1251 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1252 1253 virtual bool needs_exception_state() const { return false; } 1254 1255 // generic 1256 virtual bool can_trap() const { return true; } 1257 virtual void input_values_do(ValueVisitor* f) { 1258 StateSplit::input_values_do(f); 1259 if (has_receiver()) f->visit(&_recv); 1260 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1261 } 1262 virtual void state_values_do(ValueVisitor *f); 1263 }; 1264 1265 1266 LEAF(NewInstance, StateSplit) 1267 private: 1268 ciInstanceKlass* _klass; 1269 bool _is_unresolved; 1270 1271 public: 1272 // creation 1273 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1274 : StateSplit(instanceType, state_before) 1275 , _klass(klass), _is_unresolved(is_unresolved) 1276 {} 1277 1278 // accessors 1279 ciInstanceKlass* klass() const { return _klass; } 1280 bool is_unresolved() const { return _is_unresolved; } 1281 1282 virtual bool needs_exception_state() const { return false; } 1283 1284 // generic 1285 virtual bool can_trap() const { return true; } 1286 ciType* exact_type() const; 1287 ciType* declared_type() const; 1288 }; 1289 1290 1291 BASE(NewArray, StateSplit) 1292 private: 1293 Value _length; 1294 1295 public: 1296 // creation 1297 NewArray(Value length, ValueStack* state_before) 1298 : StateSplit(objectType, state_before) 1299 , _length(length) 1300 { 1301 // Do not ASSERT_VALUES since length is null for NewMultiArray 1302 } 1303 1304 // accessors 1305 Value length() const { return _length; } 1306 1307 virtual bool needs_exception_state() const { return false; } 1308 1309 ciType* exact_type() const { return nullptr; } 1310 ciType* declared_type() const; 1311 1312 // generic 1313 virtual bool can_trap() const { return true; } 1314 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1315 }; 1316 1317 1318 LEAF(NewTypeArray, NewArray) 1319 private: 1320 BasicType _elt_type; 1321 bool _zero_array; 1322 1323 public: 1324 // creation 1325 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before, bool zero_array) 1326 : NewArray(length, state_before) 1327 , _elt_type(elt_type) 1328 , _zero_array(zero_array) 1329 {} 1330 1331 // accessors 1332 BasicType elt_type() const { return _elt_type; } 1333 bool zero_array() const { return _zero_array; } 1334 ciType* exact_type() const; 1335 }; 1336 1337 1338 LEAF(NewObjectArray, NewArray) 1339 private: 1340 ciKlass* _klass; 1341 1342 public: 1343 // creation 1344 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1345 1346 // accessors 1347 ciKlass* klass() const { return _klass; } 1348 ciType* exact_type() const; 1349 }; 1350 1351 1352 LEAF(NewMultiArray, NewArray) 1353 private: 1354 ciKlass* _klass; 1355 Values* _dims; 1356 1357 public: 1358 // creation 1359 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(nullptr, state_before), _klass(klass), _dims(dims) { 1360 ASSERT_VALUES 1361 } 1362 1363 // accessors 1364 ciKlass* klass() const { return _klass; } 1365 Values* dims() const { return _dims; } 1366 int rank() const { return dims()->length(); } 1367 1368 // generic 1369 virtual void input_values_do(ValueVisitor* f) { 1370 // NOTE: we do not call NewArray::input_values_do since "length" 1371 // is meaningless for a multi-dimensional array; passing the 1372 // zeroth element down to NewArray as its length is a bad idea 1373 // since there will be a copy in the "dims" array which doesn't 1374 // get updated, and the value must not be traversed twice. Was bug 1375 // - kbr 4/10/2001 1376 StateSplit::input_values_do(f); 1377 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1378 } 1379 }; 1380 1381 1382 BASE(TypeCheck, StateSplit) 1383 private: 1384 ciKlass* _klass; 1385 Value _obj; 1386 1387 ciMethod* _profiled_method; 1388 int _profiled_bci; 1389 1390 public: 1391 // creation 1392 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1393 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1394 _profiled_method(nullptr), _profiled_bci(0) { 1395 ASSERT_VALUES 1396 set_direct_compare(false); 1397 } 1398 1399 // accessors 1400 ciKlass* klass() const { return _klass; } 1401 Value obj() const { return _obj; } 1402 bool is_loaded() const { return klass() != nullptr; } 1403 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1404 1405 // manipulation 1406 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1407 1408 // generic 1409 virtual bool can_trap() const { return true; } 1410 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1411 1412 // Helpers for MethodData* profiling 1413 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1414 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1415 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1416 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1417 ciMethod* profiled_method() const { return _profiled_method; } 1418 int profiled_bci() const { return _profiled_bci; } 1419 }; 1420 1421 1422 LEAF(CheckCast, TypeCheck) 1423 public: 1424 // creation 1425 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1426 : TypeCheck(klass, obj, objectType, state_before) {} 1427 1428 void set_incompatible_class_change_check() { 1429 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1430 } 1431 bool is_incompatible_class_change_check() const { 1432 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1433 } 1434 void set_invokespecial_receiver_check() { 1435 set_flag(InvokeSpecialReceiverCheckFlag, true); 1436 } 1437 bool is_invokespecial_receiver_check() const { 1438 return check_flag(InvokeSpecialReceiverCheckFlag); 1439 } 1440 1441 virtual bool needs_exception_state() const { 1442 return !is_invokespecial_receiver_check(); 1443 } 1444 1445 ciType* declared_type() const; 1446 }; 1447 1448 1449 LEAF(InstanceOf, TypeCheck) 1450 public: 1451 // creation 1452 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1453 1454 virtual bool needs_exception_state() const { return false; } 1455 }; 1456 1457 1458 BASE(AccessMonitor, StateSplit) 1459 private: 1460 Value _obj; 1461 int _monitor_no; 1462 1463 public: 1464 // creation 1465 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = nullptr) 1466 : StateSplit(illegalType, state_before) 1467 , _obj(obj) 1468 , _monitor_no(monitor_no) 1469 { 1470 set_needs_null_check(true); 1471 ASSERT_VALUES 1472 } 1473 1474 // accessors 1475 Value obj() const { return _obj; } 1476 int monitor_no() const { return _monitor_no; } 1477 1478 // generic 1479 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1480 }; 1481 1482 1483 LEAF(MonitorEnter, AccessMonitor) 1484 public: 1485 // creation 1486 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before) 1487 : AccessMonitor(obj, monitor_no, state_before) 1488 { 1489 ASSERT_VALUES 1490 } 1491 1492 // generic 1493 virtual bool can_trap() const { return true; } 1494 }; 1495 1496 1497 LEAF(MonitorExit, AccessMonitor) 1498 public: 1499 // creation 1500 MonitorExit(Value obj, int monitor_no) 1501 : AccessMonitor(obj, monitor_no, nullptr) 1502 { 1503 ASSERT_VALUES 1504 } 1505 }; 1506 1507 1508 LEAF(Intrinsic, StateSplit) 1509 private: 1510 vmIntrinsics::ID _id; 1511 ArgsNonNullState _nonnull_state; 1512 Values* _args; 1513 Value _recv; 1514 1515 public: 1516 // preserves_state can be set to true for Intrinsics 1517 // which are guaranteed to preserve register state across any slow 1518 // cases; setting it to true does not mean that the Intrinsic can 1519 // not trap, only that if we continue execution in the same basic 1520 // block after the Intrinsic, all of the registers are intact. This 1521 // allows load elimination and common expression elimination to be 1522 // performed across the Intrinsic. The default value is false. 1523 Intrinsic(ValueType* type, 1524 vmIntrinsics::ID id, 1525 Values* args, 1526 bool has_receiver, 1527 ValueStack* state_before, 1528 bool preserves_state, 1529 bool cantrap = true) 1530 : StateSplit(type, state_before) 1531 , _id(id) 1532 , _args(args) 1533 , _recv(nullptr) 1534 { 1535 assert(args != nullptr, "args must exist"); 1536 ASSERT_VALUES 1537 set_flag(PreservesStateFlag, preserves_state); 1538 set_flag(CanTrapFlag, cantrap); 1539 if (has_receiver) { 1540 _recv = argument_at(0); 1541 } 1542 set_needs_null_check(has_receiver); 1543 1544 // some intrinsics can't trap, so don't force them to be pinned 1545 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1546 unpin(PinStateSplitConstructor); 1547 } 1548 } 1549 1550 // accessors 1551 vmIntrinsics::ID id() const { return _id; } 1552 int number_of_arguments() const { return _args->length(); } 1553 Value argument_at(int i) const { return _args->at(i); } 1554 1555 bool has_receiver() const { return (_recv != nullptr); } 1556 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1557 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1558 1559 bool arg_needs_null_check(int i) const { 1560 return _nonnull_state.arg_needs_null_check(i); 1561 } 1562 1563 void set_arg_needs_null_check(int i, bool check) { 1564 _nonnull_state.set_arg_needs_null_check(i, check); 1565 } 1566 1567 // generic 1568 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1569 virtual void input_values_do(ValueVisitor* f) { 1570 StateSplit::input_values_do(f); 1571 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1572 } 1573 }; 1574 1575 1576 class LIR_List; 1577 1578 LEAF(BlockBegin, StateSplit) 1579 private: 1580 int _block_id; // the unique block id 1581 int _bci; // start-bci of block 1582 int _depth_first_number; // number of this block in a depth-first ordering 1583 int _linear_scan_number; // number of this block in linear-scan ordering 1584 int _dominator_depth; 1585 int _loop_depth; // the loop nesting level of this block 1586 int _loop_index; // number of the innermost loop of this block 1587 int _flags; // the flags associated with this block 1588 1589 // fields used by BlockListBuilder 1590 int _total_preds; // number of predecessors found by BlockListBuilder 1591 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1592 1593 // SSA specific fields: (factor out later) 1594 BlockList _predecessors; // the predecessors of this block 1595 BlockList _dominates; // list of blocks that are dominated by this block 1596 BlockBegin* _dominator; // the dominator of this block 1597 // SSA specific ends 1598 BlockEnd* _end; // the last instruction of this block 1599 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1600 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1601 int _exception_handler_pco; // if this block is the start of an exception handler, 1602 // this records the PC offset in the assembly code of the 1603 // first instruction in this block 1604 Label _label; // the label associated with this block 1605 LIR_List* _lir; // the low level intermediate representation for this block 1606 1607 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1608 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1609 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1610 ResourceBitMap _live_kill; // set of registers defined in this block 1611 1612 ResourceBitMap _fpu_register_usage; 1613 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1614 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1615 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1616 1617 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1618 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1619 1620 friend class SuxAndWeightAdjuster; 1621 1622 public: 1623 void* operator new(size_t size) throw() { 1624 Compilation* c = Compilation::current(); 1625 void* res = c->arena()->Amalloc(size); 1626 return res; 1627 } 1628 1629 // initialization/counting 1630 static int number_of_blocks() { 1631 return Compilation::current()->number_of_blocks(); 1632 } 1633 1634 // creation 1635 BlockBegin(int bci) 1636 : StateSplit(illegalType) 1637 , _block_id(Compilation::current()->get_next_block_id()) 1638 , _bci(bci) 1639 , _depth_first_number(-1) 1640 , _linear_scan_number(-1) 1641 , _dominator_depth(-1) 1642 , _loop_depth(0) 1643 , _loop_index(-1) 1644 , _flags(0) 1645 , _total_preds(0) 1646 , _stores_to_locals() 1647 , _predecessors(2) 1648 , _dominates(2) 1649 , _dominator(nullptr) 1650 , _end(nullptr) 1651 , _exception_handlers(1) 1652 , _exception_states(nullptr) 1653 , _exception_handler_pco(-1) 1654 , _lir(nullptr) 1655 , _live_in() 1656 , _live_out() 1657 , _live_gen() 1658 , _live_kill() 1659 , _fpu_register_usage() 1660 , _fpu_stack_state(nullptr) 1661 , _first_lir_instruction_id(-1) 1662 , _last_lir_instruction_id(-1) 1663 { 1664 _block = this; 1665 #ifndef PRODUCT 1666 set_printable_bci(bci); 1667 #endif 1668 } 1669 1670 // accessors 1671 int block_id() const { return _block_id; } 1672 int bci() const { return _bci; } 1673 BlockList* dominates() { return &_dominates; } 1674 BlockBegin* dominator() const { return _dominator; } 1675 int loop_depth() const { return _loop_depth; } 1676 int dominator_depth() const { return _dominator_depth; } 1677 int depth_first_number() const { return _depth_first_number; } 1678 int linear_scan_number() const { return _linear_scan_number; } 1679 BlockEnd* end() const { return _end; } 1680 Label* label() { return &_label; } 1681 LIR_List* lir() const { return _lir; } 1682 int exception_handler_pco() const { return _exception_handler_pco; } 1683 ResourceBitMap& live_in() { return _live_in; } 1684 ResourceBitMap& live_out() { return _live_out; } 1685 ResourceBitMap& live_gen() { return _live_gen; } 1686 ResourceBitMap& live_kill() { return _live_kill; } 1687 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1688 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1689 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1690 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1691 int total_preds() const { return _total_preds; } 1692 BitMap& stores_to_locals() { return _stores_to_locals; } 1693 1694 // manipulation 1695 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1696 void set_loop_depth(int d) { _loop_depth = d; } 1697 void set_dominator_depth(int d) { _dominator_depth = d; } 1698 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1699 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1700 void set_end(BlockEnd* new_end); 1701 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1702 BlockBegin* insert_block_between(BlockBegin* sux); 1703 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1704 void set_lir(LIR_List* lir) { _lir = lir; } 1705 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1706 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1707 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1708 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1709 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1710 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1711 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1712 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1713 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1714 void increment_total_preds(int n = 1) { _total_preds += n; } 1715 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1716 1717 // generic 1718 virtual void state_values_do(ValueVisitor* f); 1719 1720 // successors and predecessors 1721 int number_of_sux() const; 1722 BlockBegin* sux_at(int i) const; 1723 void add_predecessor(BlockBegin* pred); 1724 void remove_predecessor(BlockBegin* pred); 1725 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1726 int number_of_preds() const { return _predecessors.length(); } 1727 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1728 1729 // exception handlers potentially invoked by this block 1730 void add_exception_handler(BlockBegin* b); 1731 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1732 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1733 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1734 1735 // states of the instructions that have an edge to this exception handler 1736 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == nullptr ? 0 : _exception_states->length(); } 1737 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1738 int add_exception_state(ValueStack* state); 1739 1740 // flags 1741 enum Flag { 1742 no_flag = 0, 1743 std_entry_flag = 1 << 0, 1744 osr_entry_flag = 1 << 1, 1745 exception_entry_flag = 1 << 2, 1746 subroutine_entry_flag = 1 << 3, 1747 backward_branch_target_flag = 1 << 4, 1748 is_on_work_list_flag = 1 << 5, 1749 was_visited_flag = 1 << 6, 1750 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1751 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1752 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1753 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1754 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1755 }; 1756 1757 void set(Flag f) { _flags |= f; } 1758 void clear(Flag f) { _flags &= ~f; } 1759 bool is_set(Flag f) const { return (_flags & f) != 0; } 1760 bool is_entry_block() const { 1761 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1762 return (_flags & entry_mask) != 0; 1763 } 1764 1765 // iteration 1766 void iterate_preorder (BlockClosure* closure); 1767 void iterate_postorder (BlockClosure* closure); 1768 1769 void block_values_do(ValueVisitor* f); 1770 1771 // loops 1772 void set_loop_index(int ix) { _loop_index = ix; } 1773 int loop_index() const { return _loop_index; } 1774 1775 // merging 1776 bool try_merge(ValueStack* state, bool has_irreducible_loops); // try to merge states at block begin 1777 void merge(ValueStack* state, bool has_irreducible_loops) { 1778 bool b = try_merge(state, has_irreducible_loops); 1779 assert(b, "merge failed"); 1780 } 1781 1782 // debugging 1783 void print_block() PRODUCT_RETURN; 1784 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1785 1786 }; 1787 1788 1789 BASE(BlockEnd, StateSplit) 1790 private: 1791 BlockList* _sux; 1792 1793 protected: 1794 BlockList* sux() const { return _sux; } 1795 1796 void set_sux(BlockList* sux) { 1797 #ifdef ASSERT 1798 assert(sux != nullptr, "sux must exist"); 1799 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != nullptr, "sux must exist"); 1800 #endif 1801 _sux = sux; 1802 } 1803 1804 public: 1805 // creation 1806 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1807 : StateSplit(type, state_before) 1808 , _sux(nullptr) 1809 { 1810 set_flag(IsSafepointFlag, is_safepoint); 1811 } 1812 1813 // accessors 1814 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1815 // For compatibility with old code, for new code use block() 1816 BlockBegin* begin() const { return _block; } 1817 1818 // manipulation 1819 inline void remove_sux_at(int i) { _sux->remove_at(i);} 1820 inline int find_sux(BlockBegin* sux) {return _sux->find(sux);} 1821 1822 // successors 1823 int number_of_sux() const { return _sux != nullptr ? _sux->length() : 0; } 1824 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1825 bool is_sux(BlockBegin* sux) const { return _sux == nullptr ? false : _sux->contains(sux); } 1826 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1827 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1828 }; 1829 1830 1831 LEAF(Goto, BlockEnd) 1832 public: 1833 enum Direction { 1834 none, // Just a regular goto 1835 taken, not_taken // Goto produced from If 1836 }; 1837 private: 1838 ciMethod* _profiled_method; 1839 int _profiled_bci; 1840 Direction _direction; 1841 public: 1842 // creation 1843 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1844 : BlockEnd(illegalType, state_before, is_safepoint) 1845 , _profiled_method(nullptr) 1846 , _profiled_bci(0) 1847 , _direction(none) { 1848 BlockList* s = new BlockList(1); 1849 s->append(sux); 1850 set_sux(s); 1851 } 1852 1853 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, nullptr, is_safepoint) 1854 , _profiled_method(nullptr) 1855 , _profiled_bci(0) 1856 , _direction(none) { 1857 BlockList* s = new BlockList(1); 1858 s->append(sux); 1859 set_sux(s); 1860 } 1861 1862 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1863 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1864 int profiled_bci() const { return _profiled_bci; } 1865 Direction direction() const { return _direction; } 1866 1867 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1868 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1869 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1870 void set_direction(Direction d) { _direction = d; } 1871 }; 1872 1873 #ifdef ASSERT 1874 LEAF(Assert, Instruction) 1875 private: 1876 Value _x; 1877 Condition _cond; 1878 Value _y; 1879 char *_message; 1880 1881 public: 1882 // creation 1883 // unordered_is_true is valid for float/double compares only 1884 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1885 1886 // accessors 1887 Value x() const { return _x; } 1888 Condition cond() const { return _cond; } 1889 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1890 Value y() const { return _y; } 1891 const char *message() const { return _message; } 1892 1893 // generic 1894 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1895 }; 1896 #endif 1897 1898 LEAF(RangeCheckPredicate, StateSplit) 1899 private: 1900 Value _x; 1901 Condition _cond; 1902 Value _y; 1903 1904 void check_state(); 1905 1906 public: 1907 // creation 1908 // unordered_is_true is valid for float/double compares only 1909 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1910 , _x(x) 1911 , _cond(cond) 1912 , _y(y) 1913 { 1914 ASSERT_VALUES 1915 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1916 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1917 this->set_state(state); 1918 check_state(); 1919 } 1920 1921 // Always deoptimize 1922 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 1923 { 1924 this->set_state(state); 1925 _x = _y = nullptr; 1926 check_state(); 1927 } 1928 1929 // accessors 1930 Value x() const { return _x; } 1931 Condition cond() const { return _cond; } 1932 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1933 Value y() const { return _y; } 1934 1935 void always_fail() { _x = _y = nullptr; } 1936 1937 // generic 1938 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1939 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 1940 }; 1941 1942 LEAF(If, BlockEnd) 1943 private: 1944 Value _x; 1945 Condition _cond; 1946 Value _y; 1947 ciMethod* _profiled_method; 1948 int _profiled_bci; // Canonicalizer may alter bci of If node 1949 bool _swapped; // Is the order reversed with respect to the original If in the 1950 // bytecode stream? 1951 public: 1952 // creation 1953 // unordered_is_true is valid for float/double compares only 1954 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 1955 : BlockEnd(illegalType, state_before, is_safepoint) 1956 , _x(x) 1957 , _cond(cond) 1958 , _y(y) 1959 , _profiled_method(nullptr) 1960 , _profiled_bci(0) 1961 , _swapped(false) 1962 { 1963 ASSERT_VALUES 1964 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1965 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1966 BlockList* s = new BlockList(2); 1967 s->append(tsux); 1968 s->append(fsux); 1969 set_sux(s); 1970 } 1971 1972 // accessors 1973 Value x() const { return _x; } 1974 Condition cond() const { return _cond; } 1975 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1976 Value y() const { return _y; } 1977 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 1978 BlockBegin* tsux() const { return sux_for(true); } 1979 BlockBegin* fsux() const { return sux_for(false); } 1980 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 1981 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1982 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1983 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 1984 bool is_swapped() const { return _swapped; } 1985 1986 // manipulation 1987 void swap_operands() { 1988 Value t = _x; _x = _y; _y = t; 1989 _cond = mirror(_cond); 1990 } 1991 1992 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1993 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1994 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1995 void set_swapped(bool value) { _swapped = value; } 1996 // generic 1997 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 1998 }; 1999 2000 2001 BASE(Switch, BlockEnd) 2002 private: 2003 Value _tag; 2004 2005 public: 2006 // creation 2007 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2008 : BlockEnd(illegalType, state_before, is_safepoint) 2009 , _tag(tag) { 2010 ASSERT_VALUES 2011 set_sux(sux); 2012 } 2013 2014 // accessors 2015 Value tag() const { return _tag; } 2016 int length() const { return number_of_sux() - 1; } 2017 2018 virtual bool needs_exception_state() const { return false; } 2019 2020 // generic 2021 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2022 }; 2023 2024 2025 LEAF(TableSwitch, Switch) 2026 private: 2027 int _lo_key; 2028 2029 public: 2030 // creation 2031 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2032 : Switch(tag, sux, state_before, is_safepoint) 2033 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2034 2035 // accessors 2036 int lo_key() const { return _lo_key; } 2037 int hi_key() const { return _lo_key + (length() - 1); } 2038 }; 2039 2040 2041 LEAF(LookupSwitch, Switch) 2042 private: 2043 intArray* _keys; 2044 2045 public: 2046 // creation 2047 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2048 : Switch(tag, sux, state_before, is_safepoint) 2049 , _keys(keys) { 2050 assert(keys != nullptr, "keys must exist"); 2051 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2052 } 2053 2054 // accessors 2055 int key_at(int i) const { return _keys->at(i); } 2056 }; 2057 2058 2059 LEAF(Return, BlockEnd) 2060 private: 2061 Value _result; 2062 2063 public: 2064 // creation 2065 Return(Value result) : 2066 BlockEnd(result == nullptr ? voidType : result->type()->base(), nullptr, true), 2067 _result(result) {} 2068 2069 // accessors 2070 Value result() const { return _result; } 2071 bool has_result() const { return result() != nullptr; } 2072 2073 // generic 2074 virtual void input_values_do(ValueVisitor* f) { 2075 BlockEnd::input_values_do(f); 2076 if (has_result()) f->visit(&_result); 2077 } 2078 }; 2079 2080 2081 LEAF(Throw, BlockEnd) 2082 private: 2083 Value _exception; 2084 2085 public: 2086 // creation 2087 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2088 ASSERT_VALUES 2089 } 2090 2091 // accessors 2092 Value exception() const { return _exception; } 2093 2094 // generic 2095 virtual bool can_trap() const { return true; } 2096 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2097 }; 2098 2099 2100 LEAF(Base, BlockEnd) 2101 public: 2102 // creation 2103 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, nullptr, false) { 2104 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2105 assert(osr_entry == nullptr || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2106 BlockList* s = new BlockList(2); 2107 if (osr_entry != nullptr) s->append(osr_entry); 2108 s->append(std_entry); // must be default sux! 2109 set_sux(s); 2110 } 2111 2112 // accessors 2113 BlockBegin* std_entry() const { return default_sux(); } 2114 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? nullptr : sux_at(0); } 2115 }; 2116 2117 2118 LEAF(OsrEntry, Instruction) 2119 public: 2120 // creation 2121 #ifdef _LP64 2122 OsrEntry() : Instruction(longType) { pin(); } 2123 #else 2124 OsrEntry() : Instruction(intType) { pin(); } 2125 #endif 2126 2127 // generic 2128 virtual void input_values_do(ValueVisitor* f) { } 2129 }; 2130 2131 2132 // Models the incoming exception at a catch site 2133 LEAF(ExceptionObject, Instruction) 2134 public: 2135 // creation 2136 ExceptionObject() : Instruction(objectType) { 2137 pin(); 2138 } 2139 2140 // generic 2141 virtual void input_values_do(ValueVisitor* f) { } 2142 }; 2143 2144 2145 // Models needed rounding for floating-point values on Intel. 2146 // Currently only used to represent rounding of double-precision 2147 // values stored into local variables, but could be used to model 2148 // intermediate rounding of single-precision values as well. 2149 LEAF(RoundFP, Instruction) 2150 private: 2151 Value _input; // floating-point value to be rounded 2152 2153 public: 2154 RoundFP(Value input) 2155 : Instruction(input->type()) // Note: should not be used for constants 2156 , _input(input) 2157 { 2158 ASSERT_VALUES 2159 } 2160 2161 // accessors 2162 Value input() const { return _input; } 2163 2164 // generic 2165 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2166 }; 2167 2168 2169 BASE(UnsafeOp, Instruction) 2170 private: 2171 Value _object; // Object to be fetched from or mutated 2172 Value _offset; // Offset within object 2173 bool _is_volatile; // true if volatile - dl/JSR166 2174 BasicType _basic_type; // ValueType can not express byte-sized integers 2175 2176 protected: 2177 // creation 2178 UnsafeOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2179 : Instruction(is_put ? voidType : as_ValueType(basic_type)), 2180 _object(object), _offset(offset), _is_volatile(is_volatile), _basic_type(basic_type) 2181 { 2182 //Note: Unsafe ops are not not guaranteed to throw NPE. 2183 // Convservatively, Unsafe operations must be pinned though we could be 2184 // looser about this if we wanted to.. 2185 pin(); 2186 } 2187 2188 public: 2189 // accessors 2190 BasicType basic_type() { return _basic_type; } 2191 Value object() { return _object; } 2192 Value offset() { return _offset; } 2193 bool is_volatile() { return _is_volatile; } 2194 2195 // generic 2196 virtual void input_values_do(ValueVisitor* f) { f->visit(&_object); 2197 f->visit(&_offset); } 2198 }; 2199 2200 LEAF(UnsafeGet, UnsafeOp) 2201 private: 2202 bool _is_raw; 2203 public: 2204 UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile) 2205 : UnsafeOp(basic_type, object, offset, false, is_volatile) 2206 { 2207 ASSERT_VALUES 2208 _is_raw = false; 2209 } 2210 UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile, bool is_raw) 2211 : UnsafeOp(basic_type, object, offset, false, is_volatile), _is_raw(is_raw) 2212 { 2213 ASSERT_VALUES 2214 } 2215 2216 // accessors 2217 bool is_raw() { return _is_raw; } 2218 }; 2219 2220 2221 LEAF(UnsafePut, UnsafeOp) 2222 private: 2223 Value _value; // Value to be stored 2224 public: 2225 UnsafePut(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2226 : UnsafeOp(basic_type, object, offset, true, is_volatile) 2227 , _value(value) 2228 { 2229 ASSERT_VALUES 2230 } 2231 2232 // accessors 2233 Value value() { return _value; } 2234 2235 // generic 2236 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2237 f->visit(&_value); } 2238 }; 2239 2240 LEAF(UnsafeGetAndSet, UnsafeOp) 2241 private: 2242 Value _value; // Value to be stored 2243 bool _is_add; 2244 public: 2245 UnsafeGetAndSet(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2246 : UnsafeOp(basic_type, object, offset, false, false) 2247 , _value(value) 2248 , _is_add(is_add) 2249 { 2250 ASSERT_VALUES 2251 } 2252 2253 // accessors 2254 bool is_add() const { return _is_add; } 2255 Value value() { return _value; } 2256 2257 // generic 2258 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2259 f->visit(&_value); } 2260 }; 2261 2262 LEAF(ProfileCall, Instruction) 2263 private: 2264 ciMethod* _method; 2265 int _bci_of_invoke; 2266 ciMethod* _callee; // the method that is called at the given bci 2267 Value _recv; 2268 ciKlass* _known_holder; 2269 Values* _obj_args; // arguments for type profiling 2270 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2271 bool _inlined; // Are we profiling a call that is inlined 2272 2273 public: 2274 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2275 : Instruction(voidType) 2276 , _method(method) 2277 , _bci_of_invoke(bci) 2278 , _callee(callee) 2279 , _recv(recv) 2280 , _known_holder(known_holder) 2281 , _obj_args(obj_args) 2282 , _inlined(inlined) 2283 { 2284 // The ProfileCall has side-effects and must occur precisely where located 2285 pin(); 2286 } 2287 2288 ciMethod* method() const { return _method; } 2289 int bci_of_invoke() const { return _bci_of_invoke; } 2290 ciMethod* callee() const { return _callee; } 2291 Value recv() const { return _recv; } 2292 ciKlass* known_holder() const { return _known_holder; } 2293 int nb_profiled_args() const { return _obj_args == nullptr ? 0 : _obj_args->length(); } 2294 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2295 bool arg_needs_null_check(int i) const { 2296 return _nonnull_state.arg_needs_null_check(i); 2297 } 2298 bool inlined() const { return _inlined; } 2299 2300 void set_arg_needs_null_check(int i, bool check) { 2301 _nonnull_state.set_arg_needs_null_check(i, check); 2302 } 2303 2304 virtual void input_values_do(ValueVisitor* f) { 2305 if (_recv != nullptr) { 2306 f->visit(&_recv); 2307 } 2308 for (int i = 0; i < nb_profiled_args(); i++) { 2309 f->visit(_obj_args->adr_at(i)); 2310 } 2311 } 2312 }; 2313 2314 LEAF(ProfileReturnType, Instruction) 2315 private: 2316 ciMethod* _method; 2317 ciMethod* _callee; 2318 int _bci_of_invoke; 2319 Value _ret; 2320 2321 public: 2322 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2323 : Instruction(voidType) 2324 , _method(method) 2325 , _callee(callee) 2326 , _bci_of_invoke(bci) 2327 , _ret(ret) 2328 { 2329 set_needs_null_check(true); 2330 // The ProfileType has side-effects and must occur precisely where located 2331 pin(); 2332 } 2333 2334 ciMethod* method() const { return _method; } 2335 ciMethod* callee() const { return _callee; } 2336 int bci_of_invoke() const { return _bci_of_invoke; } 2337 Value ret() const { return _ret; } 2338 2339 virtual void input_values_do(ValueVisitor* f) { 2340 if (_ret != nullptr) { 2341 f->visit(&_ret); 2342 } 2343 } 2344 }; 2345 2346 // Call some C runtime function that doesn't safepoint, 2347 // optionally passing the current thread as the first argument. 2348 LEAF(RuntimeCall, Instruction) 2349 private: 2350 const char* _entry_name; 2351 address _entry; 2352 Values* _args; 2353 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2354 2355 public: 2356 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2357 : Instruction(type) 2358 , _entry_name(entry_name) 2359 , _entry(entry) 2360 , _args(args) 2361 , _pass_thread(pass_thread) { 2362 ASSERT_VALUES 2363 pin(); 2364 } 2365 2366 const char* entry_name() const { return _entry_name; } 2367 address entry() const { return _entry; } 2368 int number_of_arguments() const { return _args->length(); } 2369 Value argument_at(int i) const { return _args->at(i); } 2370 bool pass_thread() const { return _pass_thread; } 2371 2372 virtual void input_values_do(ValueVisitor* f) { 2373 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2374 } 2375 }; 2376 2377 // Use to trip invocation counter of an inlined method 2378 2379 LEAF(ProfileInvoke, Instruction) 2380 private: 2381 ciMethod* _inlinee; 2382 ValueStack* _state; 2383 2384 public: 2385 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2386 : Instruction(voidType) 2387 , _inlinee(inlinee) 2388 , _state(state) 2389 { 2390 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2391 pin(); 2392 } 2393 2394 ciMethod* inlinee() { return _inlinee; } 2395 ValueStack* state() { return _state; } 2396 virtual void input_values_do(ValueVisitor*) {} 2397 virtual void state_values_do(ValueVisitor*); 2398 }; 2399 2400 LEAF(MemBar, Instruction) 2401 private: 2402 LIR_Code _code; 2403 2404 public: 2405 MemBar(LIR_Code code) 2406 : Instruction(voidType) 2407 , _code(code) 2408 { 2409 pin(); 2410 } 2411 2412 LIR_Code code() { return _code; } 2413 2414 virtual void input_values_do(ValueVisitor*) {} 2415 }; 2416 2417 class BlockPair: public CompilationResourceObj { 2418 private: 2419 BlockBegin* _from; 2420 int _index; // sux index of 'to' block 2421 public: 2422 BlockPair(BlockBegin* from, int index): _from(from), _index(index) {} 2423 BlockBegin* from() const { return _from; } 2424 int index() const { return _index; } 2425 }; 2426 2427 typedef GrowableArray<BlockPair*> BlockPairList; 2428 2429 inline int BlockBegin::number_of_sux() const { assert(_end != nullptr, "need end"); return _end->number_of_sux(); } 2430 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end != nullptr , "need end"); return _end->sux_at(i); } 2431 2432 #undef ASSERT_VALUES 2433 2434 #endif // SHARE_C1_C1_INSTRUCTION_HPP