1 /* 2 * Copyright (c) 1999, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 39 40 // Instruction class hierarchy 41 // 42 // All leaf classes in the class hierarchy are concrete classes 43 // (i.e., are instantiated). All other classes are abstract and 44 // serve factoring. 45 46 class Instruction; 47 class Phi; 48 class Local; 49 class Constant; 50 class AccessField; 51 class LoadField; 52 class StoreField; 53 class AccessArray; 54 class ArrayLength; 55 class AccessIndexed; 56 class LoadIndexed; 57 class StoreIndexed; 58 class NegateOp; 59 class Op2; 60 class ArithmeticOp; 61 class ShiftOp; 62 class LogicOp; 63 class CompareOp; 64 class IfOp; 65 class Convert; 66 class NullCheck; 67 class TypeCast; 68 class OsrEntry; 69 class ExceptionObject; 70 class StateSplit; 71 class Invoke; 72 class NewInstance; 73 class NewArray; 74 class NewTypeArray; 75 class NewObjectArray; 76 class NewMultiArray; 77 class Deoptimize; 78 class TypeCheck; 79 class CheckCast; 80 class InstanceOf; 81 class AccessMonitor; 82 class MonitorEnter; 83 class MonitorExit; 84 class Intrinsic; 85 class BlockBegin; 86 class BlockEnd; 87 class Goto; 88 class If; 89 class Switch; 90 class TableSwitch; 91 class LookupSwitch; 92 class Return; 93 class Throw; 94 class Base; 95 class RoundFP; 96 class UnsafeOp; 97 class UnsafeGet; 98 class UnsafePut; 99 class UnsafeGetAndSet; 100 class ProfileCall; 101 class ProfileReturnType; 102 class ProfileACmpTypes; 103 class ProfileInvoke; 104 class RuntimeCall; 105 class MemBar; 106 class RangeCheckPredicate; 107 #ifdef ASSERT 108 class Assert; 109 #endif 110 111 // A Value is a reference to the instruction creating the value 112 typedef Instruction* Value; 113 typedef GrowableArray<Value> Values; 114 typedef GrowableArray<ValueStack*> ValueStackStack; 115 116 // BlockClosure is the base class for block traversal/iteration. 117 118 class BlockClosure: public CompilationResourceObj { 119 public: 120 virtual void block_do(BlockBegin* block) = 0; 121 }; 122 123 124 // A simple closure class for visiting the values of an Instruction 125 class ValueVisitor: public StackObj { 126 public: 127 virtual void visit(Value* v) = 0; 128 }; 129 130 131 // Some array and list classes 132 typedef GrowableArray<BlockBegin*> BlockBeginArray; 133 134 class BlockList: public GrowableArray<BlockBegin*> { 135 public: 136 BlockList(): GrowableArray<BlockBegin*>() {} 137 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 138 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 139 140 void iterate_forward(BlockClosure* closure); 141 void iterate_backward(BlockClosure* closure); 142 void values_do(ValueVisitor* f); 143 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 144 }; 145 146 147 // InstructionVisitors provide type-based dispatch for instructions. 148 // For each concrete Instruction class X, a virtual function do_X is 149 // provided. Functionality that needs to be implemented for all classes 150 // (e.g., printing, code generation) is factored out into a specialised 151 // visitor instead of added to the Instruction classes itself. 152 153 class InstructionVisitor: public StackObj { 154 public: 155 virtual void do_Phi (Phi* x) = 0; 156 virtual void do_Local (Local* x) = 0; 157 virtual void do_Constant (Constant* x) = 0; 158 virtual void do_LoadField (LoadField* x) = 0; 159 virtual void do_StoreField (StoreField* x) = 0; 160 virtual void do_ArrayLength (ArrayLength* x) = 0; 161 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 162 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 163 virtual void do_NegateOp (NegateOp* x) = 0; 164 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 165 virtual void do_ShiftOp (ShiftOp* x) = 0; 166 virtual void do_LogicOp (LogicOp* x) = 0; 167 virtual void do_CompareOp (CompareOp* x) = 0; 168 virtual void do_IfOp (IfOp* x) = 0; 169 virtual void do_Convert (Convert* x) = 0; 170 virtual void do_NullCheck (NullCheck* x) = 0; 171 virtual void do_TypeCast (TypeCast* x) = 0; 172 virtual void do_Invoke (Invoke* x) = 0; 173 virtual void do_NewInstance (NewInstance* x) = 0; 174 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 175 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 176 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 177 virtual void do_CheckCast (CheckCast* x) = 0; 178 virtual void do_InstanceOf (InstanceOf* x) = 0; 179 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 180 virtual void do_MonitorExit (MonitorExit* x) = 0; 181 virtual void do_Intrinsic (Intrinsic* x) = 0; 182 virtual void do_BlockBegin (BlockBegin* x) = 0; 183 virtual void do_Goto (Goto* x) = 0; 184 virtual void do_If (If* x) = 0; 185 virtual void do_TableSwitch (TableSwitch* x) = 0; 186 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 187 virtual void do_Return (Return* x) = 0; 188 virtual void do_Throw (Throw* x) = 0; 189 virtual void do_Base (Base* x) = 0; 190 virtual void do_OsrEntry (OsrEntry* x) = 0; 191 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 192 virtual void do_RoundFP (RoundFP* x) = 0; 193 virtual void do_UnsafeGet (UnsafeGet* x) = 0; 194 virtual void do_UnsafePut (UnsafePut* x) = 0; 195 virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x) = 0; 196 virtual void do_ProfileCall (ProfileCall* x) = 0; 197 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 198 virtual void do_ProfileACmpTypes(ProfileACmpTypes* x) = 0; 199 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 200 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 201 virtual void do_MemBar (MemBar* x) = 0; 202 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 203 #ifdef ASSERT 204 virtual void do_Assert (Assert* x) = 0; 205 #endif 206 }; 207 208 209 // Hashing support 210 // 211 // Note: This hash functions affect the performance 212 // of ValueMap - make changes carefully! 213 214 #define HASH1(x1 ) ((intx)(x1)) 215 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 216 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 217 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3 ) << 7) ^ HASH1(x4)) 218 #define HASH5(x1, x2, x3, x4, x5) ((HASH4(x1, x2, x3, x4) << 7) ^ HASH1(x5)) 219 220 221 // The following macros are used to implement instruction-specific hashing. 222 // By default, each instruction implements hash() and is_equal(Value), used 223 // for value numbering/common subexpression elimination. The default imple- 224 // mentation disables value numbering. Each instruction which can be value- 225 // numbered, should define corresponding hash() and is_equal(Value) functions 226 // via the macros below. The f arguments specify all the values/op codes, etc. 227 // that need to be identical for two instructions to be identical. 228 // 229 // Note: The default implementation of hash() returns 0 in order to indicate 230 // that the instruction should not be considered for value numbering. 231 // The currently used hash functions do not guarantee that never a 0 232 // is produced. While this is still correct, it may be a performance 233 // bug (no value numbering for that node). However, this situation is 234 // so unlikely, that we are not going to handle it specially. 235 236 #define HASHING1(class_name, enabled, f1) \ 237 virtual intx hash() const { \ 238 return (enabled) ? HASH2(name(), f1) : 0; \ 239 } \ 240 virtual bool is_equal(Value v) const { \ 241 if (!(enabled) ) return false; \ 242 class_name* _v = v->as_##class_name(); \ 243 if (_v == nullptr) return false; \ 244 if (f1 != _v->f1) return false; \ 245 return true; \ 246 } \ 247 248 249 #define HASHING2(class_name, enabled, f1, f2) \ 250 virtual intx hash() const { \ 251 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 252 } \ 253 virtual bool is_equal(Value v) const { \ 254 if (!(enabled) ) return false; \ 255 class_name* _v = v->as_##class_name(); \ 256 if (_v == nullptr) return false; \ 257 if (f1 != _v->f1) return false; \ 258 if (f2 != _v->f2) return false; \ 259 return true; \ 260 } \ 261 262 263 #define HASHING3(class_name, enabled, f1, f2, f3) \ 264 virtual intx hash() const { \ 265 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 266 } \ 267 virtual bool is_equal(Value v) const { \ 268 if (!(enabled) ) return false; \ 269 class_name* _v = v->as_##class_name(); \ 270 if (_v == nullptr) return false; \ 271 if (f1 != _v->f1) return false; \ 272 if (f2 != _v->f2) return false; \ 273 if (f3 != _v->f3) return false; \ 274 return true; \ 275 } \ 276 277 #define HASHING4(class_name, enabled, f1, f2, f3, f4) \ 278 virtual intx hash() const { \ 279 return (enabled) ? HASH5(name(), f1, f2, f3, f4) : 0; \ 280 } \ 281 virtual bool is_equal(Value v) const { \ 282 if (!(enabled) ) return false; \ 283 class_name* _v = v->as_##class_name(); \ 284 if (_v == nullptr ) return false; \ 285 if (f1 != _v->f1) return false; \ 286 if (f2 != _v->f2) return false; \ 287 if (f3 != _v->f3) return false; \ 288 if (f4 != _v->f4) return false; \ 289 return true; \ 290 } \ 291 292 293 // The mother of all instructions... 294 295 class Instruction: public CompilationResourceObj { 296 private: 297 int _id; // the unique instruction id 298 #ifndef PRODUCT 299 int _printable_bci; // the bci of the instruction for printing 300 #endif 301 int _use_count; // the number of instructions referring to this value (w/o prev/next); only roots can have use count = 0 or > 1 302 int _pin_state; // set of PinReason describing the reason for pinning 303 unsigned int _flags; // Flag bits 304 ValueType* _type; // the instruction value type 305 Instruction* _next; // the next instruction if any (null for BlockEnd instructions) 306 Instruction* _subst; // the substitution instruction if any 307 LIR_Opr _operand; // LIR specific information 308 309 ValueStack* _state_before; // Copy of state with input operands still on stack (or null) 310 ValueStack* _exception_state; // Copy of state for exception handling 311 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 312 313 friend class UseCountComputer; 314 friend class GraphBuilder; 315 316 void update_exception_state(ValueStack* state); 317 318 protected: 319 BlockBegin* _block; // Block that contains this instruction 320 321 void set_type(ValueType* type) { 322 assert(type != nullptr, "type must exist"); 323 _type = type; 324 } 325 326 // Helper class to keep track of which arguments need a null check 327 class ArgsNonNullState { 328 private: 329 int _nonnull_state; // mask identifying which args are nonnull 330 public: 331 ArgsNonNullState() 332 : _nonnull_state(AllBits) {} 333 334 // Does argument number i needs a null check? 335 bool arg_needs_null_check(int i) const { 336 // No data is kept for arguments starting at position 33 so 337 // conservatively assume that they need a null check. 338 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 339 return is_set_nth_bit(_nonnull_state, i); 340 } 341 return true; 342 } 343 344 // Set whether argument number i needs a null check or not 345 void set_arg_needs_null_check(int i, bool check) { 346 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 347 if (check) { 348 _nonnull_state |= (int)nth_bit(i); 349 } else { 350 _nonnull_state &= (int)~(nth_bit(i)); 351 } 352 } 353 } 354 }; 355 356 public: 357 void* operator new(size_t size) throw() { 358 Compilation* c = Compilation::current(); 359 void* res = c->arena()->Amalloc(size); 360 return res; 361 } 362 363 static const int no_bci = -99; 364 365 enum InstructionFlag { 366 NeedsNullCheckFlag = 0, 367 NeverNullFlag, // For "Q" signatures 368 CanTrapFlag, 369 DirectCompareFlag, 370 IsEliminatedFlag, 371 IsSafepointFlag, 372 IsStaticFlag, 373 NeedsStoreCheckFlag, 374 NeedsWriteBarrierFlag, 375 PreservesStateFlag, 376 TargetIsFinalFlag, 377 TargetIsLoadedFlag, 378 UnorderedIsTrueFlag, 379 NeedsPatchingFlag, 380 ThrowIncompatibleClassChangeErrorFlag, 381 InvokeSpecialReceiverCheckFlag, 382 ProfileMDOFlag, 383 IsLinkedInBlockFlag, 384 NeedsRangeCheckFlag, 385 InWorkListFlag, 386 DeoptimizeOnException, 387 KillsMemoryFlag, 388 OmitChecksFlag, 389 InstructionLastFlag 390 }; 391 392 public: 393 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 394 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 395 396 // 'globally' used condition values 397 enum Condition { 398 eql, neq, lss, leq, gtr, geq, aeq, beq 399 }; 400 401 // Instructions may be pinned for many reasons and under certain conditions 402 // with enough knowledge it's possible to safely unpin them. 403 enum PinReason { 404 PinUnknown = 1 << 0 405 , PinExplicitNullCheck = 1 << 3 406 , PinStackForStateSplit= 1 << 12 407 , PinStateSplitConstructor= 1 << 13 408 , PinGlobalValueNumbering= 1 << 14 409 }; 410 411 static Condition mirror(Condition cond); 412 static Condition negate(Condition cond); 413 414 // initialization 415 static int number_of_instructions() { 416 return Compilation::current()->number_of_instructions(); 417 } 418 419 // creation 420 Instruction(ValueType* type, ValueStack* state_before = nullptr, bool type_is_constant = false) 421 : _id(Compilation::current()->get_next_id()), 422 #ifndef PRODUCT 423 _printable_bci(-99), 424 #endif 425 _use_count(0) 426 , _pin_state(0) 427 , _flags(0) 428 , _type(type) 429 , _next(nullptr) 430 , _subst(nullptr) 431 , _operand(LIR_OprFact::illegalOpr) 432 , _state_before(state_before) 433 , _exception_handlers(nullptr) 434 , _block(nullptr) 435 { 436 check_state(state_before); 437 assert(type != nullptr && (!type->is_constant() || type_is_constant), "type must exist"); 438 update_exception_state(_state_before); 439 } 440 441 // accessors 442 int id() const { return _id; } 443 #ifndef PRODUCT 444 bool has_printable_bci() const { return _printable_bci != -99; } 445 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 446 void set_printable_bci(int bci) { _printable_bci = bci; } 447 #endif 448 int dominator_depth(); 449 int use_count() const { return _use_count; } 450 int pin_state() const { return _pin_state; } 451 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 452 ValueType* type() const { return _type; } 453 BlockBegin *block() const { return _block; } 454 Instruction* prev(); // use carefully, expensive operation 455 Instruction* next() const { return _next; } 456 bool has_subst() const { return _subst != nullptr; } 457 Instruction* subst() { return _subst == nullptr ? this : _subst->subst(); } 458 LIR_Opr operand() const { return _operand; } 459 460 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 461 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 462 void set_null_free(bool f) { set_flag(NeverNullFlag, f); } 463 bool is_null_free() const { return check_flag(NeverNullFlag); } 464 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 465 bool can_be_linked() { return as_Local() == nullptr && as_Phi() == nullptr; } 466 467 bool is_null_obj() { return as_Constant() != nullptr && type()->as_ObjectType()->constant_value()->is_null_object(); } 468 469 bool has_uses() const { return use_count() > 0; } 470 ValueStack* state_before() const { return _state_before; } 471 ValueStack* exception_state() const { return _exception_state; } 472 virtual bool needs_exception_state() const { return true; } 473 XHandlers* exception_handlers() const { return _exception_handlers; } 474 ciKlass* as_loaded_klass_or_null() const; 475 476 // manipulation 477 void pin(PinReason reason) { _pin_state |= reason; } 478 void pin() { _pin_state |= PinUnknown; } 479 // DANGEROUS: only used by EliminateStores 480 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 481 482 Instruction* set_next(Instruction* next) { 483 assert(next->has_printable_bci(), "_printable_bci should have been set"); 484 assert(next != nullptr, "must not be null"); 485 assert(as_BlockEnd() == nullptr, "BlockEnd instructions must have no next"); 486 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 487 488 BlockBegin *block = this->block(); 489 next->_block = block; 490 491 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 492 _next = next; 493 return next; 494 } 495 496 Instruction* set_next(Instruction* next, int bci) { 497 #ifndef PRODUCT 498 next->set_printable_bci(bci); 499 #endif 500 return set_next(next); 501 } 502 503 // when blocks are merged 504 void fixup_block_pointers() { 505 Instruction *cur = next()->next(); // next()'s block is set in set_next 506 while (cur && cur->_block != block()) { 507 cur->_block = block(); 508 cur = cur->next(); 509 } 510 } 511 512 Instruction *insert_after(Instruction *i) { 513 Instruction* n = _next; 514 set_next(i); 515 i->set_next(n); 516 return _next; 517 } 518 519 bool is_loaded_flat_array() const; 520 bool maybe_flat_array(); 521 bool maybe_null_free_array(); 522 523 Instruction *insert_after_same_bci(Instruction *i) { 524 #ifndef PRODUCT 525 i->set_printable_bci(printable_bci()); 526 #endif 527 return insert_after(i); 528 } 529 530 void set_subst(Instruction* subst) { 531 assert(subst == nullptr || 532 type()->base() == subst->type()->base() || 533 subst->type()->base() == illegalType, "type can't change"); 534 _subst = subst; 535 } 536 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 537 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 538 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 539 540 // machine-specifics 541 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 542 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 543 544 // generic 545 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 546 virtual Phi* as_Phi() { return nullptr; } 547 virtual Local* as_Local() { return nullptr; } 548 virtual Constant* as_Constant() { return nullptr; } 549 virtual AccessField* as_AccessField() { return nullptr; } 550 virtual LoadField* as_LoadField() { return nullptr; } 551 virtual StoreField* as_StoreField() { return nullptr; } 552 virtual AccessArray* as_AccessArray() { return nullptr; } 553 virtual ArrayLength* as_ArrayLength() { return nullptr; } 554 virtual AccessIndexed* as_AccessIndexed() { return nullptr; } 555 virtual LoadIndexed* as_LoadIndexed() { return nullptr; } 556 virtual StoreIndexed* as_StoreIndexed() { return nullptr; } 557 virtual NegateOp* as_NegateOp() { return nullptr; } 558 virtual Op2* as_Op2() { return nullptr; } 559 virtual ArithmeticOp* as_ArithmeticOp() { return nullptr; } 560 virtual ShiftOp* as_ShiftOp() { return nullptr; } 561 virtual LogicOp* as_LogicOp() { return nullptr; } 562 virtual CompareOp* as_CompareOp() { return nullptr; } 563 virtual IfOp* as_IfOp() { return nullptr; } 564 virtual Convert* as_Convert() { return nullptr; } 565 virtual NullCheck* as_NullCheck() { return nullptr; } 566 virtual OsrEntry* as_OsrEntry() { return nullptr; } 567 virtual StateSplit* as_StateSplit() { return nullptr; } 568 virtual Invoke* as_Invoke() { return nullptr; } 569 virtual NewInstance* as_NewInstance() { return nullptr; } 570 virtual NewArray* as_NewArray() { return nullptr; } 571 virtual NewTypeArray* as_NewTypeArray() { return nullptr; } 572 virtual NewObjectArray* as_NewObjectArray() { return nullptr; } 573 virtual NewMultiArray* as_NewMultiArray() { return nullptr; } 574 virtual TypeCheck* as_TypeCheck() { return nullptr; } 575 virtual CheckCast* as_CheckCast() { return nullptr; } 576 virtual InstanceOf* as_InstanceOf() { return nullptr; } 577 virtual TypeCast* as_TypeCast() { return nullptr; } 578 virtual AccessMonitor* as_AccessMonitor() { return nullptr; } 579 virtual MonitorEnter* as_MonitorEnter() { return nullptr; } 580 virtual MonitorExit* as_MonitorExit() { return nullptr; } 581 virtual Intrinsic* as_Intrinsic() { return nullptr; } 582 virtual BlockBegin* as_BlockBegin() { return nullptr; } 583 virtual BlockEnd* as_BlockEnd() { return nullptr; } 584 virtual Goto* as_Goto() { return nullptr; } 585 virtual If* as_If() { return nullptr; } 586 virtual TableSwitch* as_TableSwitch() { return nullptr; } 587 virtual LookupSwitch* as_LookupSwitch() { return nullptr; } 588 virtual Return* as_Return() { return nullptr; } 589 virtual Throw* as_Throw() { return nullptr; } 590 virtual Base* as_Base() { return nullptr; } 591 virtual RoundFP* as_RoundFP() { return nullptr; } 592 virtual ExceptionObject* as_ExceptionObject() { return nullptr; } 593 virtual UnsafeOp* as_UnsafeOp() { return nullptr; } 594 virtual ProfileInvoke* as_ProfileInvoke() { return nullptr; } 595 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return nullptr; } 596 597 #ifdef ASSERT 598 virtual Assert* as_Assert() { return nullptr; } 599 #endif 600 601 virtual void visit(InstructionVisitor* v) = 0; 602 603 virtual bool can_trap() const { return false; } 604 605 virtual void input_values_do(ValueVisitor* f) = 0; 606 virtual void state_values_do(ValueVisitor* f); 607 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 608 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 609 610 virtual ciType* exact_type() const; 611 virtual ciType* declared_type() const { return nullptr; } 612 613 // hashing 614 virtual const char* name() const = 0; 615 HASHING1(Instruction, false, id()) // hashing disabled by default 616 617 // debugging 618 static void check_state(ValueStack* state) PRODUCT_RETURN; 619 void print() PRODUCT_RETURN; 620 void print_line() PRODUCT_RETURN; 621 void print(InstructionPrinter& ip) PRODUCT_RETURN; 622 }; 623 624 625 // The following macros are used to define base (i.e., non-leaf) 626 // and leaf instruction classes. They define class-name related 627 // generic functionality in one place. 628 629 #define BASE(class_name, super_class_name) \ 630 class class_name: public super_class_name { \ 631 public: \ 632 virtual class_name* as_##class_name() { return this; } \ 633 634 635 #define LEAF(class_name, super_class_name) \ 636 BASE(class_name, super_class_name) \ 637 public: \ 638 virtual const char* name() const { return #class_name; } \ 639 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 640 641 642 // Debugging support 643 644 645 #ifdef ASSERT 646 class AssertValues: public ValueVisitor { 647 void visit(Value* x) { assert((*x) != nullptr, "value must exist"); } 648 }; 649 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 650 #else 651 #define ASSERT_VALUES 652 #endif // ASSERT 653 654 655 // A Phi is a phi function in the sense of SSA form. It stands for 656 // the value of a local variable at the beginning of a join block. 657 // A Phi consists of n operands, one for every incoming branch. 658 659 LEAF(Phi, Instruction) 660 private: 661 int _pf_flags; // the flags of the phi function 662 int _index; // to value on operand stack (index < 0) or to local 663 public: 664 // creation 665 Phi(ValueType* type, BlockBegin* b, int index) 666 : Instruction(type->base()) 667 , _pf_flags(0) 668 , _index(index) 669 { 670 _block = b; 671 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 672 if (type->is_illegal()) { 673 make_illegal(); 674 } 675 } 676 677 // flags 678 enum Flag { 679 no_flag = 0, 680 visited = 1 << 0, 681 cannot_simplify = 1 << 1 682 }; 683 684 // accessors 685 bool is_local() const { return _index >= 0; } 686 bool is_on_stack() const { return !is_local(); } 687 int local_index() const { assert(is_local(), ""); return _index; } 688 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 689 690 Value operand_at(int i) const; 691 int operand_count() const; 692 693 void set(Flag f) { _pf_flags |= f; } 694 void clear(Flag f) { _pf_flags &= ~f; } 695 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 696 697 // Invalidates phis corresponding to merges of locals of two different types 698 // (these should never be referenced, otherwise the bytecodes are illegal) 699 void make_illegal() { 700 set(cannot_simplify); 701 set_type(illegalType); 702 } 703 704 bool is_illegal() const { 705 return type()->is_illegal(); 706 } 707 708 // generic 709 virtual void input_values_do(ValueVisitor* f) { 710 } 711 }; 712 713 714 // A local is a placeholder for an incoming argument to a function call. 715 LEAF(Local, Instruction) 716 private: 717 int _java_index; // the local index within the method to which the local belongs 718 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 719 ciType* _declared_type; 720 public: 721 // creation 722 Local(ciType* declared, ValueType* type, int index, bool receiver, bool null_free) 723 : Instruction(type) 724 , _java_index(index) 725 , _is_receiver(receiver) 726 , _declared_type(declared) 727 { 728 set_null_free(null_free); 729 NOT_PRODUCT(set_printable_bci(-1)); 730 } 731 732 // accessors 733 int java_index() const { return _java_index; } 734 bool is_receiver() const { return _is_receiver; } 735 736 virtual ciType* declared_type() const { return _declared_type; } 737 738 // generic 739 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 740 }; 741 742 743 LEAF(Constant, Instruction) 744 public: 745 // creation 746 Constant(ValueType* type): 747 Instruction(type, nullptr, /*type_is_constant*/ true) 748 { 749 assert(type->is_constant(), "must be a constant"); 750 } 751 752 Constant(ValueType* type, ValueStack* state_before, bool kills_memory = false): 753 Instruction(type, state_before, /*type_is_constant*/ true) 754 { 755 assert(state_before != nullptr, "only used for constants which need patching"); 756 assert(type->is_constant(), "must be a constant"); 757 set_flag(KillsMemoryFlag, kills_memory); 758 pin(); // since it's patching it needs to be pinned 759 } 760 761 // generic 762 virtual bool can_trap() const { return state_before() != nullptr; } 763 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 764 765 virtual intx hash() const; 766 virtual bool is_equal(Value v) const; 767 768 virtual ciType* exact_type() const; 769 770 bool kills_memory() const { return check_flag(KillsMemoryFlag); } 771 772 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 773 774 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 775 BlockBegin* compare(Instruction::Condition cond, Value right, 776 BlockBegin* true_sux, BlockBegin* false_sux) const { 777 switch (compare(cond, right)) { 778 case not_comparable: 779 return nullptr; 780 case cond_false: 781 return false_sux; 782 case cond_true: 783 return true_sux; 784 default: 785 ShouldNotReachHere(); 786 return nullptr; 787 } 788 } 789 }; 790 791 792 BASE(AccessField, Instruction) 793 private: 794 Value _obj; 795 int _offset; 796 ciField* _field; 797 NullCheck* _explicit_null_check; // For explicit null check elimination 798 799 public: 800 // creation 801 AccessField(Value obj, int offset, ciField* field, bool is_static, 802 ValueStack* state_before, bool needs_patching) 803 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 804 , _obj(obj) 805 , _offset(offset) 806 , _field(field) 807 , _explicit_null_check(nullptr) 808 { 809 set_needs_null_check(!is_static); 810 set_flag(IsStaticFlag, is_static); 811 set_flag(NeedsPatchingFlag, needs_patching); 812 ASSERT_VALUES 813 // pin of all instructions with memory access 814 pin(); 815 } 816 817 // accessors 818 Value obj() const { return _obj; } 819 int offset() const { return _offset; } 820 ciField* field() const { return _field; } 821 BasicType field_type() const { return _field->type()->basic_type(); } 822 bool is_static() const { return check_flag(IsStaticFlag); } 823 NullCheck* explicit_null_check() const { return _explicit_null_check; } 824 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 825 826 // Unresolved getstatic and putstatic can cause initialization. 827 // Technically it occurs at the Constant that materializes the base 828 // of the static fields but it's simpler to model it here. 829 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 830 831 // manipulation 832 833 // Under certain circumstances, if a previous NullCheck instruction 834 // proved the target object non-null, we can eliminate the explicit 835 // null check and do an implicit one, simply specifying the debug 836 // information from the NullCheck. This field should only be consulted 837 // if needs_null_check() is true. 838 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 839 840 // generic 841 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 842 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 843 }; 844 845 846 LEAF(LoadField, AccessField) 847 public: 848 // creation 849 LoadField(Value obj, int offset, ciField* field, bool is_static, 850 ValueStack* state_before, bool needs_patching, 851 ciInlineKlass* inline_klass = nullptr, Value default_value = nullptr ) 852 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 853 { 854 set_null_free(field->is_null_free()); 855 } 856 857 ciType* declared_type() const; 858 859 // generic; cannot be eliminated if needs patching or if volatile. 860 HASHING3(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset(), declared_type()) 861 }; 862 863 864 LEAF(StoreField, AccessField) 865 private: 866 Value _value; 867 ciField* _enclosing_field; // enclosing field (the flat one) for nested fields 868 869 public: 870 // creation 871 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 872 ValueStack* state_before, bool needs_patching); 873 874 // accessors 875 Value value() const { return _value; } 876 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 877 ciField* enclosing_field() const { return _enclosing_field; } 878 void set_enclosing_field(ciField* field) { _enclosing_field = field; } 879 880 // generic 881 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 882 }; 883 884 885 BASE(AccessArray, Instruction) 886 private: 887 Value _array; 888 889 public: 890 // creation 891 AccessArray(ValueType* type, Value array, ValueStack* state_before) 892 : Instruction(type, state_before) 893 , _array(array) 894 { 895 set_needs_null_check(true); 896 ASSERT_VALUES 897 pin(); // instruction with side effect (null exception or range check throwing) 898 } 899 900 Value array() const { return _array; } 901 902 // generic 903 virtual bool can_trap() const { return needs_null_check(); } 904 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 905 }; 906 907 908 LEAF(ArrayLength, AccessArray) 909 private: 910 NullCheck* _explicit_null_check; // For explicit null check elimination 911 912 public: 913 // creation 914 ArrayLength(Value array, ValueStack* state_before) 915 : AccessArray(intType, array, state_before) 916 , _explicit_null_check(nullptr) {} 917 918 // accessors 919 NullCheck* explicit_null_check() const { return _explicit_null_check; } 920 921 // setters 922 // See LoadField::set_explicit_null_check for documentation 923 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 924 925 // generic 926 HASHING1(ArrayLength, true, array()->subst()) 927 }; 928 929 930 BASE(AccessIndexed, AccessArray) 931 private: 932 Value _index; 933 Value _length; 934 BasicType _elt_type; 935 bool _mismatched; 936 ciMethod* _profiled_method; 937 int _profiled_bci; 938 939 public: 940 // creation 941 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 942 : AccessArray(as_ValueType(elt_type), array, state_before) 943 , _index(index) 944 , _length(length) 945 , _elt_type(elt_type) 946 , _mismatched(mismatched) 947 , _profiled_method(nullptr), _profiled_bci(0) 948 { 949 set_flag(Instruction::NeedsRangeCheckFlag, true); 950 ASSERT_VALUES 951 } 952 953 // accessors 954 Value index() const { return _index; } 955 Value length() const { return _length; } 956 BasicType elt_type() const { return _elt_type; } 957 bool mismatched() const { return _mismatched; } 958 959 void clear_length() { _length = nullptr; } 960 // perform elimination of range checks involving constants 961 bool compute_needs_range_check(); 962 963 // Helpers for MethodData* profiling 964 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 965 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 966 void set_profiled_bci(int bci) { _profiled_bci = bci; } 967 bool should_profile() const { return check_flag(ProfileMDOFlag); } 968 ciMethod* profiled_method() const { return _profiled_method; } 969 int profiled_bci() const { return _profiled_bci; } 970 971 972 // generic 973 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != nullptr) f->visit(&_length); } 974 }; 975 976 class DelayedLoadIndexed; 977 978 LEAF(LoadIndexed, AccessIndexed) 979 private: 980 NullCheck* _explicit_null_check; // For explicit null check elimination 981 NewInstance* _vt; 982 DelayedLoadIndexed* _delayed; 983 984 public: 985 // creation 986 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 987 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 988 , _explicit_null_check(nullptr), _vt(nullptr), _delayed(nullptr) {} 989 990 // accessors 991 NullCheck* explicit_null_check() const { return _explicit_null_check; } 992 993 // setters 994 // See LoadField::set_explicit_null_check for documentation 995 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 996 997 ciType* exact_type() const; 998 ciType* declared_type() const; 999 1000 NewInstance* vt() const { return _vt; } 1001 void set_vt(NewInstance* vt) { _vt = vt; } 1002 1003 DelayedLoadIndexed* delayed() const { return _delayed; } 1004 void set_delayed(DelayedLoadIndexed* delayed) { _delayed = delayed; } 1005 1006 // generic; 1007 HASHING4(LoadIndexed, delayed() == nullptr && !should_profile(), elt_type(), array()->subst(), index()->subst(), vt()) 1008 }; 1009 1010 class DelayedLoadIndexed : public CompilationResourceObj { 1011 private: 1012 LoadIndexed* _load_instr; 1013 ValueStack* _state_before; 1014 ciField* _field; 1015 int _offset; 1016 public: 1017 DelayedLoadIndexed(LoadIndexed* load, ValueStack* state_before) 1018 : _load_instr(load) 1019 , _state_before(state_before) 1020 , _field(nullptr) 1021 , _offset(0) { } 1022 1023 void update(ciField* field, int offset) { 1024 _field = field; 1025 _offset += offset; 1026 } 1027 1028 LoadIndexed* load_instr() const { return _load_instr; } 1029 ValueStack* state_before() const { return _state_before; } 1030 ciField* field() const { return _field; } 1031 int offset() const { return _offset; } 1032 }; 1033 1034 LEAF(StoreIndexed, AccessIndexed) 1035 private: 1036 Value _value; 1037 1038 bool _check_boolean; 1039 1040 public: 1041 // creation 1042 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1043 bool check_boolean, bool mismatched = false); 1044 1045 // accessors 1046 Value value() const { return _value; } 1047 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1048 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1049 bool check_boolean() const { return _check_boolean; } 1050 1051 // Flattened array support 1052 bool is_exact_flat_array_store() const; 1053 // generic 1054 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1055 }; 1056 1057 1058 LEAF(NegateOp, Instruction) 1059 private: 1060 Value _x; 1061 1062 public: 1063 // creation 1064 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1065 ASSERT_VALUES 1066 } 1067 1068 // accessors 1069 Value x() const { return _x; } 1070 1071 // generic 1072 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1073 }; 1074 1075 1076 BASE(Op2, Instruction) 1077 private: 1078 Bytecodes::Code _op; 1079 Value _x; 1080 Value _y; 1081 1082 public: 1083 // creation 1084 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = nullptr) 1085 : Instruction(type, state_before) 1086 , _op(op) 1087 , _x(x) 1088 , _y(y) 1089 { 1090 ASSERT_VALUES 1091 } 1092 1093 // accessors 1094 Bytecodes::Code op() const { return _op; } 1095 Value x() const { return _x; } 1096 Value y() const { return _y; } 1097 1098 // manipulators 1099 void swap_operands() { 1100 assert(is_commutative(), "operation must be commutative"); 1101 Value t = _x; _x = _y; _y = t; 1102 } 1103 1104 // generic 1105 virtual bool is_commutative() const { return false; } 1106 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1107 }; 1108 1109 1110 LEAF(ArithmeticOp, Op2) 1111 public: 1112 // creation 1113 ArithmeticOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1114 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1115 { 1116 if (can_trap()) pin(); 1117 } 1118 1119 // generic 1120 virtual bool is_commutative() const; 1121 virtual bool can_trap() const; 1122 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1123 }; 1124 1125 1126 LEAF(ShiftOp, Op2) 1127 public: 1128 // creation 1129 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1130 1131 // generic 1132 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1133 }; 1134 1135 1136 LEAF(LogicOp, Op2) 1137 public: 1138 // creation 1139 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1140 1141 // generic 1142 virtual bool is_commutative() const; 1143 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1144 }; 1145 1146 1147 LEAF(CompareOp, Op2) 1148 public: 1149 // creation 1150 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1151 : Op2(intType, op, x, y, state_before) 1152 {} 1153 1154 // generic 1155 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1156 }; 1157 1158 1159 LEAF(IfOp, Op2) 1160 private: 1161 Value _tval; 1162 Value _fval; 1163 bool _substitutability_check; 1164 1165 public: 1166 // creation 1167 IfOp(Value x, Condition cond, Value y, Value tval, Value fval, ValueStack* state_before, bool substitutability_check) 1168 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1169 , _tval(tval) 1170 , _fval(fval) 1171 , _substitutability_check(substitutability_check) 1172 { 1173 ASSERT_VALUES 1174 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1175 set_state_before(state_before); 1176 } 1177 1178 // accessors 1179 virtual bool is_commutative() const; 1180 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1181 Condition cond() const { return (Condition)Op2::op(); } 1182 Value tval() const { return _tval; } 1183 Value fval() const { return _fval; } 1184 bool substitutability_check() const { return _substitutability_check; } 1185 // generic 1186 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1187 }; 1188 1189 1190 LEAF(Convert, Instruction) 1191 private: 1192 Bytecodes::Code _op; 1193 Value _value; 1194 1195 public: 1196 // creation 1197 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1198 ASSERT_VALUES 1199 } 1200 1201 // accessors 1202 Bytecodes::Code op() const { return _op; } 1203 Value value() const { return _value; } 1204 1205 // generic 1206 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1207 HASHING2(Convert, true, op(), value()->subst()) 1208 }; 1209 1210 1211 LEAF(NullCheck, Instruction) 1212 private: 1213 Value _obj; 1214 1215 public: 1216 // creation 1217 NullCheck(Value obj, ValueStack* state_before) 1218 : Instruction(obj->type()->base(), state_before) 1219 , _obj(obj) 1220 { 1221 ASSERT_VALUES 1222 set_can_trap(true); 1223 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1224 pin(Instruction::PinExplicitNullCheck); 1225 } 1226 1227 // accessors 1228 Value obj() const { return _obj; } 1229 1230 // setters 1231 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1232 1233 // generic 1234 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1235 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1236 HASHING1(NullCheck, true, obj()->subst()) 1237 }; 1238 1239 1240 // This node is supposed to cast the type of another node to a more precise 1241 // declared type. 1242 LEAF(TypeCast, Instruction) 1243 private: 1244 ciType* _declared_type; 1245 Value _obj; 1246 1247 public: 1248 // The type of this node is the same type as the object type (and it might be constant). 1249 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1250 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1251 _declared_type(type), 1252 _obj(obj) {} 1253 1254 // accessors 1255 ciType* declared_type() const { return _declared_type; } 1256 Value obj() const { return _obj; } 1257 1258 // generic 1259 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1260 }; 1261 1262 1263 BASE(StateSplit, Instruction) 1264 private: 1265 ValueStack* _state; 1266 1267 protected: 1268 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1269 1270 public: 1271 // creation 1272 StateSplit(ValueType* type, ValueStack* state_before = nullptr) 1273 : Instruction(type, state_before) 1274 , _state(nullptr) 1275 { 1276 pin(PinStateSplitConstructor); 1277 } 1278 1279 // accessors 1280 ValueStack* state() const { return _state; } 1281 IRScope* scope() const; // the state's scope 1282 1283 // manipulation 1284 void set_state(ValueStack* state) { assert(_state == nullptr, "overwriting existing state"); check_state(state); _state = state; } 1285 1286 // generic 1287 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1288 virtual void state_values_do(ValueVisitor* f); 1289 }; 1290 1291 1292 LEAF(Invoke, StateSplit) 1293 private: 1294 Bytecodes::Code _code; 1295 Value _recv; 1296 Values* _args; 1297 BasicTypeList* _signature; 1298 ciMethod* _target; 1299 1300 public: 1301 // creation 1302 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1303 ciMethod* target, ValueStack* state_before); 1304 1305 // accessors 1306 Bytecodes::Code code() const { return _code; } 1307 Value receiver() const { return _recv; } 1308 bool has_receiver() const { return receiver() != nullptr; } 1309 int number_of_arguments() const { return _args->length(); } 1310 Value argument_at(int i) const { return _args->at(i); } 1311 BasicTypeList* signature() const { return _signature; } 1312 ciMethod* target() const { return _target; } 1313 1314 ciType* declared_type() const; 1315 1316 // Returns false if target is not loaded 1317 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1318 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1319 1320 // JSR 292 support 1321 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1322 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1323 1324 virtual bool needs_exception_state() const { return false; } 1325 1326 // generic 1327 virtual bool can_trap() const { return true; } 1328 virtual void input_values_do(ValueVisitor* f) { 1329 StateSplit::input_values_do(f); 1330 if (has_receiver()) f->visit(&_recv); 1331 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1332 } 1333 virtual void state_values_do(ValueVisitor *f); 1334 }; 1335 1336 1337 LEAF(NewInstance, StateSplit) 1338 private: 1339 ciInstanceKlass* _klass; 1340 bool _is_unresolved; 1341 bool _needs_state_before; 1342 1343 public: 1344 // creation 1345 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved, bool needs_state_before) 1346 : StateSplit(instanceType, state_before) 1347 , _klass(klass), _is_unresolved(is_unresolved), _needs_state_before(needs_state_before) 1348 {} 1349 1350 // accessors 1351 ciInstanceKlass* klass() const { return _klass; } 1352 bool is_unresolved() const { return _is_unresolved; } 1353 bool needs_state_before() const { return _needs_state_before; } 1354 1355 virtual bool needs_exception_state() const { return false; } 1356 1357 // generic 1358 virtual bool can_trap() const { return true; } 1359 ciType* exact_type() const; 1360 ciType* declared_type() const; 1361 }; 1362 1363 BASE(NewArray, StateSplit) 1364 private: 1365 Value _length; 1366 1367 public: 1368 // creation 1369 NewArray(Value length, ValueStack* state_before) 1370 : StateSplit(objectType, state_before) 1371 , _length(length) 1372 { 1373 // Do not ASSERT_VALUES since length is null for NewMultiArray 1374 } 1375 1376 // accessors 1377 Value length() const { return _length; } 1378 1379 virtual bool needs_exception_state() const { return false; } 1380 1381 ciType* exact_type() const { return nullptr; } 1382 ciType* declared_type() const; 1383 1384 // generic 1385 virtual bool can_trap() const { return true; } 1386 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1387 }; 1388 1389 1390 LEAF(NewTypeArray, NewArray) 1391 private: 1392 BasicType _elt_type; 1393 bool _zero_array; 1394 1395 public: 1396 // creation 1397 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before, bool zero_array) 1398 : NewArray(length, state_before) 1399 , _elt_type(elt_type) 1400 , _zero_array(zero_array) 1401 {} 1402 1403 // accessors 1404 BasicType elt_type() const { return _elt_type; } 1405 bool zero_array() const { return _zero_array; } 1406 ciType* exact_type() const; 1407 }; 1408 1409 1410 LEAF(NewObjectArray, NewArray) 1411 private: 1412 ciKlass* _klass; 1413 1414 public: 1415 // creation 1416 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) 1417 : NewArray(length, state_before), _klass(klass) { } 1418 1419 // accessors 1420 ciKlass* klass() const { return _klass; } 1421 ciType* exact_type() const; 1422 }; 1423 1424 1425 LEAF(NewMultiArray, NewArray) 1426 private: 1427 ciKlass* _klass; 1428 Values* _dims; 1429 1430 public: 1431 // creation 1432 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(nullptr, state_before), _klass(klass), _dims(dims) { 1433 ASSERT_VALUES 1434 } 1435 1436 // accessors 1437 ciKlass* klass() const { return _klass; } 1438 Values* dims() const { return _dims; } 1439 int rank() const { return dims()->length(); } 1440 1441 // generic 1442 virtual void input_values_do(ValueVisitor* f) { 1443 // NOTE: we do not call NewArray::input_values_do since "length" 1444 // is meaningless for a multi-dimensional array; passing the 1445 // zeroth element down to NewArray as its length is a bad idea 1446 // since there will be a copy in the "dims" array which doesn't 1447 // get updated, and the value must not be traversed twice. Was bug 1448 // - kbr 4/10/2001 1449 StateSplit::input_values_do(f); 1450 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1451 } 1452 1453 ciType* exact_type() const; 1454 }; 1455 1456 1457 BASE(TypeCheck, StateSplit) 1458 private: 1459 ciKlass* _klass; 1460 Value _obj; 1461 1462 ciMethod* _profiled_method; 1463 int _profiled_bci; 1464 1465 public: 1466 // creation 1467 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1468 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1469 _profiled_method(nullptr), _profiled_bci(0) { 1470 ASSERT_VALUES 1471 set_direct_compare(false); 1472 } 1473 1474 // accessors 1475 ciKlass* klass() const { return _klass; } 1476 Value obj() const { return _obj; } 1477 bool is_loaded() const { return klass() != nullptr; } 1478 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1479 1480 // manipulation 1481 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1482 1483 // generic 1484 virtual bool can_trap() const { return true; } 1485 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1486 1487 // Helpers for MethodData* profiling 1488 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1489 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1490 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1491 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1492 ciMethod* profiled_method() const { return _profiled_method; } 1493 int profiled_bci() const { return _profiled_bci; } 1494 }; 1495 1496 1497 LEAF(CheckCast, TypeCheck) 1498 public: 1499 // creation 1500 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1501 : TypeCheck(klass, obj, objectType, state_before) { } 1502 1503 void set_incompatible_class_change_check() { 1504 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1505 } 1506 bool is_incompatible_class_change_check() const { 1507 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1508 } 1509 void set_invokespecial_receiver_check() { 1510 set_flag(InvokeSpecialReceiverCheckFlag, true); 1511 } 1512 bool is_invokespecial_receiver_check() const { 1513 return check_flag(InvokeSpecialReceiverCheckFlag); 1514 } 1515 1516 virtual bool needs_exception_state() const { 1517 return !is_invokespecial_receiver_check(); 1518 } 1519 1520 ciType* declared_type() const; 1521 }; 1522 1523 1524 LEAF(InstanceOf, TypeCheck) 1525 public: 1526 // creation 1527 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1528 1529 virtual bool needs_exception_state() const { return false; } 1530 }; 1531 1532 1533 BASE(AccessMonitor, StateSplit) 1534 private: 1535 Value _obj; 1536 int _monitor_no; 1537 1538 public: 1539 // creation 1540 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = nullptr) 1541 : StateSplit(illegalType, state_before) 1542 , _obj(obj) 1543 , _monitor_no(monitor_no) 1544 { 1545 set_needs_null_check(true); 1546 ASSERT_VALUES 1547 } 1548 1549 // accessors 1550 Value obj() const { return _obj; } 1551 int monitor_no() const { return _monitor_no; } 1552 1553 // generic 1554 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1555 }; 1556 1557 1558 LEAF(MonitorEnter, AccessMonitor) 1559 bool _maybe_inlinetype; 1560 public: 1561 // creation 1562 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before, bool maybe_inlinetype) 1563 : AccessMonitor(obj, monitor_no, state_before) 1564 , _maybe_inlinetype(maybe_inlinetype) 1565 { 1566 ASSERT_VALUES 1567 } 1568 1569 // accessors 1570 bool maybe_inlinetype() const { return _maybe_inlinetype; } 1571 1572 // generic 1573 virtual bool can_trap() const { return true; } 1574 }; 1575 1576 1577 LEAF(MonitorExit, AccessMonitor) 1578 public: 1579 // creation 1580 MonitorExit(Value obj, int monitor_no) 1581 : AccessMonitor(obj, monitor_no, nullptr) 1582 { 1583 ASSERT_VALUES 1584 } 1585 }; 1586 1587 1588 LEAF(Intrinsic, StateSplit) 1589 private: 1590 vmIntrinsics::ID _id; 1591 ArgsNonNullState _nonnull_state; 1592 Values* _args; 1593 Value _recv; 1594 1595 public: 1596 // preserves_state can be set to true for Intrinsics 1597 // which are guaranteed to preserve register state across any slow 1598 // cases; setting it to true does not mean that the Intrinsic can 1599 // not trap, only that if we continue execution in the same basic 1600 // block after the Intrinsic, all of the registers are intact. This 1601 // allows load elimination and common expression elimination to be 1602 // performed across the Intrinsic. The default value is false. 1603 Intrinsic(ValueType* type, 1604 vmIntrinsics::ID id, 1605 Values* args, 1606 bool has_receiver, 1607 ValueStack* state_before, 1608 bool preserves_state, 1609 bool cantrap = true) 1610 : StateSplit(type, state_before) 1611 , _id(id) 1612 , _args(args) 1613 , _recv(nullptr) 1614 { 1615 assert(args != nullptr, "args must exist"); 1616 ASSERT_VALUES 1617 set_flag(PreservesStateFlag, preserves_state); 1618 set_flag(CanTrapFlag, cantrap); 1619 if (has_receiver) { 1620 _recv = argument_at(0); 1621 } 1622 set_needs_null_check(has_receiver); 1623 1624 // some intrinsics can't trap, so don't force them to be pinned 1625 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1626 unpin(PinStateSplitConstructor); 1627 } 1628 } 1629 1630 // accessors 1631 vmIntrinsics::ID id() const { return _id; } 1632 int number_of_arguments() const { return _args->length(); } 1633 Value argument_at(int i) const { return _args->at(i); } 1634 1635 bool has_receiver() const { return (_recv != nullptr); } 1636 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1637 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1638 1639 bool arg_needs_null_check(int i) const { 1640 return _nonnull_state.arg_needs_null_check(i); 1641 } 1642 1643 void set_arg_needs_null_check(int i, bool check) { 1644 _nonnull_state.set_arg_needs_null_check(i, check); 1645 } 1646 1647 // generic 1648 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1649 virtual void input_values_do(ValueVisitor* f) { 1650 StateSplit::input_values_do(f); 1651 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1652 } 1653 }; 1654 1655 1656 class LIR_List; 1657 1658 LEAF(BlockBegin, StateSplit) 1659 private: 1660 int _block_id; // the unique block id 1661 int _bci; // start-bci of block 1662 int _depth_first_number; // number of this block in a depth-first ordering 1663 int _linear_scan_number; // number of this block in linear-scan ordering 1664 int _dominator_depth; 1665 int _loop_depth; // the loop nesting level of this block 1666 int _loop_index; // number of the innermost loop of this block 1667 int _flags; // the flags associated with this block 1668 1669 // fields used by BlockListBuilder 1670 int _total_preds; // number of predecessors found by BlockListBuilder 1671 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1672 1673 // SSA specific fields: (factor out later) 1674 BlockList _predecessors; // the predecessors of this block 1675 BlockList _dominates; // list of blocks that are dominated by this block 1676 BlockBegin* _dominator; // the dominator of this block 1677 // SSA specific ends 1678 BlockEnd* _end; // the last instruction of this block 1679 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1680 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1681 int _exception_handler_pco; // if this block is the start of an exception handler, 1682 // this records the PC offset in the assembly code of the 1683 // first instruction in this block 1684 Label _label; // the label associated with this block 1685 LIR_List* _lir; // the low level intermediate representation for this block 1686 1687 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1688 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1689 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1690 ResourceBitMap _live_kill; // set of registers defined in this block 1691 1692 ResourceBitMap _fpu_register_usage; 1693 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1694 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1695 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1696 1697 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1698 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1699 1700 friend class SuxAndWeightAdjuster; 1701 1702 public: 1703 void* operator new(size_t size) throw() { 1704 Compilation* c = Compilation::current(); 1705 void* res = c->arena()->Amalloc(size); 1706 return res; 1707 } 1708 1709 // initialization/counting 1710 static int number_of_blocks() { 1711 return Compilation::current()->number_of_blocks(); 1712 } 1713 1714 // creation 1715 BlockBegin(int bci) 1716 : StateSplit(illegalType) 1717 , _block_id(Compilation::current()->get_next_block_id()) 1718 , _bci(bci) 1719 , _depth_first_number(-1) 1720 , _linear_scan_number(-1) 1721 , _dominator_depth(-1) 1722 , _loop_depth(0) 1723 , _loop_index(-1) 1724 , _flags(0) 1725 , _total_preds(0) 1726 , _stores_to_locals() 1727 , _predecessors(2) 1728 , _dominates(2) 1729 , _dominator(nullptr) 1730 , _end(nullptr) 1731 , _exception_handlers(1) 1732 , _exception_states(nullptr) 1733 , _exception_handler_pco(-1) 1734 , _lir(nullptr) 1735 , _live_in() 1736 , _live_out() 1737 , _live_gen() 1738 , _live_kill() 1739 , _fpu_register_usage() 1740 , _fpu_stack_state(nullptr) 1741 , _first_lir_instruction_id(-1) 1742 , _last_lir_instruction_id(-1) 1743 { 1744 _block = this; 1745 #ifndef PRODUCT 1746 set_printable_bci(bci); 1747 #endif 1748 } 1749 1750 // accessors 1751 int block_id() const { return _block_id; } 1752 int bci() const { return _bci; } 1753 BlockList* dominates() { return &_dominates; } 1754 BlockBegin* dominator() const { return _dominator; } 1755 int loop_depth() const { return _loop_depth; } 1756 int dominator_depth() const { return _dominator_depth; } 1757 int depth_first_number() const { return _depth_first_number; } 1758 int linear_scan_number() const { return _linear_scan_number; } 1759 BlockEnd* end() const { return _end; } 1760 Label* label() { return &_label; } 1761 LIR_List* lir() const { return _lir; } 1762 int exception_handler_pco() const { return _exception_handler_pco; } 1763 ResourceBitMap& live_in() { return _live_in; } 1764 ResourceBitMap& live_out() { return _live_out; } 1765 ResourceBitMap& live_gen() { return _live_gen; } 1766 ResourceBitMap& live_kill() { return _live_kill; } 1767 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1768 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1769 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1770 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1771 int total_preds() const { return _total_preds; } 1772 BitMap& stores_to_locals() { return _stores_to_locals; } 1773 1774 // manipulation 1775 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1776 void set_loop_depth(int d) { _loop_depth = d; } 1777 void set_dominator_depth(int d) { _dominator_depth = d; } 1778 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1779 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1780 void set_end(BlockEnd* new_end); 1781 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1782 BlockBegin* insert_block_between(BlockBegin* sux); 1783 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1784 void set_lir(LIR_List* lir) { _lir = lir; } 1785 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1786 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1787 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1788 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1789 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1790 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1791 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1792 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1793 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1794 void increment_total_preds(int n = 1) { _total_preds += n; } 1795 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1796 1797 // generic 1798 virtual void state_values_do(ValueVisitor* f); 1799 1800 // successors and predecessors 1801 int number_of_sux() const; 1802 BlockBegin* sux_at(int i) const; 1803 void add_predecessor(BlockBegin* pred); 1804 void remove_predecessor(BlockBegin* pred); 1805 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1806 int number_of_preds() const { return _predecessors.length(); } 1807 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1808 1809 // exception handlers potentially invoked by this block 1810 void add_exception_handler(BlockBegin* b); 1811 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1812 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1813 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1814 1815 // states of the instructions that have an edge to this exception handler 1816 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == nullptr ? 0 : _exception_states->length(); } 1817 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1818 int add_exception_state(ValueStack* state); 1819 1820 // flags 1821 enum Flag { 1822 no_flag = 0, 1823 std_entry_flag = 1 << 0, 1824 osr_entry_flag = 1 << 1, 1825 exception_entry_flag = 1 << 2, 1826 subroutine_entry_flag = 1 << 3, 1827 backward_branch_target_flag = 1 << 4, 1828 is_on_work_list_flag = 1 << 5, 1829 was_visited_flag = 1 << 6, 1830 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1831 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1832 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1833 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1834 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1835 }; 1836 1837 void set(Flag f) { _flags |= f; } 1838 void clear(Flag f) { _flags &= ~f; } 1839 bool is_set(Flag f) const { return (_flags & f) != 0; } 1840 bool is_entry_block() const { 1841 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1842 return (_flags & entry_mask) != 0; 1843 } 1844 1845 // iteration 1846 void iterate_preorder (BlockClosure* closure); 1847 void iterate_postorder (BlockClosure* closure); 1848 1849 void block_values_do(ValueVisitor* f); 1850 1851 // loops 1852 void set_loop_index(int ix) { _loop_index = ix; } 1853 int loop_index() const { return _loop_index; } 1854 1855 // merging 1856 bool try_merge(ValueStack* state, bool has_irreducible_loops); // try to merge states at block begin 1857 void merge(ValueStack* state, bool has_irreducible_loops) { 1858 bool b = try_merge(state, has_irreducible_loops); 1859 assert(b, "merge failed"); 1860 } 1861 1862 // debugging 1863 void print_block() PRODUCT_RETURN; 1864 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1865 1866 }; 1867 1868 1869 BASE(BlockEnd, StateSplit) 1870 private: 1871 BlockList* _sux; 1872 1873 protected: 1874 BlockList* sux() const { return _sux; } 1875 1876 void set_sux(BlockList* sux) { 1877 #ifdef ASSERT 1878 assert(sux != nullptr, "sux must exist"); 1879 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != nullptr, "sux must exist"); 1880 #endif 1881 _sux = sux; 1882 } 1883 1884 public: 1885 // creation 1886 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1887 : StateSplit(type, state_before) 1888 , _sux(nullptr) 1889 { 1890 set_flag(IsSafepointFlag, is_safepoint); 1891 } 1892 1893 // accessors 1894 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1895 // For compatibility with old code, for new code use block() 1896 BlockBegin* begin() const { return _block; } 1897 1898 // manipulation 1899 inline void remove_sux_at(int i) { _sux->remove_at(i);} 1900 inline int find_sux(BlockBegin* sux) {return _sux->find(sux);} 1901 1902 // successors 1903 int number_of_sux() const { return _sux != nullptr ? _sux->length() : 0; } 1904 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1905 bool is_sux(BlockBegin* sux) const { return _sux == nullptr ? false : _sux->contains(sux); } 1906 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1907 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1908 }; 1909 1910 1911 LEAF(Goto, BlockEnd) 1912 public: 1913 enum Direction { 1914 none, // Just a regular goto 1915 taken, not_taken // Goto produced from If 1916 }; 1917 private: 1918 ciMethod* _profiled_method; 1919 int _profiled_bci; 1920 Direction _direction; 1921 public: 1922 // creation 1923 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1924 : BlockEnd(illegalType, state_before, is_safepoint) 1925 , _profiled_method(nullptr) 1926 , _profiled_bci(0) 1927 , _direction(none) { 1928 BlockList* s = new BlockList(1); 1929 s->append(sux); 1930 set_sux(s); 1931 } 1932 1933 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, nullptr, is_safepoint) 1934 , _profiled_method(nullptr) 1935 , _profiled_bci(0) 1936 , _direction(none) { 1937 BlockList* s = new BlockList(1); 1938 s->append(sux); 1939 set_sux(s); 1940 } 1941 1942 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1943 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1944 int profiled_bci() const { return _profiled_bci; } 1945 Direction direction() const { return _direction; } 1946 1947 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1948 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1949 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1950 void set_direction(Direction d) { _direction = d; } 1951 }; 1952 1953 #ifdef ASSERT 1954 LEAF(Assert, Instruction) 1955 private: 1956 Value _x; 1957 Condition _cond; 1958 Value _y; 1959 char *_message; 1960 1961 public: 1962 // creation 1963 // unordered_is_true is valid for float/double compares only 1964 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1965 1966 // accessors 1967 Value x() const { return _x; } 1968 Condition cond() const { return _cond; } 1969 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1970 Value y() const { return _y; } 1971 const char *message() const { return _message; } 1972 1973 // generic 1974 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1975 }; 1976 #endif 1977 1978 LEAF(RangeCheckPredicate, StateSplit) 1979 private: 1980 Value _x; 1981 Condition _cond; 1982 Value _y; 1983 1984 void check_state(); 1985 1986 public: 1987 // creation 1988 // unordered_is_true is valid for float/double compares only 1989 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1990 , _x(x) 1991 , _cond(cond) 1992 , _y(y) 1993 { 1994 ASSERT_VALUES 1995 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1996 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1997 this->set_state(state); 1998 check_state(); 1999 } 2000 2001 // Always deoptimize 2002 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 2003 { 2004 this->set_state(state); 2005 _x = _y = nullptr; 2006 check_state(); 2007 } 2008 2009 // accessors 2010 Value x() const { return _x; } 2011 Condition cond() const { return _cond; } 2012 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2013 Value y() const { return _y; } 2014 2015 void always_fail() { _x = _y = nullptr; } 2016 2017 // generic 2018 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2019 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2020 }; 2021 2022 LEAF(If, BlockEnd) 2023 private: 2024 Value _x; 2025 Condition _cond; 2026 Value _y; 2027 ciMethod* _profiled_method; 2028 int _profiled_bci; // Canonicalizer may alter bci of If node 2029 bool _swapped; // Is the order reversed with respect to the original If in the 2030 // bytecode stream? 2031 bool _substitutability_check; 2032 public: 2033 // creation 2034 // unordered_is_true is valid for float/double compares only 2035 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint, bool substitutability_check=false) 2036 : BlockEnd(illegalType, state_before, is_safepoint) 2037 , _x(x) 2038 , _cond(cond) 2039 , _y(y) 2040 , _profiled_method(nullptr) 2041 , _profiled_bci(0) 2042 , _swapped(false) 2043 , _substitutability_check(substitutability_check) 2044 { 2045 ASSERT_VALUES 2046 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2047 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2048 BlockList* s = new BlockList(2); 2049 s->append(tsux); 2050 s->append(fsux); 2051 set_sux(s); 2052 } 2053 2054 // accessors 2055 Value x() const { return _x; } 2056 Condition cond() const { return _cond; } 2057 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2058 Value y() const { return _y; } 2059 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2060 BlockBegin* tsux() const { return sux_for(true); } 2061 BlockBegin* fsux() const { return sux_for(false); } 2062 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2063 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2064 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2065 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2066 bool is_swapped() const { return _swapped; } 2067 2068 // manipulation 2069 void swap_operands() { 2070 Value t = _x; _x = _y; _y = t; 2071 _cond = mirror(_cond); 2072 } 2073 2074 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2075 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2076 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2077 void set_swapped(bool value) { _swapped = value; } 2078 bool substitutability_check() const { return _substitutability_check; } 2079 // generic 2080 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2081 }; 2082 2083 2084 BASE(Switch, BlockEnd) 2085 private: 2086 Value _tag; 2087 2088 public: 2089 // creation 2090 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2091 : BlockEnd(illegalType, state_before, is_safepoint) 2092 , _tag(tag) { 2093 ASSERT_VALUES 2094 set_sux(sux); 2095 } 2096 2097 // accessors 2098 Value tag() const { return _tag; } 2099 int length() const { return number_of_sux() - 1; } 2100 2101 virtual bool needs_exception_state() const { return false; } 2102 2103 // generic 2104 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2105 }; 2106 2107 2108 LEAF(TableSwitch, Switch) 2109 private: 2110 int _lo_key; 2111 2112 public: 2113 // creation 2114 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2115 : Switch(tag, sux, state_before, is_safepoint) 2116 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2117 2118 // accessors 2119 int lo_key() const { return _lo_key; } 2120 int hi_key() const { return _lo_key + (length() - 1); } 2121 }; 2122 2123 2124 LEAF(LookupSwitch, Switch) 2125 private: 2126 intArray* _keys; 2127 2128 public: 2129 // creation 2130 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2131 : Switch(tag, sux, state_before, is_safepoint) 2132 , _keys(keys) { 2133 assert(keys != nullptr, "keys must exist"); 2134 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2135 } 2136 2137 // accessors 2138 int key_at(int i) const { return _keys->at(i); } 2139 }; 2140 2141 2142 LEAF(Return, BlockEnd) 2143 private: 2144 Value _result; 2145 2146 public: 2147 // creation 2148 Return(Value result) : 2149 BlockEnd(result == nullptr ? voidType : result->type()->base(), nullptr, true), 2150 _result(result) {} 2151 2152 // accessors 2153 Value result() const { return _result; } 2154 bool has_result() const { return result() != nullptr; } 2155 2156 // generic 2157 virtual void input_values_do(ValueVisitor* f) { 2158 BlockEnd::input_values_do(f); 2159 if (has_result()) f->visit(&_result); 2160 } 2161 }; 2162 2163 2164 LEAF(Throw, BlockEnd) 2165 private: 2166 Value _exception; 2167 2168 public: 2169 // creation 2170 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2171 ASSERT_VALUES 2172 } 2173 2174 // accessors 2175 Value exception() const { return _exception; } 2176 2177 // generic 2178 virtual bool can_trap() const { return true; } 2179 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2180 }; 2181 2182 2183 LEAF(Base, BlockEnd) 2184 public: 2185 // creation 2186 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, nullptr, false) { 2187 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2188 assert(osr_entry == nullptr || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2189 BlockList* s = new BlockList(2); 2190 if (osr_entry != nullptr) s->append(osr_entry); 2191 s->append(std_entry); // must be default sux! 2192 set_sux(s); 2193 } 2194 2195 // accessors 2196 BlockBegin* std_entry() const { return default_sux(); } 2197 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? nullptr : sux_at(0); } 2198 }; 2199 2200 2201 LEAF(OsrEntry, Instruction) 2202 public: 2203 // creation 2204 #ifdef _LP64 2205 OsrEntry() : Instruction(longType) { pin(); } 2206 #else 2207 OsrEntry() : Instruction(intType) { pin(); } 2208 #endif 2209 2210 // generic 2211 virtual void input_values_do(ValueVisitor* f) { } 2212 }; 2213 2214 2215 // Models the incoming exception at a catch site 2216 LEAF(ExceptionObject, Instruction) 2217 public: 2218 // creation 2219 ExceptionObject() : Instruction(objectType) { 2220 pin(); 2221 } 2222 2223 // generic 2224 virtual void input_values_do(ValueVisitor* f) { } 2225 }; 2226 2227 2228 // Models needed rounding for floating-point values on Intel. 2229 // Currently only used to represent rounding of double-precision 2230 // values stored into local variables, but could be used to model 2231 // intermediate rounding of single-precision values as well. 2232 LEAF(RoundFP, Instruction) 2233 private: 2234 Value _input; // floating-point value to be rounded 2235 2236 public: 2237 RoundFP(Value input) 2238 : Instruction(input->type()) // Note: should not be used for constants 2239 , _input(input) 2240 { 2241 ASSERT_VALUES 2242 } 2243 2244 // accessors 2245 Value input() const { return _input; } 2246 2247 // generic 2248 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2249 }; 2250 2251 2252 BASE(UnsafeOp, Instruction) 2253 private: 2254 Value _object; // Object to be fetched from or mutated 2255 Value _offset; // Offset within object 2256 bool _is_volatile; // true if volatile - dl/JSR166 2257 BasicType _basic_type; // ValueType can not express byte-sized integers 2258 2259 protected: 2260 // creation 2261 UnsafeOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2262 : Instruction(is_put ? voidType : as_ValueType(basic_type)), 2263 _object(object), _offset(offset), _is_volatile(is_volatile), _basic_type(basic_type) 2264 { 2265 //Note: Unsafe ops are not not guaranteed to throw NPE. 2266 // Convservatively, Unsafe operations must be pinned though we could be 2267 // looser about this if we wanted to.. 2268 pin(); 2269 } 2270 2271 public: 2272 // accessors 2273 BasicType basic_type() { return _basic_type; } 2274 Value object() { return _object; } 2275 Value offset() { return _offset; } 2276 bool is_volatile() { return _is_volatile; } 2277 2278 // generic 2279 virtual void input_values_do(ValueVisitor* f) { f->visit(&_object); 2280 f->visit(&_offset); } 2281 }; 2282 2283 LEAF(UnsafeGet, UnsafeOp) 2284 private: 2285 bool _is_raw; 2286 public: 2287 UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile) 2288 : UnsafeOp(basic_type, object, offset, false, is_volatile) 2289 { 2290 ASSERT_VALUES 2291 _is_raw = false; 2292 } 2293 UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile, bool is_raw) 2294 : UnsafeOp(basic_type, object, offset, false, is_volatile), _is_raw(is_raw) 2295 { 2296 ASSERT_VALUES 2297 } 2298 2299 // accessors 2300 bool is_raw() { return _is_raw; } 2301 }; 2302 2303 2304 LEAF(UnsafePut, UnsafeOp) 2305 private: 2306 Value _value; // Value to be stored 2307 public: 2308 UnsafePut(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2309 : UnsafeOp(basic_type, object, offset, true, is_volatile) 2310 , _value(value) 2311 { 2312 ASSERT_VALUES 2313 } 2314 2315 // accessors 2316 Value value() { return _value; } 2317 2318 // generic 2319 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2320 f->visit(&_value); } 2321 }; 2322 2323 LEAF(UnsafeGetAndSet, UnsafeOp) 2324 private: 2325 Value _value; // Value to be stored 2326 bool _is_add; 2327 public: 2328 UnsafeGetAndSet(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2329 : UnsafeOp(basic_type, object, offset, false, false) 2330 , _value(value) 2331 , _is_add(is_add) 2332 { 2333 ASSERT_VALUES 2334 } 2335 2336 // accessors 2337 bool is_add() const { return _is_add; } 2338 Value value() { return _value; } 2339 2340 // generic 2341 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2342 f->visit(&_value); } 2343 }; 2344 2345 LEAF(ProfileCall, Instruction) 2346 private: 2347 ciMethod* _method; 2348 int _bci_of_invoke; 2349 ciMethod* _callee; // the method that is called at the given bci 2350 Value _recv; 2351 ciKlass* _known_holder; 2352 Values* _obj_args; // arguments for type profiling 2353 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2354 bool _inlined; // Are we profiling a call that is inlined 2355 2356 public: 2357 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2358 : Instruction(voidType) 2359 , _method(method) 2360 , _bci_of_invoke(bci) 2361 , _callee(callee) 2362 , _recv(recv) 2363 , _known_holder(known_holder) 2364 , _obj_args(obj_args) 2365 , _inlined(inlined) 2366 { 2367 // The ProfileCall has side-effects and must occur precisely where located 2368 pin(); 2369 } 2370 2371 ciMethod* method() const { return _method; } 2372 int bci_of_invoke() const { return _bci_of_invoke; } 2373 ciMethod* callee() const { return _callee; } 2374 Value recv() const { return _recv; } 2375 ciKlass* known_holder() const { return _known_holder; } 2376 int nb_profiled_args() const { return _obj_args == nullptr ? 0 : _obj_args->length(); } 2377 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2378 bool arg_needs_null_check(int i) const { 2379 return _nonnull_state.arg_needs_null_check(i); 2380 } 2381 bool inlined() const { return _inlined; } 2382 2383 void set_arg_needs_null_check(int i, bool check) { 2384 _nonnull_state.set_arg_needs_null_check(i, check); 2385 } 2386 2387 virtual void input_values_do(ValueVisitor* f) { 2388 if (_recv != nullptr) { 2389 f->visit(&_recv); 2390 } 2391 for (int i = 0; i < nb_profiled_args(); i++) { 2392 f->visit(_obj_args->adr_at(i)); 2393 } 2394 } 2395 }; 2396 2397 LEAF(ProfileReturnType, Instruction) 2398 private: 2399 ciMethod* _method; 2400 ciMethod* _callee; 2401 int _bci_of_invoke; 2402 Value _ret; 2403 2404 public: 2405 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2406 : Instruction(voidType) 2407 , _method(method) 2408 , _callee(callee) 2409 , _bci_of_invoke(bci) 2410 , _ret(ret) 2411 { 2412 set_needs_null_check(true); 2413 // The ProfileReturnType has side-effects and must occur precisely where located 2414 pin(); 2415 } 2416 2417 ciMethod* method() const { return _method; } 2418 ciMethod* callee() const { return _callee; } 2419 int bci_of_invoke() const { return _bci_of_invoke; } 2420 Value ret() const { return _ret; } 2421 2422 virtual void input_values_do(ValueVisitor* f) { 2423 if (_ret != nullptr) { 2424 f->visit(&_ret); 2425 } 2426 } 2427 }; 2428 2429 LEAF(ProfileACmpTypes, Instruction) 2430 private: 2431 ciMethod* _method; 2432 int _bci; 2433 Value _left; 2434 Value _right; 2435 bool _left_maybe_null; 2436 bool _right_maybe_null; 2437 2438 public: 2439 ProfileACmpTypes(ciMethod* method, int bci, Value left, Value right) 2440 : Instruction(voidType) 2441 , _method(method) 2442 , _bci(bci) 2443 , _left(left) 2444 , _right(right) 2445 { 2446 // The ProfileACmp has side-effects and must occur precisely where located 2447 pin(); 2448 _left_maybe_null = true; 2449 _right_maybe_null = true; 2450 } 2451 2452 ciMethod* method() const { return _method; } 2453 int bci() const { return _bci; } 2454 Value left() const { return _left; } 2455 Value right() const { return _right; } 2456 bool left_maybe_null() const { return _left_maybe_null; } 2457 bool right_maybe_null() const { return _right_maybe_null; } 2458 void set_left_maybe_null(bool v) { _left_maybe_null = v; } 2459 void set_right_maybe_null(bool v) { _right_maybe_null = v; } 2460 2461 virtual void input_values_do(ValueVisitor* f) { 2462 if (_left != nullptr) { 2463 f->visit(&_left); 2464 } 2465 if (_right != nullptr) { 2466 f->visit(&_right); 2467 } 2468 } 2469 }; 2470 2471 // Call some C runtime function that doesn't safepoint, 2472 // optionally passing the current thread as the first argument. 2473 LEAF(RuntimeCall, Instruction) 2474 private: 2475 const char* _entry_name; 2476 address _entry; 2477 Values* _args; 2478 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2479 2480 public: 2481 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2482 : Instruction(type) 2483 , _entry_name(entry_name) 2484 , _entry(entry) 2485 , _args(args) 2486 , _pass_thread(pass_thread) { 2487 ASSERT_VALUES 2488 pin(); 2489 } 2490 2491 const char* entry_name() const { return _entry_name; } 2492 address entry() const { return _entry; } 2493 int number_of_arguments() const { return _args->length(); } 2494 Value argument_at(int i) const { return _args->at(i); } 2495 bool pass_thread() const { return _pass_thread; } 2496 2497 virtual void input_values_do(ValueVisitor* f) { 2498 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2499 } 2500 }; 2501 2502 // Use to trip invocation counter of an inlined method 2503 2504 LEAF(ProfileInvoke, Instruction) 2505 private: 2506 ciMethod* _inlinee; 2507 ValueStack* _state; 2508 2509 public: 2510 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2511 : Instruction(voidType) 2512 , _inlinee(inlinee) 2513 , _state(state) 2514 { 2515 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2516 pin(); 2517 } 2518 2519 ciMethod* inlinee() { return _inlinee; } 2520 ValueStack* state() { return _state; } 2521 virtual void input_values_do(ValueVisitor*) {} 2522 virtual void state_values_do(ValueVisitor*); 2523 }; 2524 2525 LEAF(MemBar, Instruction) 2526 private: 2527 LIR_Code _code; 2528 2529 public: 2530 MemBar(LIR_Code code) 2531 : Instruction(voidType) 2532 , _code(code) 2533 { 2534 pin(); 2535 } 2536 2537 LIR_Code code() { return _code; } 2538 2539 virtual void input_values_do(ValueVisitor*) {} 2540 }; 2541 2542 class BlockPair: public CompilationResourceObj { 2543 private: 2544 BlockBegin* _from; 2545 int _index; // sux index of 'to' block 2546 public: 2547 BlockPair(BlockBegin* from, int index): _from(from), _index(index) {} 2548 BlockBegin* from() const { return _from; } 2549 int index() const { return _index; } 2550 }; 2551 2552 typedef GrowableArray<BlockPair*> BlockPairList; 2553 2554 inline int BlockBegin::number_of_sux() const { assert(_end != nullptr, "need end"); return _end->number_of_sux(); } 2555 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end != nullptr , "need end"); return _end->sux_at(i); } 2556 2557 #undef ASSERT_VALUES 2558 2559 #endif // SHARE_C1_C1_INSTRUCTION_HPP