1 /* 2 * Copyright (c) 1999, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 39 40 // Instruction class hierarchy 41 // 42 // All leaf classes in the class hierarchy are concrete classes 43 // (i.e., are instantiated). All other classes are abstract and 44 // serve factoring. 45 46 class Instruction; 47 class Phi; 48 class Local; 49 class Constant; 50 class AccessField; 51 class LoadField; 52 class StoreField; 53 class AccessArray; 54 class ArrayLength; 55 class AccessIndexed; 56 class LoadIndexed; 57 class StoreIndexed; 58 class NegateOp; 59 class Op2; 60 class ArithmeticOp; 61 class ShiftOp; 62 class LogicOp; 63 class CompareOp; 64 class IfOp; 65 class Convert; 66 class NullCheck; 67 class TypeCast; 68 class OsrEntry; 69 class ExceptionObject; 70 class StateSplit; 71 class Invoke; 72 class NewInstance; 73 class NewArray; 74 class NewTypeArray; 75 class NewObjectArray; 76 class NewMultiArray; 77 class Deoptimize; 78 class TypeCheck; 79 class CheckCast; 80 class InstanceOf; 81 class AccessMonitor; 82 class MonitorEnter; 83 class MonitorExit; 84 class Intrinsic; 85 class BlockBegin; 86 class BlockEnd; 87 class Goto; 88 class If; 89 class Switch; 90 class TableSwitch; 91 class LookupSwitch; 92 class Return; 93 class Throw; 94 class Base; 95 class RoundFP; 96 class UnsafeOp; 97 class UnsafeGet; 98 class UnsafePut; 99 class UnsafeGetAndSet; 100 class ProfileCall; 101 class ProfileReturnType; 102 class ProfileACmpTypes; 103 class ProfileInvoke; 104 class RuntimeCall; 105 class MemBar; 106 class RangeCheckPredicate; 107 #ifdef ASSERT 108 class Assert; 109 #endif 110 111 // A Value is a reference to the instruction creating the value 112 typedef Instruction* Value; 113 typedef GrowableArray<Value> Values; 114 typedef GrowableArray<ValueStack*> ValueStackStack; 115 116 // BlockClosure is the base class for block traversal/iteration. 117 118 class BlockClosure: public CompilationResourceObj { 119 public: 120 virtual void block_do(BlockBegin* block) = 0; 121 }; 122 123 124 // A simple closure class for visiting the values of an Instruction 125 class ValueVisitor: public StackObj { 126 public: 127 virtual void visit(Value* v) = 0; 128 }; 129 130 131 // Some array and list classes 132 typedef GrowableArray<BlockBegin*> BlockBeginArray; 133 134 class BlockList: public GrowableArray<BlockBegin*> { 135 public: 136 BlockList(): GrowableArray<BlockBegin*>() {} 137 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 138 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 139 140 void iterate_forward(BlockClosure* closure); 141 void iterate_backward(BlockClosure* closure); 142 void values_do(ValueVisitor* f); 143 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 144 }; 145 146 147 // InstructionVisitors provide type-based dispatch for instructions. 148 // For each concrete Instruction class X, a virtual function do_X is 149 // provided. Functionality that needs to be implemented for all classes 150 // (e.g., printing, code generation) is factored out into a specialised 151 // visitor instead of added to the Instruction classes itself. 152 153 class InstructionVisitor: public StackObj { 154 public: 155 virtual void do_Phi (Phi* x) = 0; 156 virtual void do_Local (Local* x) = 0; 157 virtual void do_Constant (Constant* x) = 0; 158 virtual void do_LoadField (LoadField* x) = 0; 159 virtual void do_StoreField (StoreField* x) = 0; 160 virtual void do_ArrayLength (ArrayLength* x) = 0; 161 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 162 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 163 virtual void do_NegateOp (NegateOp* x) = 0; 164 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 165 virtual void do_ShiftOp (ShiftOp* x) = 0; 166 virtual void do_LogicOp (LogicOp* x) = 0; 167 virtual void do_CompareOp (CompareOp* x) = 0; 168 virtual void do_IfOp (IfOp* x) = 0; 169 virtual void do_Convert (Convert* x) = 0; 170 virtual void do_NullCheck (NullCheck* x) = 0; 171 virtual void do_TypeCast (TypeCast* x) = 0; 172 virtual void do_Invoke (Invoke* x) = 0; 173 virtual void do_NewInstance (NewInstance* x) = 0; 174 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 175 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 176 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 177 virtual void do_CheckCast (CheckCast* x) = 0; 178 virtual void do_InstanceOf (InstanceOf* x) = 0; 179 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 180 virtual void do_MonitorExit (MonitorExit* x) = 0; 181 virtual void do_Intrinsic (Intrinsic* x) = 0; 182 virtual void do_BlockBegin (BlockBegin* x) = 0; 183 virtual void do_Goto (Goto* x) = 0; 184 virtual void do_If (If* x) = 0; 185 virtual void do_TableSwitch (TableSwitch* x) = 0; 186 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 187 virtual void do_Return (Return* x) = 0; 188 virtual void do_Throw (Throw* x) = 0; 189 virtual void do_Base (Base* x) = 0; 190 virtual void do_OsrEntry (OsrEntry* x) = 0; 191 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 192 virtual void do_RoundFP (RoundFP* x) = 0; 193 virtual void do_UnsafeGet (UnsafeGet* x) = 0; 194 virtual void do_UnsafePut (UnsafePut* x) = 0; 195 virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x) = 0; 196 virtual void do_ProfileCall (ProfileCall* x) = 0; 197 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 198 virtual void do_ProfileACmpTypes(ProfileACmpTypes* x) = 0; 199 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 200 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 201 virtual void do_MemBar (MemBar* x) = 0; 202 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 203 #ifdef ASSERT 204 virtual void do_Assert (Assert* x) = 0; 205 #endif 206 }; 207 208 209 // Hashing support 210 // 211 // Note: This hash functions affect the performance 212 // of ValueMap - make changes carefully! 213 214 #define HASH1(x1 ) ((intx)(x1)) 215 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 216 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 217 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3 ) << 7) ^ HASH1(x4)) 218 #define HASH5(x1, x2, x3, x4, x5) ((HASH4(x1, x2, x3, x4) << 7) ^ HASH1(x5)) 219 220 221 // The following macros are used to implement instruction-specific hashing. 222 // By default, each instruction implements hash() and is_equal(Value), used 223 // for value numbering/common subexpression elimination. The default imple- 224 // mentation disables value numbering. Each instruction which can be value- 225 // numbered, should define corresponding hash() and is_equal(Value) functions 226 // via the macros below. The f arguments specify all the values/op codes, etc. 227 // that need to be identical for two instructions to be identical. 228 // 229 // Note: The default implementation of hash() returns 0 in order to indicate 230 // that the instruction should not be considered for value numbering. 231 // The currently used hash functions do not guarantee that never a 0 232 // is produced. While this is still correct, it may be a performance 233 // bug (no value numbering for that node). However, this situation is 234 // so unlikely, that we are not going to handle it specially. 235 236 #define HASHING1(class_name, enabled, f1) \ 237 virtual intx hash() const { \ 238 return (enabled) ? HASH2(name(), f1) : 0; \ 239 } \ 240 virtual bool is_equal(Value v) const { \ 241 if (!(enabled) ) return false; \ 242 class_name* _v = v->as_##class_name(); \ 243 if (_v == nullptr) return false; \ 244 if (f1 != _v->f1) return false; \ 245 return true; \ 246 } \ 247 248 249 #define HASHING2(class_name, enabled, f1, f2) \ 250 virtual intx hash() const { \ 251 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 252 } \ 253 virtual bool is_equal(Value v) const { \ 254 if (!(enabled) ) return false; \ 255 class_name* _v = v->as_##class_name(); \ 256 if (_v == nullptr) return false; \ 257 if (f1 != _v->f1) return false; \ 258 if (f2 != _v->f2) return false; \ 259 return true; \ 260 } \ 261 262 263 #define HASHING3(class_name, enabled, f1, f2, f3) \ 264 virtual intx hash() const { \ 265 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 266 } \ 267 virtual bool is_equal(Value v) const { \ 268 if (!(enabled) ) return false; \ 269 class_name* _v = v->as_##class_name(); \ 270 if (_v == nullptr) return false; \ 271 if (f1 != _v->f1) return false; \ 272 if (f2 != _v->f2) return false; \ 273 if (f3 != _v->f3) return false; \ 274 return true; \ 275 } \ 276 277 #define HASHING4(class_name, enabled, f1, f2, f3, f4) \ 278 virtual intx hash() const { \ 279 return (enabled) ? HASH5(name(), f1, f2, f3, f4) : 0; \ 280 } \ 281 virtual bool is_equal(Value v) const { \ 282 if (!(enabled) ) return false; \ 283 class_name* _v = v->as_##class_name(); \ 284 if (_v == nullptr ) return false; \ 285 if (f1 != _v->f1) return false; \ 286 if (f2 != _v->f2) return false; \ 287 if (f3 != _v->f3) return false; \ 288 if (f4 != _v->f4) return false; \ 289 return true; \ 290 } \ 291 292 293 // The mother of all instructions... 294 295 class Instruction: public CompilationResourceObj { 296 private: 297 int _id; // the unique instruction id 298 #ifndef PRODUCT 299 int _printable_bci; // the bci of the instruction for printing 300 #endif 301 int _use_count; // the number of instructions referring to this value (w/o prev/next); only roots can have use count = 0 or > 1 302 int _pin_state; // set of PinReason describing the reason for pinning 303 ValueType* _type; // the instruction value type 304 Instruction* _next; // the next instruction if any (null for BlockEnd instructions) 305 Instruction* _subst; // the substitution instruction if any 306 LIR_Opr _operand; // LIR specific information 307 unsigned int _flags; // Flag bits 308 309 ValueStack* _state_before; // Copy of state with input operands still on stack (or null) 310 ValueStack* _exception_state; // Copy of state for exception handling 311 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 312 313 friend class UseCountComputer; 314 friend class GraphBuilder; 315 316 void update_exception_state(ValueStack* state); 317 318 protected: 319 BlockBegin* _block; // Block that contains this instruction 320 321 void set_type(ValueType* type) { 322 assert(type != nullptr, "type must exist"); 323 _type = type; 324 } 325 326 // Helper class to keep track of which arguments need a null check 327 class ArgsNonNullState { 328 private: 329 int _nonnull_state; // mask identifying which args are nonnull 330 public: 331 ArgsNonNullState() 332 : _nonnull_state(AllBits) {} 333 334 // Does argument number i needs a null check? 335 bool arg_needs_null_check(int i) const { 336 // No data is kept for arguments starting at position 33 so 337 // conservatively assume that they need a null check. 338 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 339 return is_set_nth_bit(_nonnull_state, i); 340 } 341 return true; 342 } 343 344 // Set whether argument number i needs a null check or not 345 void set_arg_needs_null_check(int i, bool check) { 346 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 347 if (check) { 348 _nonnull_state |= (int)nth_bit(i); 349 } else { 350 _nonnull_state &= (int)~(nth_bit(i)); 351 } 352 } 353 } 354 }; 355 356 public: 357 void* operator new(size_t size) throw() { 358 Compilation* c = Compilation::current(); 359 void* res = c->arena()->Amalloc(size); 360 return res; 361 } 362 363 static const int no_bci = -99; 364 365 enum InstructionFlag { 366 NeedsNullCheckFlag = 0, 367 NeverNullFlag, // For "Q" signatures 368 CanTrapFlag, 369 DirectCompareFlag, 370 IsEliminatedFlag, 371 IsSafepointFlag, 372 IsStaticFlag, 373 NeedsStoreCheckFlag, 374 NeedsWriteBarrierFlag, 375 PreservesStateFlag, 376 TargetIsFinalFlag, 377 TargetIsLoadedFlag, 378 UnorderedIsTrueFlag, 379 NeedsPatchingFlag, 380 ThrowIncompatibleClassChangeErrorFlag, 381 InvokeSpecialReceiverCheckFlag, 382 ProfileMDOFlag, 383 IsLinkedInBlockFlag, 384 NeedsRangeCheckFlag, 385 InWorkListFlag, 386 DeoptimizeOnException, 387 KillsMemoryFlag, 388 InstructionLastFlag 389 }; 390 391 public: 392 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 393 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 394 395 // 'globally' used condition values 396 enum Condition { 397 eql, neq, lss, leq, gtr, geq, aeq, beq 398 }; 399 400 // Instructions may be pinned for many reasons and under certain conditions 401 // with enough knowledge it's possible to safely unpin them. 402 enum PinReason { 403 PinUnknown = 1 << 0 404 , PinExplicitNullCheck = 1 << 3 405 , PinStackForStateSplit= 1 << 12 406 , PinStateSplitConstructor= 1 << 13 407 , PinGlobalValueNumbering= 1 << 14 408 }; 409 410 static Condition mirror(Condition cond); 411 static Condition negate(Condition cond); 412 413 // initialization 414 static int number_of_instructions() { 415 return Compilation::current()->number_of_instructions(); 416 } 417 418 // creation 419 Instruction(ValueType* type, ValueStack* state_before = nullptr, bool type_is_constant = false) 420 : _id(Compilation::current()->get_next_id()), 421 #ifndef PRODUCT 422 _printable_bci(-99), 423 #endif 424 _use_count(0) 425 , _pin_state(0) 426 , _type(type) 427 , _next(nullptr) 428 , _subst(nullptr) 429 , _operand(LIR_OprFact::illegalOpr) 430 , _flags(0) 431 , _state_before(state_before) 432 , _exception_handlers(nullptr) 433 , _block(nullptr) 434 { 435 check_state(state_before); 436 assert(type != nullptr && (!type->is_constant() || type_is_constant), "type must exist"); 437 update_exception_state(_state_before); 438 } 439 440 // accessors 441 int id() const { return _id; } 442 #ifndef PRODUCT 443 bool has_printable_bci() const { return _printable_bci != -99; } 444 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 445 void set_printable_bci(int bci) { _printable_bci = bci; } 446 #endif 447 int dominator_depth(); 448 int use_count() const { return _use_count; } 449 int pin_state() const { return _pin_state; } 450 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 451 ValueType* type() const { return _type; } 452 BlockBegin *block() const { return _block; } 453 Instruction* prev(); // use carefully, expensive operation 454 Instruction* next() const { return _next; } 455 bool has_subst() const { return _subst != nullptr; } 456 Instruction* subst() { return _subst == nullptr ? this : _subst->subst(); } 457 LIR_Opr operand() const { return _operand; } 458 459 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 460 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 461 void set_null_free(bool f) { set_flag(NeverNullFlag, f); } 462 bool is_null_free() const { return check_flag(NeverNullFlag); } 463 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 464 bool can_be_linked() { return as_Local() == nullptr && as_Phi() == nullptr; } 465 466 bool is_null_obj() { return as_Constant() != nullptr && type()->as_ObjectType()->constant_value()->is_null_object(); } 467 468 bool has_uses() const { return use_count() > 0; } 469 ValueStack* state_before() const { return _state_before; } 470 ValueStack* exception_state() const { return _exception_state; } 471 virtual bool needs_exception_state() const { return true; } 472 XHandlers* exception_handlers() const { return _exception_handlers; } 473 ciKlass* as_loaded_klass_or_null() const; 474 475 // manipulation 476 void pin(PinReason reason) { _pin_state |= reason; } 477 void pin() { _pin_state |= PinUnknown; } 478 // DANGEROUS: only used by EliminateStores 479 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 480 481 Instruction* set_next(Instruction* next) { 482 assert(next->has_printable_bci(), "_printable_bci should have been set"); 483 assert(next != nullptr, "must not be null"); 484 assert(as_BlockEnd() == nullptr, "BlockEnd instructions must have no next"); 485 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 486 487 BlockBegin *block = this->block(); 488 next->_block = block; 489 490 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 491 _next = next; 492 return next; 493 } 494 495 Instruction* set_next(Instruction* next, int bci) { 496 #ifndef PRODUCT 497 next->set_printable_bci(bci); 498 #endif 499 return set_next(next); 500 } 501 502 // when blocks are merged 503 void fixup_block_pointers() { 504 Instruction *cur = next()->next(); // next()'s block is set in set_next 505 while (cur && cur->_block != block()) { 506 cur->_block = block(); 507 cur = cur->next(); 508 } 509 } 510 511 Instruction *insert_after(Instruction *i) { 512 Instruction* n = _next; 513 set_next(i); 514 i->set_next(n); 515 return _next; 516 } 517 518 bool is_loaded_flat_array() const; 519 bool maybe_flat_array(); 520 bool maybe_null_free_array(); 521 522 Instruction *insert_after_same_bci(Instruction *i) { 523 #ifndef PRODUCT 524 i->set_printable_bci(printable_bci()); 525 #endif 526 return insert_after(i); 527 } 528 529 void set_subst(Instruction* subst) { 530 assert(subst == nullptr || 531 type()->base() == subst->type()->base() || 532 subst->type()->base() == illegalType, "type can't change"); 533 _subst = subst; 534 } 535 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 536 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 537 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 538 539 // machine-specifics 540 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 541 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 542 543 // generic 544 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 545 virtual Phi* as_Phi() { return nullptr; } 546 virtual Local* as_Local() { return nullptr; } 547 virtual Constant* as_Constant() { return nullptr; } 548 virtual AccessField* as_AccessField() { return nullptr; } 549 virtual LoadField* as_LoadField() { return nullptr; } 550 virtual StoreField* as_StoreField() { return nullptr; } 551 virtual AccessArray* as_AccessArray() { return nullptr; } 552 virtual ArrayLength* as_ArrayLength() { return nullptr; } 553 virtual AccessIndexed* as_AccessIndexed() { return nullptr; } 554 virtual LoadIndexed* as_LoadIndexed() { return nullptr; } 555 virtual StoreIndexed* as_StoreIndexed() { return nullptr; } 556 virtual NegateOp* as_NegateOp() { return nullptr; } 557 virtual Op2* as_Op2() { return nullptr; } 558 virtual ArithmeticOp* as_ArithmeticOp() { return nullptr; } 559 virtual ShiftOp* as_ShiftOp() { return nullptr; } 560 virtual LogicOp* as_LogicOp() { return nullptr; } 561 virtual CompareOp* as_CompareOp() { return nullptr; } 562 virtual IfOp* as_IfOp() { return nullptr; } 563 virtual Convert* as_Convert() { return nullptr; } 564 virtual NullCheck* as_NullCheck() { return nullptr; } 565 virtual OsrEntry* as_OsrEntry() { return nullptr; } 566 virtual StateSplit* as_StateSplit() { return nullptr; } 567 virtual Invoke* as_Invoke() { return nullptr; } 568 virtual NewInstance* as_NewInstance() { return nullptr; } 569 virtual NewArray* as_NewArray() { return nullptr; } 570 virtual NewTypeArray* as_NewTypeArray() { return nullptr; } 571 virtual NewObjectArray* as_NewObjectArray() { return nullptr; } 572 virtual NewMultiArray* as_NewMultiArray() { return nullptr; } 573 virtual TypeCheck* as_TypeCheck() { return nullptr; } 574 virtual CheckCast* as_CheckCast() { return nullptr; } 575 virtual InstanceOf* as_InstanceOf() { return nullptr; } 576 virtual TypeCast* as_TypeCast() { return nullptr; } 577 virtual AccessMonitor* as_AccessMonitor() { return nullptr; } 578 virtual MonitorEnter* as_MonitorEnter() { return nullptr; } 579 virtual MonitorExit* as_MonitorExit() { return nullptr; } 580 virtual Intrinsic* as_Intrinsic() { return nullptr; } 581 virtual BlockBegin* as_BlockBegin() { return nullptr; } 582 virtual BlockEnd* as_BlockEnd() { return nullptr; } 583 virtual Goto* as_Goto() { return nullptr; } 584 virtual If* as_If() { return nullptr; } 585 virtual TableSwitch* as_TableSwitch() { return nullptr; } 586 virtual LookupSwitch* as_LookupSwitch() { return nullptr; } 587 virtual Return* as_Return() { return nullptr; } 588 virtual Throw* as_Throw() { return nullptr; } 589 virtual Base* as_Base() { return nullptr; } 590 virtual RoundFP* as_RoundFP() { return nullptr; } 591 virtual ExceptionObject* as_ExceptionObject() { return nullptr; } 592 virtual UnsafeOp* as_UnsafeOp() { return nullptr; } 593 virtual ProfileInvoke* as_ProfileInvoke() { return nullptr; } 594 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return nullptr; } 595 596 #ifdef ASSERT 597 virtual Assert* as_Assert() { return nullptr; } 598 #endif 599 600 virtual void visit(InstructionVisitor* v) = 0; 601 602 virtual bool can_trap() const { return false; } 603 604 virtual void input_values_do(ValueVisitor* f) = 0; 605 virtual void state_values_do(ValueVisitor* f); 606 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 607 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 608 609 virtual ciType* exact_type() const; 610 virtual ciType* declared_type() const { return nullptr; } 611 612 // hashing 613 virtual const char* name() const = 0; 614 HASHING1(Instruction, false, id()) // hashing disabled by default 615 616 // debugging 617 static void check_state(ValueStack* state) PRODUCT_RETURN; 618 void print() PRODUCT_RETURN; 619 void print_line() PRODUCT_RETURN; 620 void print(InstructionPrinter& ip) PRODUCT_RETURN; 621 }; 622 623 624 // The following macros are used to define base (i.e., non-leaf) 625 // and leaf instruction classes. They define class-name related 626 // generic functionality in one place. 627 628 #define BASE(class_name, super_class_name) \ 629 class class_name: public super_class_name { \ 630 public: \ 631 virtual class_name* as_##class_name() { return this; } \ 632 633 634 #define LEAF(class_name, super_class_name) \ 635 BASE(class_name, super_class_name) \ 636 public: \ 637 virtual const char* name() const { return #class_name; } \ 638 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 639 640 641 // Debugging support 642 643 644 #ifdef ASSERT 645 class AssertValues: public ValueVisitor { 646 void visit(Value* x) { assert((*x) != nullptr, "value must exist"); } 647 }; 648 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 649 #else 650 #define ASSERT_VALUES 651 #endif // ASSERT 652 653 654 // A Phi is a phi function in the sense of SSA form. It stands for 655 // the value of a local variable at the beginning of a join block. 656 // A Phi consists of n operands, one for every incoming branch. 657 658 LEAF(Phi, Instruction) 659 private: 660 int _pf_flags; // the flags of the phi function 661 int _index; // to value on operand stack (index < 0) or to local 662 public: 663 // creation 664 Phi(ValueType* type, BlockBegin* b, int index) 665 : Instruction(type->base()) 666 , _pf_flags(0) 667 , _index(index) 668 { 669 _block = b; 670 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 671 if (type->is_illegal()) { 672 make_illegal(); 673 } 674 } 675 676 // flags 677 enum Flag { 678 no_flag = 0, 679 visited = 1 << 0, 680 cannot_simplify = 1 << 1 681 }; 682 683 // accessors 684 bool is_local() const { return _index >= 0; } 685 bool is_on_stack() const { return !is_local(); } 686 int local_index() const { assert(is_local(), ""); return _index; } 687 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 688 689 Value operand_at(int i) const; 690 int operand_count() const; 691 692 void set(Flag f) { _pf_flags |= f; } 693 void clear(Flag f) { _pf_flags &= ~f; } 694 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 695 696 // Invalidates phis corresponding to merges of locals of two different types 697 // (these should never be referenced, otherwise the bytecodes are illegal) 698 void make_illegal() { 699 set(cannot_simplify); 700 set_type(illegalType); 701 } 702 703 bool is_illegal() const { 704 return type()->is_illegal(); 705 } 706 707 // generic 708 virtual void input_values_do(ValueVisitor* f) { 709 } 710 }; 711 712 713 // A local is a placeholder for an incoming argument to a function call. 714 LEAF(Local, Instruction) 715 private: 716 int _java_index; // the local index within the method to which the local belongs 717 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 718 ciType* _declared_type; 719 public: 720 // creation 721 Local(ciType* declared, ValueType* type, int index, bool receiver, bool null_free) 722 : Instruction(type) 723 , _java_index(index) 724 , _is_receiver(receiver) 725 , _declared_type(declared) 726 { 727 set_null_free(null_free); 728 NOT_PRODUCT(set_printable_bci(-1)); 729 } 730 731 // accessors 732 int java_index() const { return _java_index; } 733 bool is_receiver() const { return _is_receiver; } 734 735 virtual ciType* declared_type() const { return _declared_type; } 736 737 // generic 738 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 739 }; 740 741 742 LEAF(Constant, Instruction) 743 public: 744 // creation 745 Constant(ValueType* type): 746 Instruction(type, nullptr, /*type_is_constant*/ true) 747 { 748 assert(type->is_constant(), "must be a constant"); 749 } 750 751 Constant(ValueType* type, ValueStack* state_before, bool kills_memory = false): 752 Instruction(type, state_before, /*type_is_constant*/ true) 753 { 754 assert(state_before != nullptr, "only used for constants which need patching"); 755 assert(type->is_constant(), "must be a constant"); 756 set_flag(KillsMemoryFlag, kills_memory); 757 pin(); // since it's patching it needs to be pinned 758 } 759 760 // generic 761 virtual bool can_trap() const { return state_before() != nullptr; } 762 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 763 764 virtual intx hash() const; 765 virtual bool is_equal(Value v) const; 766 767 virtual ciType* exact_type() const; 768 769 bool kills_memory() const { return check_flag(KillsMemoryFlag); } 770 771 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 772 773 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 774 BlockBegin* compare(Instruction::Condition cond, Value right, 775 BlockBegin* true_sux, BlockBegin* false_sux) const { 776 switch (compare(cond, right)) { 777 case not_comparable: 778 return nullptr; 779 case cond_false: 780 return false_sux; 781 case cond_true: 782 return true_sux; 783 default: 784 ShouldNotReachHere(); 785 return nullptr; 786 } 787 } 788 }; 789 790 791 BASE(AccessField, Instruction) 792 private: 793 Value _obj; 794 int _offset; 795 ciField* _field; 796 NullCheck* _explicit_null_check; // For explicit null check elimination 797 798 public: 799 // creation 800 AccessField(Value obj, int offset, ciField* field, bool is_static, 801 ValueStack* state_before, bool needs_patching) 802 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 803 , _obj(obj) 804 , _offset(offset) 805 , _field(field) 806 , _explicit_null_check(nullptr) 807 { 808 set_needs_null_check(!is_static); 809 set_flag(IsStaticFlag, is_static); 810 set_flag(NeedsPatchingFlag, needs_patching); 811 ASSERT_VALUES 812 // pin of all instructions with memory access 813 pin(); 814 } 815 816 // accessors 817 Value obj() const { return _obj; } 818 int offset() const { return _offset; } 819 ciField* field() const { return _field; } 820 BasicType field_type() const { return _field->type()->basic_type(); } 821 bool is_static() const { return check_flag(IsStaticFlag); } 822 NullCheck* explicit_null_check() const { return _explicit_null_check; } 823 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 824 825 // Unresolved getstatic and putstatic can cause initialization. 826 // Technically it occurs at the Constant that materializes the base 827 // of the static fields but it's simpler to model it here. 828 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 829 830 // manipulation 831 832 // Under certain circumstances, if a previous NullCheck instruction 833 // proved the target object non-null, we can eliminate the explicit 834 // null check and do an implicit one, simply specifying the debug 835 // information from the NullCheck. This field should only be consulted 836 // if needs_null_check() is true. 837 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 838 839 // generic 840 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 841 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 842 }; 843 844 845 LEAF(LoadField, AccessField) 846 public: 847 // creation 848 LoadField(Value obj, int offset, ciField* field, bool is_static, 849 ValueStack* state_before, bool needs_patching, 850 ciInlineKlass* inline_klass = nullptr, Value default_value = nullptr ) 851 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 852 { 853 set_null_free(field->is_null_free()); 854 } 855 856 ciType* declared_type() const; 857 858 // generic; cannot be eliminated if needs patching or if volatile. 859 HASHING3(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset(), declared_type()) 860 }; 861 862 863 LEAF(StoreField, AccessField) 864 private: 865 Value _value; 866 ciField* _enclosing_field; // enclosing field (the flat one) for nested fields 867 868 public: 869 // creation 870 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 871 ValueStack* state_before, bool needs_patching); 872 873 // accessors 874 Value value() const { return _value; } 875 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 876 ciField* enclosing_field() const { return _enclosing_field; } 877 void set_enclosing_field(ciField* field) { _enclosing_field = field; } 878 879 // generic 880 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 881 }; 882 883 884 BASE(AccessArray, Instruction) 885 private: 886 Value _array; 887 888 public: 889 // creation 890 AccessArray(ValueType* type, Value array, ValueStack* state_before) 891 : Instruction(type, state_before) 892 , _array(array) 893 { 894 set_needs_null_check(true); 895 ASSERT_VALUES 896 pin(); // instruction with side effect (null exception or range check throwing) 897 } 898 899 Value array() const { return _array; } 900 901 // generic 902 virtual bool can_trap() const { return needs_null_check(); } 903 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 904 }; 905 906 907 LEAF(ArrayLength, AccessArray) 908 private: 909 NullCheck* _explicit_null_check; // For explicit null check elimination 910 911 public: 912 // creation 913 ArrayLength(Value array, ValueStack* state_before) 914 : AccessArray(intType, array, state_before) 915 , _explicit_null_check(nullptr) {} 916 917 // accessors 918 NullCheck* explicit_null_check() const { return _explicit_null_check; } 919 920 // setters 921 // See LoadField::set_explicit_null_check for documentation 922 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 923 924 // generic 925 HASHING1(ArrayLength, true, array()->subst()) 926 }; 927 928 929 BASE(AccessIndexed, AccessArray) 930 private: 931 Value _index; 932 Value _length; 933 BasicType _elt_type; 934 bool _mismatched; 935 ciMethod* _profiled_method; 936 int _profiled_bci; 937 938 public: 939 // creation 940 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 941 : AccessArray(as_ValueType(elt_type), array, state_before) 942 , _index(index) 943 , _length(length) 944 , _elt_type(elt_type) 945 , _mismatched(mismatched) 946 , _profiled_method(nullptr), _profiled_bci(0) 947 { 948 set_flag(Instruction::NeedsRangeCheckFlag, true); 949 ASSERT_VALUES 950 } 951 952 // accessors 953 Value index() const { return _index; } 954 Value length() const { return _length; } 955 BasicType elt_type() const { return _elt_type; } 956 bool mismatched() const { return _mismatched; } 957 958 void clear_length() { _length = nullptr; } 959 // perform elimination of range checks involving constants 960 bool compute_needs_range_check(); 961 962 // Helpers for MethodData* profiling 963 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 964 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 965 void set_profiled_bci(int bci) { _profiled_bci = bci; } 966 bool should_profile() const { return check_flag(ProfileMDOFlag); } 967 ciMethod* profiled_method() const { return _profiled_method; } 968 int profiled_bci() const { return _profiled_bci; } 969 970 971 // generic 972 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != nullptr) f->visit(&_length); } 973 }; 974 975 class DelayedLoadIndexed; 976 977 LEAF(LoadIndexed, AccessIndexed) 978 private: 979 NullCheck* _explicit_null_check; // For explicit null check elimination 980 NewInstance* _vt; 981 DelayedLoadIndexed* _delayed; 982 983 public: 984 // creation 985 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 986 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 987 , _explicit_null_check(nullptr), _vt(nullptr), _delayed(nullptr) {} 988 989 // accessors 990 NullCheck* explicit_null_check() const { return _explicit_null_check; } 991 992 // setters 993 // See LoadField::set_explicit_null_check for documentation 994 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 995 996 ciType* exact_type() const; 997 ciType* declared_type() const; 998 999 NewInstance* vt() const { return _vt; } 1000 void set_vt(NewInstance* vt) { _vt = vt; } 1001 1002 DelayedLoadIndexed* delayed() const { return _delayed; } 1003 void set_delayed(DelayedLoadIndexed* delayed) { _delayed = delayed; } 1004 1005 // generic; 1006 HASHING4(LoadIndexed, delayed() == nullptr && !should_profile(), elt_type(), array()->subst(), index()->subst(), vt()) 1007 }; 1008 1009 class DelayedLoadIndexed : public CompilationResourceObj { 1010 private: 1011 LoadIndexed* _load_instr; 1012 ValueStack* _state_before; 1013 ciField* _field; 1014 int _offset; 1015 public: 1016 DelayedLoadIndexed(LoadIndexed* load, ValueStack* state_before) 1017 : _load_instr(load) 1018 , _state_before(state_before) 1019 , _field(nullptr) 1020 , _offset(0) { } 1021 1022 void update(ciField* field, int offset) { 1023 _field = field; 1024 _offset += offset; 1025 } 1026 1027 LoadIndexed* load_instr() const { return _load_instr; } 1028 ValueStack* state_before() const { return _state_before; } 1029 ciField* field() const { return _field; } 1030 int offset() const { return _offset; } 1031 }; 1032 1033 LEAF(StoreIndexed, AccessIndexed) 1034 private: 1035 Value _value; 1036 1037 bool _check_boolean; 1038 1039 public: 1040 // creation 1041 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1042 bool check_boolean, bool mismatched = false); 1043 1044 // accessors 1045 Value value() const { return _value; } 1046 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1047 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1048 bool check_boolean() const { return _check_boolean; } 1049 1050 // Flattened array support 1051 bool is_exact_flat_array_store() const; 1052 // generic 1053 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1054 }; 1055 1056 1057 LEAF(NegateOp, Instruction) 1058 private: 1059 Value _x; 1060 1061 public: 1062 // creation 1063 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1064 ASSERT_VALUES 1065 } 1066 1067 // accessors 1068 Value x() const { return _x; } 1069 1070 // generic 1071 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1072 }; 1073 1074 1075 BASE(Op2, Instruction) 1076 private: 1077 Bytecodes::Code _op; 1078 Value _x; 1079 Value _y; 1080 1081 public: 1082 // creation 1083 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = nullptr) 1084 : Instruction(type, state_before) 1085 , _op(op) 1086 , _x(x) 1087 , _y(y) 1088 { 1089 ASSERT_VALUES 1090 } 1091 1092 // accessors 1093 Bytecodes::Code op() const { return _op; } 1094 Value x() const { return _x; } 1095 Value y() const { return _y; } 1096 1097 // manipulators 1098 void swap_operands() { 1099 assert(is_commutative(), "operation must be commutative"); 1100 Value t = _x; _x = _y; _y = t; 1101 } 1102 1103 // generic 1104 virtual bool is_commutative() const { return false; } 1105 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1106 }; 1107 1108 1109 LEAF(ArithmeticOp, Op2) 1110 public: 1111 // creation 1112 ArithmeticOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1113 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1114 { 1115 if (can_trap()) pin(); 1116 } 1117 1118 // generic 1119 virtual bool is_commutative() const; 1120 virtual bool can_trap() const; 1121 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1122 }; 1123 1124 1125 LEAF(ShiftOp, Op2) 1126 public: 1127 // creation 1128 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1129 1130 // generic 1131 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1132 }; 1133 1134 1135 LEAF(LogicOp, Op2) 1136 public: 1137 // creation 1138 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1139 1140 // generic 1141 virtual bool is_commutative() const; 1142 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1143 }; 1144 1145 1146 LEAF(CompareOp, Op2) 1147 public: 1148 // creation 1149 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1150 : Op2(intType, op, x, y, state_before) 1151 {} 1152 1153 // generic 1154 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1155 }; 1156 1157 1158 LEAF(IfOp, Op2) 1159 private: 1160 Value _tval; 1161 Value _fval; 1162 bool _substitutability_check; 1163 1164 public: 1165 // creation 1166 IfOp(Value x, Condition cond, Value y, Value tval, Value fval, ValueStack* state_before, bool substitutability_check) 1167 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1168 , _tval(tval) 1169 , _fval(fval) 1170 , _substitutability_check(substitutability_check) 1171 { 1172 ASSERT_VALUES 1173 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1174 set_state_before(state_before); 1175 } 1176 1177 // accessors 1178 virtual bool is_commutative() const; 1179 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1180 Condition cond() const { return (Condition)Op2::op(); } 1181 Value tval() const { return _tval; } 1182 Value fval() const { return _fval; } 1183 bool substitutability_check() const { return _substitutability_check; } 1184 // generic 1185 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1186 }; 1187 1188 1189 LEAF(Convert, Instruction) 1190 private: 1191 Bytecodes::Code _op; 1192 Value _value; 1193 1194 public: 1195 // creation 1196 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1197 ASSERT_VALUES 1198 } 1199 1200 // accessors 1201 Bytecodes::Code op() const { return _op; } 1202 Value value() const { return _value; } 1203 1204 // generic 1205 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1206 HASHING2(Convert, true, op(), value()->subst()) 1207 }; 1208 1209 1210 LEAF(NullCheck, Instruction) 1211 private: 1212 Value _obj; 1213 1214 public: 1215 // creation 1216 NullCheck(Value obj, ValueStack* state_before) 1217 : Instruction(obj->type()->base(), state_before) 1218 , _obj(obj) 1219 { 1220 ASSERT_VALUES 1221 set_can_trap(true); 1222 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1223 pin(Instruction::PinExplicitNullCheck); 1224 } 1225 1226 // accessors 1227 Value obj() const { return _obj; } 1228 1229 // setters 1230 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1231 1232 // generic 1233 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1234 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1235 HASHING1(NullCheck, true, obj()->subst()) 1236 }; 1237 1238 1239 // This node is supposed to cast the type of another node to a more precise 1240 // declared type. 1241 LEAF(TypeCast, Instruction) 1242 private: 1243 ciType* _declared_type; 1244 Value _obj; 1245 1246 public: 1247 // The type of this node is the same type as the object type (and it might be constant). 1248 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1249 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1250 _declared_type(type), 1251 _obj(obj) {} 1252 1253 // accessors 1254 ciType* declared_type() const { return _declared_type; } 1255 Value obj() const { return _obj; } 1256 1257 // generic 1258 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1259 }; 1260 1261 1262 BASE(StateSplit, Instruction) 1263 private: 1264 ValueStack* _state; 1265 1266 protected: 1267 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1268 1269 public: 1270 // creation 1271 StateSplit(ValueType* type, ValueStack* state_before = nullptr) 1272 : Instruction(type, state_before) 1273 , _state(nullptr) 1274 { 1275 pin(PinStateSplitConstructor); 1276 } 1277 1278 // accessors 1279 ValueStack* state() const { return _state; } 1280 IRScope* scope() const; // the state's scope 1281 1282 // manipulation 1283 void set_state(ValueStack* state) { assert(_state == nullptr, "overwriting existing state"); check_state(state); _state = state; } 1284 1285 // generic 1286 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1287 virtual void state_values_do(ValueVisitor* f); 1288 }; 1289 1290 1291 LEAF(Invoke, StateSplit) 1292 private: 1293 Bytecodes::Code _code; 1294 Value _recv; 1295 Values* _args; 1296 BasicTypeList* _signature; 1297 ciMethod* _target; 1298 1299 public: 1300 // creation 1301 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1302 ciMethod* target, ValueStack* state_before); 1303 1304 // accessors 1305 Bytecodes::Code code() const { return _code; } 1306 Value receiver() const { return _recv; } 1307 bool has_receiver() const { return receiver() != nullptr; } 1308 int number_of_arguments() const { return _args->length(); } 1309 Value argument_at(int i) const { return _args->at(i); } 1310 BasicTypeList* signature() const { return _signature; } 1311 ciMethod* target() const { return _target; } 1312 1313 ciType* declared_type() const; 1314 1315 // Returns false if target is not loaded 1316 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1317 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1318 1319 // JSR 292 support 1320 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1321 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1322 1323 virtual bool needs_exception_state() const { return false; } 1324 1325 // generic 1326 virtual bool can_trap() const { return true; } 1327 virtual void input_values_do(ValueVisitor* f) { 1328 StateSplit::input_values_do(f); 1329 if (has_receiver()) f->visit(&_recv); 1330 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1331 } 1332 virtual void state_values_do(ValueVisitor *f); 1333 }; 1334 1335 1336 LEAF(NewInstance, StateSplit) 1337 private: 1338 ciInstanceKlass* _klass; 1339 bool _is_unresolved; 1340 bool _needs_state_before; 1341 1342 public: 1343 // creation 1344 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved, bool needs_state_before) 1345 : StateSplit(instanceType, state_before) 1346 , _klass(klass), _is_unresolved(is_unresolved), _needs_state_before(needs_state_before) 1347 {} 1348 1349 // accessors 1350 ciInstanceKlass* klass() const { return _klass; } 1351 bool is_unresolved() const { return _is_unresolved; } 1352 bool needs_state_before() const { return _needs_state_before; } 1353 1354 virtual bool needs_exception_state() const { return false; } 1355 1356 // generic 1357 virtual bool can_trap() const { return true; } 1358 ciType* exact_type() const; 1359 ciType* declared_type() const; 1360 }; 1361 1362 BASE(NewArray, StateSplit) 1363 private: 1364 Value _length; 1365 1366 public: 1367 // creation 1368 NewArray(Value length, ValueStack* state_before) 1369 : StateSplit(objectType, state_before) 1370 , _length(length) 1371 { 1372 // Do not ASSERT_VALUES since length is null for NewMultiArray 1373 } 1374 1375 // accessors 1376 Value length() const { return _length; } 1377 1378 virtual bool needs_exception_state() const { return false; } 1379 1380 ciType* exact_type() const { return nullptr; } 1381 ciType* declared_type() const; 1382 1383 // generic 1384 virtual bool can_trap() const { return true; } 1385 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1386 }; 1387 1388 1389 LEAF(NewTypeArray, NewArray) 1390 private: 1391 BasicType _elt_type; 1392 1393 public: 1394 // creation 1395 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1396 : NewArray(length, state_before) 1397 , _elt_type(elt_type) 1398 {} 1399 1400 // accessors 1401 BasicType elt_type() const { return _elt_type; } 1402 ciType* exact_type() const; 1403 }; 1404 1405 1406 LEAF(NewObjectArray, NewArray) 1407 private: 1408 ciKlass* _klass; 1409 1410 public: 1411 // creation 1412 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) 1413 : NewArray(length, state_before), _klass(klass) { } 1414 1415 // accessors 1416 ciKlass* klass() const { return _klass; } 1417 ciType* exact_type() const; 1418 }; 1419 1420 1421 LEAF(NewMultiArray, NewArray) 1422 private: 1423 ciKlass* _klass; 1424 Values* _dims; 1425 1426 public: 1427 // creation 1428 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(nullptr, state_before), _klass(klass), _dims(dims) { 1429 ASSERT_VALUES 1430 } 1431 1432 // accessors 1433 ciKlass* klass() const { return _klass; } 1434 Values* dims() const { return _dims; } 1435 int rank() const { return dims()->length(); } 1436 1437 // generic 1438 virtual void input_values_do(ValueVisitor* f) { 1439 // NOTE: we do not call NewArray::input_values_do since "length" 1440 // is meaningless for a multi-dimensional array; passing the 1441 // zeroth element down to NewArray as its length is a bad idea 1442 // since there will be a copy in the "dims" array which doesn't 1443 // get updated, and the value must not be traversed twice. Was bug 1444 // - kbr 4/10/2001 1445 StateSplit::input_values_do(f); 1446 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1447 } 1448 1449 ciType* exact_type() const; 1450 }; 1451 1452 1453 BASE(TypeCheck, StateSplit) 1454 private: 1455 ciKlass* _klass; 1456 Value _obj; 1457 1458 ciMethod* _profiled_method; 1459 int _profiled_bci; 1460 1461 public: 1462 // creation 1463 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1464 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1465 _profiled_method(nullptr), _profiled_bci(0) { 1466 ASSERT_VALUES 1467 set_direct_compare(false); 1468 } 1469 1470 // accessors 1471 ciKlass* klass() const { return _klass; } 1472 Value obj() const { return _obj; } 1473 bool is_loaded() const { return klass() != nullptr; } 1474 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1475 1476 // manipulation 1477 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1478 1479 // generic 1480 virtual bool can_trap() const { return true; } 1481 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1482 1483 // Helpers for MethodData* profiling 1484 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1485 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1486 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1487 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1488 ciMethod* profiled_method() const { return _profiled_method; } 1489 int profiled_bci() const { return _profiled_bci; } 1490 }; 1491 1492 1493 LEAF(CheckCast, TypeCheck) 1494 public: 1495 // creation 1496 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before) 1497 : TypeCheck(klass, obj, objectType, state_before) { } 1498 1499 void set_incompatible_class_change_check() { 1500 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1501 } 1502 bool is_incompatible_class_change_check() const { 1503 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1504 } 1505 void set_invokespecial_receiver_check() { 1506 set_flag(InvokeSpecialReceiverCheckFlag, true); 1507 } 1508 bool is_invokespecial_receiver_check() const { 1509 return check_flag(InvokeSpecialReceiverCheckFlag); 1510 } 1511 1512 virtual bool needs_exception_state() const { 1513 return !is_invokespecial_receiver_check(); 1514 } 1515 1516 ciType* declared_type() const; 1517 }; 1518 1519 1520 LEAF(InstanceOf, TypeCheck) 1521 public: 1522 // creation 1523 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1524 1525 virtual bool needs_exception_state() const { return false; } 1526 }; 1527 1528 1529 BASE(AccessMonitor, StateSplit) 1530 private: 1531 Value _obj; 1532 int _monitor_no; 1533 1534 public: 1535 // creation 1536 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = nullptr) 1537 : StateSplit(illegalType, state_before) 1538 , _obj(obj) 1539 , _monitor_no(monitor_no) 1540 { 1541 set_needs_null_check(true); 1542 ASSERT_VALUES 1543 } 1544 1545 // accessors 1546 Value obj() const { return _obj; } 1547 int monitor_no() const { return _monitor_no; } 1548 1549 // generic 1550 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1551 }; 1552 1553 1554 LEAF(MonitorEnter, AccessMonitor) 1555 bool _maybe_inlinetype; 1556 public: 1557 // creation 1558 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before, bool maybe_inlinetype) 1559 : AccessMonitor(obj, monitor_no, state_before) 1560 , _maybe_inlinetype(maybe_inlinetype) 1561 { 1562 ASSERT_VALUES 1563 } 1564 1565 // accessors 1566 bool maybe_inlinetype() const { return _maybe_inlinetype; } 1567 1568 // generic 1569 virtual bool can_trap() const { return true; } 1570 }; 1571 1572 1573 LEAF(MonitorExit, AccessMonitor) 1574 public: 1575 // creation 1576 MonitorExit(Value obj, int monitor_no) 1577 : AccessMonitor(obj, monitor_no, nullptr) 1578 { 1579 ASSERT_VALUES 1580 } 1581 }; 1582 1583 1584 LEAF(Intrinsic, StateSplit) 1585 private: 1586 vmIntrinsics::ID _id; 1587 Values* _args; 1588 Value _recv; 1589 ArgsNonNullState _nonnull_state; 1590 1591 public: 1592 // preserves_state can be set to true for Intrinsics 1593 // which are guaranteed to preserve register state across any slow 1594 // cases; setting it to true does not mean that the Intrinsic can 1595 // not trap, only that if we continue execution in the same basic 1596 // block after the Intrinsic, all of the registers are intact. This 1597 // allows load elimination and common expression elimination to be 1598 // performed across the Intrinsic. The default value is false. 1599 Intrinsic(ValueType* type, 1600 vmIntrinsics::ID id, 1601 Values* args, 1602 bool has_receiver, 1603 ValueStack* state_before, 1604 bool preserves_state, 1605 bool cantrap = true) 1606 : StateSplit(type, state_before) 1607 , _id(id) 1608 , _args(args) 1609 , _recv(nullptr) 1610 { 1611 assert(args != nullptr, "args must exist"); 1612 ASSERT_VALUES 1613 set_flag(PreservesStateFlag, preserves_state); 1614 set_flag(CanTrapFlag, cantrap); 1615 if (has_receiver) { 1616 _recv = argument_at(0); 1617 } 1618 set_needs_null_check(has_receiver); 1619 1620 // some intrinsics can't trap, so don't force them to be pinned 1621 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1622 unpin(PinStateSplitConstructor); 1623 } 1624 } 1625 1626 // accessors 1627 vmIntrinsics::ID id() const { return _id; } 1628 int number_of_arguments() const { return _args->length(); } 1629 Value argument_at(int i) const { return _args->at(i); } 1630 1631 bool has_receiver() const { return (_recv != nullptr); } 1632 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1633 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1634 1635 bool arg_needs_null_check(int i) const { 1636 return _nonnull_state.arg_needs_null_check(i); 1637 } 1638 1639 void set_arg_needs_null_check(int i, bool check) { 1640 _nonnull_state.set_arg_needs_null_check(i, check); 1641 } 1642 1643 // generic 1644 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1645 virtual void input_values_do(ValueVisitor* f) { 1646 StateSplit::input_values_do(f); 1647 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1648 } 1649 }; 1650 1651 1652 class LIR_List; 1653 1654 LEAF(BlockBegin, StateSplit) 1655 private: 1656 int _block_id; // the unique block id 1657 int _bci; // start-bci of block 1658 int _depth_first_number; // number of this block in a depth-first ordering 1659 int _linear_scan_number; // number of this block in linear-scan ordering 1660 int _dominator_depth; 1661 int _loop_depth; // the loop nesting level of this block 1662 int _loop_index; // number of the innermost loop of this block 1663 int _flags; // the flags associated with this block 1664 1665 // fields used by BlockListBuilder 1666 int _total_preds; // number of predecessors found by BlockListBuilder 1667 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1668 1669 // SSA specific fields: (factor out later) 1670 BlockList _predecessors; // the predecessors of this block 1671 BlockList _dominates; // list of blocks that are dominated by this block 1672 BlockBegin* _dominator; // the dominator of this block 1673 // SSA specific ends 1674 BlockEnd* _end; // the last instruction of this block 1675 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1676 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1677 int _exception_handler_pco; // if this block is the start of an exception handler, 1678 // this records the PC offset in the assembly code of the 1679 // first instruction in this block 1680 Label _label; // the label associated with this block 1681 LIR_List* _lir; // the low level intermediate representation for this block 1682 1683 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1684 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1685 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1686 ResourceBitMap _live_kill; // set of registers defined in this block 1687 1688 ResourceBitMap _fpu_register_usage; 1689 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1690 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1691 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1692 1693 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1694 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1695 1696 friend class SuxAndWeightAdjuster; 1697 1698 public: 1699 void* operator new(size_t size) throw() { 1700 Compilation* c = Compilation::current(); 1701 void* res = c->arena()->Amalloc(size); 1702 return res; 1703 } 1704 1705 // initialization/counting 1706 static int number_of_blocks() { 1707 return Compilation::current()->number_of_blocks(); 1708 } 1709 1710 // creation 1711 BlockBegin(int bci) 1712 : StateSplit(illegalType) 1713 , _block_id(Compilation::current()->get_next_block_id()) 1714 , _bci(bci) 1715 , _depth_first_number(-1) 1716 , _linear_scan_number(-1) 1717 , _dominator_depth(-1) 1718 , _loop_depth(0) 1719 , _loop_index(-1) 1720 , _flags(0) 1721 , _total_preds(0) 1722 , _stores_to_locals() 1723 , _predecessors(2) 1724 , _dominates(2) 1725 , _dominator(nullptr) 1726 , _end(nullptr) 1727 , _exception_handlers(1) 1728 , _exception_states(nullptr) 1729 , _exception_handler_pco(-1) 1730 , _lir(nullptr) 1731 , _live_in() 1732 , _live_out() 1733 , _live_gen() 1734 , _live_kill() 1735 , _fpu_register_usage() 1736 , _fpu_stack_state(nullptr) 1737 , _first_lir_instruction_id(-1) 1738 , _last_lir_instruction_id(-1) 1739 { 1740 _block = this; 1741 #ifndef PRODUCT 1742 set_printable_bci(bci); 1743 #endif 1744 } 1745 1746 // accessors 1747 int block_id() const { return _block_id; } 1748 int bci() const { return _bci; } 1749 BlockList* dominates() { return &_dominates; } 1750 BlockBegin* dominator() const { return _dominator; } 1751 int loop_depth() const { return _loop_depth; } 1752 int dominator_depth() const { return _dominator_depth; } 1753 int depth_first_number() const { return _depth_first_number; } 1754 int linear_scan_number() const { return _linear_scan_number; } 1755 BlockEnd* end() const { return _end; } 1756 Label* label() { return &_label; } 1757 LIR_List* lir() const { return _lir; } 1758 int exception_handler_pco() const { return _exception_handler_pco; } 1759 ResourceBitMap& live_in() { return _live_in; } 1760 ResourceBitMap& live_out() { return _live_out; } 1761 ResourceBitMap& live_gen() { return _live_gen; } 1762 ResourceBitMap& live_kill() { return _live_kill; } 1763 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1764 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1765 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1766 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1767 int total_preds() const { return _total_preds; } 1768 BitMap& stores_to_locals() { return _stores_to_locals; } 1769 1770 // manipulation 1771 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1772 void set_loop_depth(int d) { _loop_depth = d; } 1773 void set_dominator_depth(int d) { _dominator_depth = d; } 1774 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1775 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1776 void set_end(BlockEnd* new_end); 1777 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1778 BlockBegin* insert_block_between(BlockBegin* sux); 1779 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1780 void set_lir(LIR_List* lir) { _lir = lir; } 1781 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1782 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1783 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1784 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1785 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1786 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1787 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1788 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1789 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1790 void increment_total_preds(int n = 1) { _total_preds += n; } 1791 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1792 1793 // generic 1794 virtual void state_values_do(ValueVisitor* f); 1795 1796 // successors and predecessors 1797 int number_of_sux() const; 1798 BlockBegin* sux_at(int i) const; 1799 void add_predecessor(BlockBegin* pred); 1800 void remove_predecessor(BlockBegin* pred); 1801 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1802 int number_of_preds() const { return _predecessors.length(); } 1803 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1804 1805 // exception handlers potentially invoked by this block 1806 void add_exception_handler(BlockBegin* b); 1807 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1808 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1809 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1810 1811 // states of the instructions that have an edge to this exception handler 1812 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == nullptr ? 0 : _exception_states->length(); } 1813 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1814 int add_exception_state(ValueStack* state); 1815 1816 // flags 1817 enum Flag { 1818 no_flag = 0, 1819 std_entry_flag = 1 << 0, 1820 osr_entry_flag = 1 << 1, 1821 exception_entry_flag = 1 << 2, 1822 subroutine_entry_flag = 1 << 3, 1823 backward_branch_target_flag = 1 << 4, 1824 is_on_work_list_flag = 1 << 5, 1825 was_visited_flag = 1 << 6, 1826 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1827 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1828 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1829 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1830 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1831 }; 1832 1833 void set(Flag f) { _flags |= f; } 1834 void clear(Flag f) { _flags &= ~f; } 1835 bool is_set(Flag f) const { return (_flags & f) != 0; } 1836 bool is_entry_block() const { 1837 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1838 return (_flags & entry_mask) != 0; 1839 } 1840 1841 // iteration 1842 void iterate_preorder (BlockClosure* closure); 1843 void iterate_postorder (BlockClosure* closure); 1844 1845 void block_values_do(ValueVisitor* f); 1846 1847 // loops 1848 void set_loop_index(int ix) { _loop_index = ix; } 1849 int loop_index() const { return _loop_index; } 1850 1851 // merging 1852 bool try_merge(ValueStack* state, bool has_irreducible_loops); // try to merge states at block begin 1853 void merge(ValueStack* state, bool has_irreducible_loops) { 1854 bool b = try_merge(state, has_irreducible_loops); 1855 assert(b, "merge failed"); 1856 } 1857 1858 // debugging 1859 void print_block() PRODUCT_RETURN; 1860 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1861 1862 }; 1863 1864 1865 BASE(BlockEnd, StateSplit) 1866 private: 1867 BlockList* _sux; 1868 1869 protected: 1870 BlockList* sux() const { return _sux; } 1871 1872 void set_sux(BlockList* sux) { 1873 #ifdef ASSERT 1874 assert(sux != nullptr, "sux must exist"); 1875 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != nullptr, "sux must exist"); 1876 #endif 1877 _sux = sux; 1878 } 1879 1880 public: 1881 // creation 1882 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1883 : StateSplit(type, state_before) 1884 , _sux(nullptr) 1885 { 1886 set_flag(IsSafepointFlag, is_safepoint); 1887 } 1888 1889 // accessors 1890 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1891 // For compatibility with old code, for new code use block() 1892 BlockBegin* begin() const { return _block; } 1893 1894 // manipulation 1895 inline void remove_sux_at(int i) { _sux->remove_at(i);} 1896 inline int find_sux(BlockBegin* sux) {return _sux->find(sux);} 1897 1898 // successors 1899 int number_of_sux() const { return _sux != nullptr ? _sux->length() : 0; } 1900 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1901 bool is_sux(BlockBegin* sux) const { return _sux == nullptr ? false : _sux->contains(sux); } 1902 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1903 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1904 }; 1905 1906 1907 LEAF(Goto, BlockEnd) 1908 public: 1909 enum Direction { 1910 none, // Just a regular goto 1911 taken, not_taken // Goto produced from If 1912 }; 1913 private: 1914 ciMethod* _profiled_method; 1915 int _profiled_bci; 1916 Direction _direction; 1917 public: 1918 // creation 1919 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1920 : BlockEnd(illegalType, state_before, is_safepoint) 1921 , _profiled_method(nullptr) 1922 , _profiled_bci(0) 1923 , _direction(none) { 1924 BlockList* s = new BlockList(1); 1925 s->append(sux); 1926 set_sux(s); 1927 } 1928 1929 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, nullptr, is_safepoint) 1930 , _profiled_method(nullptr) 1931 , _profiled_bci(0) 1932 , _direction(none) { 1933 BlockList* s = new BlockList(1); 1934 s->append(sux); 1935 set_sux(s); 1936 } 1937 1938 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1939 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1940 int profiled_bci() const { return _profiled_bci; } 1941 Direction direction() const { return _direction; } 1942 1943 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1944 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1945 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1946 void set_direction(Direction d) { _direction = d; } 1947 }; 1948 1949 #ifdef ASSERT 1950 LEAF(Assert, Instruction) 1951 private: 1952 Value _x; 1953 Condition _cond; 1954 Value _y; 1955 char *_message; 1956 1957 public: 1958 // creation 1959 // unordered_is_true is valid for float/double compares only 1960 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1961 1962 // accessors 1963 Value x() const { return _x; } 1964 Condition cond() const { return _cond; } 1965 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1966 Value y() const { return _y; } 1967 const char *message() const { return _message; } 1968 1969 // generic 1970 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1971 }; 1972 #endif 1973 1974 LEAF(RangeCheckPredicate, StateSplit) 1975 private: 1976 Value _x; 1977 Condition _cond; 1978 Value _y; 1979 1980 void check_state(); 1981 1982 public: 1983 // creation 1984 // unordered_is_true is valid for float/double compares only 1985 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 1986 , _x(x) 1987 , _cond(cond) 1988 , _y(y) 1989 { 1990 ASSERT_VALUES 1991 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1992 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1993 this->set_state(state); 1994 check_state(); 1995 } 1996 1997 // Always deoptimize 1998 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 1999 { 2000 this->set_state(state); 2001 _x = _y = nullptr; 2002 check_state(); 2003 } 2004 2005 // accessors 2006 Value x() const { return _x; } 2007 Condition cond() const { return _cond; } 2008 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2009 Value y() const { return _y; } 2010 2011 void always_fail() { _x = _y = nullptr; } 2012 2013 // generic 2014 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2015 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2016 }; 2017 2018 LEAF(If, BlockEnd) 2019 private: 2020 Value _x; 2021 Condition _cond; 2022 Value _y; 2023 ciMethod* _profiled_method; 2024 int _profiled_bci; // Canonicalizer may alter bci of If node 2025 bool _swapped; // Is the order reversed with respect to the original If in the 2026 // bytecode stream? 2027 bool _substitutability_check; 2028 public: 2029 // creation 2030 // unordered_is_true is valid for float/double compares only 2031 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint, bool substitutability_check=false) 2032 : BlockEnd(illegalType, state_before, is_safepoint) 2033 , _x(x) 2034 , _cond(cond) 2035 , _y(y) 2036 , _profiled_method(nullptr) 2037 , _profiled_bci(0) 2038 , _swapped(false) 2039 , _substitutability_check(substitutability_check) 2040 { 2041 ASSERT_VALUES 2042 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2043 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2044 BlockList* s = new BlockList(2); 2045 s->append(tsux); 2046 s->append(fsux); 2047 set_sux(s); 2048 } 2049 2050 // accessors 2051 Value x() const { return _x; } 2052 Condition cond() const { return _cond; } 2053 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2054 Value y() const { return _y; } 2055 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2056 BlockBegin* tsux() const { return sux_for(true); } 2057 BlockBegin* fsux() const { return sux_for(false); } 2058 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2059 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2060 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2061 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2062 bool is_swapped() const { return _swapped; } 2063 2064 // manipulation 2065 void swap_operands() { 2066 Value t = _x; _x = _y; _y = t; 2067 _cond = mirror(_cond); 2068 } 2069 2070 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2071 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2072 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2073 void set_swapped(bool value) { _swapped = value; } 2074 bool substitutability_check() const { return _substitutability_check; } 2075 // generic 2076 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2077 }; 2078 2079 2080 BASE(Switch, BlockEnd) 2081 private: 2082 Value _tag; 2083 2084 public: 2085 // creation 2086 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2087 : BlockEnd(illegalType, state_before, is_safepoint) 2088 , _tag(tag) { 2089 ASSERT_VALUES 2090 set_sux(sux); 2091 } 2092 2093 // accessors 2094 Value tag() const { return _tag; } 2095 int length() const { return number_of_sux() - 1; } 2096 2097 virtual bool needs_exception_state() const { return false; } 2098 2099 // generic 2100 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2101 }; 2102 2103 2104 LEAF(TableSwitch, Switch) 2105 private: 2106 int _lo_key; 2107 2108 public: 2109 // creation 2110 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2111 : Switch(tag, sux, state_before, is_safepoint) 2112 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2113 2114 // accessors 2115 int lo_key() const { return _lo_key; } 2116 int hi_key() const { return _lo_key + (length() - 1); } 2117 }; 2118 2119 2120 LEAF(LookupSwitch, Switch) 2121 private: 2122 intArray* _keys; 2123 2124 public: 2125 // creation 2126 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2127 : Switch(tag, sux, state_before, is_safepoint) 2128 , _keys(keys) { 2129 assert(keys != nullptr, "keys must exist"); 2130 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2131 } 2132 2133 // accessors 2134 int key_at(int i) const { return _keys->at(i); } 2135 }; 2136 2137 2138 LEAF(Return, BlockEnd) 2139 private: 2140 Value _result; 2141 2142 public: 2143 // creation 2144 Return(Value result) : 2145 BlockEnd(result == nullptr ? voidType : result->type()->base(), nullptr, true), 2146 _result(result) {} 2147 2148 // accessors 2149 Value result() const { return _result; } 2150 bool has_result() const { return result() != nullptr; } 2151 2152 // generic 2153 virtual void input_values_do(ValueVisitor* f) { 2154 BlockEnd::input_values_do(f); 2155 if (has_result()) f->visit(&_result); 2156 } 2157 }; 2158 2159 2160 LEAF(Throw, BlockEnd) 2161 private: 2162 Value _exception; 2163 2164 public: 2165 // creation 2166 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2167 ASSERT_VALUES 2168 } 2169 2170 // accessors 2171 Value exception() const { return _exception; } 2172 2173 // generic 2174 virtual bool can_trap() const { return true; } 2175 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2176 }; 2177 2178 2179 LEAF(Base, BlockEnd) 2180 public: 2181 // creation 2182 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, nullptr, false) { 2183 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2184 assert(osr_entry == nullptr || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2185 BlockList* s = new BlockList(2); 2186 if (osr_entry != nullptr) s->append(osr_entry); 2187 s->append(std_entry); // must be default sux! 2188 set_sux(s); 2189 } 2190 2191 // accessors 2192 BlockBegin* std_entry() const { return default_sux(); } 2193 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? nullptr : sux_at(0); } 2194 }; 2195 2196 2197 LEAF(OsrEntry, Instruction) 2198 public: 2199 // creation 2200 #ifdef _LP64 2201 OsrEntry() : Instruction(longType) { pin(); } 2202 #else 2203 OsrEntry() : Instruction(intType) { pin(); } 2204 #endif 2205 2206 // generic 2207 virtual void input_values_do(ValueVisitor* f) { } 2208 }; 2209 2210 2211 // Models the incoming exception at a catch site 2212 LEAF(ExceptionObject, Instruction) 2213 public: 2214 // creation 2215 ExceptionObject() : Instruction(objectType) { 2216 pin(); 2217 } 2218 2219 // generic 2220 virtual void input_values_do(ValueVisitor* f) { } 2221 }; 2222 2223 2224 // Models needed rounding for floating-point values on Intel. 2225 // Currently only used to represent rounding of double-precision 2226 // values stored into local variables, but could be used to model 2227 // intermediate rounding of single-precision values as well. 2228 LEAF(RoundFP, Instruction) 2229 private: 2230 Value _input; // floating-point value to be rounded 2231 2232 public: 2233 RoundFP(Value input) 2234 : Instruction(input->type()) // Note: should not be used for constants 2235 , _input(input) 2236 { 2237 ASSERT_VALUES 2238 } 2239 2240 // accessors 2241 Value input() const { return _input; } 2242 2243 // generic 2244 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2245 }; 2246 2247 2248 BASE(UnsafeOp, Instruction) 2249 private: 2250 Value _object; // Object to be fetched from or mutated 2251 Value _offset; // Offset within object 2252 bool _is_volatile; // true if volatile - dl/JSR166 2253 BasicType _basic_type; // ValueType can not express byte-sized integers 2254 2255 protected: 2256 // creation 2257 UnsafeOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2258 : Instruction(is_put ? voidType : as_ValueType(basic_type)), 2259 _object(object), _offset(offset), _is_volatile(is_volatile), _basic_type(basic_type) 2260 { 2261 //Note: Unsafe ops are not not guaranteed to throw NPE. 2262 // Convservatively, Unsafe operations must be pinned though we could be 2263 // looser about this if we wanted to.. 2264 pin(); 2265 } 2266 2267 public: 2268 // accessors 2269 BasicType basic_type() { return _basic_type; } 2270 Value object() { return _object; } 2271 Value offset() { return _offset; } 2272 bool is_volatile() { return _is_volatile; } 2273 2274 // generic 2275 virtual void input_values_do(ValueVisitor* f) { f->visit(&_object); 2276 f->visit(&_offset); } 2277 }; 2278 2279 LEAF(UnsafeGet, UnsafeOp) 2280 private: 2281 bool _is_raw; 2282 public: 2283 UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile) 2284 : UnsafeOp(basic_type, object, offset, false, is_volatile) 2285 { 2286 ASSERT_VALUES 2287 _is_raw = false; 2288 } 2289 UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile, bool is_raw) 2290 : UnsafeOp(basic_type, object, offset, false, is_volatile), _is_raw(is_raw) 2291 { 2292 ASSERT_VALUES 2293 } 2294 2295 // accessors 2296 bool is_raw() { return _is_raw; } 2297 }; 2298 2299 2300 LEAF(UnsafePut, UnsafeOp) 2301 private: 2302 Value _value; // Value to be stored 2303 public: 2304 UnsafePut(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2305 : UnsafeOp(basic_type, object, offset, true, is_volatile) 2306 , _value(value) 2307 { 2308 ASSERT_VALUES 2309 } 2310 2311 // accessors 2312 Value value() { return _value; } 2313 2314 // generic 2315 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2316 f->visit(&_value); } 2317 }; 2318 2319 LEAF(UnsafeGetAndSet, UnsafeOp) 2320 private: 2321 Value _value; // Value to be stored 2322 bool _is_add; 2323 public: 2324 UnsafeGetAndSet(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2325 : UnsafeOp(basic_type, object, offset, false, false) 2326 , _value(value) 2327 , _is_add(is_add) 2328 { 2329 ASSERT_VALUES 2330 } 2331 2332 // accessors 2333 bool is_add() const { return _is_add; } 2334 Value value() { return _value; } 2335 2336 // generic 2337 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2338 f->visit(&_value); } 2339 }; 2340 2341 LEAF(ProfileCall, Instruction) 2342 private: 2343 ciMethod* _method; 2344 int _bci_of_invoke; 2345 ciMethod* _callee; // the method that is called at the given bci 2346 Value _recv; 2347 ciKlass* _known_holder; 2348 Values* _obj_args; // arguments for type profiling 2349 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2350 bool _inlined; // Are we profiling a call that is inlined 2351 2352 public: 2353 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2354 : Instruction(voidType) 2355 , _method(method) 2356 , _bci_of_invoke(bci) 2357 , _callee(callee) 2358 , _recv(recv) 2359 , _known_holder(known_holder) 2360 , _obj_args(obj_args) 2361 , _inlined(inlined) 2362 { 2363 // The ProfileCall has side-effects and must occur precisely where located 2364 pin(); 2365 } 2366 2367 ciMethod* method() const { return _method; } 2368 int bci_of_invoke() const { return _bci_of_invoke; } 2369 ciMethod* callee() const { return _callee; } 2370 Value recv() const { return _recv; } 2371 ciKlass* known_holder() const { return _known_holder; } 2372 int nb_profiled_args() const { return _obj_args == nullptr ? 0 : _obj_args->length(); } 2373 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2374 bool arg_needs_null_check(int i) const { 2375 return _nonnull_state.arg_needs_null_check(i); 2376 } 2377 bool inlined() const { return _inlined; } 2378 2379 void set_arg_needs_null_check(int i, bool check) { 2380 _nonnull_state.set_arg_needs_null_check(i, check); 2381 } 2382 2383 virtual void input_values_do(ValueVisitor* f) { 2384 if (_recv != nullptr) { 2385 f->visit(&_recv); 2386 } 2387 for (int i = 0; i < nb_profiled_args(); i++) { 2388 f->visit(_obj_args->adr_at(i)); 2389 } 2390 } 2391 }; 2392 2393 LEAF(ProfileReturnType, Instruction) 2394 private: 2395 ciMethod* _method; 2396 ciMethod* _callee; 2397 int _bci_of_invoke; 2398 Value _ret; 2399 2400 public: 2401 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2402 : Instruction(voidType) 2403 , _method(method) 2404 , _callee(callee) 2405 , _bci_of_invoke(bci) 2406 , _ret(ret) 2407 { 2408 set_needs_null_check(true); 2409 // The ProfileReturnType has side-effects and must occur precisely where located 2410 pin(); 2411 } 2412 2413 ciMethod* method() const { return _method; } 2414 ciMethod* callee() const { return _callee; } 2415 int bci_of_invoke() const { return _bci_of_invoke; } 2416 Value ret() const { return _ret; } 2417 2418 virtual void input_values_do(ValueVisitor* f) { 2419 if (_ret != nullptr) { 2420 f->visit(&_ret); 2421 } 2422 } 2423 }; 2424 2425 LEAF(ProfileACmpTypes, Instruction) 2426 private: 2427 ciMethod* _method; 2428 int _bci; 2429 Value _left; 2430 Value _right; 2431 bool _left_maybe_null; 2432 bool _right_maybe_null; 2433 2434 public: 2435 ProfileACmpTypes(ciMethod* method, int bci, Value left, Value right) 2436 : Instruction(voidType) 2437 , _method(method) 2438 , _bci(bci) 2439 , _left(left) 2440 , _right(right) 2441 { 2442 // The ProfileACmp has side-effects and must occur precisely where located 2443 pin(); 2444 _left_maybe_null = true; 2445 _right_maybe_null = true; 2446 } 2447 2448 ciMethod* method() const { return _method; } 2449 int bci() const { return _bci; } 2450 Value left() const { return _left; } 2451 Value right() const { return _right; } 2452 bool left_maybe_null() const { return _left_maybe_null; } 2453 bool right_maybe_null() const { return _right_maybe_null; } 2454 void set_left_maybe_null(bool v) { _left_maybe_null = v; } 2455 void set_right_maybe_null(bool v) { _right_maybe_null = v; } 2456 2457 virtual void input_values_do(ValueVisitor* f) { 2458 if (_left != nullptr) { 2459 f->visit(&_left); 2460 } 2461 if (_right != nullptr) { 2462 f->visit(&_right); 2463 } 2464 } 2465 }; 2466 2467 // Call some C runtime function that doesn't safepoint, 2468 // optionally passing the current thread as the first argument. 2469 LEAF(RuntimeCall, Instruction) 2470 private: 2471 const char* _entry_name; 2472 address _entry; 2473 Values* _args; 2474 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2475 2476 public: 2477 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2478 : Instruction(type) 2479 , _entry_name(entry_name) 2480 , _entry(entry) 2481 , _args(args) 2482 , _pass_thread(pass_thread) { 2483 ASSERT_VALUES 2484 pin(); 2485 } 2486 2487 const char* entry_name() const { return _entry_name; } 2488 address entry() const { return _entry; } 2489 int number_of_arguments() const { return _args->length(); } 2490 Value argument_at(int i) const { return _args->at(i); } 2491 bool pass_thread() const { return _pass_thread; } 2492 2493 virtual void input_values_do(ValueVisitor* f) { 2494 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2495 } 2496 }; 2497 2498 // Use to trip invocation counter of an inlined method 2499 2500 LEAF(ProfileInvoke, Instruction) 2501 private: 2502 ciMethod* _inlinee; 2503 ValueStack* _state; 2504 2505 public: 2506 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2507 : Instruction(voidType) 2508 , _inlinee(inlinee) 2509 , _state(state) 2510 { 2511 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2512 pin(); 2513 } 2514 2515 ciMethod* inlinee() { return _inlinee; } 2516 ValueStack* state() { return _state; } 2517 virtual void input_values_do(ValueVisitor*) {} 2518 virtual void state_values_do(ValueVisitor*); 2519 }; 2520 2521 LEAF(MemBar, Instruction) 2522 private: 2523 LIR_Code _code; 2524 2525 public: 2526 MemBar(LIR_Code code) 2527 : Instruction(voidType) 2528 , _code(code) 2529 { 2530 pin(); 2531 } 2532 2533 LIR_Code code() { return _code; } 2534 2535 virtual void input_values_do(ValueVisitor*) {} 2536 }; 2537 2538 class BlockPair: public CompilationResourceObj { 2539 private: 2540 BlockBegin* _from; 2541 BlockBegin* _to; 2542 public: 2543 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2544 BlockBegin* from() const { return _from; } 2545 BlockBegin* to() const { return _to; } 2546 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2547 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2548 void set_to(BlockBegin* b) { _to = b; } 2549 void set_from(BlockBegin* b) { _from = b; } 2550 }; 2551 2552 typedef GrowableArray<BlockPair*> BlockPairList; 2553 2554 inline int BlockBegin::number_of_sux() const { assert(_end != nullptr, "need end"); return _end->number_of_sux(); } 2555 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end != nullptr , "need end"); return _end->sux_at(i); } 2556 2557 #undef ASSERT_VALUES 2558 2559 #endif // SHARE_C1_C1_INSTRUCTION_HPP