1 /* 2 * Copyright (c) 1999, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_CODESTUBS_HPP 26 #define SHARE_C1_C1_CODESTUBS_HPP 27 28 #include "c1/c1_FrameMap.hpp" 29 #include "c1/c1_IR.hpp" 30 #include "c1/c1_Instruction.hpp" 31 #include "c1/c1_LIR.hpp" 32 #include "c1/c1_Runtime1.hpp" 33 #include "code/nativeInst.hpp" 34 #include "utilities/growableArray.hpp" 35 #include "utilities/macros.hpp" 36 37 class CodeEmitInfo; 38 class LIR_Assembler; 39 class LIR_OpVisitState; 40 41 // CodeStubs are little 'out-of-line' pieces of code that 42 // usually handle slow cases of operations. All code stubs 43 // are collected and code is emitted at the end of the 44 // nmethod. 45 46 class CodeStub: public CompilationResourceObj { 47 protected: 48 Label _entry; // label at the stub entry point 49 Label _continuation; // label where stub continues, if any 50 51 public: 52 CodeStub() {} 53 54 // code generation 55 void assert_no_unbound_labels() { assert(!_entry.is_unbound() && !_continuation.is_unbound(), "unbound label"); } 56 virtual void emit_code(LIR_Assembler* e) = 0; 57 virtual CodeEmitInfo* info() const { return nullptr; } 58 virtual bool is_exception_throw_stub() const { return false; } 59 virtual bool is_simple_exception_stub() const { return false; } 60 virtual int nr_immediate_oops_patched() const { return 0; } 61 #ifndef PRODUCT 62 virtual void print_name(outputStream* out) const = 0; 63 #endif 64 65 // label access 66 Label* entry() { return &_entry; } 67 Label* continuation() { return &_continuation; } 68 // for LIR 69 virtual void visit(LIR_OpVisitState* visit) = 0; 70 }; 71 72 class CodeStubList: public GrowableArray<CodeStub*> { 73 public: 74 CodeStubList(): GrowableArray<CodeStub*>() {} 75 76 void append(CodeStub* stub) { 77 if (!contains(stub)) { 78 GrowableArray<CodeStub*>::append(stub); 79 } 80 } 81 }; 82 83 class C1SafepointPollStub: public CodeStub { 84 private: 85 uintptr_t _safepoint_offset; 86 87 public: 88 C1SafepointPollStub() : 89 _safepoint_offset(0) { 90 } 91 92 uintptr_t safepoint_offset() { return _safepoint_offset; } 93 void set_safepoint_offset(uintptr_t safepoint_offset) { _safepoint_offset = safepoint_offset; } 94 95 virtual void emit_code(LIR_Assembler* e); 96 virtual void visit(LIR_OpVisitState* visitor) { 97 // don't pass in the code emit info since it's processed in the fast path 98 visitor->do_slow_case(); 99 } 100 #ifndef PRODUCT 101 virtual void print_name(outputStream* out) const { out->print("C1SafepointPollStub"); } 102 #endif // PRODUCT 103 }; 104 105 class CounterOverflowStub: public CodeStub { 106 private: 107 CodeEmitInfo* _info; 108 int _bci; 109 LIR_Opr _method; 110 111 public: 112 CounterOverflowStub(CodeEmitInfo* info, int bci, LIR_Opr method) : _info(info), _bci(bci), _method(method) { 113 FrameMap* f = Compilation::current()->frame_map(); 114 f->update_reserved_argument_area_size(2 * BytesPerWord); 115 } 116 117 virtual void emit_code(LIR_Assembler* e); 118 119 virtual void visit(LIR_OpVisitState* visitor) { 120 visitor->do_slow_case(_info); 121 visitor->do_input(_method); 122 } 123 124 #ifndef PRODUCT 125 virtual void print_name(outputStream* out) const { out->print("CounterOverflowStub"); } 126 #endif // PRODUCT 127 128 }; 129 130 class ConversionStub: public CodeStub { 131 private: 132 Bytecodes::Code _bytecode; 133 LIR_Opr _input; 134 LIR_Opr _result; 135 136 static float float_zero; 137 static double double_zero; 138 public: 139 ConversionStub(Bytecodes::Code bytecode, LIR_Opr input, LIR_Opr result) 140 : _bytecode(bytecode), _input(input), _result(result) { 141 NOT_IA32( ShouldNotReachHere(); ) // used only on x86-32 142 } 143 144 Bytecodes::Code bytecode() { return _bytecode; } 145 LIR_Opr input() { return _input; } 146 LIR_Opr result() { return _result; } 147 148 virtual void emit_code(LIR_Assembler* e); 149 virtual void visit(LIR_OpVisitState* visitor) { 150 visitor->do_slow_case(); 151 visitor->do_input(_input); 152 visitor->do_output(_result); 153 } 154 #ifndef PRODUCT 155 virtual void print_name(outputStream* out) const { out->print("ConversionStub"); } 156 #endif // PRODUCT 157 }; 158 159 160 // Throws ArrayIndexOutOfBoundsException by default but can be 161 // configured to throw IndexOutOfBoundsException in constructor 162 class RangeCheckStub: public CodeStub { 163 private: 164 CodeEmitInfo* _info; 165 LIR_Opr _index; 166 LIR_Opr _array; 167 bool _throw_index_out_of_bounds_exception; 168 169 public: 170 // For ArrayIndexOutOfBoundsException. 171 RangeCheckStub(CodeEmitInfo* info, LIR_Opr index, LIR_Opr array) 172 : _index(index), _array(array), _throw_index_out_of_bounds_exception(false) { 173 assert(info != nullptr, "must have info"); 174 _info = new CodeEmitInfo(info); 175 FrameMap* f = Compilation::current()->frame_map(); 176 f->update_reserved_argument_area_size(2 * BytesPerWord); 177 } 178 // For IndexOutOfBoundsException. 179 RangeCheckStub(CodeEmitInfo* info, LIR_Opr index) 180 : _index(index), _array(), _throw_index_out_of_bounds_exception(true) { 181 assert(info != nullptr, "must have info"); 182 _info = new CodeEmitInfo(info); 183 FrameMap* f = Compilation::current()->frame_map(); 184 f->update_reserved_argument_area_size(2 * BytesPerWord); 185 } 186 virtual void emit_code(LIR_Assembler* e); 187 virtual CodeEmitInfo* info() const { return _info; } 188 virtual bool is_exception_throw_stub() const { return true; } 189 virtual void visit(LIR_OpVisitState* visitor) { 190 visitor->do_slow_case(_info); 191 visitor->do_input(_index); 192 if (_array) { visitor->do_input(_array); } 193 } 194 #ifndef PRODUCT 195 virtual void print_name(outputStream* out) const { out->print("RangeCheckStub"); } 196 #endif // PRODUCT 197 }; 198 199 // stub used when predicate fails and deoptimization is needed 200 class PredicateFailedStub: public CodeStub { 201 private: 202 CodeEmitInfo* _info; 203 204 public: 205 PredicateFailedStub(CodeEmitInfo* info); 206 virtual void emit_code(LIR_Assembler* e); 207 virtual CodeEmitInfo* info() const { return _info; } 208 virtual void visit(LIR_OpVisitState* visitor) { 209 visitor->do_slow_case(_info); 210 } 211 #ifndef PRODUCT 212 virtual void print_name(outputStream* out) const { out->print("PredicateFailedStub"); } 213 #endif // PRODUCT 214 }; 215 216 class DivByZeroStub: public CodeStub { 217 private: 218 CodeEmitInfo* _info; 219 int _offset; 220 221 public: 222 DivByZeroStub(CodeEmitInfo* info) 223 : _info(info), _offset(-1) { 224 } 225 DivByZeroStub(int offset, CodeEmitInfo* info) 226 : _info(info), _offset(offset) { 227 } 228 virtual void emit_code(LIR_Assembler* e); 229 virtual CodeEmitInfo* info() const { return _info; } 230 virtual bool is_exception_throw_stub() const { return true; } 231 virtual void visit(LIR_OpVisitState* visitor) { 232 visitor->do_slow_case(_info); 233 } 234 #ifndef PRODUCT 235 virtual void print_name(outputStream* out) const { out->print("DivByZeroStub"); } 236 #endif // PRODUCT 237 }; 238 239 240 class ImplicitNullCheckStub: public CodeStub { 241 private: 242 CodeEmitInfo* _info; 243 int _offset; 244 245 public: 246 ImplicitNullCheckStub(int offset, CodeEmitInfo* info) 247 : _info(info), _offset(offset) { 248 } 249 virtual void emit_code(LIR_Assembler* e); 250 virtual CodeEmitInfo* info() const { return _info; } 251 virtual bool is_exception_throw_stub() const { return true; } 252 virtual void visit(LIR_OpVisitState* visitor) { 253 visitor->do_slow_case(_info); 254 } 255 #ifndef PRODUCT 256 virtual void print_name(outputStream* out) const { out->print("ImplicitNullCheckStub"); } 257 #endif // PRODUCT 258 }; 259 260 261 class NewInstanceStub: public CodeStub { 262 private: 263 ciInstanceKlass* _klass; 264 LIR_Opr _klass_reg; 265 LIR_Opr _result; 266 CodeEmitInfo* _info; 267 C1StubId _stub_id; 268 269 public: 270 NewInstanceStub(LIR_Opr klass_reg, LIR_Opr result, ciInstanceKlass* klass, CodeEmitInfo* info, C1StubId stub_id); 271 virtual void emit_code(LIR_Assembler* e); 272 virtual CodeEmitInfo* info() const { return _info; } 273 virtual void visit(LIR_OpVisitState* visitor) { 274 visitor->do_slow_case(_info); 275 visitor->do_input(_klass_reg); 276 visitor->do_output(_result); 277 } 278 #ifndef PRODUCT 279 virtual void print_name(outputStream* out) const { out->print("NewInstanceStub"); } 280 #endif // PRODUCT 281 }; 282 283 284 class NewTypeArrayStub: public CodeStub { 285 private: 286 LIR_Opr _klass_reg; 287 LIR_Opr _length; 288 LIR_Opr _result; 289 CodeEmitInfo* _info; 290 291 public: 292 NewTypeArrayStub(LIR_Opr klass_reg, LIR_Opr length, LIR_Opr result, CodeEmitInfo* info); 293 virtual void emit_code(LIR_Assembler* e); 294 virtual CodeEmitInfo* info() const { return _info; } 295 virtual void visit(LIR_OpVisitState* visitor) { 296 visitor->do_slow_case(_info); 297 visitor->do_input(_klass_reg); 298 visitor->do_input(_length); 299 assert(_result->is_valid(), "must be valid"); visitor->do_output(_result); 300 } 301 #ifndef PRODUCT 302 virtual void print_name(outputStream* out) const { out->print("NewTypeArrayStub"); } 303 #endif // PRODUCT 304 }; 305 306 307 class NewObjectArrayStub: public CodeStub { 308 private: 309 LIR_Opr _klass_reg; 310 LIR_Opr _length; 311 LIR_Opr _result; 312 CodeEmitInfo* _info; 313 314 public: 315 NewObjectArrayStub(LIR_Opr klass_reg, LIR_Opr length, LIR_Opr result, CodeEmitInfo* info); 316 virtual void emit_code(LIR_Assembler* e); 317 virtual CodeEmitInfo* info() const { return _info; } 318 virtual void visit(LIR_OpVisitState* visitor) { 319 visitor->do_slow_case(_info); 320 visitor->do_input(_klass_reg); 321 visitor->do_input(_length); 322 assert(_result->is_valid(), "must be valid"); visitor->do_output(_result); 323 } 324 #ifndef PRODUCT 325 virtual void print_name(outputStream* out) const { out->print("NewObjectArrayStub"); } 326 #endif // PRODUCT 327 }; 328 329 330 class MonitorAccessStub: public CodeStub { 331 protected: 332 LIR_Opr _obj_reg; 333 LIR_Opr _lock_reg; 334 335 public: 336 MonitorAccessStub(LIR_Opr obj_reg, LIR_Opr lock_reg) { 337 _obj_reg = obj_reg; 338 _lock_reg = lock_reg; 339 } 340 341 #ifndef PRODUCT 342 virtual void print_name(outputStream* out) const { out->print("MonitorAccessStub"); } 343 #endif // PRODUCT 344 }; 345 346 347 class MonitorEnterStub: public MonitorAccessStub { 348 private: 349 CodeEmitInfo* _info; 350 351 public: 352 MonitorEnterStub(LIR_Opr obj_reg, LIR_Opr lock_reg, CodeEmitInfo* info) 353 : MonitorAccessStub(obj_reg, lock_reg) { 354 _info = new CodeEmitInfo(info); 355 FrameMap* f = Compilation::current()->frame_map(); 356 f->update_reserved_argument_area_size(2 * BytesPerWord); 357 } 358 359 virtual void emit_code(LIR_Assembler* e); 360 virtual CodeEmitInfo* info() const { return _info; } 361 virtual void visit(LIR_OpVisitState* visitor) { 362 visitor->do_input(_obj_reg); 363 visitor->do_input(_lock_reg); 364 visitor->do_slow_case(_info); 365 } 366 #ifndef PRODUCT 367 virtual void print_name(outputStream* out) const { out->print("MonitorEnterStub"); } 368 #endif // PRODUCT 369 }; 370 371 372 class MonitorExitStub: public MonitorAccessStub { 373 private: 374 bool _compute_lock; 375 int _monitor_ix; 376 377 public: 378 MonitorExitStub(LIR_Opr lock_reg, bool compute_lock, int monitor_ix) 379 : MonitorAccessStub(LIR_OprFact::illegalOpr, lock_reg), 380 _compute_lock(compute_lock), _monitor_ix(monitor_ix) { } 381 virtual void emit_code(LIR_Assembler* e); 382 virtual void visit(LIR_OpVisitState* visitor) { 383 assert(_obj_reg->is_illegal(), "unused"); 384 if (_compute_lock) { 385 visitor->do_temp(_lock_reg); 386 } else { 387 visitor->do_input(_lock_reg); 388 } 389 } 390 #ifndef PRODUCT 391 virtual void print_name(outputStream* out) const { out->print("MonitorExitStub"); } 392 #endif // PRODUCT 393 }; 394 395 396 class PatchingStub: public CodeStub { 397 public: 398 enum PatchID { 399 access_field_id, 400 load_klass_id, 401 load_mirror_id, 402 load_appendix_id 403 }; 404 enum constants { 405 patch_info_size = 3 406 }; 407 private: 408 PatchID _id; 409 address _pc_start; 410 int _bytes_to_copy; 411 Label _patched_code_entry; 412 Label _patch_site_entry; 413 Label _patch_site_continuation; 414 Register _obj; 415 CodeEmitInfo* _info; 416 int _index; // index of the patchable oop or Klass* in nmethod or metadata table if needed 417 static int _patch_info_offset; 418 419 void align_patch_site(MacroAssembler* masm); 420 421 public: 422 static int patch_info_offset() { return _patch_info_offset; } 423 424 PatchingStub(MacroAssembler* masm, PatchID id, int index = -1): 425 _id(id) 426 , _info(nullptr) 427 , _index(index) { 428 // force alignment of patch sites so we 429 // can guarantee atomic writes to the patch site. 430 align_patch_site(masm); 431 _pc_start = masm->pc(); 432 masm->bind(_patch_site_entry); 433 } 434 435 virtual int nr_immediate_oops_patched() const { 436 if (_id == load_mirror_id || _id == load_appendix_id) { 437 return 1; 438 } 439 return 0; 440 } 441 442 void install(MacroAssembler* masm, LIR_PatchCode patch_code, Register obj, CodeEmitInfo* info) { 443 _info = info; 444 _obj = obj; 445 masm->bind(_patch_site_continuation); 446 _bytes_to_copy = pointer_delta_as_int(masm->pc(), pc_start()); 447 if (_id == PatchingStub::access_field_id) { 448 // embed a fixed offset to handle long patches which need to be offset by a word. 449 // the patching code will just add the field offset field to this offset so 450 // that we can reference either the high or low word of a double word field. 451 int field_offset = 0; 452 switch (patch_code) { 453 case lir_patch_low: field_offset = lo_word_offset_in_bytes; break; 454 case lir_patch_high: field_offset = hi_word_offset_in_bytes; break; 455 case lir_patch_normal: field_offset = 0; break; 456 default: ShouldNotReachHere(); 457 } 458 NativeMovRegMem* n_move = nativeMovRegMem_at(pc_start()); 459 n_move->set_offset(field_offset); 460 // Copy will never get executed, so only copy the part which is required for patching. 461 _bytes_to_copy = MAX2(n_move->num_bytes_to_end_of_patch(), (int)NativeGeneralJump::instruction_size); 462 } else if (_id == load_klass_id || _id == load_mirror_id || _id == load_appendix_id) { 463 assert(_obj != noreg, "must have register object for load_klass/load_mirror"); 464 #ifdef ASSERT 465 // verify that we're pointing at a NativeMovConstReg 466 nativeMovConstReg_at(pc_start()); 467 #endif 468 } else { 469 ShouldNotReachHere(); 470 } 471 assert(_bytes_to_copy <= (masm->pc() - pc_start()), "not enough bytes"); 472 } 473 474 address pc_start() const { return _pc_start; } 475 PatchID id() const { return _id; } 476 477 virtual void emit_code(LIR_Assembler* e); 478 virtual CodeEmitInfo* info() const { return _info; } 479 virtual void visit(LIR_OpVisitState* visitor) { 480 visitor->do_slow_case(_info); 481 } 482 #ifndef PRODUCT 483 virtual void print_name(outputStream* out) const { out->print("PatchingStub"); } 484 #endif // PRODUCT 485 }; 486 487 488 //------------------------------------------------------------------------------ 489 // DeoptimizeStub 490 // 491 class DeoptimizeStub : public CodeStub { 492 private: 493 CodeEmitInfo* _info; 494 jint _trap_request; 495 496 public: 497 DeoptimizeStub(CodeEmitInfo* info, Deoptimization::DeoptReason reason, Deoptimization::DeoptAction action) : 498 _info(new CodeEmitInfo(info)), _trap_request(Deoptimization::make_trap_request(reason, action)) { 499 FrameMap* f = Compilation::current()->frame_map(); 500 f->update_reserved_argument_area_size(2 * BytesPerWord); 501 } 502 503 virtual void emit_code(LIR_Assembler* e); 504 virtual CodeEmitInfo* info() const { return _info; } 505 virtual bool is_exception_throw_stub() const { return true; } 506 virtual void visit(LIR_OpVisitState* visitor) { 507 visitor->do_slow_case(_info); 508 } 509 #ifndef PRODUCT 510 virtual void print_name(outputStream* out) const { out->print("DeoptimizeStub"); } 511 #endif // PRODUCT 512 }; 513 514 515 class SimpleExceptionStub: public CodeStub { 516 private: 517 LIR_Opr _obj; 518 C1StubId _stub; 519 CodeEmitInfo* _info; 520 521 public: 522 SimpleExceptionStub(C1StubId stub, LIR_Opr obj, CodeEmitInfo* info): 523 _obj(obj), _stub(stub), _info(info) { 524 FrameMap* f = Compilation::current()->frame_map(); 525 f->update_reserved_argument_area_size(2 * BytesPerWord); 526 } 527 528 void set_obj(LIR_Opr obj) { 529 _obj = obj; 530 } 531 532 virtual void emit_code(LIR_Assembler* e); 533 virtual CodeEmitInfo* info() const { return _info; } 534 virtual bool is_exception_throw_stub() const { return true; } 535 virtual bool is_simple_exception_stub() const { return true; } 536 virtual void visit(LIR_OpVisitState* visitor) { 537 if (_obj->is_valid()) visitor->do_input(_obj); 538 visitor->do_slow_case(_info); 539 } 540 #ifndef PRODUCT 541 virtual void print_name(outputStream* out) const { out->print("SimpleExceptionStub"); } 542 #endif // PRODUCT 543 }; 544 545 546 547 class ArrayStoreExceptionStub: public SimpleExceptionStub { 548 public: 549 ArrayStoreExceptionStub(LIR_Opr obj, CodeEmitInfo* info): SimpleExceptionStub(C1StubId::throw_array_store_exception_id, obj, info) {} 550 #ifndef PRODUCT 551 virtual void print_name(outputStream* out) const { out->print("ArrayStoreExceptionStub"); } 552 #endif // PRODUCT 553 }; 554 555 556 class ArrayCopyStub: public CodeStub { 557 private: 558 LIR_OpArrayCopy* _op; 559 560 public: 561 ArrayCopyStub(LIR_OpArrayCopy* op): _op(op) { 562 FrameMap* f = Compilation::current()->frame_map(); 563 f->update_reserved_argument_area_size(arraycopystub_reserved_argument_area_size * BytesPerWord); 564 } 565 566 LIR_Opr src() const { return _op->src(); } 567 LIR_Opr src_pos() const { return _op->src_pos(); } 568 LIR_Opr dst() const { return _op->dst(); } 569 LIR_Opr dst_pos() const { return _op->dst_pos(); } 570 LIR_Opr length() const { return _op->length(); } 571 LIR_Opr tmp() const { return _op->tmp(); } 572 573 virtual void emit_code(LIR_Assembler* e); 574 virtual CodeEmitInfo* info() const { return _op->info(); } 575 virtual void visit(LIR_OpVisitState* visitor) { 576 // don't pass in the code emit info since it's processed in the fast path 577 visitor->do_slow_case(); 578 } 579 #ifndef PRODUCT 580 virtual void print_name(outputStream* out) const { out->print("ArrayCopyStub"); } 581 #endif // PRODUCT 582 }; 583 584 #endif // SHARE_C1_C1_CODESTUBS_HPP