1 /* 2 * Copyright (c) 1998, 2021, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_CODE_CODEBLOB_HPP 26 #define SHARE_CODE_CODEBLOB_HPP 27 28 #include "asm/codeBuffer.hpp" 29 #include "compiler/compilerDefinitions.hpp" 30 #include "runtime/javaFrameAnchor.hpp" 31 #include "runtime/frame.hpp" 32 #include "runtime/handles.hpp" 33 #include "utilities/align.hpp" 34 #include "utilities/macros.hpp" 35 36 class ImmutableOopMap; 37 class ImmutableOopMapSet; 38 class JNIHandleBlock; 39 class OopMapSet; 40 41 // CodeBlob Types 42 // Used in the CodeCache to assign CodeBlobs to different CodeHeaps 43 struct CodeBlobType { 44 enum { 45 MethodNonProfiled = 0, // Execution level 1 and 4 (non-profiled) nmethods (including native nmethods) 46 MethodProfiled = 1, // Execution level 2 and 3 (profiled) nmethods 47 NonNMethod = 2, // Non-nmethods like Buffers, Adapters and Runtime Stubs 48 All = 3, // All types (No code cache segmentation) 49 NumTypes = 4 // Number of CodeBlobTypes 50 }; 51 }; 52 53 // CodeBlob - superclass for all entries in the CodeCache. 54 // 55 // Subtypes are: 56 // CompiledMethod : Compiled Java methods (include method that calls to native code) 57 // nmethod : JIT Compiled Java methods 58 // RuntimeBlob : Non-compiled method code; generated glue code 59 // BufferBlob : Used for non-relocatable code such as interpreter, stubroutines, etc. 60 // AdapterBlob : Used to hold C2I/I2C adapters 61 // VtableBlob : Used for holding vtable chunks 62 // MethodHandlesAdapterBlob : Used to hold MethodHandles adapters 63 // OptimizedEntryBlob : Used for upcalls from native code 64 // RuntimeStub : Call to VM runtime methods 65 // SingletonBlob : Super-class for all blobs that exist in only one instance 66 // DeoptimizationBlob : Used for deoptimization 67 // ExceptionBlob : Used for stack unrolling 68 // SafepointBlob : Used to handle illegal instruction exceptions 69 // UncommonTrapBlob : Used to handle uncommon traps 70 // 71 // 72 // Layout : continuous in the CodeCache 73 // - header 74 // - relocation 75 // - content space 76 // - instruction space 77 // - data space 78 79 80 class CodeBlobLayout; 81 class OptimizedEntryBlob; // for as_optimized_entry_blob() 82 class JavaFrameAnchor; // for OptimizedEntryBlob::jfa_for_frame 83 84 class CodeBlob { 85 friend class VMStructs; 86 friend class JVMCIVMStructs; 87 friend class CodeCacheDumper; 88 89 protected: 90 91 const CompilerType _type; // CompilerType 92 int _size; // total size of CodeBlob in bytes 93 int _header_size; // size of header (depends on subclass) 94 int _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have 95 // not finished setting up their frame. Beware of pc's in 96 // that range. There is a similar range(s) on returns 97 // which we don't detect. 98 int _data_offset; // offset to where data region begins 99 int _frame_size; // size of stack frame 100 101 address _code_begin; 102 address _code_end; 103 address _content_begin; // address to where content region begins (this includes consts, insts, stubs) 104 // address _content_end - not required, for all CodeBlobs _code_end == _content_end for now 105 address _data_end; 106 address _relocation_begin; 107 address _relocation_end; 108 109 ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob 110 bool _caller_must_gc_arguments; 111 112 const char* _name; 113 S390_ONLY(int _ctable_offset;) 114 115 #ifndef PRODUCT 116 AsmRemarks _asm_remarks; 117 DbgStrings _dbg_strings; 118 119 ~CodeBlob() { 120 _asm_remarks.clear(); 121 _dbg_strings.clear(); 122 } 123 #endif // not PRODUCT 124 125 CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments); 126 CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, CodeBuffer* cb, int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments); 127 128 public: 129 // Only used by unit test. 130 CodeBlob() : _type(compiler_none) {} 131 132 // Returns the space needed for CodeBlob 133 static unsigned int allocation_size(CodeBuffer* cb, int header_size); 134 static unsigned int align_code_offset(int offset); 135 136 // Deletion 137 virtual void flush(); 138 139 // Typing 140 virtual bool is_buffer_blob() const { return false; } 141 virtual bool is_nmethod() const { return false; } 142 virtual bool is_runtime_stub() const { return false; } 143 virtual bool is_deoptimization_stub() const { return false; } 144 virtual bool is_uncommon_trap_stub() const { return false; } 145 virtual bool is_exception_stub() const { return false; } 146 virtual bool is_safepoint_stub() const { return false; } 147 virtual bool is_adapter_blob() const { return false; } 148 virtual bool is_vtable_blob() const { return false; } 149 virtual bool is_method_handles_adapter_blob() const { return false; } 150 virtual bool is_compiled() const { return false; } 151 virtual bool is_optimized_entry_blob() const { return false; } 152 153 inline bool is_compiled_by_c1() const { return _type == compiler_c1; }; 154 inline bool is_compiled_by_c2() const { return _type == compiler_c2; }; 155 inline bool is_compiled_by_jvmci() const { return _type == compiler_jvmci; }; 156 const char* compiler_name() const; 157 CompilerType compiler_type() const { return _type; } 158 159 // Casting 160 nmethod* as_nmethod_or_null() { return is_nmethod() ? (nmethod*) this : NULL; } 161 nmethod* as_nmethod() { assert(is_nmethod(), "must be nmethod"); return (nmethod*) this; } 162 CompiledMethod* as_compiled_method_or_null() { return is_compiled() ? (CompiledMethod*) this : NULL; } 163 CompiledMethod* as_compiled_method() { assert(is_compiled(), "must be compiled"); return (CompiledMethod*) this; } 164 CodeBlob* as_codeblob_or_null() const { return (CodeBlob*) this; } 165 OptimizedEntryBlob* as_optimized_entry_blob() const { assert(is_optimized_entry_blob(), "must be entry blob"); return (OptimizedEntryBlob*) this; } 166 167 // Boundaries 168 address header_begin() const { return (address) this; } 169 relocInfo* relocation_begin() const { return (relocInfo*) _relocation_begin; }; 170 relocInfo* relocation_end() const { return (relocInfo*) _relocation_end; } 171 address content_begin() const { return _content_begin; } 172 address content_end() const { return _code_end; } // _code_end == _content_end is true for all types of blobs for now, it is also checked in the constructor 173 address code_begin() const { return _code_begin; } 174 address code_end() const { return _code_end; } 175 address data_end() const { return _data_end; } 176 177 // This field holds the beginning of the const section in the old code buffer. 178 // It is needed to fix relocations of pc-relative loads when resizing the 179 // the constant pool or moving it. 180 S390_ONLY(address ctable_begin() const { return header_begin() + _ctable_offset; }) 181 void set_ctable_begin(address ctable) { S390_ONLY(_ctable_offset = ctable - header_begin();) } 182 183 // Sizes 184 int size() const { return _size; } 185 int header_size() const { return _header_size; } 186 int relocation_size() const { return (address) relocation_end() - (address) relocation_begin(); } 187 int content_size() const { return content_end() - content_begin(); } 188 int code_size() const { return code_end() - code_begin(); } 189 // Only used from CodeCache::free_unused_tail() after the Interpreter blob was trimmed 190 void adjust_size(size_t used) { 191 _size = (int)used; 192 _data_offset = (int)used; 193 _code_end = (address)this + used; 194 _data_end = (address)this + used; 195 } 196 197 // Containment 198 bool blob_contains(address addr) const { return header_begin() <= addr && addr < data_end(); } 199 bool code_contains(address addr) const { return code_begin() <= addr && addr < code_end(); } 200 bool contains(address addr) const { return content_begin() <= addr && addr < content_end(); } 201 bool is_frame_complete_at(address addr) const { return _frame_complete_offset != CodeOffsets::frame_never_safe && 202 code_contains(addr) && addr >= code_begin() + _frame_complete_offset; } 203 int frame_complete_offset() const { return _frame_complete_offset; } 204 205 // CodeCache support: really only used by the nmethods, but in order to get 206 // asserts and certain bookkeeping to work in the CodeCache they are defined 207 // virtual here. 208 virtual bool is_zombie() const { return false; } 209 virtual bool is_locked_by_vm() const { return false; } 210 211 virtual bool is_unloaded() const { return false; } 212 virtual bool is_not_entrant() const { return false; } 213 214 // GC support 215 virtual bool is_alive() const = 0; 216 217 // OopMap for frame 218 ImmutableOopMapSet* oop_maps() const { return _oop_maps; } 219 void set_oop_maps(OopMapSet* p); 220 const ImmutableOopMap* oop_map_for_return_address(address return_address); 221 virtual void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) = 0; 222 223 // Frame support. Sizes are in word units. 224 int frame_size() const { return _frame_size; } 225 void set_frame_size(int size) { _frame_size = size; } 226 227 // Returns true, if the next frame is responsible for GC'ing oops passed as arguments 228 bool caller_must_gc_arguments(JavaThread* thread) const { return _caller_must_gc_arguments; } 229 230 // Naming 231 const char* name() const { return _name; } 232 void set_name(const char* name) { _name = name; } 233 234 // Debugging 235 virtual void verify() = 0; 236 virtual void print() const; 237 virtual void print_on(outputStream* st) const; 238 virtual void print_value_on(outputStream* st) const; 239 void dump_for_addr(address addr, outputStream* st, bool verbose) const; 240 void print_code(); 241 242 // Print to stream, any comments associated with offset. 243 virtual void print_block_comment(outputStream* stream, address block_begin) const { 244 #ifndef PRODUCT 245 ptrdiff_t offset = block_begin - code_begin(); 246 assert(offset >= 0, "Expecting non-negative offset!"); 247 _asm_remarks.print(uint(offset), stream); 248 #endif 249 } 250 251 #ifndef PRODUCT 252 AsmRemarks &asm_remarks() { return _asm_remarks; } 253 DbgStrings &dbg_strings() { return _dbg_strings; } 254 255 void use_remarks(AsmRemarks &remarks) { _asm_remarks.share(remarks); } 256 void use_strings(DbgStrings &strings) { _dbg_strings.share(strings); } 257 #endif 258 }; 259 260 class CodeBlobLayout : public StackObj { 261 private: 262 int _size; 263 int _header_size; 264 int _relocation_size; 265 int _content_offset; 266 int _code_offset; 267 int _data_offset; 268 address _code_begin; 269 address _code_end; 270 address _content_begin; 271 address _content_end; 272 address _data_end; 273 address _relocation_begin; 274 address _relocation_end; 275 276 public: 277 CodeBlobLayout(address code_begin, address code_end, address content_begin, address content_end, address data_end, address relocation_begin, address relocation_end) : 278 _size(0), 279 _header_size(0), 280 _relocation_size(0), 281 _content_offset(0), 282 _code_offset(0), 283 _data_offset(0), 284 _code_begin(code_begin), 285 _code_end(code_end), 286 _content_begin(content_begin), 287 _content_end(content_end), 288 _data_end(data_end), 289 _relocation_begin(relocation_begin), 290 _relocation_end(relocation_end) 291 { 292 } 293 294 CodeBlobLayout(const address start, int size, int header_size, int relocation_size, int data_offset) : 295 _size(size), 296 _header_size(header_size), 297 _relocation_size(relocation_size), 298 _content_offset(CodeBlob::align_code_offset(_header_size + _relocation_size)), 299 _code_offset(_content_offset), 300 _data_offset(data_offset) 301 { 302 assert(is_aligned(_relocation_size, oopSize), "unaligned size"); 303 304 _code_begin = (address) start + _code_offset; 305 _code_end = (address) start + _data_offset; 306 307 _content_begin = (address) start + _content_offset; 308 _content_end = (address) start + _data_offset; 309 310 _data_end = (address) start + _size; 311 _relocation_begin = (address) start + _header_size; 312 _relocation_end = _relocation_begin + _relocation_size; 313 } 314 315 CodeBlobLayout(const address start, int size, int header_size, const CodeBuffer* cb) : 316 _size(size), 317 _header_size(header_size), 318 _relocation_size(align_up(cb->total_relocation_size(), oopSize)), 319 _content_offset(CodeBlob::align_code_offset(_header_size + _relocation_size)), 320 _code_offset(_content_offset + cb->total_offset_of(cb->insts())), 321 _data_offset(_content_offset + align_up(cb->total_content_size(), oopSize)) 322 { 323 assert(is_aligned(_relocation_size, oopSize), "unaligned size"); 324 325 _code_begin = (address) start + _code_offset; 326 _code_end = (address) start + _data_offset; 327 328 _content_begin = (address) start + _content_offset; 329 _content_end = (address) start + _data_offset; 330 331 _data_end = (address) start + _size; 332 _relocation_begin = (address) start + _header_size; 333 _relocation_end = _relocation_begin + _relocation_size; 334 } 335 336 int size() const { return _size; } 337 int header_size() const { return _header_size; } 338 int relocation_size() const { return _relocation_size; } 339 int content_offset() const { return _content_offset; } 340 int code_offset() const { return _code_offset; } 341 int data_offset() const { return _data_offset; } 342 address code_begin() const { return _code_begin; } 343 address code_end() const { return _code_end; } 344 address data_end() const { return _data_end; } 345 address relocation_begin() const { return _relocation_begin; } 346 address relocation_end() const { return _relocation_end; } 347 address content_begin() const { return _content_begin; } 348 address content_end() const { return _content_end; } 349 }; 350 351 352 class RuntimeBlob : public CodeBlob { 353 friend class VMStructs; 354 public: 355 356 // Creation 357 // a) simple CodeBlob 358 // frame_complete is the offset from the beginning of the instructions 359 // to where the frame setup (from stackwalk viewpoint) is complete. 360 RuntimeBlob(const char* name, int header_size, int size, int frame_complete, int locs_size); 361 362 // b) full CodeBlob 363 RuntimeBlob( 364 const char* name, 365 CodeBuffer* cb, 366 int header_size, 367 int size, 368 int frame_complete, 369 int frame_size, 370 OopMapSet* oop_maps, 371 bool caller_must_gc_arguments = false 372 ); 373 374 // GC support 375 virtual bool is_alive() const = 0; 376 377 void verify(); 378 379 // OopMap for frame 380 virtual void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) { ShouldNotReachHere(); } 381 382 // Debugging 383 virtual void print_on(outputStream* st) const { CodeBlob::print_on(st); } 384 virtual void print_value_on(outputStream* st) const { CodeBlob::print_value_on(st); } 385 386 // Deal with Disassembler, VTune, Forte, JvmtiExport, MemoryService. 387 static void trace_new_stub(RuntimeBlob* blob, const char* name1, const char* name2 = ""); 388 }; 389 390 class WhiteBox; 391 //---------------------------------------------------------------------------------------------------- 392 // BufferBlob: used to hold non-relocatable machine code such as the interpreter, stubroutines, etc. 393 394 class BufferBlob: public RuntimeBlob { 395 friend class VMStructs; 396 friend class AdapterBlob; 397 friend class VtableBlob; 398 friend class MethodHandlesAdapterBlob; 399 friend class OptimizedEntryBlob; 400 friend class WhiteBox; 401 402 private: 403 // Creation support 404 BufferBlob(const char* name, int size); 405 BufferBlob(const char* name, int size, CodeBuffer* cb); 406 407 // This ordinary operator delete is needed even though not used, so the 408 // below two-argument operator delete will be treated as a placement 409 // delete rather than an ordinary sized delete; see C++14 3.7.4.2/p2. 410 void operator delete(void* p); 411 void* operator new(size_t s, unsigned size) throw(); 412 413 public: 414 // Creation 415 static BufferBlob* create(const char* name, int buffer_size); 416 static BufferBlob* create(const char* name, CodeBuffer* cb); 417 418 static void free(BufferBlob* buf); 419 420 // Typing 421 virtual bool is_buffer_blob() const { return true; } 422 423 // GC/Verification support 424 void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) { /* nothing to do */ } 425 bool is_alive() const { return true; } 426 427 void verify(); 428 void print_on(outputStream* st) const; 429 void print_value_on(outputStream* st) const; 430 }; 431 432 433 //---------------------------------------------------------------------------------------------------- 434 // AdapterBlob: used to hold C2I/I2C adapters 435 436 class AdapterBlob: public BufferBlob { 437 private: 438 AdapterBlob(int size, CodeBuffer* cb); 439 440 public: 441 // Creation 442 static AdapterBlob* create(CodeBuffer* cb); 443 444 // Typing 445 virtual bool is_adapter_blob() const { return true; } 446 }; 447 448 //--------------------------------------------------------------------------------------------------- 449 class VtableBlob: public BufferBlob { 450 private: 451 VtableBlob(const char*, int); 452 453 void* operator new(size_t s, unsigned size) throw(); 454 455 public: 456 // Creation 457 static VtableBlob* create(const char* name, int buffer_size); 458 459 // Typing 460 virtual bool is_vtable_blob() const { return true; } 461 }; 462 463 //---------------------------------------------------------------------------------------------------- 464 // MethodHandlesAdapterBlob: used to hold MethodHandles adapters 465 466 class MethodHandlesAdapterBlob: public BufferBlob { 467 private: 468 MethodHandlesAdapterBlob(int size) : BufferBlob("MethodHandles adapters", size) {} 469 470 public: 471 // Creation 472 static MethodHandlesAdapterBlob* create(int buffer_size); 473 474 // Typing 475 virtual bool is_method_handles_adapter_blob() const { return true; } 476 }; 477 478 479 //---------------------------------------------------------------------------------------------------- 480 // RuntimeStub: describes stubs used by compiled code to call a (static) C++ runtime routine 481 482 class RuntimeStub: public RuntimeBlob { 483 friend class VMStructs; 484 private: 485 // Creation support 486 RuntimeStub( 487 const char* name, 488 CodeBuffer* cb, 489 int size, 490 int frame_complete, 491 int frame_size, 492 OopMapSet* oop_maps, 493 bool caller_must_gc_arguments 494 ); 495 496 // This ordinary operator delete is needed even though not used, so the 497 // below two-argument operator delete will be treated as a placement 498 // delete rather than an ordinary sized delete; see C++14 3.7.4.2/p2. 499 void operator delete(void* p); 500 void* operator new(size_t s, unsigned size) throw(); 501 502 public: 503 // Creation 504 static RuntimeStub* new_runtime_stub( 505 const char* stub_name, 506 CodeBuffer* cb, 507 int frame_complete, 508 int frame_size, 509 OopMapSet* oop_maps, 510 bool caller_must_gc_arguments 511 ); 512 513 // Typing 514 bool is_runtime_stub() const { return true; } 515 516 address entry_point() const { return code_begin(); } 517 518 // GC/Verification support 519 void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { /* nothing to do */ } 520 bool is_alive() const { return true; } 521 522 void verify(); 523 void print_on(outputStream* st) const; 524 void print_value_on(outputStream* st) const; 525 }; 526 527 528 //---------------------------------------------------------------------------------------------------- 529 // Super-class for all blobs that exist in only one instance. Implements default behaviour. 530 531 class SingletonBlob: public RuntimeBlob { 532 friend class VMStructs; 533 534 protected: 535 // This ordinary operator delete is needed even though not used, so the 536 // below two-argument operator delete will be treated as a placement 537 // delete rather than an ordinary sized delete; see C++14 3.7.4.2/p2. 538 void operator delete(void* p); 539 void* operator new(size_t s, unsigned size) throw(); 540 541 public: 542 SingletonBlob( 543 const char* name, 544 CodeBuffer* cb, 545 int header_size, 546 int size, 547 int frame_size, 548 OopMapSet* oop_maps 549 ) 550 : RuntimeBlob(name, cb, header_size, size, CodeOffsets::frame_never_safe, frame_size, oop_maps) 551 {}; 552 553 address entry_point() { return code_begin(); } 554 555 bool is_alive() const { return true; } 556 557 // GC/Verification support 558 void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { /* nothing to do */ } 559 void verify(); // does nothing 560 void print_on(outputStream* st) const; 561 void print_value_on(outputStream* st) const; 562 }; 563 564 565 //---------------------------------------------------------------------------------------------------- 566 // DeoptimizationBlob 567 568 class DeoptimizationBlob: public SingletonBlob { 569 friend class VMStructs; 570 friend class JVMCIVMStructs; 571 private: 572 int _unpack_offset; 573 int _unpack_with_exception; 574 int _unpack_with_reexecution; 575 576 int _unpack_with_exception_in_tls; 577 578 #if INCLUDE_JVMCI 579 // Offsets when JVMCI calls uncommon_trap. 580 int _uncommon_trap_offset; 581 int _implicit_exception_uncommon_trap_offset; 582 #endif 583 584 // Creation support 585 DeoptimizationBlob( 586 CodeBuffer* cb, 587 int size, 588 OopMapSet* oop_maps, 589 int unpack_offset, 590 int unpack_with_exception_offset, 591 int unpack_with_reexecution_offset, 592 int frame_size 593 ); 594 595 public: 596 // Creation 597 static DeoptimizationBlob* create( 598 CodeBuffer* cb, 599 OopMapSet* oop_maps, 600 int unpack_offset, 601 int unpack_with_exception_offset, 602 int unpack_with_reexecution_offset, 603 int frame_size 604 ); 605 606 // Typing 607 bool is_deoptimization_stub() const { return true; } 608 609 // GC for args 610 void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { /* Nothing to do */ } 611 612 // Printing 613 void print_value_on(outputStream* st) const; 614 615 address unpack() const { return code_begin() + _unpack_offset; } 616 address unpack_with_exception() const { return code_begin() + _unpack_with_exception; } 617 address unpack_with_reexecution() const { return code_begin() + _unpack_with_reexecution; } 618 619 // Alternate entry point for C1 where the exception and issuing pc 620 // are in JavaThread::_exception_oop and JavaThread::_exception_pc 621 // instead of being in registers. This is needed because C1 doesn't 622 // model exception paths in a way that keeps these registers free so 623 // there may be live values in those registers during deopt. 624 void set_unpack_with_exception_in_tls_offset(int offset) { 625 _unpack_with_exception_in_tls = offset; 626 assert(code_contains(code_begin() + _unpack_with_exception_in_tls), "must be PC inside codeblob"); 627 } 628 address unpack_with_exception_in_tls() const { return code_begin() + _unpack_with_exception_in_tls; } 629 630 #if INCLUDE_JVMCI 631 // Offsets when JVMCI calls uncommon_trap. 632 void set_uncommon_trap_offset(int offset) { 633 _uncommon_trap_offset = offset; 634 assert(contains(code_begin() + _uncommon_trap_offset), "must be PC inside codeblob"); 635 } 636 address uncommon_trap() const { return code_begin() + _uncommon_trap_offset; } 637 638 void set_implicit_exception_uncommon_trap_offset(int offset) { 639 _implicit_exception_uncommon_trap_offset = offset; 640 assert(contains(code_begin() + _implicit_exception_uncommon_trap_offset), "must be PC inside codeblob"); 641 } 642 address implicit_exception_uncommon_trap() const { return code_begin() + _implicit_exception_uncommon_trap_offset; } 643 #endif // INCLUDE_JVMCI 644 }; 645 646 647 //---------------------------------------------------------------------------------------------------- 648 // UncommonTrapBlob (currently only used by Compiler 2) 649 650 #ifdef COMPILER2 651 652 class UncommonTrapBlob: public SingletonBlob { 653 friend class VMStructs; 654 private: 655 // Creation support 656 UncommonTrapBlob( 657 CodeBuffer* cb, 658 int size, 659 OopMapSet* oop_maps, 660 int frame_size 661 ); 662 663 public: 664 // Creation 665 static UncommonTrapBlob* create( 666 CodeBuffer* cb, 667 OopMapSet* oop_maps, 668 int frame_size 669 ); 670 671 // GC for args 672 void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { /* nothing to do */ } 673 674 // Typing 675 bool is_uncommon_trap_stub() const { return true; } 676 }; 677 678 679 //---------------------------------------------------------------------------------------------------- 680 // ExceptionBlob: used for exception unwinding in compiled code (currently only used by Compiler 2) 681 682 class ExceptionBlob: public SingletonBlob { 683 friend class VMStructs; 684 private: 685 // Creation support 686 ExceptionBlob( 687 CodeBuffer* cb, 688 int size, 689 OopMapSet* oop_maps, 690 int frame_size 691 ); 692 693 public: 694 // Creation 695 static ExceptionBlob* create( 696 CodeBuffer* cb, 697 OopMapSet* oop_maps, 698 int frame_size 699 ); 700 701 // GC for args 702 void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) { /* nothing to do */ } 703 704 // Typing 705 bool is_exception_stub() const { return true; } 706 }; 707 #endif // COMPILER2 708 709 710 //---------------------------------------------------------------------------------------------------- 711 // SafepointBlob: handles illegal_instruction exceptions during a safepoint 712 713 class SafepointBlob: public SingletonBlob { 714 friend class VMStructs; 715 private: 716 // Creation support 717 SafepointBlob( 718 CodeBuffer* cb, 719 int size, 720 OopMapSet* oop_maps, 721 int frame_size 722 ); 723 724 public: 725 // Creation 726 static SafepointBlob* create( 727 CodeBuffer* cb, 728 OopMapSet* oop_maps, 729 int frame_size 730 ); 731 732 // GC for args 733 void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) { /* nothing to do */ } 734 735 // Typing 736 bool is_safepoint_stub() const { return true; } 737 }; 738 739 //---------------------------------------------------------------------------------------------------- 740 741 class ProgrammableUpcallHandler; 742 743 class OptimizedEntryBlob: public BufferBlob { 744 friend class ProgrammableUpcallHandler; 745 private: 746 intptr_t _exception_handler_offset; 747 jobject _receiver; 748 ByteSize _frame_data_offset; 749 750 OptimizedEntryBlob(const char* name, int size, CodeBuffer* cb, intptr_t exception_handler_offset, 751 jobject receiver, ByteSize frame_data_offset); 752 753 struct FrameData { 754 JavaFrameAnchor jfa; 755 JavaThread* thread; 756 JNIHandleBlock* old_handles; 757 JNIHandleBlock* new_handles; 758 bool should_detach; 759 }; 760 761 // defined in frame_ARCH.cpp 762 FrameData* frame_data_for_frame(const frame& frame) const; 763 public: 764 // Creation 765 static OptimizedEntryBlob* create(const char* name, CodeBuffer* cb, 766 intptr_t exception_handler_offset, jobject receiver, 767 ByteSize frame_data_offset); 768 769 address exception_handler() { return code_begin() + _exception_handler_offset; } 770 jobject receiver() { return _receiver; } 771 772 JavaFrameAnchor* jfa_for_frame(const frame& frame) const; 773 774 void oops_do(OopClosure* f, const frame& frame); 775 776 // Typing 777 virtual bool is_optimized_entry_blob() const override { return true; } 778 }; 779 780 #endif // SHARE_CODE_CODEBLOB_HPP