1 /*
2 * Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_CODE_CODEBLOB_HPP
26 #define SHARE_CODE_CODEBLOB_HPP
27
28 #include "asm/codeBuffer.hpp"
29 #include "compiler/compilerDefinitions.hpp"
30 #include "compiler/oopMap.hpp"
31 #include "runtime/frame.hpp"
32 #include "runtime/handles.hpp"
33 #include "runtime/javaFrameAnchor.hpp"
34 #include "utilities/align.hpp"
35 #include "utilities/macros.hpp"
36
37 class AOTCodeReader;
38 class ImmutableOopMap;
39 class ImmutableOopMapSet;
40 class JNIHandleBlock;
41 class OopMapSet;
42
43 // CodeBlob Types
44 // Used in the CodeCache to assign CodeBlobs to different CodeHeaps
45 enum class CodeBlobType {
46 MethodNonProfiled = 0, // Execution level 1 and 4 (non-profiled) nmethods (including native nmethods)
47 MethodProfiled = 1, // Execution level 2 and 3 (profiled) nmethods
48 MethodHot = 2, // Nmethods predicted to be always hot
49 NonNMethod = 3, // Non-nmethods like Buffers, Adapters and Runtime Stubs
50 All = 4, // All types (No code cache segmentation)
51 NumTypes = 5 // Number of CodeBlobTypes
52 };
53
54 // CodeBlob - superclass for all entries in the CodeCache.
55 //
56 // Subtypes are:
57 // nmethod : JIT Compiled Java methods
58 // RuntimeBlob : Non-compiled method code; generated glue code
59 // BufferBlob : Used for non-relocatable code such as interpreter, stubroutines, etc.
60 // AdapterBlob : Used to hold C2I/I2C adapters
61 // VtableBlob : Used for holding vtable chunks
62 // MethodHandlesAdapterBlob : Used to hold MethodHandles adapters
63 // RuntimeStub : Call to VM runtime methods
64 // SingletonBlob : Super-class for all blobs that exist in only one instance
65 // DeoptimizationBlob : Used for deoptimization
66 // SafepointBlob : Used to handle illegal instruction exceptions
67 // ExceptionBlob : Used for stack unrolling
68 // UncommonTrapBlob : Used to handle uncommon traps
69 // UpcallStub : Used for upcalls from native code
70 //
71 //
72 // Layout in the CodeCache:
73 // - header
74 // - content space
75 // - instruction space
76 // Outside of the CodeCache:
77 // - mutable_data
78 // - relocation info
79 // - additional data for subclasses
80
81 enum class CodeBlobKind : u1 {
82 None,
83 Nmethod,
84 Buffer,
85 Adapter,
86 Vtable,
87 MHAdapter,
88 RuntimeStub,
89 Deoptimization,
90 Safepoint,
91 #ifdef COMPILER2
92 Exception,
93 UncommonTrap,
94 #endif
95 Upcall,
96 Number_Of_Kinds
97 };
98
99 class UpcallStub; // for as_upcall_stub()
100 class RuntimeStub; // for as_runtime_stub()
101 class JavaFrameAnchor; // for UpcallStub::jfa_for_frame
102 class BufferBlob;
103 class AdapterBlob;
104 class SingletonBlob;
105 class ExceptionBlob;
106 class DeoptimizationBlob;
107 class SafepointBlob;
108 class UncommonTrapBlob;
109
110 class CodeBlob {
111 friend class VMStructs;
112 friend class JVMCIVMStructs;
113
114 protected:
115 // order fields from large to small to minimize padding between fields
116 ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob
117 const char* _name;
118 address _mutable_data;
119
120 int _size; // total size of CodeBlob in bytes
121 int _relocation_size; // size of relocation (could be bigger than 64Kb)
122 int _content_offset; // offset to where content region begins (this includes consts, insts, stubs)
123 int _code_offset; // offset to where instructions region begins (this includes insts, stubs)
124 int _data_offset; // offset to where data region begins
125 int _frame_size; // size of stack frame in words (NOT slots. On x64 these are 64bit words)
126 int _mutable_data_size;
127
128 S390_ONLY(int _ctable_offset;)
129
130 uint16_t _header_size; // size of header (depends on subclass)
131 int16_t _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
132 // not finished setting up their frame. Beware of pc's in
133 // that range. There is a similar range(s) on returns
134 // which we don't detect.
135
136 CodeBlobKind _kind; // Kind of this code blob
137
138 bool _caller_must_gc_arguments;
139
140 #ifndef PRODUCT
141 AsmRemarks _asm_remarks;
142 DbgStrings _dbg_strings;
143 #endif
144
145 void print_on_impl(outputStream* st) const;
146 void print_value_on_impl(outputStream* st) const;
147
148 class Vptr {
149 public:
150 virtual void print_on(const CodeBlob* instance, outputStream* st) const = 0;
151 virtual void print_value_on(const CodeBlob* instance, outputStream* st) const = 0;
152 virtual void prepare_for_archiving(CodeBlob* instance) const {
153 instance->prepare_for_archiving_impl();
154 };
155 virtual void post_restore(CodeBlob* instance) const {
156 instance->post_restore_impl();
157 };
158 };
159
160 static const Vptr* vptr(CodeBlobKind kind);
161 const Vptr* vptr() const;
162
163 CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size,
164 int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments,
165 int mutable_data_size);
166
167 // Simple CodeBlob used for simple BufferBlob.
168 CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size);
169
170
171 void operator delete(void* p) { }
172
173 void prepare_for_archiving_impl() NOT_CDS_RETURN;
174 void post_restore_impl() NOT_CDS_RETURN;
175
176 public:
177
178 ~CodeBlob() {
179 assert(_oop_maps == nullptr, "Not flushed");
180 }
181
182 // Returns the space needed for CodeBlob
183 static unsigned int allocation_size(CodeBuffer* cb, int header_size);
184 static unsigned int align_code_offset(int offset);
185
186 // Deletion
187 void purge();
188
189 // Typing
190 bool is_nmethod() const { return _kind == CodeBlobKind::Nmethod; }
191 // we may want to check for an actual buffer blob or subtype instance
192 bool is_buffer_blob(bool strict=true) const {
193 if (strict) {
194 return _kind == CodeBlobKind::Buffer;
195 } else {
196 return (_kind == CodeBlobKind::Buffer ||
197 _kind == CodeBlobKind::Adapter ||
198 _kind == CodeBlobKind::Vtable ||
199 _kind == CodeBlobKind::MHAdapter);
200 }
201 }
202 bool is_runtime_stub() const { return _kind == CodeBlobKind::RuntimeStub; }
203 // singleton blobs are never directly implemented
204 bool is_deoptimization_stub() const { return _kind == CodeBlobKind::Deoptimization; }
205 #ifdef COMPILER2
206 bool is_uncommon_trap_stub() const { return _kind == CodeBlobKind::UncommonTrap; }
207 bool is_exception_stub() const { return _kind == CodeBlobKind::Exception; }
208 #else
209 bool is_uncommon_trap_stub() const { return false; }
210 bool is_exception_stub() const { return false; }
211 #endif
212 bool is_safepoint_stub() const { return _kind == CodeBlobKind::Safepoint; }
213 bool is_singleton_blob() const {
214 return (is_deoptimization_stub() ||
215 is_uncommon_trap_stub() ||
216 is_exception_stub() ||
217 is_safepoint_stub());
218 }
219 bool is_adapter_blob() const { return _kind == CodeBlobKind::Adapter; }
220 bool is_vtable_blob() const { return _kind == CodeBlobKind::Vtable; }
221 bool is_method_handles_adapter_blob() const { return _kind == CodeBlobKind::MHAdapter; }
222 bool is_upcall_stub() const { return _kind == CodeBlobKind::Upcall; }
223
224 // Casting
225 nmethod* as_nmethod_or_null() const { return is_nmethod() ? (nmethod*) this : nullptr; }
226 nmethod* as_nmethod() const { assert(is_nmethod(), "must be nmethod"); return (nmethod*) this; }
227 CodeBlob* as_codeblob() const { return (CodeBlob*) this; }
228 // we may want to force an actual buffer blob or subtype instance
229 BufferBlob* as_buffer_blob(bool strict = true) const { assert(is_buffer_blob(strict), "must be %sbuffer blob", (strict ? "strict " : "")); return (BufferBlob*) this; }
230 AdapterBlob* as_adapter_blob() const { assert(is_adapter_blob(), "must be adapter blob"); return (AdapterBlob*) this; }
231 ExceptionBlob* as_exception_blob() const { assert(is_exception_stub(), "must be exception stub"); return (ExceptionBlob*) this; }
232 // this will always return a subtype instance
233 SingletonBlob* as_singleton_blob() const { assert(is_singleton_blob(), "must be singleton blob"); return (SingletonBlob*) this; }
234 DeoptimizationBlob* as_deoptimization_blob() const { assert(is_deoptimization_stub(), "must be deopt stub"); return (DeoptimizationBlob*) this; }
235 SafepointBlob* as_safepoint_blob() const { assert(is_safepoint_stub(), "must be safepoint stub"); return (SafepointBlob*) this; }
236 UpcallStub* as_upcall_stub() const { assert(is_upcall_stub(), "must be upcall stub"); return (UpcallStub*) this; }
237 RuntimeStub* as_runtime_stub() const { assert(is_runtime_stub(), "must be runtime blob"); return (RuntimeStub*) this; }
238 UncommonTrapBlob* as_uncommon_trap_blob() const { assert(is_uncommon_trap_stub(), "must be uncommon trap stub"); return (UncommonTrapBlob*) this; }
239
240 // Boundaries
241 address header_begin() const { return (address) this; }
242 address header_end() const { return ((address) this) + _header_size; }
243 address content_begin() const { return (address) header_begin() + _content_offset; }
244 address content_end() const { return (address) header_begin() + _data_offset; }
245 address code_begin() const { return (address) header_begin() + _code_offset; }
246 address code_end() const { return (address) header_begin() + _data_offset; }
247 address data_begin() const { return (address) header_begin() + _data_offset; }
248 address data_end() const { return (address) header_begin() + _size; }
249 address blob_end() const { return (address) header_begin() + _size; }
250 // code_end == content_end is true for all types of blobs for now, it is also checked in the constructor
251
252 int mutable_data_size() const { return _mutable_data_size; }
253 address mutable_data_begin() const { return _mutable_data; }
254 address mutable_data_end() const { return _mutable_data + _mutable_data_size; }
255
256 relocInfo* relocation_begin() const { return (relocInfo*)_mutable_data; }
257 relocInfo* relocation_end() const { return (relocInfo*)((address)relocation_begin() + _relocation_size); }
258
259 // Offsets
260 int content_offset() const { return _content_offset; }
261 int code_offset() const { return _code_offset; }
262
263 // This field holds the beginning of the const section in the old code buffer.
264 // It is needed to fix relocations of pc-relative loads when resizing the
265 // the constant pool or moving it.
266 S390_ONLY(address ctable_begin() const { return header_begin() + _ctable_offset; })
267 void set_ctable_begin(address ctable) { S390_ONLY(_ctable_offset = ctable - header_begin();) }
268
269 // Sizes
270 int size() const { return _size; }
271 int header_size() const { return _header_size; }
272 int relocation_size() const { return _relocation_size; }
273 int content_size() const { return pointer_delta_as_int(content_end(), content_begin()); }
274 int code_size() const { return pointer_delta_as_int(code_end(), code_begin()); }
275
276 // Only used from CodeCache::free_unused_tail() after the Interpreter blob was trimmed
277 void adjust_size(size_t used) {
278 _size = (int)used;
279 _data_offset = _size;
280 }
281
282 // Containment
283 bool blob_contains(address addr) const { return header_begin() <= addr && addr < blob_end(); }
284 bool code_contains(address addr) const { return code_begin() <= addr && addr < code_end(); }
285 bool contains(address addr) const { return content_begin() <= addr && addr < content_end(); }
286 bool is_frame_complete_at(address addr) const { return _frame_complete_offset != CodeOffsets::frame_never_safe &&
287 code_contains(addr) && addr >= code_begin() + _frame_complete_offset; }
288 int frame_complete_offset() const { return _frame_complete_offset; }
289
290 // OopMap for frame
291 ImmutableOopMapSet* oop_maps() const { return _oop_maps; }
292 void set_oop_maps(OopMapSet* p);
293 void set_oop_maps(ImmutableOopMapSet* p) { _oop_maps = p; }
294
295 const ImmutableOopMap* oop_map_for_slot(int slot, address return_address) const;
296 const ImmutableOopMap* oop_map_for_return_address(address return_address) const;
297
298 // Frame support. Sizes are in word units.
299 int frame_size() const { return _frame_size; }
300 void set_frame_size(int size) { _frame_size = size; }
301
302 // Returns true, if the next frame is responsible for GC'ing oops passed as arguments
303 bool caller_must_gc_arguments(JavaThread* thread) const { return _caller_must_gc_arguments; }
304
305 // Naming
306 const char* name() const { return _name; }
307 void set_name(const char* name) { _name = name; }
308
309 // Debugging
310 void verify();
311 void print() const;
312 void print_on(outputStream* st) const;
313 void print_value_on(outputStream* st) const;
314
315 void dump_for_addr(address addr, outputStream* st, bool verbose) const;
316 void print_code_on(outputStream* st);
317
318 // Print to stream, any comments associated with offset.
319 void print_block_comment(outputStream* stream, address block_begin) const;
320
321 #ifndef PRODUCT
322 AsmRemarks &asm_remarks() { return _asm_remarks; }
323 DbgStrings &dbg_strings() { return _dbg_strings; }
324
325 void use_remarks(AsmRemarks &remarks) { _asm_remarks.share(remarks); }
326 void use_strings(DbgStrings &strings) { _dbg_strings.share(strings); }
327 #endif
328
329 #if INCLUDE_CDS
330 void restore_mutable_data(address reloc_data);
331
332 void copy_to(address buffer) {
333 memcpy(buffer, this, this->size());
334 }
335
336 // methods to archive a blob into AOT code cache
337 void prepare_for_archiving();
338 static void archive_blob(CodeBlob* blob, address archive_buffer);
339
340 // methods to restore a blob from AOT code cache into the CodeCache
341 void post_restore();
342 CodeBlob* restore(address code_cache_buffer, AOTCodeReader* reader);
343 static CodeBlob* create(CodeBlob* archived_blob, AOTCodeReader* reader);
344 #endif
345 };
346
347 //----------------------------------------------------------------------------------------------------
348 // RuntimeBlob: used for non-compiled method code (adapters, stubs, blobs)
349
350 class RuntimeBlob : public CodeBlob {
351 public:
352
353 // Creation
354 // a) simple CodeBlob
355 RuntimeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size)
356 : CodeBlob(name, kind, size, header_size)
357 {}
358
359 // b) full CodeBlob
360 // frame_complete is the offset from the beginning of the instructions
361 // to where the frame setup (from stackwalk viewpoint) is complete.
362 RuntimeBlob(
363 const char* name,
364 CodeBlobKind kind,
365 CodeBuffer* cb,
366 int size,
367 uint16_t header_size,
368 int16_t frame_complete,
369 int frame_size,
370 OopMapSet* oop_maps,
371 bool caller_must_gc_arguments = false
372 );
373
374 static void free(RuntimeBlob* blob);
375
376 // Deal with Disassembler, VTune, Forte, JvmtiExport, MemoryService.
377 static void trace_new_stub(RuntimeBlob* blob, const char* name1, const char* name2 = "");
378
379 class Vptr : public CodeBlob::Vptr {
380 };
381 };
382
383 class WhiteBox;
384 //----------------------------------------------------------------------------------------------------
385 // BufferBlob: used to hold non-relocatable machine code such as the interpreter, stubroutines, etc.
386
387 class BufferBlob: public RuntimeBlob {
388 friend class VMStructs;
389 friend class AdapterBlob;
390 friend class VtableBlob;
391 friend class MethodHandlesAdapterBlob;
392 friend class UpcallStub;
393 friend class WhiteBox;
394
395 private:
396 // Creation support
397 BufferBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size = sizeof(BufferBlob));
398 BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size = sizeof(BufferBlob));
399
400 void* operator new(size_t s, unsigned size) throw();
401
402 public:
403 // Creation
404 static BufferBlob* create(const char* name, uint buffer_size);
405 static BufferBlob* create(const char* name, CodeBuffer* cb);
406
407 static void free(BufferBlob* buf);
408
409 void print_on_impl(outputStream* st) const;
410 void print_value_on_impl(outputStream* st) const;
411
412 class Vptr : public RuntimeBlob::Vptr {
413 void print_on(const CodeBlob* instance, outputStream* st) const override {
414 instance->as_buffer_blob(false)->print_on_impl(st);
415 }
416 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
417 instance->as_buffer_blob(false)->print_value_on_impl(st);
418 }
419 };
420
421 static const Vptr _vpntr;
422 };
423
424
425 //----------------------------------------------------------------------------------------------------
426 // AdapterBlob: used to hold C2I/I2C adapters
427
428 class AdapterBlob: public BufferBlob {
429 public:
430 enum Entry {
431 I2C,
432 C2I,
433 C2I_Unverified,
434 C2I_No_Clinit_Check,
435 ENTRY_COUNT
436 };
437 private:
438 AdapterBlob(int size, CodeBuffer* cb, int entry_offset[ENTRY_COUNT]);
439 // _i2c_offset is always 0 so no need to store it
440 int _c2i_offset;
441 int _c2i_unverified_offset;
442 int _c2i_no_clinit_check_offset;
443 public:
444 // Creation
445 static AdapterBlob* create(CodeBuffer* cb, int entry_offset[ENTRY_COUNT]);
446 address i2c_entry() { return code_begin(); }
447 address c2i_entry() { return i2c_entry() + _c2i_offset; }
448 address c2i_unverified_entry() { return i2c_entry() + _c2i_unverified_offset; }
449 address c2i_no_clinit_check_entry() { return _c2i_no_clinit_check_offset == -1 ? nullptr : i2c_entry() + _c2i_no_clinit_check_offset; }
450 };
451
452 //---------------------------------------------------------------------------------------------------
453 class VtableBlob: public BufferBlob {
454 private:
455 VtableBlob(const char*, int);
456
457 void* operator new(size_t s, unsigned size) throw();
458
459 public:
460 // Creation
461 static VtableBlob* create(const char* name, int buffer_size);
462 };
463
464 //----------------------------------------------------------------------------------------------------
465 // MethodHandlesAdapterBlob: used to hold MethodHandles adapters
466
467 class MethodHandlesAdapterBlob: public BufferBlob {
468 private:
469 MethodHandlesAdapterBlob(int size): BufferBlob("MethodHandles adapters", CodeBlobKind::MHAdapter, size) {}
470
471 public:
472 // Creation
473 static MethodHandlesAdapterBlob* create(int buffer_size);
474 };
475
476
477 //----------------------------------------------------------------------------------------------------
478 // RuntimeStub: describes stubs used by compiled code to call a (static) C++ runtime routine
479
480 class RuntimeStub: public RuntimeBlob {
481 friend class VMStructs;
482 private:
483 // Creation support
484 RuntimeStub(
485 const char* name,
486 CodeBuffer* cb,
487 int size,
488 int16_t frame_complete,
489 int frame_size,
490 OopMapSet* oop_maps,
491 bool caller_must_gc_arguments
492 );
493
494 void* operator new(size_t s, unsigned size) throw();
495
496 public:
497 static const int ENTRY_COUNT = 1;
498 // Creation
499 static RuntimeStub* new_runtime_stub(
500 const char* stub_name,
501 CodeBuffer* cb,
502 int16_t frame_complete,
503 int frame_size,
504 OopMapSet* oop_maps,
505 bool caller_must_gc_arguments,
506 bool alloc_fail_is_fatal=true
507 );
508
509 static void free(RuntimeStub* stub) { RuntimeBlob::free(stub); }
510
511 address entry_point() const { return code_begin(); }
512
513 void post_restore_impl() {
514 trace_new_stub(this, "RuntimeStub - ", name());
515 }
516
517 void print_on_impl(outputStream* st) const;
518 void print_value_on_impl(outputStream* st) const;
519
520 class Vptr : public RuntimeBlob::Vptr {
521 void post_restore(CodeBlob* instance) const override {
522 instance->as_runtime_stub()->post_restore_impl();
523 }
524 void print_on(const CodeBlob* instance, outputStream* st) const override {
525 instance->as_runtime_stub()->print_on_impl(st);
526 }
527 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
528 instance->as_runtime_stub()->print_value_on_impl(st);
529 }
530 };
531
532 static const Vptr _vpntr;
533 };
534
535
536 //----------------------------------------------------------------------------------------------------
537 // Super-class for all blobs that exist in only one instance. Implements default behaviour.
538
539 class SingletonBlob: public RuntimeBlob {
540 friend class VMStructs;
541
542 protected:
543 void* operator new(size_t s, unsigned size, bool alloc_fail_is_fatal=true) throw();
544
545 public:
546 SingletonBlob(
547 const char* name,
548 CodeBlobKind kind,
549 CodeBuffer* cb,
550 int size,
551 uint16_t header_size,
552 int frame_size,
553 OopMapSet* oop_maps
554 )
555 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, frame_size, oop_maps)
556 {};
557
558 address entry_point() { return code_begin(); }
559
560 void print_on_impl(outputStream* st) const;
561 void print_value_on_impl(outputStream* st) const;
562
563 class Vptr : public RuntimeBlob::Vptr {
564 void print_on(const CodeBlob* instance, outputStream* st) const override {
565 instance->as_singleton_blob()->print_on_impl(st);
566 }
567 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
568 instance->as_singleton_blob()->print_value_on_impl(st);
569 }
570 };
571
572 static const Vptr _vpntr;
573 };
574
575
576 //----------------------------------------------------------------------------------------------------
577 // DeoptimizationBlob
578
579 class DeoptimizationBlob: public SingletonBlob {
580 friend class VMStructs;
581 friend class JVMCIVMStructs;
582 private:
583 int _unpack_offset;
584 int _unpack_with_exception;
585 int _unpack_with_reexecution;
586
587 int _unpack_with_exception_in_tls;
588
589 #if INCLUDE_JVMCI
590 // Offsets when JVMCI calls uncommon_trap.
591 int _uncommon_trap_offset;
592 int _implicit_exception_uncommon_trap_offset;
593 #endif
594
595 // Creation support
596 DeoptimizationBlob(
597 CodeBuffer* cb,
598 int size,
599 OopMapSet* oop_maps,
600 int unpack_offset,
601 int unpack_with_exception_offset,
602 int unpack_with_reexecution_offset,
603 int frame_size
604 );
605
606 public:
607 static const int ENTRY_COUNT = 4 JVMCI_ONLY(+ 2);
608 // Creation
609 static DeoptimizationBlob* create(
610 CodeBuffer* cb,
611 OopMapSet* oop_maps,
612 int unpack_offset,
613 int unpack_with_exception_offset,
614 int unpack_with_reexecution_offset,
615 int frame_size
616 );
617
618 address unpack() const { return code_begin() + _unpack_offset; }
619 address unpack_with_exception() const { return code_begin() + _unpack_with_exception; }
620 address unpack_with_reexecution() const { return code_begin() + _unpack_with_reexecution; }
621
622 // Alternate entry point for C1 where the exception and issuing pc
623 // are in JavaThread::_exception_oop and JavaThread::_exception_pc
624 // instead of being in registers. This is needed because C1 doesn't
625 // model exception paths in a way that keeps these registers free so
626 // there may be live values in those registers during deopt.
627 void set_unpack_with_exception_in_tls_offset(int offset) {
628 _unpack_with_exception_in_tls = offset;
629 assert(code_contains(code_begin() + _unpack_with_exception_in_tls), "must be PC inside codeblob");
630 }
631 address unpack_with_exception_in_tls() const { return code_begin() + _unpack_with_exception_in_tls; }
632
633 #if INCLUDE_JVMCI
634 // Offsets when JVMCI calls uncommon_trap.
635 void set_uncommon_trap_offset(int offset) {
636 _uncommon_trap_offset = offset;
637 assert(contains(code_begin() + _uncommon_trap_offset), "must be PC inside codeblob");
638 }
639 address uncommon_trap() const { return (EnableJVMCI ? code_begin() + _uncommon_trap_offset : nullptr); }
640
641 void set_implicit_exception_uncommon_trap_offset(int offset) {
642 _implicit_exception_uncommon_trap_offset = offset;
643 assert(contains(code_begin() + _implicit_exception_uncommon_trap_offset), "must be PC inside codeblob");
644 }
645 address implicit_exception_uncommon_trap() const { return (EnableJVMCI ? code_begin() + _implicit_exception_uncommon_trap_offset : nullptr); }
646 #endif // INCLUDE_JVMCI
647
648 void post_restore_impl() {
649 trace_new_stub(this, "DeoptimizationBlob");
650 }
651
652 void print_value_on_impl(outputStream* st) const;
653
654 class Vptr : public SingletonBlob::Vptr {
655 void post_restore(CodeBlob* instance) const override {
656 instance->as_deoptimization_blob()->post_restore_impl();
657 }
658
659 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
660 instance->as_deoptimization_blob()->print_value_on_impl(st);
661 }
662 };
663
664 static const Vptr _vpntr;
665 };
666
667
668 //----------------------------------------------------------------------------------------------------
669 // UncommonTrapBlob (currently only used by Compiler 2)
670
671 #ifdef COMPILER2
672
673 class UncommonTrapBlob: public SingletonBlob {
674 private:
675 // Creation support
676 UncommonTrapBlob(
677 CodeBuffer* cb,
678 int size,
679 OopMapSet* oop_maps,
680 int frame_size
681 );
682
683 public:
684 // Creation
685 static UncommonTrapBlob* create(
686 CodeBuffer* cb,
687 OopMapSet* oop_maps,
688 int frame_size
689 );
690 void post_restore_impl() {
691 trace_new_stub(this, "UncommonTrapBlob");
692 }
693 class Vptr : public SingletonBlob::Vptr {
694 void post_restore(CodeBlob* instance) const override {
695 instance->as_uncommon_trap_blob()->post_restore_impl();
696 }
697 };
698
699 static const Vptr _vpntr;
700 };
701
702
703 //----------------------------------------------------------------------------------------------------
704 // ExceptionBlob: used for exception unwinding in compiled code (currently only used by Compiler 2)
705
706 class ExceptionBlob: public SingletonBlob {
707 private:
708 // Creation support
709 ExceptionBlob(
710 CodeBuffer* cb,
711 int size,
712 OopMapSet* oop_maps,
713 int frame_size
714 );
715
716 public:
717 // Creation
718 static ExceptionBlob* create(
719 CodeBuffer* cb,
720 OopMapSet* oop_maps,
721 int frame_size
722 );
723
724 void post_restore_impl() {
725 trace_new_stub(this, "ExceptionBlob");
726 }
727
728 class Vptr : public SingletonBlob::Vptr {
729 void post_restore(CodeBlob* instance) const override {
730 instance->as_exception_blob()->post_restore_impl();
731 }
732 };
733
734 static const Vptr _vpntr;
735 };
736 #endif // COMPILER2
737
738
739 //----------------------------------------------------------------------------------------------------
740 // SafepointBlob: handles illegal_instruction exceptions during a safepoint
741
742 class SafepointBlob: public SingletonBlob {
743 private:
744 // Creation support
745 SafepointBlob(
746 CodeBuffer* cb,
747 int size,
748 OopMapSet* oop_maps,
749 int frame_size
750 );
751
752 public:
753 static const int ENTRY_COUNT = 1;
754 // Creation
755 static SafepointBlob* create(
756 CodeBuffer* cb,
757 OopMapSet* oop_maps,
758 int frame_size
759 );
760
761 void post_restore_impl() {
762 trace_new_stub(this, "SafepointBlob - ", name());
763 }
764 class Vptr : public SingletonBlob::Vptr {
765 void post_restore(CodeBlob* instance) const override {
766 instance->as_safepoint_blob()->post_restore_impl();
767 }
768 };
769
770 static const Vptr _vpntr;
771 };
772
773 //----------------------------------------------------------------------------------------------------
774
775 class UpcallLinker;
776
777 // A (Panama) upcall stub. Not used by JNI.
778 class UpcallStub: public RuntimeBlob {
779 friend class VMStructs;
780 friend class UpcallLinker;
781 private:
782 jobject _receiver;
783 ByteSize _frame_data_offset;
784
785 UpcallStub(const char* name, CodeBuffer* cb, int size, jobject receiver, ByteSize frame_data_offset);
786
787 void* operator new(size_t s, unsigned size) throw();
788
789 struct FrameData {
790 JavaFrameAnchor jfa;
791 JavaThread* thread;
792 JNIHandleBlock* old_handles;
793 JNIHandleBlock* new_handles;
794 };
795
796 // defined in frame_ARCH.cpp
797 FrameData* frame_data_for_frame(const frame& frame) const;
798 public:
799 // Creation
800 static UpcallStub* create(const char* name, CodeBuffer* cb, jobject receiver, ByteSize frame_data_offset);
801
802 static void free(UpcallStub* blob);
803
804 jobject receiver() { return _receiver; }
805
806 JavaFrameAnchor* jfa_for_frame(const frame& frame) const;
807
808 // GC support
809 void oops_do(OopClosure* f, const frame& frame);
810
811 void print_on_impl(outputStream* st) const;
812 void print_value_on_impl(outputStream* st) const;
813
814 class Vptr : public RuntimeBlob::Vptr {
815 void print_on(const CodeBlob* instance, outputStream* st) const override {
816 instance->as_upcall_stub()->print_on_impl(st);
817 }
818 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
819 instance->as_upcall_stub()->print_value_on_impl(st);
820 }
821 };
822
823 static const Vptr _vpntr;
824 };
825
826 #endif // SHARE_CODE_CODEBLOB_HPP