1 /*
2 * Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_CODE_CODEBLOB_HPP
26 #define SHARE_CODE_CODEBLOB_HPP
27
28 #include "asm/codeBuffer.hpp"
29 #include "compiler/compilerDefinitions.hpp"
30 #include "compiler/oopMap.hpp"
31 #include "runtime/frame.hpp"
32 #include "runtime/handles.hpp"
33 #include "runtime/javaFrameAnchor.hpp"
34 #include "utilities/align.hpp"
35 #include "utilities/macros.hpp"
36
37 class AOTCodeReader;
38 class ImmutableOopMap;
39 class ImmutableOopMapSet;
40 class JNIHandleBlock;
41 class OopMapSet;
42
43 // CodeBlob Types
44 // Used in the CodeCache to assign CodeBlobs to different CodeHeaps
45 enum class CodeBlobType {
46 MethodNonProfiled = 0, // Execution level 1 and 4 (non-profiled) nmethods (including native nmethods)
47 MethodProfiled = 1, // Execution level 2 and 3 (profiled) nmethods
48 NonNMethod = 2, // Non-nmethods like Buffers, Adapters and Runtime Stubs
49 All = 3, // All types (No code cache segmentation)
50 NumTypes = 4 // Number of CodeBlobTypes
51 };
52
53 // CodeBlob - superclass for all entries in the CodeCache.
54 //
55 // Subtypes are:
56 // nmethod : JIT Compiled Java methods
57 // RuntimeBlob : Non-compiled method code; generated glue code
58 // BufferBlob : Used for non-relocatable code such as interpreter, stubroutines, etc.
59 // AdapterBlob : Used to hold C2I/I2C adapters
60 // VtableBlob : Used for holding vtable chunks
61 // MethodHandlesAdapterBlob : Used to hold MethodHandles adapters
62 // RuntimeStub : Call to VM runtime methods
63 // SingletonBlob : Super-class for all blobs that exist in only one instance
64 // DeoptimizationBlob : Used for deoptimization
65 // SafepointBlob : Used to handle illegal instruction exceptions
66 // ExceptionBlob : Used for stack unrolling
67 // UncommonTrapBlob : Used to handle uncommon traps
68 // UpcallStub : Used for upcalls from native code
69 //
70 //
71 // Layout in the CodeCache:
72 // - header
73 // - content space
74 // - instruction space
75 // Outside of the CodeCache:
76 // - mutable_data
77 // - relocation info
78 // - additional data for subclasses
79
80 enum class CodeBlobKind : u1 {
81 None,
82 Nmethod,
83 Buffer,
84 Adapter,
85 Vtable,
86 MHAdapter,
87 RuntimeStub,
88 Deoptimization,
89 Safepoint,
90 #ifdef COMPILER2
91 Exception,
92 UncommonTrap,
93 #endif
94 Upcall,
95 Number_Of_Kinds
96 };
97
98 class UpcallStub; // for as_upcall_stub()
99 class RuntimeStub; // for as_runtime_stub()
100 class JavaFrameAnchor; // for UpcallStub::jfa_for_frame
101 class BufferBlob;
102 class AdapterBlob;
103 class SingletonBlob;
104 class ExceptionBlob;
105 class DeoptimizationBlob;
106 class SafepointBlob;
107 class UncommonTrapBlob;
108
109 class CodeBlob {
110 friend class VMStructs;
111 friend class JVMCIVMStructs;
112
113 protected:
114 // order fields from large to small to minimize padding between fields
115 ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob
116 const char* _name;
117 address _mutable_data;
118
119 int _size; // total size of CodeBlob in bytes
120 int _relocation_size; // size of relocation (could be bigger than 64Kb)
121 int _content_offset; // offset to where content region begins (this includes consts, insts, stubs)
122 int _code_offset; // offset to where instructions region begins (this includes insts, stubs)
123 int _data_offset; // offset to where data region begins
124 int _frame_size; // size of stack frame in words (NOT slots. On x64 these are 64bit words)
125 int _mutable_data_size;
126
127 S390_ONLY(int _ctable_offset;)
128
129 uint16_t _header_size; // size of header (depends on subclass)
130 int16_t _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
131 // not finished setting up their frame. Beware of pc's in
132 // that range. There is a similar range(s) on returns
133 // which we don't detect.
134
135 CodeBlobKind _kind; // Kind of this code blob
136
137 bool _caller_must_gc_arguments;
138
139 #ifndef PRODUCT
140 AsmRemarks _asm_remarks;
141 DbgStrings _dbg_strings;
142 #endif
143
144 void print_on_impl(outputStream* st) const;
145 void print_value_on_impl(outputStream* st) const;
146
147 class Vptr {
148 public:
149 virtual void print_on(const CodeBlob* instance, outputStream* st) const = 0;
150 virtual void print_value_on(const CodeBlob* instance, outputStream* st) const = 0;
151 virtual void prepare_for_archiving(CodeBlob* instance) const {
152 instance->prepare_for_archiving_impl();
153 };
154 virtual void post_restore(CodeBlob* instance) const {
155 instance->post_restore_impl();
156 };
157 };
158
159 static const Vptr* vptr(CodeBlobKind kind);
160 const Vptr* vptr() const;
161
162 CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size,
163 int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments,
164 int mutable_data_size);
165
166 // Simple CodeBlob used for simple BufferBlob.
167 CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size);
168
169
170 void operator delete(void* p) { }
171
172 void prepare_for_archiving_impl() NOT_CDS_RETURN;
173 void post_restore_impl() NOT_CDS_RETURN;
174
175 public:
176
177 ~CodeBlob() {
178 assert(_oop_maps == nullptr, "Not flushed");
179 }
180
181 // Returns the space needed for CodeBlob
182 static unsigned int allocation_size(CodeBuffer* cb, int header_size);
183 static unsigned int align_code_offset(int offset);
184
185 // Deletion
186 void purge();
187
188 // Typing
189 bool is_nmethod() const { return _kind == CodeBlobKind::Nmethod; }
190 // we may want to check for an actual buffer blob or subtype instance
191 bool is_buffer_blob(bool strict=true) const {
192 if (strict) {
193 return _kind == CodeBlobKind::Buffer;
194 } else {
195 return (_kind == CodeBlobKind::Buffer ||
196 _kind == CodeBlobKind::Adapter ||
197 _kind == CodeBlobKind::Vtable ||
198 _kind == CodeBlobKind::MHAdapter);
199 }
200 }
201 bool is_runtime_stub() const { return _kind == CodeBlobKind::RuntimeStub; }
202 // singleton blobs are never directly implemented
203 bool is_deoptimization_stub() const { return _kind == CodeBlobKind::Deoptimization; }
204 #ifdef COMPILER2
205 bool is_uncommon_trap_stub() const { return _kind == CodeBlobKind::UncommonTrap; }
206 bool is_exception_stub() const { return _kind == CodeBlobKind::Exception; }
207 #else
208 bool is_uncommon_trap_stub() const { return false; }
209 bool is_exception_stub() const { return false; }
210 #endif
211 bool is_safepoint_stub() const { return _kind == CodeBlobKind::Safepoint; }
212 bool is_singleton_blob() const {
213 return (is_deoptimization_stub() ||
214 is_uncommon_trap_stub() ||
215 is_exception_stub() ||
216 is_safepoint_stub());
217 }
218 bool is_adapter_blob() const { return _kind == CodeBlobKind::Adapter; }
219 bool is_vtable_blob() const { return _kind == CodeBlobKind::Vtable; }
220 bool is_method_handles_adapter_blob() const { return _kind == CodeBlobKind::MHAdapter; }
221 bool is_upcall_stub() const { return _kind == CodeBlobKind::Upcall; }
222
223 // Casting
224 nmethod* as_nmethod_or_null() const { return is_nmethod() ? (nmethod*) this : nullptr; }
225 nmethod* as_nmethod() const { assert(is_nmethod(), "must be nmethod"); return (nmethod*) this; }
226 CodeBlob* as_codeblob() const { return (CodeBlob*) this; }
227 // we may want to force an actual buffer blob or subtype instance
228 BufferBlob* as_buffer_blob(bool strict = true) const { assert(is_buffer_blob(), "must be %sbuffer blob", (strict ? "strict " : "")); return (BufferBlob*) this; }
229 AdapterBlob* as_adapter_blob() const { assert(is_adapter_blob(), "must be adapter blob"); return (AdapterBlob*) this; }
230 ExceptionBlob* as_exception_blob() const { assert(is_exception_stub(), "must be exception stub"); return (ExceptionBlob*) this; }
231 // this will always return a subtype instance
232 SingletonBlob* as_singleton_blob() const { assert(is_singleton_blob(), "must be singleton blob"); return (SingletonBlob*) this; }
233 DeoptimizationBlob* as_deoptimization_blob() const { assert(is_deoptimization_stub(), "must be deopt stub"); return (DeoptimizationBlob*) this; }
234 SafepointBlob* as_safepoint_blob() const { assert(is_safepoint_stub(), "must be safepoint stub"); return (SafepointBlob*) this; }
235 UpcallStub* as_upcall_stub() const { assert(is_upcall_stub(), "must be upcall stub"); return (UpcallStub*) this; }
236 RuntimeStub* as_runtime_stub() const { assert(is_runtime_stub(), "must be runtime blob"); return (RuntimeStub*) this; }
237 UncommonTrapBlob* as_uncommon_trap_blob() const { assert(is_uncommon_trap_stub(), "must be uncommon trap stub"); return (UncommonTrapBlob*) this; }
238
239 // Boundaries
240 address header_begin() const { return (address) this; }
241 address header_end() const { return ((address) this) + _header_size; }
242 address content_begin() const { return (address) header_begin() + _content_offset; }
243 address content_end() const { return (address) header_begin() + _data_offset; }
244 address code_begin() const { return (address) header_begin() + _code_offset; }
245 address code_end() const { return (address) header_begin() + _data_offset; }
246 address data_begin() const { return (address) header_begin() + _data_offset; }
247 address data_end() const { return (address) header_begin() + _size; }
248 address blob_end() const { return (address) header_begin() + _size; }
249 // code_end == content_end is true for all types of blobs for now, it is also checked in the constructor
250
251 int mutable_data_size() const { return _mutable_data_size; }
252 address mutable_data_begin() const { return _mutable_data; }
253 address mutable_data_end() const { return _mutable_data + _mutable_data_size; }
254
255 relocInfo* relocation_begin() const { return (relocInfo*)_mutable_data; }
256 relocInfo* relocation_end() const { return (relocInfo*)((address)relocation_begin() + _relocation_size); }
257
258 // Offsets
259 int content_offset() const { return _content_offset; }
260 int code_offset() const { return _code_offset; }
261
262 // This field holds the beginning of the const section in the old code buffer.
263 // It is needed to fix relocations of pc-relative loads when resizing the
264 // the constant pool or moving it.
265 S390_ONLY(address ctable_begin() const { return header_begin() + _ctable_offset; })
266 void set_ctable_begin(address ctable) { S390_ONLY(_ctable_offset = ctable - header_begin();) }
267
268 // Sizes
269 int size() const { return _size; }
270 int header_size() const { return _header_size; }
271 int relocation_size() const { return _relocation_size; }
272 int content_size() const { return pointer_delta_as_int(content_end(), content_begin()); }
273 int code_size() const { return pointer_delta_as_int(code_end(), code_begin()); }
274
275 // Only used from CodeCache::free_unused_tail() after the Interpreter blob was trimmed
276 void adjust_size(size_t used) {
277 _size = (int)used;
278 _data_offset = _size;
279 }
280
281 // Containment
282 bool blob_contains(address addr) const { return header_begin() <= addr && addr < blob_end(); }
283 bool code_contains(address addr) const { return code_begin() <= addr && addr < code_end(); }
284 bool contains(address addr) const { return content_begin() <= addr && addr < content_end(); }
285 bool is_frame_complete_at(address addr) const { return _frame_complete_offset != CodeOffsets::frame_never_safe &&
286 code_contains(addr) && addr >= code_begin() + _frame_complete_offset; }
287 int frame_complete_offset() const { return _frame_complete_offset; }
288
289 // OopMap for frame
290 ImmutableOopMapSet* oop_maps() const { return _oop_maps; }
291 void set_oop_maps(OopMapSet* p);
292 void set_oop_maps(ImmutableOopMapSet* p) { _oop_maps = p; }
293
294 const ImmutableOopMap* oop_map_for_slot(int slot, address return_address) const;
295 const ImmutableOopMap* oop_map_for_return_address(address return_address) const;
296
297 // Frame support. Sizes are in word units.
298 int frame_size() const { return _frame_size; }
299 void set_frame_size(int size) { _frame_size = size; }
300
301 // Returns true, if the next frame is responsible for GC'ing oops passed as arguments
302 bool caller_must_gc_arguments(JavaThread* thread) const { return _caller_must_gc_arguments; }
303
304 // Naming
305 const char* name() const { return _name; }
306 void set_name(const char* name) { _name = name; }
307
308 // Debugging
309 void verify();
310 void print() const;
311 void print_on(outputStream* st) const;
312 void print_value_on(outputStream* st) const;
313
314 void dump_for_addr(address addr, outputStream* st, bool verbose) const;
315 void print_code_on(outputStream* st);
316
317 // Print to stream, any comments associated with offset.
318 void print_block_comment(outputStream* stream, address block_begin) const;
319
320 #ifndef PRODUCT
321 AsmRemarks &asm_remarks() { return _asm_remarks; }
322 DbgStrings &dbg_strings() { return _dbg_strings; }
323
324 void use_remarks(AsmRemarks &remarks) { _asm_remarks.share(remarks); }
325 void use_strings(DbgStrings &strings) { _dbg_strings.share(strings); }
326 #endif
327
328 #if INCLUDE_CDS
329 void restore_mutable_data(address reloc_data);
330
331 void copy_to(address buffer) {
332 memcpy(buffer, this, this->size());
333 }
334
335 // methods to archive a blob into AOT code cache
336 void prepare_for_archiving();
337 static void archive_blob(CodeBlob* blob, address archive_buffer);
338
339 // methods to restore a blob from AOT code cache into the CodeCache
340 void post_restore();
341 CodeBlob* restore(address code_cache_buffer, AOTCodeReader* reader);
342 static CodeBlob* create(CodeBlob* archived_blob, AOTCodeReader* reader);
343 #endif
344 };
345
346 //----------------------------------------------------------------------------------------------------
347 // RuntimeBlob: used for non-compiled method code (adapters, stubs, blobs)
348
349 class RuntimeBlob : public CodeBlob {
350 public:
351
352 // Creation
353 // a) simple CodeBlob
354 RuntimeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size)
355 : CodeBlob(name, kind, size, header_size)
356 {}
357
358 // b) full CodeBlob
359 // frame_complete is the offset from the beginning of the instructions
360 // to where the frame setup (from stackwalk viewpoint) is complete.
361 RuntimeBlob(
362 const char* name,
363 CodeBlobKind kind,
364 CodeBuffer* cb,
365 int size,
366 uint16_t header_size,
367 int16_t frame_complete,
368 int frame_size,
369 OopMapSet* oop_maps,
370 bool caller_must_gc_arguments = false
371 );
372
373 static void free(RuntimeBlob* blob);
374
375 // Deal with Disassembler, VTune, Forte, JvmtiExport, MemoryService.
376 static void trace_new_stub(RuntimeBlob* blob, const char* name1, const char* name2 = "");
377
378 class Vptr : public CodeBlob::Vptr {
379 };
380 };
381
382 class WhiteBox;
383 //----------------------------------------------------------------------------------------------------
384 // BufferBlob: used to hold non-relocatable machine code such as the interpreter, stubroutines, etc.
385
386 class BufferBlob: public RuntimeBlob {
387 friend class VMStructs;
388 friend class AdapterBlob;
389 friend class VtableBlob;
390 friend class MethodHandlesAdapterBlob;
391 friend class UpcallStub;
392 friend class WhiteBox;
393
394 private:
395 // Creation support
396 BufferBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size = sizeof(BufferBlob));
397 BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size = sizeof(BufferBlob));
398
399 void* operator new(size_t s, unsigned size) throw();
400
401 public:
402 // Creation
403 static BufferBlob* create(const char* name, uint buffer_size);
404 static BufferBlob* create(const char* name, CodeBuffer* cb);
405
406 static void free(BufferBlob* buf);
407
408 void print_on_impl(outputStream* st) const;
409 void print_value_on_impl(outputStream* st) const;
410
411 class Vptr : public RuntimeBlob::Vptr {
412 void print_on(const CodeBlob* instance, outputStream* st) const override {
413 instance->as_buffer_blob(false)->print_on_impl(st);
414 }
415 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
416 instance->as_buffer_blob(false)->print_value_on_impl(st);
417 }
418 };
419
420 static const Vptr _vpntr;
421 };
422
423
424 //----------------------------------------------------------------------------------------------------
425 // AdapterBlob: used to hold C2I/I2C adapters
426
427 class AdapterBlob: public BufferBlob {
428 public:
429 enum Entry {
430 I2C,
431 C2I,
432 C2I_Unverified,
433 C2I_No_Clinit_Check,
434 ENTRY_COUNT
435 };
436 private:
437 AdapterBlob(int size, CodeBuffer* cb, int entry_offset[ENTRY_COUNT]);
438 // _i2c_offset is always 0 so no need to store it
439 int _c2i_offset;
440 int _c2i_unverified_offset;
441 int _c2i_no_clinit_check_offset;
442 public:
443 // Creation
444 static AdapterBlob* create(CodeBuffer* cb, int entry_offset[ENTRY_COUNT]);
445 address i2c_entry() { return code_begin(); }
446 address c2i_entry() { return i2c_entry() + _c2i_offset; }
447 address c2i_unverified_entry() { return i2c_entry() + _c2i_unverified_offset; }
448 address c2i_no_clinit_check_entry() { return _c2i_no_clinit_check_offset == -1 ? nullptr : i2c_entry() + _c2i_no_clinit_check_offset; }
449 };
450
451 //---------------------------------------------------------------------------------------------------
452 class VtableBlob: public BufferBlob {
453 private:
454 VtableBlob(const char*, int);
455
456 void* operator new(size_t s, unsigned size) throw();
457
458 public:
459 // Creation
460 static VtableBlob* create(const char* name, int buffer_size);
461 };
462
463 //----------------------------------------------------------------------------------------------------
464 // MethodHandlesAdapterBlob: used to hold MethodHandles adapters
465
466 class MethodHandlesAdapterBlob: public BufferBlob {
467 private:
468 MethodHandlesAdapterBlob(int size): BufferBlob("MethodHandles adapters", CodeBlobKind::MHAdapter, size) {}
469
470 public:
471 // Creation
472 static MethodHandlesAdapterBlob* create(int buffer_size);
473 };
474
475
476 //----------------------------------------------------------------------------------------------------
477 // RuntimeStub: describes stubs used by compiled code to call a (static) C++ runtime routine
478
479 class RuntimeStub: public RuntimeBlob {
480 friend class VMStructs;
481 private:
482 // Creation support
483 RuntimeStub(
484 const char* name,
485 CodeBuffer* cb,
486 int size,
487 int16_t frame_complete,
488 int frame_size,
489 OopMapSet* oop_maps,
490 bool caller_must_gc_arguments
491 );
492
493 void* operator new(size_t s, unsigned size) throw();
494
495 public:
496 static const int ENTRY_COUNT = 1;
497 // Creation
498 static RuntimeStub* new_runtime_stub(
499 const char* stub_name,
500 CodeBuffer* cb,
501 int16_t frame_complete,
502 int frame_size,
503 OopMapSet* oop_maps,
504 bool caller_must_gc_arguments,
505 bool alloc_fail_is_fatal=true
506 );
507
508 static void free(RuntimeStub* stub) { RuntimeBlob::free(stub); }
509
510 address entry_point() const { return code_begin(); }
511
512 void post_restore_impl() {
513 trace_new_stub(this, "RuntimeStub - ", name());
514 }
515
516 void print_on_impl(outputStream* st) const;
517 void print_value_on_impl(outputStream* st) const;
518
519 class Vptr : public RuntimeBlob::Vptr {
520 void post_restore(CodeBlob* instance) const override {
521 instance->as_runtime_stub()->post_restore_impl();
522 }
523 void print_on(const CodeBlob* instance, outputStream* st) const override {
524 instance->as_runtime_stub()->print_on_impl(st);
525 }
526 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
527 instance->as_runtime_stub()->print_value_on_impl(st);
528 }
529 };
530
531 static const Vptr _vpntr;
532 };
533
534
535 //----------------------------------------------------------------------------------------------------
536 // Super-class for all blobs that exist in only one instance. Implements default behaviour.
537
538 class SingletonBlob: public RuntimeBlob {
539 friend class VMStructs;
540
541 protected:
542 void* operator new(size_t s, unsigned size, bool alloc_fail_is_fatal=true) throw();
543
544 public:
545 SingletonBlob(
546 const char* name,
547 CodeBlobKind kind,
548 CodeBuffer* cb,
549 int size,
550 uint16_t header_size,
551 int frame_size,
552 OopMapSet* oop_maps
553 )
554 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, frame_size, oop_maps)
555 {};
556
557 address entry_point() { return code_begin(); }
558
559 void print_on_impl(outputStream* st) const;
560 void print_value_on_impl(outputStream* st) const;
561
562 class Vptr : public RuntimeBlob::Vptr {
563 void print_on(const CodeBlob* instance, outputStream* st) const override {
564 instance->as_singleton_blob()->print_on_impl(st);
565 }
566 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
567 instance->as_singleton_blob()->print_value_on_impl(st);
568 }
569 };
570
571 static const Vptr _vpntr;
572 };
573
574
575 //----------------------------------------------------------------------------------------------------
576 // DeoptimizationBlob
577
578 class DeoptimizationBlob: public SingletonBlob {
579 friend class VMStructs;
580 friend class JVMCIVMStructs;
581 private:
582 int _unpack_offset;
583 int _unpack_with_exception;
584 int _unpack_with_reexecution;
585
586 int _unpack_with_exception_in_tls;
587
588 #if INCLUDE_JVMCI
589 // Offsets when JVMCI calls uncommon_trap.
590 int _uncommon_trap_offset;
591 int _implicit_exception_uncommon_trap_offset;
592 #endif
593
594 // Creation support
595 DeoptimizationBlob(
596 CodeBuffer* cb,
597 int size,
598 OopMapSet* oop_maps,
599 int unpack_offset,
600 int unpack_with_exception_offset,
601 int unpack_with_reexecution_offset,
602 int frame_size
603 );
604
605 public:
606 static const int ENTRY_COUNT = 4 JVMTI_ONLY(+ 2);
607 // Creation
608 static DeoptimizationBlob* create(
609 CodeBuffer* cb,
610 OopMapSet* oop_maps,
611 int unpack_offset,
612 int unpack_with_exception_offset,
613 int unpack_with_reexecution_offset,
614 int frame_size
615 );
616
617 address unpack() const { return code_begin() + _unpack_offset; }
618 address unpack_with_exception() const { return code_begin() + _unpack_with_exception; }
619 address unpack_with_reexecution() const { return code_begin() + _unpack_with_reexecution; }
620
621 // Alternate entry point for C1 where the exception and issuing pc
622 // are in JavaThread::_exception_oop and JavaThread::_exception_pc
623 // instead of being in registers. This is needed because C1 doesn't
624 // model exception paths in a way that keeps these registers free so
625 // there may be live values in those registers during deopt.
626 void set_unpack_with_exception_in_tls_offset(int offset) {
627 _unpack_with_exception_in_tls = offset;
628 assert(code_contains(code_begin() + _unpack_with_exception_in_tls), "must be PC inside codeblob");
629 }
630 address unpack_with_exception_in_tls() const { return code_begin() + _unpack_with_exception_in_tls; }
631
632 #if INCLUDE_JVMCI
633 // Offsets when JVMCI calls uncommon_trap.
634 void set_uncommon_trap_offset(int offset) {
635 _uncommon_trap_offset = offset;
636 assert(contains(code_begin() + _uncommon_trap_offset), "must be PC inside codeblob");
637 }
638 address uncommon_trap() const { return (EnableJVMCI ? code_begin() + _uncommon_trap_offset : nullptr); }
639
640 void set_implicit_exception_uncommon_trap_offset(int offset) {
641 _implicit_exception_uncommon_trap_offset = offset;
642 assert(contains(code_begin() + _implicit_exception_uncommon_trap_offset), "must be PC inside codeblob");
643 }
644 address implicit_exception_uncommon_trap() const { return (EnableJVMCI ? code_begin() + _implicit_exception_uncommon_trap_offset : nullptr); }
645 #endif // INCLUDE_JVMCI
646
647 void post_restore_impl() {
648 trace_new_stub(this, "DeoptimizationBlob");
649 }
650
651 void print_value_on_impl(outputStream* st) const;
652
653 class Vptr : public SingletonBlob::Vptr {
654 void post_restore(CodeBlob* instance) const override {
655 instance->as_deoptimization_blob()->post_restore_impl();
656 }
657
658 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
659 instance->as_deoptimization_blob()->print_value_on_impl(st);
660 }
661 };
662
663 static const Vptr _vpntr;
664 };
665
666
667 //----------------------------------------------------------------------------------------------------
668 // UncommonTrapBlob (currently only used by Compiler 2)
669
670 #ifdef COMPILER2
671
672 class UncommonTrapBlob: public SingletonBlob {
673 private:
674 // Creation support
675 UncommonTrapBlob(
676 CodeBuffer* cb,
677 int size,
678 OopMapSet* oop_maps,
679 int frame_size
680 );
681
682 public:
683 // Creation
684 static UncommonTrapBlob* create(
685 CodeBuffer* cb,
686 OopMapSet* oop_maps,
687 int frame_size
688 );
689 void post_restore_impl() {
690 trace_new_stub(this, "UncommonTrapBlob");
691 }
692 class Vptr : public SingletonBlob::Vptr {
693 void post_restore(CodeBlob* instance) const override {
694 instance->as_uncommon_trap_blob()->post_restore_impl();
695 }
696 };
697
698 static const Vptr _vpntr;
699 };
700
701
702 //----------------------------------------------------------------------------------------------------
703 // ExceptionBlob: used for exception unwinding in compiled code (currently only used by Compiler 2)
704
705 class ExceptionBlob: public SingletonBlob {
706 private:
707 // Creation support
708 ExceptionBlob(
709 CodeBuffer* cb,
710 int size,
711 OopMapSet* oop_maps,
712 int frame_size
713 );
714
715 public:
716 // Creation
717 static ExceptionBlob* create(
718 CodeBuffer* cb,
719 OopMapSet* oop_maps,
720 int frame_size
721 );
722
723 void post_restore_impl() {
724 trace_new_stub(this, "ExceptionBlob");
725 }
726
727 class Vptr : public SingletonBlob::Vptr {
728 void post_restore(CodeBlob* instance) const override {
729 instance->as_exception_blob()->post_restore_impl();
730 }
731 };
732
733 static const Vptr _vpntr;
734 };
735 #endif // COMPILER2
736
737
738 //----------------------------------------------------------------------------------------------------
739 // SafepointBlob: handles illegal_instruction exceptions during a safepoint
740
741 class SafepointBlob: public SingletonBlob {
742 private:
743 // Creation support
744 SafepointBlob(
745 CodeBuffer* cb,
746 int size,
747 OopMapSet* oop_maps,
748 int frame_size
749 );
750
751 public:
752 static const int ENTRY_COUNT = 1;
753 // Creation
754 static SafepointBlob* create(
755 CodeBuffer* cb,
756 OopMapSet* oop_maps,
757 int frame_size
758 );
759
760 void post_restore_impl() {
761 trace_new_stub(this, "SafepointBlob - ", name());
762 }
763 class Vptr : public SingletonBlob::Vptr {
764 void post_restore(CodeBlob* instance) const override {
765 instance->as_safepoint_blob()->post_restore_impl();
766 }
767 };
768
769 static const Vptr _vpntr;
770 };
771
772 //----------------------------------------------------------------------------------------------------
773
774 class UpcallLinker;
775
776 // A (Panama) upcall stub. Not used by JNI.
777 class UpcallStub: public RuntimeBlob {
778 friend class VMStructs;
779 friend class UpcallLinker;
780 private:
781 jobject _receiver;
782 ByteSize _frame_data_offset;
783
784 UpcallStub(const char* name, CodeBuffer* cb, int size, jobject receiver, ByteSize frame_data_offset);
785
786 void* operator new(size_t s, unsigned size) throw();
787
788 struct FrameData {
789 JavaFrameAnchor jfa;
790 JavaThread* thread;
791 JNIHandleBlock* old_handles;
792 JNIHandleBlock* new_handles;
793 };
794
795 // defined in frame_ARCH.cpp
796 FrameData* frame_data_for_frame(const frame& frame) const;
797 public:
798 // Creation
799 static UpcallStub* create(const char* name, CodeBuffer* cb, jobject receiver, ByteSize frame_data_offset);
800
801 static void free(UpcallStub* blob);
802
803 jobject receiver() { return _receiver; }
804
805 JavaFrameAnchor* jfa_for_frame(const frame& frame) const;
806
807 // GC support
808 void oops_do(OopClosure* f, const frame& frame);
809
810 void print_on_impl(outputStream* st) const;
811 void print_value_on_impl(outputStream* st) const;
812
813 class Vptr : public RuntimeBlob::Vptr {
814 void print_on(const CodeBlob* instance, outputStream* st) const override {
815 instance->as_upcall_stub()->print_on_impl(st);
816 }
817 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
818 instance->as_upcall_stub()->print_value_on_impl(st);
819 }
820 };
821
822 static const Vptr _vpntr;
823 };
824
825 #endif // SHARE_CODE_CODEBLOB_HPP