1 /*
2 * Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_CODE_CODEBLOB_HPP
26 #define SHARE_CODE_CODEBLOB_HPP
27
28 #include "asm/codeBuffer.hpp"
29 #include "compiler/compilerDefinitions.hpp"
30 #include "compiler/oopMap.hpp"
31 #include "runtime/frame.hpp"
32 #include "runtime/handles.hpp"
33 #include "runtime/javaFrameAnchor.hpp"
34 #include "utilities/align.hpp"
35 #include "utilities/macros.hpp"
36
37 class AOTCodeReader;
38 class ImmutableOopMap;
39 class ImmutableOopMapSet;
40 class JNIHandleBlock;
41 class OopMapSet;
42
43 // CodeBlob Types
44 // Used in the CodeCache to assign CodeBlobs to different CodeHeaps
45 enum class CodeBlobType {
46 MethodNonProfiled = 0, // Execution level 1 and 4 (non-profiled) nmethods (including native nmethods)
47 MethodProfiled = 1, // Execution level 2 and 3 (profiled) nmethods
48 MethodHot = 2, // Nmethods predicted to be always hot
49 NonNMethod = 3, // Non-nmethods like Buffers, Adapters and Runtime Stubs
50 All = 4, // All types (No code cache segmentation)
51 NumTypes = 5 // Number of CodeBlobTypes
52 };
53
54 // CodeBlob - superclass for all entries in the CodeCache.
55 //
56 // Subtypes are:
57 // nmethod : JIT Compiled Java methods
58 // RuntimeBlob : Non-compiled method code; generated glue code
59 // BufferBlob : Used for non-relocatable code such as interpreter, stubroutines, etc.
60 // AdapterBlob : Used to hold C2I/I2C adapters
61 // VtableBlob : Used for holding vtable chunks
62 // MethodHandlesAdapterBlob : Used to hold MethodHandles adapters
63 // BufferedInlineTypeBlob : used for pack/unpack handlers
64 // RuntimeStub : Call to VM runtime methods
65 // SingletonBlob : Super-class for all blobs that exist in only one instance
66 // DeoptimizationBlob : Used for deoptimization
67 // SafepointBlob : Used to handle illegal instruction exceptions
68 // ExceptionBlob : Used for stack unrolling
69 // UncommonTrapBlob : Used to handle uncommon traps
70 // UpcallStub : Used for upcalls from native code
71 //
72 //
73 // Layout in the CodeCache:
74 // - header
75 // - content space
76 // - instruction space
77 // Outside of the CodeCache:
78 // - mutable_data
79 // - relocation info
80 // - additional data for subclasses
81
82 enum class CodeBlobKind : u1 {
83 None,
84 Nmethod,
85 Buffer,
86 Adapter,
87 Vtable,
88 MHAdapter,
89 BufferedInlineType,
90 RuntimeStub,
91 Deoptimization,
92 Safepoint,
93 #ifdef COMPILER2
94 Exception,
95 UncommonTrap,
96 #endif
97 Upcall,
98 Number_Of_Kinds
99 };
100
101 class UpcallStub; // for as_upcall_stub()
102 class RuntimeStub; // for as_runtime_stub()
103 class JavaFrameAnchor; // for UpcallStub::jfa_for_frame
104 class BufferBlob;
105 class AdapterBlob;
106 class SingletonBlob;
107 class ExceptionBlob;
108 class DeoptimizationBlob;
109 class SafepointBlob;
110 class UncommonTrapBlob;
111
112 class CodeBlob {
113 friend class VMStructs;
114 friend class JVMCIVMStructs;
115
116 protected:
117 // order fields from large to small to minimize padding between fields
118 ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob
119 const char* _name;
120 address _mutable_data;
121
122 int _size; // total size of CodeBlob in bytes
123 int _relocation_size; // size of relocation (could be bigger than 64Kb)
124 int _content_offset; // offset to where content region begins (this includes consts, insts, stubs)
125 int _code_offset; // offset to where instructions region begins (this includes insts, stubs)
126 int _data_offset; // offset to where data region begins
127 int _frame_size; // size of stack frame in words (NOT slots. On x64 these are 64bit words)
128 int _mutable_data_size;
129
130 S390_ONLY(int _ctable_offset;)
131
132 uint16_t _header_size; // size of header (depends on subclass)
133 int16_t _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
134 // not finished setting up their frame. Beware of pc's in
135 // that range. There is a similar range(s) on returns
136 // which we don't detect.
137
138 CodeBlobKind _kind; // Kind of this code blob
139
140 bool _caller_must_gc_arguments;
141
142 #ifndef PRODUCT
143 AsmRemarks _asm_remarks;
144 DbgStrings _dbg_strings;
145 #endif
146
147 void print_on_impl(outputStream* st) const;
148 void print_value_on_impl(outputStream* st) const;
149
150 class Vptr {
151 public:
152 virtual void print_on(const CodeBlob* instance, outputStream* st) const = 0;
153 virtual void print_value_on(const CodeBlob* instance, outputStream* st) const = 0;
154 virtual void prepare_for_archiving(CodeBlob* instance) const {
155 instance->prepare_for_archiving_impl();
156 };
157 virtual void post_restore(CodeBlob* instance) const {
158 instance->post_restore_impl();
159 };
160 };
161
162 static const Vptr* vptr(CodeBlobKind kind);
163 const Vptr* vptr() const;
164
165 CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size,
166 int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments,
167 int mutable_data_size);
168
169 // Simple CodeBlob used for simple BufferBlob.
170 CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size);
171
172
173 void operator delete(void* p) { }
174
175 void prepare_for_archiving_impl() NOT_CDS_RETURN;
176 void post_restore_impl() NOT_CDS_RETURN;
177
178 public:
179
180 ~CodeBlob() {
181 assert(_oop_maps == nullptr, "Not flushed");
182 }
183
184 // Returns the space needed for CodeBlob
185 static unsigned int allocation_size(CodeBuffer* cb, int header_size);
186 static unsigned int align_code_offset(int offset);
187
188 // Deletion
189 void purge();
190
191 // Typing
192 bool is_nmethod() const { return _kind == CodeBlobKind::Nmethod; }
193 // we may want to check for an actual buffer blob or subtype instance
194 bool is_buffer_blob(bool strict=true) const {
195 if (strict) {
196 return _kind == CodeBlobKind::Buffer;
197 } else {
198 return (_kind == CodeBlobKind::Buffer ||
199 _kind == CodeBlobKind::Adapter ||
200 _kind == CodeBlobKind::Vtable ||
201 _kind == CodeBlobKind::MHAdapter);
202 }
203 }
204 bool is_runtime_stub() const { return _kind == CodeBlobKind::RuntimeStub; }
205 // singleton blobs are never directly implemented
206 bool is_deoptimization_stub() const { return _kind == CodeBlobKind::Deoptimization; }
207 #ifdef COMPILER2
208 bool is_uncommon_trap_stub() const { return _kind == CodeBlobKind::UncommonTrap; }
209 bool is_exception_stub() const { return _kind == CodeBlobKind::Exception; }
210 #else
211 bool is_uncommon_trap_stub() const { return false; }
212 bool is_exception_stub() const { return false; }
213 #endif
214 bool is_safepoint_stub() const { return _kind == CodeBlobKind::Safepoint; }
215 bool is_singleton_blob() const {
216 return (is_deoptimization_stub() ||
217 is_uncommon_trap_stub() ||
218 is_exception_stub() ||
219 is_safepoint_stub());
220 }
221 bool is_adapter_blob() const { return _kind == CodeBlobKind::Adapter; }
222 bool is_vtable_blob() const { return _kind == CodeBlobKind::Vtable; }
223 bool is_method_handles_adapter_blob() const { return _kind == CodeBlobKind::MHAdapter; }
224 bool is_buffered_inline_type_blob() const { return _kind == CodeBlobKind::BufferedInlineType; }
225 bool is_upcall_stub() const { return _kind == CodeBlobKind::Upcall; }
226
227 // Casting
228 nmethod* as_nmethod_or_null() const { return is_nmethod() ? (nmethod*) this : nullptr; }
229 nmethod* as_nmethod() const { assert(is_nmethod(), "must be nmethod"); return (nmethod*) this; }
230 CodeBlob* as_codeblob() const { return (CodeBlob*) this; }
231 // we may want to force an actual buffer blob or subtype instance
232 BufferBlob* as_buffer_blob(bool strict = true) const { assert(is_buffer_blob(strict), "must be %sbuffer blob", (strict ? "strict " : "")); return (BufferBlob*) this; }
233 AdapterBlob* as_adapter_blob() const { assert(is_adapter_blob(), "must be adapter blob"); return (AdapterBlob*) this; }
234 ExceptionBlob* as_exception_blob() const { assert(is_exception_stub(), "must be exception stub"); return (ExceptionBlob*) this; }
235 // this will always return a subtype instance
236 SingletonBlob* as_singleton_blob() const { assert(is_singleton_blob(), "must be singleton blob"); return (SingletonBlob*) this; }
237 DeoptimizationBlob* as_deoptimization_blob() const { assert(is_deoptimization_stub(), "must be deopt stub"); return (DeoptimizationBlob*) this; }
238 SafepointBlob* as_safepoint_blob() const { assert(is_safepoint_stub(), "must be safepoint stub"); return (SafepointBlob*) this; }
239 UpcallStub* as_upcall_stub() const { assert(is_upcall_stub(), "must be upcall stub"); return (UpcallStub*) this; }
240 RuntimeStub* as_runtime_stub() const { assert(is_runtime_stub(), "must be runtime blob"); return (RuntimeStub*) this; }
241 UncommonTrapBlob* as_uncommon_trap_blob() const { assert(is_uncommon_trap_stub(), "must be uncommon trap stub"); return (UncommonTrapBlob*) this; }
242
243 // Boundaries
244 address header_begin() const { return (address) this; }
245 address header_end() const { return ((address) this) + _header_size; }
246 address content_begin() const { return (address) header_begin() + _content_offset; }
247 address content_end() const { return (address) header_begin() + _data_offset; }
248 address code_begin() const { return (address) header_begin() + _code_offset; }
249 address code_end() const { return (address) header_begin() + _data_offset; }
250 address data_begin() const { return (address) header_begin() + _data_offset; }
251 address data_end() const { return (address) header_begin() + _size; }
252 address blob_end() const { return (address) header_begin() + _size; }
253 // code_end == content_end is true for all types of blobs for now, it is also checked in the constructor
254
255 int mutable_data_size() const { return _mutable_data_size; }
256 address mutable_data_begin() const { return _mutable_data; }
257 address mutable_data_end() const { return _mutable_data + _mutable_data_size; }
258
259 relocInfo* relocation_begin() const { return (relocInfo*)_mutable_data; }
260 relocInfo* relocation_end() const { return (relocInfo*)((address)relocation_begin() + _relocation_size); }
261
262 // Offsets
263 int content_offset() const { return _content_offset; }
264 int code_offset() const { return _code_offset; }
265
266 // This field holds the beginning of the const section in the old code buffer.
267 // It is needed to fix relocations of pc-relative loads when resizing the
268 // the constant pool or moving it.
269 S390_ONLY(address ctable_begin() const { return header_begin() + _ctable_offset; })
270 void set_ctable_begin(address ctable) { S390_ONLY(_ctable_offset = ctable - header_begin();) }
271
272 // Sizes
273 int size() const { return _size; }
274 int header_size() const { return _header_size; }
275 int relocation_size() const { return _relocation_size; }
276 int content_size() const { return pointer_delta_as_int(content_end(), content_begin()); }
277 int code_size() const { return pointer_delta_as_int(code_end(), code_begin()); }
278
279 // Only used from CodeCache::free_unused_tail() after the Interpreter blob was trimmed
280 void adjust_size(size_t used) {
281 _size = (int)used;
282 _data_offset = _size;
283 }
284
285 // Containment
286 bool blob_contains(address addr) const { return header_begin() <= addr && addr < blob_end(); }
287 bool code_contains(address addr) const { return code_begin() <= addr && addr < code_end(); }
288 bool contains(address addr) const { return content_begin() <= addr && addr < content_end(); }
289 bool is_frame_complete_at(address addr) const { return _frame_complete_offset != CodeOffsets::frame_never_safe &&
290 code_contains(addr) && addr >= code_begin() + _frame_complete_offset; }
291 int frame_complete_offset() const { return _frame_complete_offset; }
292
293 // OopMap for frame
294 ImmutableOopMapSet* oop_maps() const { return _oop_maps; }
295 void set_oop_maps(OopMapSet* p);
296 void set_oop_maps(ImmutableOopMapSet* p) { _oop_maps = p; }
297
298 const ImmutableOopMap* oop_map_for_slot(int slot, address return_address) const;
299 const ImmutableOopMap* oop_map_for_return_address(address return_address) const;
300
301 // Frame support. Sizes are in word units.
302 int frame_size() const { return _frame_size; }
303 void set_frame_size(int size) { _frame_size = size; }
304
305 // Returns true, if the next frame is responsible for GC'ing oops passed as arguments
306 bool caller_must_gc_arguments(JavaThread* thread) const { return _caller_must_gc_arguments; }
307
308 // Naming
309 const char* name() const { return _name; }
310 void set_name(const char* name) { _name = name; }
311
312 // Debugging
313 void verify();
314 void print() const;
315 void print_on(outputStream* st) const;
316 void print_value_on(outputStream* st) const;
317
318 void dump_for_addr(address addr, outputStream* st, bool verbose) const;
319 void print_code_on(outputStream* st);
320
321 // Print to stream, any comments associated with offset.
322 void print_block_comment(outputStream* stream, address block_begin) const;
323
324 #ifndef PRODUCT
325 AsmRemarks &asm_remarks() { return _asm_remarks; }
326 DbgStrings &dbg_strings() { return _dbg_strings; }
327
328 void use_remarks(AsmRemarks &remarks) { _asm_remarks.share(remarks); }
329 void use_strings(DbgStrings &strings) { _dbg_strings.share(strings); }
330 #endif
331
332 #if INCLUDE_CDS
333 void restore_mutable_data(address reloc_data);
334
335 void copy_to(address buffer) {
336 memcpy(buffer, this, this->size());
337 }
338
339 // methods to archive a blob into AOT code cache
340 void prepare_for_archiving();
341 static void archive_blob(CodeBlob* blob, address archive_buffer);
342
343 // methods to restore a blob from AOT code cache into the CodeCache
344 void post_restore();
345 CodeBlob* restore(address code_cache_buffer, AOTCodeReader* reader);
346 static CodeBlob* create(CodeBlob* archived_blob, AOTCodeReader* reader);
347 #endif
348 };
349
350 //----------------------------------------------------------------------------------------------------
351 // RuntimeBlob: used for non-compiled method code (adapters, stubs, blobs)
352
353 class RuntimeBlob : public CodeBlob {
354 public:
355
356 // Creation
357 // a) simple CodeBlob
358 RuntimeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size)
359 : CodeBlob(name, kind, size, header_size)
360 {}
361
362 // b) full CodeBlob
363 // frame_complete is the offset from the beginning of the instructions
364 // to where the frame setup (from stackwalk viewpoint) is complete.
365 RuntimeBlob(
366 const char* name,
367 CodeBlobKind kind,
368 CodeBuffer* cb,
369 int size,
370 uint16_t header_size,
371 int16_t frame_complete,
372 int frame_size,
373 OopMapSet* oop_maps,
374 bool caller_must_gc_arguments = false
375 );
376
377 static void free(RuntimeBlob* blob);
378
379 // Deal with Disassembler, VTune, Forte, JvmtiExport, MemoryService.
380 static void trace_new_stub(RuntimeBlob* blob, const char* name1, const char* name2 = "");
381
382 class Vptr : public CodeBlob::Vptr {
383 };
384 };
385
386 class WhiteBox;
387 //----------------------------------------------------------------------------------------------------
388 // BufferBlob: used to hold non-relocatable machine code such as the interpreter, stubroutines, etc.
389
390 class BufferBlob: public RuntimeBlob {
391 friend class VMStructs;
392 friend class AdapterBlob;
393 friend class VtableBlob;
394 friend class MethodHandlesAdapterBlob;
395 friend class BufferedInlineTypeBlob;
396 friend class UpcallStub;
397 friend class WhiteBox;
398
399 private:
400 // Creation support
401 BufferBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size = sizeof(BufferBlob));
402 BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size = sizeof(BufferBlob));
403 BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments = false);
404
405 void* operator new(size_t s, unsigned size) throw();
406
407 public:
408 // Creation
409 static BufferBlob* create(const char* name, uint buffer_size);
410 static BufferBlob* create(const char* name, CodeBuffer* cb);
411
412 static void free(BufferBlob* buf);
413
414 void print_on_impl(outputStream* st) const;
415 void print_value_on_impl(outputStream* st) const;
416
417 class Vptr : public RuntimeBlob::Vptr {
418 void print_on(const CodeBlob* instance, outputStream* st) const override {
419 instance->as_buffer_blob(false)->print_on_impl(st);
420 }
421 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
422 instance->as_buffer_blob(false)->print_value_on_impl(st);
423 }
424 };
425
426 static const Vptr _vpntr;
427 };
428
429
430 //----------------------------------------------------------------------------------------------------
431 // AdapterBlob: used to hold C2I/I2C adapters
432
433 class AdapterBlob: public BufferBlob {
434 public:
435 enum Entry {
436 I2C,
437 C2I,
438 C2I_Inline,
439 C2I_Inline_RO,
440 C2I_Unverified,
441 C2I_Unverified_Inline,
442 C2I_No_Clinit_Check,
443 ENTRY_COUNT
444 };
445 private:
446 AdapterBlob(int size, CodeBuffer* cb, int entry_offset[ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments = false);
447
448 // _i2c_offset is always 0 so no need to store it
449 int _c2i_offset;
450 int _c2i_inline_offset;
451 int _c2i_inline_ro_offset;
452 int _c2i_unverified_offset;
453 int _c2i_unverified_inline_offset;
454 int _c2i_no_clinit_check_offset;
455 public:
456 // Creation
457 static AdapterBlob* create(CodeBuffer* cb,
458 int entry_offset[ENTRY_COUNT],
459 int frame_complete,
460 int frame_size,
461 OopMapSet* oop_maps,
462 bool caller_must_gc_arguments = false);
463
464 bool caller_must_gc_arguments(JavaThread* thread) const { return true; }
465 static AdapterBlob* create(CodeBuffer* cb, int entry_offset[ENTRY_COUNT]);
466 address i2c_entry() { return code_begin(); }
467 address c2i_entry() { return i2c_entry() + _c2i_offset; }
468 address c2i_inline_entry() { return i2c_entry() + _c2i_inline_offset; }
469 address c2i_inline_ro_entry() { return i2c_entry() + _c2i_inline_ro_offset; }
470 address c2i_unverified_entry() { return i2c_entry() + _c2i_unverified_offset; }
471 address c2i_unverified_inline_entry() { return i2c_entry() + _c2i_unverified_inline_offset; }
472 address c2i_no_clinit_check_entry() { return _c2i_no_clinit_check_offset == -1 ? nullptr : i2c_entry() + _c2i_no_clinit_check_offset; }
473 };
474
475 //---------------------------------------------------------------------------------------------------
476 class VtableBlob: public BufferBlob {
477 private:
478 VtableBlob(const char*, int);
479
480 void* operator new(size_t s, unsigned size) throw();
481
482 public:
483 // Creation
484 static VtableBlob* create(const char* name, int buffer_size);
485 };
486
487 //----------------------------------------------------------------------------------------------------
488 // MethodHandlesAdapterBlob: used to hold MethodHandles adapters
489
490 class MethodHandlesAdapterBlob: public BufferBlob {
491 private:
492 MethodHandlesAdapterBlob(int size): BufferBlob("MethodHandles adapters", CodeBlobKind::MHAdapter, size) {}
493
494 public:
495 // Creation
496 static MethodHandlesAdapterBlob* create(int buffer_size);
497 };
498
499 //----------------------------------------------------------------------------------------------------
500 // BufferedInlineTypeBlob : used for pack/unpack handlers
501
502 class BufferedInlineTypeBlob: public BufferBlob {
503 private:
504 const int _pack_fields_off;
505 const int _pack_fields_jobject_off;
506 const int _unpack_fields_off;
507
508 BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off);
509
510 public:
511 // Creation
512 static BufferedInlineTypeBlob* create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off);
513
514 address pack_fields() const { return code_begin() + _pack_fields_off; }
515 address pack_fields_jobject() const { return code_begin() + _pack_fields_jobject_off; }
516 address unpack_fields() const { return code_begin() + _unpack_fields_off; }
517 };
518
519 //----------------------------------------------------------------------------------------------------
520 // RuntimeStub: describes stubs used by compiled code to call a (static) C++ runtime routine
521
522 class RuntimeStub: public RuntimeBlob {
523 friend class VMStructs;
524 private:
525 // Creation support
526 RuntimeStub(
527 const char* name,
528 CodeBuffer* cb,
529 int size,
530 int16_t frame_complete,
531 int frame_size,
532 OopMapSet* oop_maps,
533 bool caller_must_gc_arguments
534 );
535
536 void* operator new(size_t s, unsigned size) throw();
537
538 public:
539 static const int ENTRY_COUNT = 1;
540 // Creation
541 static RuntimeStub* new_runtime_stub(
542 const char* stub_name,
543 CodeBuffer* cb,
544 int16_t frame_complete,
545 int frame_size,
546 OopMapSet* oop_maps,
547 bool caller_must_gc_arguments,
548 bool alloc_fail_is_fatal=true
549 );
550
551 static void free(RuntimeStub* stub) { RuntimeBlob::free(stub); }
552
553 address entry_point() const { return code_begin(); }
554
555 void post_restore_impl() {
556 trace_new_stub(this, "RuntimeStub - ", name());
557 }
558
559 void print_on_impl(outputStream* st) const;
560 void print_value_on_impl(outputStream* st) const;
561
562 class Vptr : public RuntimeBlob::Vptr {
563 void post_restore(CodeBlob* instance) const override {
564 instance->as_runtime_stub()->post_restore_impl();
565 }
566 void print_on(const CodeBlob* instance, outputStream* st) const override {
567 instance->as_runtime_stub()->print_on_impl(st);
568 }
569 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
570 instance->as_runtime_stub()->print_value_on_impl(st);
571 }
572 };
573
574 static const Vptr _vpntr;
575 };
576
577
578 //----------------------------------------------------------------------------------------------------
579 // Super-class for all blobs that exist in only one instance. Implements default behaviour.
580
581 class SingletonBlob: public RuntimeBlob {
582 friend class VMStructs;
583
584 protected:
585 void* operator new(size_t s, unsigned size, bool alloc_fail_is_fatal=true) throw();
586
587 public:
588 SingletonBlob(
589 const char* name,
590 CodeBlobKind kind,
591 CodeBuffer* cb,
592 int size,
593 uint16_t header_size,
594 int frame_size,
595 OopMapSet* oop_maps
596 )
597 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, frame_size, oop_maps)
598 {};
599
600 address entry_point() { return code_begin(); }
601
602 void print_on_impl(outputStream* st) const;
603 void print_value_on_impl(outputStream* st) const;
604
605 class Vptr : public RuntimeBlob::Vptr {
606 void print_on(const CodeBlob* instance, outputStream* st) const override {
607 instance->as_singleton_blob()->print_on_impl(st);
608 }
609 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
610 instance->as_singleton_blob()->print_value_on_impl(st);
611 }
612 };
613
614 static const Vptr _vpntr;
615 };
616
617
618 //----------------------------------------------------------------------------------------------------
619 // DeoptimizationBlob
620
621 class DeoptimizationBlob: public SingletonBlob {
622 friend class VMStructs;
623 friend class JVMCIVMStructs;
624 private:
625 int _unpack_offset;
626 int _unpack_with_exception;
627 int _unpack_with_reexecution;
628
629 int _unpack_with_exception_in_tls;
630
631 #if INCLUDE_JVMCI
632 // Offsets when JVMCI calls uncommon_trap.
633 int _uncommon_trap_offset;
634 int _implicit_exception_uncommon_trap_offset;
635 #endif
636
637 // Creation support
638 DeoptimizationBlob(
639 CodeBuffer* cb,
640 int size,
641 OopMapSet* oop_maps,
642 int unpack_offset,
643 int unpack_with_exception_offset,
644 int unpack_with_reexecution_offset,
645 int frame_size
646 );
647
648 public:
649 static const int ENTRY_COUNT = 4 JVMCI_ONLY(+ 2);
650 // Creation
651 static DeoptimizationBlob* create(
652 CodeBuffer* cb,
653 OopMapSet* oop_maps,
654 int unpack_offset,
655 int unpack_with_exception_offset,
656 int unpack_with_reexecution_offset,
657 int frame_size
658 );
659
660 address unpack() const { return code_begin() + _unpack_offset; }
661 address unpack_with_exception() const { return code_begin() + _unpack_with_exception; }
662 address unpack_with_reexecution() const { return code_begin() + _unpack_with_reexecution; }
663
664 // Alternate entry point for C1 where the exception and issuing pc
665 // are in JavaThread::_exception_oop and JavaThread::_exception_pc
666 // instead of being in registers. This is needed because C1 doesn't
667 // model exception paths in a way that keeps these registers free so
668 // there may be live values in those registers during deopt.
669 void set_unpack_with_exception_in_tls_offset(int offset) {
670 _unpack_with_exception_in_tls = offset;
671 assert(code_contains(code_begin() + _unpack_with_exception_in_tls), "must be PC inside codeblob");
672 }
673 address unpack_with_exception_in_tls() const { return code_begin() + _unpack_with_exception_in_tls; }
674
675 #if INCLUDE_JVMCI
676 // Offsets when JVMCI calls uncommon_trap.
677 void set_uncommon_trap_offset(int offset) {
678 _uncommon_trap_offset = offset;
679 assert(contains(code_begin() + _uncommon_trap_offset), "must be PC inside codeblob");
680 }
681 address uncommon_trap() const { return (EnableJVMCI ? code_begin() + _uncommon_trap_offset : nullptr); }
682
683 void set_implicit_exception_uncommon_trap_offset(int offset) {
684 _implicit_exception_uncommon_trap_offset = offset;
685 assert(contains(code_begin() + _implicit_exception_uncommon_trap_offset), "must be PC inside codeblob");
686 }
687 address implicit_exception_uncommon_trap() const { return (EnableJVMCI ? code_begin() + _implicit_exception_uncommon_trap_offset : nullptr); }
688 #endif // INCLUDE_JVMCI
689
690 void post_restore_impl() {
691 trace_new_stub(this, "DeoptimizationBlob");
692 }
693
694 void print_value_on_impl(outputStream* st) const;
695
696 class Vptr : public SingletonBlob::Vptr {
697 void post_restore(CodeBlob* instance) const override {
698 instance->as_deoptimization_blob()->post_restore_impl();
699 }
700
701 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
702 instance->as_deoptimization_blob()->print_value_on_impl(st);
703 }
704 };
705
706 static const Vptr _vpntr;
707 };
708
709
710 //----------------------------------------------------------------------------------------------------
711 // UncommonTrapBlob (currently only used by Compiler 2)
712
713 #ifdef COMPILER2
714
715 class UncommonTrapBlob: public SingletonBlob {
716 private:
717 // Creation support
718 UncommonTrapBlob(
719 CodeBuffer* cb,
720 int size,
721 OopMapSet* oop_maps,
722 int frame_size
723 );
724
725 public:
726 // Creation
727 static UncommonTrapBlob* create(
728 CodeBuffer* cb,
729 OopMapSet* oop_maps,
730 int frame_size
731 );
732 void post_restore_impl() {
733 trace_new_stub(this, "UncommonTrapBlob");
734 }
735 class Vptr : public SingletonBlob::Vptr {
736 void post_restore(CodeBlob* instance) const override {
737 instance->as_uncommon_trap_blob()->post_restore_impl();
738 }
739 };
740
741 static const Vptr _vpntr;
742 };
743
744
745 //----------------------------------------------------------------------------------------------------
746 // ExceptionBlob: used for exception unwinding in compiled code (currently only used by Compiler 2)
747
748 class ExceptionBlob: public SingletonBlob {
749 private:
750 // Creation support
751 ExceptionBlob(
752 CodeBuffer* cb,
753 int size,
754 OopMapSet* oop_maps,
755 int frame_size
756 );
757
758 public:
759 // Creation
760 static ExceptionBlob* create(
761 CodeBuffer* cb,
762 OopMapSet* oop_maps,
763 int frame_size
764 );
765
766 void post_restore_impl() {
767 trace_new_stub(this, "ExceptionBlob");
768 }
769
770 class Vptr : public SingletonBlob::Vptr {
771 void post_restore(CodeBlob* instance) const override {
772 instance->as_exception_blob()->post_restore_impl();
773 }
774 };
775
776 static const Vptr _vpntr;
777 };
778 #endif // COMPILER2
779
780
781 //----------------------------------------------------------------------------------------------------
782 // SafepointBlob: handles illegal_instruction exceptions during a safepoint
783
784 class SafepointBlob: public SingletonBlob {
785 private:
786 // Creation support
787 SafepointBlob(
788 CodeBuffer* cb,
789 int size,
790 OopMapSet* oop_maps,
791 int frame_size
792 );
793
794 public:
795 static const int ENTRY_COUNT = 1;
796 // Creation
797 static SafepointBlob* create(
798 CodeBuffer* cb,
799 OopMapSet* oop_maps,
800 int frame_size
801 );
802
803 void post_restore_impl() {
804 trace_new_stub(this, "SafepointBlob - ", name());
805 }
806 class Vptr : public SingletonBlob::Vptr {
807 void post_restore(CodeBlob* instance) const override {
808 instance->as_safepoint_blob()->post_restore_impl();
809 }
810 };
811
812 static const Vptr _vpntr;
813 };
814
815 //----------------------------------------------------------------------------------------------------
816
817 class UpcallLinker;
818
819 // A (Panama) upcall stub. Not used by JNI.
820 class UpcallStub: public RuntimeBlob {
821 friend class VMStructs;
822 friend class UpcallLinker;
823 private:
824 jobject _receiver;
825 ByteSize _frame_data_offset;
826
827 UpcallStub(const char* name, CodeBuffer* cb, int size, jobject receiver, ByteSize frame_data_offset);
828
829 void* operator new(size_t s, unsigned size) throw();
830
831 struct FrameData {
832 JavaFrameAnchor jfa;
833 JavaThread* thread;
834 JNIHandleBlock* old_handles;
835 JNIHandleBlock* new_handles;
836 };
837
838 // defined in frame_ARCH.cpp
839 FrameData* frame_data_for_frame(const frame& frame) const;
840 public:
841 // Creation
842 static UpcallStub* create(const char* name, CodeBuffer* cb, jobject receiver, ByteSize frame_data_offset);
843
844 static void free(UpcallStub* blob);
845
846 jobject receiver() { return _receiver; }
847
848 JavaFrameAnchor* jfa_for_frame(const frame& frame) const;
849
850 // GC support
851 void oops_do(OopClosure* f, const frame& frame);
852
853 void print_on_impl(outputStream* st) const;
854 void print_value_on_impl(outputStream* st) const;
855
856 class Vptr : public RuntimeBlob::Vptr {
857 void print_on(const CodeBlob* instance, outputStream* st) const override {
858 instance->as_upcall_stub()->print_on_impl(st);
859 }
860 void print_value_on(const CodeBlob* instance, outputStream* st) const override {
861 instance->as_upcall_stub()->print_value_on_impl(st);
862 }
863 };
864
865 static const Vptr _vpntr;
866 };
867
868 #endif // SHARE_CODE_CODEBLOB_HPP