1 /*
  2  * Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_CODE_CODEBLOB_HPP
 26 #define SHARE_CODE_CODEBLOB_HPP
 27 
 28 #include "asm/codeBuffer.hpp"
 29 #include "compiler/compilerDefinitions.hpp"
 30 #include "compiler/oopMap.hpp"
 31 #include "runtime/javaFrameAnchor.hpp"
 32 #include "runtime/frame.hpp"
 33 #include "runtime/handles.hpp"
 34 #include "utilities/align.hpp"
 35 #include "utilities/macros.hpp"
 36 
 37 class ImmutableOopMap;
 38 class ImmutableOopMapSet;
 39 class JNIHandleBlock;
 40 class OopMapSet;
 41 
 42 // CodeBlob Types
 43 // Used in the CodeCache to assign CodeBlobs to different CodeHeaps
 44 enum class CodeBlobType {
 45   MethodNonProfiled   = 0,    // Execution level 1 and 4 (non-profiled) nmethods (including native nmethods)
 46   MethodProfiled      = 1,    // Execution level 2 and 3 (profiled) nmethods
 47   NonNMethod          = 2,    // Non-nmethods like Buffers, Adapters and Runtime Stubs
 48   All                 = 3,    // All types (No code cache segmentation)
 49   NumTypes            = 4     // Number of CodeBlobTypes
 50 };
 51 
 52 // CodeBlob - superclass for all entries in the CodeCache.
 53 //
 54 // Subtypes are:
 55 //  nmethod              : JIT Compiled Java methods
 56 //  RuntimeBlob          : Non-compiled method code; generated glue code
 57 //   BufferBlob          : Used for non-relocatable code such as interpreter, stubroutines, etc.
 58 //    AdapterBlob        : Used to hold C2I/I2C adapters
 59 //    VtableBlob         : Used for holding vtable chunks
 60 //    MethodHandlesAdapterBlob : Used to hold MethodHandles adapters
 61 //   RuntimeStub         : Call to VM runtime methods
 62 //   SingletonBlob       : Super-class for all blobs that exist in only one instance
 63 //    DeoptimizationBlob : Used for deoptimization
 64 //    SafepointBlob      : Used to handle illegal instruction exceptions
 65 //    ExceptionBlob      : Used for stack unrolling
 66 //    UncommonTrapBlob   : Used to handle uncommon traps
 67 //   UpcallStub  : Used for upcalls from native code
 68 //
 69 //
 70 // Layout in the CodeCache:
 71 //   - header
 72 //   - content space
 73 //     - instruction space
 74 // Outside of the CodeCache:
 75 //   - mutable_data
 76 //     - relocation info
 77 //     - additional data for subclasses
 78 
 79 enum class CodeBlobKind : u1 {
 80   None,
 81   Nmethod,
 82   Buffer,
 83   Adapter,
 84   Vtable,
 85   MHAdapter,
 86   RuntimeStub,
 87   Deoptimization,
 88   Safepoint,
 89 #ifdef COMPILER2
 90   Exception,
 91   UncommonTrap,
 92 #endif
 93   Upcall,
 94   Number_Of_Kinds
 95 };
 96 
 97 class UpcallStub;      // for as_upcall_stub()
 98 class RuntimeStub;     // for as_runtime_stub()
 99 class JavaFrameAnchor; // for UpcallStub::jfa_for_frame
100 class AdapterBlob;
101 class ExceptionBlob;
102 
103 class CodeBlob {
104   friend class VMStructs;
105   friend class JVMCIVMStructs;
106 
107 private:
108   void restore_mutable_data(address reloc_data);
109 
110 protected:
111   // order fields from large to small to minimize padding between fields
112   ImmutableOopMapSet* _oop_maps;   // OopMap for this CodeBlob
113   const char*         _name;
114   address             _mutable_data;
115 
116   int      _size;                  // total size of CodeBlob in bytes
117   int      _relocation_size;       // size of relocation (could be bigger than 64Kb)
118   int      _content_offset;        // offset to where content region begins (this includes consts, insts, stubs)
119   int      _code_offset;           // offset to where instructions region begins (this includes insts, stubs)
120   int      _data_offset;           // offset to where data region begins
121   int      _frame_size;            // size of stack frame in words (NOT slots. On x64 these are 64bit words)
122   int      _mutable_data_size;
123 
124   S390_ONLY(int _ctable_offset;)
125 
126   uint16_t _header_size;           // size of header (depends on subclass)
127   int16_t  _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
128                                    // not finished setting up their frame. Beware of pc's in
129                                    // that range. There is a similar range(s) on returns
130                                    // which we don't detect.
131 
132   CodeBlobKind _kind;              // Kind of this code blob
133 
134   bool _caller_must_gc_arguments;
135 
136 #ifndef PRODUCT
137   AsmRemarks _asm_remarks;
138   DbgStrings _dbg_strings;
139 #endif
140 
141   void print_on_impl(outputStream* st) const;
142   void print_value_on_impl(outputStream* st) const;
143 
144   class Vptr {
145    public:
146     virtual void print_on(const CodeBlob* instance, outputStream* st) const = 0;
147     virtual void print_value_on(const CodeBlob* instance, outputStream* st) const = 0;
148     virtual void prepare_for_archiving(CodeBlob* instance) const {
149       instance->prepare_for_archiving_impl();
150     };
151     virtual void post_restore(CodeBlob* instance) const {
152       instance->post_restore_impl();
153     };
154   };
155 
156   static const Vptr* vptr(CodeBlobKind kind);
157   const Vptr* vptr() const;
158 
159   CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size,
160            int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments,
161            int mutable_data_size);
162 
163   // Simple CodeBlob used for simple BufferBlob.
164   CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size);
165 
166 
167   void operator delete(void* p) { }
168 
169   void prepare_for_archiving_impl();
170   void post_restore_impl();
171 
172 public:
173 
174   ~CodeBlob() {
175     assert(_oop_maps == nullptr, "Not flushed");
176   }
177 
178   // Returns the space needed for CodeBlob
179   static unsigned int allocation_size(CodeBuffer* cb, int header_size);
180   static unsigned int align_code_offset(int offset);
181 
182   // Deletion
183   void purge();
184 
185   // Typing
186   bool is_nmethod() const                     { return _kind == CodeBlobKind::Nmethod; }
187   bool is_buffer_blob() const                 { return _kind == CodeBlobKind::Buffer; }
188   bool is_runtime_stub() const                { return _kind == CodeBlobKind::RuntimeStub; }
189   bool is_deoptimization_stub() const         { return _kind == CodeBlobKind::Deoptimization; }
190 #ifdef COMPILER2
191   bool is_uncommon_trap_stub() const          { return _kind == CodeBlobKind::UncommonTrap; }
192   bool is_exception_stub() const              { return _kind == CodeBlobKind::Exception; }
193 #else
194   bool is_uncommon_trap_stub() const          { return false; }
195   bool is_exception_stub() const              { return false; }
196 #endif
197   bool is_safepoint_stub() const              { return _kind == CodeBlobKind::Safepoint; }
198   bool is_adapter_blob() const                { return _kind == CodeBlobKind::Adapter; }
199   bool is_vtable_blob() const                 { return _kind == CodeBlobKind::Vtable; }
200   bool is_method_handles_adapter_blob() const { return _kind == CodeBlobKind::MHAdapter; }
201   bool is_upcall_stub() const                 { return _kind == CodeBlobKind::Upcall; }
202 
203   // Casting
204   nmethod* as_nmethod_or_null() const         { return is_nmethod() ? (nmethod*) this : nullptr; }
205   nmethod* as_nmethod() const                 { assert(is_nmethod(), "must be nmethod"); return (nmethod*) this; }
206   CodeBlob* as_codeblob() const               { return (CodeBlob*) this; }
207   AdapterBlob* as_adapter_blob() const        { assert(is_adapter_blob(), "must be adapter blob"); return (AdapterBlob*) this; }
208   UpcallStub* as_upcall_stub() const          { assert(is_upcall_stub(), "must be upcall stub"); return (UpcallStub*) this; }
209   RuntimeStub* as_runtime_stub() const        { assert(is_runtime_stub(), "must be runtime blob"); return (RuntimeStub*) this; }
210 
211   // Boundaries
212   address    header_begin() const             { return (address)    this; }
213   address    header_end() const               { return ((address)   this) + _header_size; }
214   address    content_begin() const            { return (address)    header_begin() + _content_offset; }
215   address    content_end() const              { return (address)    header_begin() + _data_offset; }
216   address    code_begin() const               { return (address)    header_begin() + _code_offset; }
217   address    code_end() const                 { return (address)    header_begin() + _data_offset; }
218   address    data_begin() const               { return (address)    header_begin() + _data_offset; }
219   address    data_end() const                 { return (address)    header_begin() + _size; }
220   address    blob_end() const                 { return (address)    header_begin() + _size; }
221   // code_end == content_end is true for all types of blobs for now, it is also checked in the constructor
222 
223   int mutable_data_size() const               { return _mutable_data_size; }
224   address mutable_data_begin() const          { return _mutable_data; }
225   address mutable_data_end() const            { return _mutable_data + _mutable_data_size; }
226 
227   relocInfo* relocation_begin() const         { return (relocInfo*)_mutable_data; }
228   relocInfo* relocation_end() const           { return (relocInfo*)((address)relocation_begin() + _relocation_size); }
229 
230   // Offsets
231   int content_offset() const                  { return _content_offset; }
232   int code_offset() const                     { return _code_offset; }
233 
234   // This field holds the beginning of the const section in the old code buffer.
235   // It is needed to fix relocations of pc-relative loads when resizing the
236   // the constant pool or moving it.
237   S390_ONLY(address ctable_begin() const { return header_begin() + _ctable_offset; })
238   void set_ctable_begin(address ctable) { S390_ONLY(_ctable_offset = ctable - header_begin();) }
239 
240   // Sizes
241   int size() const               { return _size; }
242   int header_size() const        { return _header_size; }
243   int relocation_size() const    { return pointer_delta_as_int((address) relocation_end(), (address) relocation_begin()); }
244   int content_size() const       { return pointer_delta_as_int(content_end(), content_begin()); }
245   int code_size() const          { return pointer_delta_as_int(code_end(), code_begin()); }
246 
247   // Only used from CodeCache::free_unused_tail() after the Interpreter blob was trimmed
248   void adjust_size(size_t used) {
249     _size = (int)used;
250     _data_offset = _size;
251   }
252 
253   // Containment
254   bool blob_contains(address addr) const         { return header_begin()       <= addr && addr < blob_end();       }
255   bool code_contains(address addr) const         { return code_begin()         <= addr && addr < code_end();       }
256   bool contains(address addr) const              { return content_begin()      <= addr && addr < content_end();    }
257   bool is_frame_complete_at(address addr) const  { return _frame_complete_offset != CodeOffsets::frame_never_safe &&
258                                                           code_contains(addr) && addr >= code_begin() + _frame_complete_offset; }
259   int frame_complete_offset() const              { return _frame_complete_offset; }
260 
261   // OopMap for frame
262   ImmutableOopMapSet* oop_maps() const           { return _oop_maps; }
263   void set_oop_maps(OopMapSet* p);
264   void set_oop_maps(ImmutableOopMapSet* p)       { _oop_maps = p; }
265 
266   const ImmutableOopMap* oop_map_for_slot(int slot, address return_address) const;
267   const ImmutableOopMap* oop_map_for_return_address(address return_address) const;
268 
269   // Frame support. Sizes are in word units.
270   int  frame_size() const                        { return _frame_size; }
271   void set_frame_size(int size)                  { _frame_size = size; }
272 
273   // Returns true, if the next frame is responsible for GC'ing oops passed as arguments
274   bool caller_must_gc_arguments(JavaThread* thread) const { return _caller_must_gc_arguments; }
275 
276   // Naming
277   const char* name() const                       { return _name; }
278   void set_name(const char* name)                { _name = name; }
279 
280   // Debugging
281   void verify();
282   void print() const;
283   void print_on(outputStream* st) const;
284   void print_value_on(outputStream* st) const;
285 
286   void dump_for_addr(address addr, outputStream* st, bool verbose) const;
287   void print_code_on(outputStream* st);
288 
289   // Print to stream, any comments associated with offset.
290   void print_block_comment(outputStream* stream, address block_begin) const;
291 
292 #ifndef PRODUCT
293   AsmRemarks &asm_remarks() { return _asm_remarks; }
294   DbgStrings &dbg_strings() { return _dbg_strings; }
295 
296   void use_remarks(AsmRemarks &remarks) { _asm_remarks.share(remarks); }
297   void use_strings(DbgStrings &strings) { _dbg_strings.share(strings); }
298 #endif
299 
300   void copy_to(address buffer) {
301     memcpy(buffer, this, this->size());
302   }
303 
304   // methods to archive a blob into AOT code cache
305   void prepare_for_archiving();
306   static void archive_blob(CodeBlob* blob, address archive_buffer);
307 
308   // methods to restore a blob from AOT code cache into the CodeCache
309   void post_restore();
310   CodeBlob* restore(address code_cache_buffer, const char* name, address archived_reloc_data, ImmutableOopMapSet* archived_oop_maps);
311   static CodeBlob* create(CodeBlob* archived_blob, const char* name, address archived_reloc_data, ImmutableOopMapSet* archived_oop_maps);
312 };
313 
314 //----------------------------------------------------------------------------------------------------
315 // RuntimeBlob: used for non-compiled method code (adapters, stubs, blobs)
316 
317 class RuntimeBlob : public CodeBlob {
318   friend class VMStructs;
319  public:
320 
321   // Creation
322   // a) simple CodeBlob
323   RuntimeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size)
324     : CodeBlob(name, kind, size, header_size)
325   {}
326 
327   // b) full CodeBlob
328   // frame_complete is the offset from the beginning of the instructions
329   // to where the frame setup (from stackwalk viewpoint) is complete.
330   RuntimeBlob(
331     const char* name,
332     CodeBlobKind kind,
333     CodeBuffer* cb,
334     int         size,
335     uint16_t    header_size,
336     int16_t     frame_complete,
337     int         frame_size,
338     OopMapSet*  oop_maps,
339     bool        caller_must_gc_arguments = false
340   );
341 
342   static void free(RuntimeBlob* blob);
343 
344   // Deal with Disassembler, VTune, Forte, JvmtiExport, MemoryService.
345   static void trace_new_stub(RuntimeBlob* blob, const char* name1, const char* name2 = "");
346 
347   class Vptr : public CodeBlob::Vptr {
348   };
349 };
350 
351 class WhiteBox;
352 //----------------------------------------------------------------------------------------------------
353 // BufferBlob: used to hold non-relocatable machine code such as the interpreter, stubroutines, etc.
354 
355 class BufferBlob: public RuntimeBlob {
356   friend class VMStructs;
357   friend class AdapterBlob;
358   friend class VtableBlob;
359   friend class MethodHandlesAdapterBlob;
360   friend class UpcallStub;
361   friend class WhiteBox;
362 
363  private:
364   // Creation support
365   BufferBlob(const char* name, CodeBlobKind kind, int size);
366   BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size);
367 
368   void* operator new(size_t s, unsigned size) throw();
369 
370  public:
371   // Creation
372   static BufferBlob* create(const char* name, uint buffer_size);
373   static BufferBlob* create(const char* name, CodeBuffer* cb);
374 
375   static void free(BufferBlob* buf);
376 
377   void print_on_impl(outputStream* st) const;
378   void print_value_on_impl(outputStream* st) const;
379 
380   class Vptr : public RuntimeBlob::Vptr {
381     void print_on(const CodeBlob* instance, outputStream* st) const override {
382       ((const BufferBlob*)instance)->print_on_impl(st);
383     }
384     void print_value_on(const CodeBlob* instance, outputStream* st) const override {
385       ((const BufferBlob*)instance)->print_value_on_impl(st);
386     }
387   };
388 
389   static const Vptr _vpntr;
390 };
391 
392 
393 //----------------------------------------------------------------------------------------------------
394 // AdapterBlob: used to hold C2I/I2C adapters
395 
396 class AdapterBlob: public BufferBlob {
397 private:
398   AdapterBlob(int size, CodeBuffer* cb);
399 
400 public:
401   // Creation
402   static AdapterBlob* create(CodeBuffer* cb);
403 };
404 
405 //---------------------------------------------------------------------------------------------------
406 class VtableBlob: public BufferBlob {
407 private:
408   VtableBlob(const char*, int);
409 
410   void* operator new(size_t s, unsigned size) throw();
411 
412 public:
413   // Creation
414   static VtableBlob* create(const char* name, int buffer_size);
415 };
416 
417 //----------------------------------------------------------------------------------------------------
418 // MethodHandlesAdapterBlob: used to hold MethodHandles adapters
419 
420 class MethodHandlesAdapterBlob: public BufferBlob {
421 private:
422   MethodHandlesAdapterBlob(int size): BufferBlob("MethodHandles adapters", CodeBlobKind::MHAdapter, size) {}
423 
424 public:
425   // Creation
426   static MethodHandlesAdapterBlob* create(int buffer_size);
427 };
428 
429 
430 //----------------------------------------------------------------------------------------------------
431 // RuntimeStub: describes stubs used by compiled code to call a (static) C++ runtime routine
432 
433 class RuntimeStub: public RuntimeBlob {
434   friend class VMStructs;
435  private:
436   // Creation support
437   RuntimeStub(
438     const char* name,
439     CodeBuffer* cb,
440     int         size,
441     int16_t     frame_complete,
442     int         frame_size,
443     OopMapSet*  oop_maps,
444     bool        caller_must_gc_arguments
445   );
446 
447   void* operator new(size_t s, unsigned size) throw();
448 
449  public:
450   // Creation
451   static RuntimeStub* new_runtime_stub(
452     const char* stub_name,
453     CodeBuffer* cb,
454     int16_t     frame_complete,
455     int         frame_size,
456     OopMapSet*  oop_maps,
457     bool        caller_must_gc_arguments,
458     bool        alloc_fail_is_fatal=true
459   );
460 
461   static void free(RuntimeStub* stub) { RuntimeBlob::free(stub); }
462 
463   address entry_point() const         { return code_begin(); }
464 
465   void print_on_impl(outputStream* st) const;
466   void print_value_on_impl(outputStream* st) const;
467 
468   class Vptr : public RuntimeBlob::Vptr {
469     void print_on(const CodeBlob* instance, outputStream* st) const override {
470       instance->as_runtime_stub()->print_on_impl(st);
471     }
472     void print_value_on(const CodeBlob* instance, outputStream* st) const override {
473       instance->as_runtime_stub()->print_value_on_impl(st);
474     }
475   };
476 
477   static const Vptr _vpntr;
478 };
479 
480 
481 //----------------------------------------------------------------------------------------------------
482 // Super-class for all blobs that exist in only one instance. Implements default behaviour.
483 
484 class SingletonBlob: public RuntimeBlob {
485   friend class VMStructs;
486 
487  protected:
488   void* operator new(size_t s, unsigned size, bool alloc_fail_is_fatal=true) throw();
489 
490  public:
491    SingletonBlob(
492      const char*  name,
493      CodeBlobKind kind,
494      CodeBuffer*  cb,
495      int          size,
496      uint16_t     header_size,
497      int          frame_size,
498      OopMapSet*   oop_maps
499    )
500    : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, frame_size, oop_maps)
501   {};
502 
503   address entry_point()                          { return code_begin(); }
504 
505   void print_on_impl(outputStream* st) const;
506   void print_value_on_impl(outputStream* st) const;
507 
508   class Vptr : public RuntimeBlob::Vptr {
509     void print_on(const CodeBlob* instance, outputStream* st) const override {
510       ((const SingletonBlob*)instance)->print_on_impl(st);
511     }
512     void print_value_on(const CodeBlob* instance, outputStream* st) const override {
513       ((const SingletonBlob*)instance)->print_value_on_impl(st);
514     }
515   };
516 
517   static const Vptr _vpntr;
518 };
519 
520 
521 //----------------------------------------------------------------------------------------------------
522 // DeoptimizationBlob
523 
524 class DeoptimizationBlob: public SingletonBlob {
525   friend class VMStructs;
526   friend class JVMCIVMStructs;
527  private:
528   int _unpack_offset;
529   int _unpack_with_exception;
530   int _unpack_with_reexecution;
531 
532   int _unpack_with_exception_in_tls;
533 
534 #if INCLUDE_JVMCI
535   // Offsets when JVMCI calls uncommon_trap.
536   int _uncommon_trap_offset;
537   int _implicit_exception_uncommon_trap_offset;
538 #endif
539 
540   // Creation support
541   DeoptimizationBlob(
542     CodeBuffer* cb,
543     int         size,
544     OopMapSet*  oop_maps,
545     int         unpack_offset,
546     int         unpack_with_exception_offset,
547     int         unpack_with_reexecution_offset,
548     int         frame_size
549   );
550 
551  public:
552   // Creation
553   static DeoptimizationBlob* create(
554     CodeBuffer* cb,
555     OopMapSet*  oop_maps,
556     int         unpack_offset,
557     int         unpack_with_exception_offset,
558     int         unpack_with_reexecution_offset,
559     int         frame_size
560   );
561 
562   address unpack() const                         { return code_begin() + _unpack_offset;           }
563   address unpack_with_exception() const          { return code_begin() + _unpack_with_exception;   }
564   address unpack_with_reexecution() const        { return code_begin() + _unpack_with_reexecution; }
565 
566   // Alternate entry point for C1 where the exception and issuing pc
567   // are in JavaThread::_exception_oop and JavaThread::_exception_pc
568   // instead of being in registers.  This is needed because C1 doesn't
569   // model exception paths in a way that keeps these registers free so
570   // there may be live values in those registers during deopt.
571   void set_unpack_with_exception_in_tls_offset(int offset) {
572     _unpack_with_exception_in_tls = offset;
573     assert(code_contains(code_begin() + _unpack_with_exception_in_tls), "must be PC inside codeblob");
574   }
575   address unpack_with_exception_in_tls() const   { return code_begin() + _unpack_with_exception_in_tls; }
576 
577 #if INCLUDE_JVMCI
578   // Offsets when JVMCI calls uncommon_trap.
579   void set_uncommon_trap_offset(int offset) {
580     _uncommon_trap_offset = offset;
581     assert(contains(code_begin() + _uncommon_trap_offset), "must be PC inside codeblob");
582   }
583   address uncommon_trap() const                  { return code_begin() + _uncommon_trap_offset; }
584 
585   void set_implicit_exception_uncommon_trap_offset(int offset) {
586     _implicit_exception_uncommon_trap_offset = offset;
587     assert(contains(code_begin() + _implicit_exception_uncommon_trap_offset), "must be PC inside codeblob");
588   }
589   address implicit_exception_uncommon_trap() const { return code_begin() + _implicit_exception_uncommon_trap_offset; }
590 #endif // INCLUDE_JVMCI
591 
592   void print_value_on_impl(outputStream* st) const;
593 
594   class Vptr : public SingletonBlob::Vptr {
595     void print_value_on(const CodeBlob* instance, outputStream* st) const override {
596       ((const DeoptimizationBlob*)instance)->print_value_on_impl(st);
597     }
598   };
599 
600   static const Vptr _vpntr;
601 };
602 
603 
604 //----------------------------------------------------------------------------------------------------
605 // UncommonTrapBlob (currently only used by Compiler 2)
606 
607 #ifdef COMPILER2
608 
609 class UncommonTrapBlob: public SingletonBlob {
610   friend class VMStructs;
611  private:
612   // Creation support
613   UncommonTrapBlob(
614     CodeBuffer* cb,
615     int         size,
616     OopMapSet*  oop_maps,
617     int         frame_size
618   );
619 
620  public:
621   // Creation
622   static UncommonTrapBlob* create(
623     CodeBuffer* cb,
624     OopMapSet*  oop_maps,
625     int         frame_size
626   );
627 };
628 
629 
630 //----------------------------------------------------------------------------------------------------
631 // ExceptionBlob: used for exception unwinding in compiled code (currently only used by Compiler 2)
632 
633 class ExceptionBlob: public SingletonBlob {
634   friend class VMStructs;
635  private:
636   // Creation support
637   ExceptionBlob(
638     CodeBuffer* cb,
639     int         size,
640     OopMapSet*  oop_maps,
641     int         frame_size
642   );
643 
644  public:
645   // Creation
646   static ExceptionBlob* create(
647     CodeBuffer* cb,
648     OopMapSet*  oop_maps,
649     int         frame_size
650   );
651 
652   void post_restore_impl() {
653     trace_new_stub(this, "ExceptionBlob");
654   }
655 
656   class Vptr : public SingletonBlob::Vptr {
657     void post_restore(CodeBlob* instance) const override {
658       ((ExceptionBlob*)instance)->post_restore_impl();
659     }
660   };
661 
662   static const Vptr _vpntr;
663 };
664 #endif // COMPILER2
665 
666 
667 //----------------------------------------------------------------------------------------------------
668 // SafepointBlob: handles illegal_instruction exceptions during a safepoint
669 
670 class SafepointBlob: public SingletonBlob {
671   friend class VMStructs;
672  private:
673   // Creation support
674   SafepointBlob(
675     CodeBuffer* cb,
676     int         size,
677     OopMapSet*  oop_maps,
678     int         frame_size
679   );
680 
681  public:
682   // Creation
683   static SafepointBlob* create(
684     CodeBuffer* cb,
685     OopMapSet*  oop_maps,
686     int         frame_size
687   );
688 };
689 
690 //----------------------------------------------------------------------------------------------------
691 
692 class UpcallLinker;
693 
694 // A (Panama) upcall stub. Not used by JNI.
695 class UpcallStub: public RuntimeBlob {
696   friend class VMStructs;
697   friend class UpcallLinker;
698  private:
699   jobject _receiver;
700   ByteSize _frame_data_offset;
701 
702   UpcallStub(const char* name, CodeBuffer* cb, int size, jobject receiver, ByteSize frame_data_offset);
703 
704   void* operator new(size_t s, unsigned size) throw();
705 
706   struct FrameData {
707     JavaFrameAnchor jfa;
708     JavaThread* thread;
709     JNIHandleBlock* old_handles;
710     JNIHandleBlock* new_handles;
711   };
712 
713   // defined in frame_ARCH.cpp
714   FrameData* frame_data_for_frame(const frame& frame) const;
715  public:
716   // Creation
717   static UpcallStub* create(const char* name, CodeBuffer* cb, jobject receiver, ByteSize frame_data_offset);
718 
719   static void free(UpcallStub* blob);
720 
721   jobject receiver() { return _receiver; }
722 
723   JavaFrameAnchor* jfa_for_frame(const frame& frame) const;
724 
725   // GC support
726   void oops_do(OopClosure* f, const frame& frame);
727 
728   void print_on_impl(outputStream* st) const;
729   void print_value_on_impl(outputStream* st) const;
730 
731   class Vptr : public RuntimeBlob::Vptr {
732     void print_on(const CodeBlob* instance, outputStream* st) const override {
733       instance->as_upcall_stub()->print_on_impl(st);
734     }
735     void print_value_on(const CodeBlob* instance, outputStream* st) const override {
736       instance->as_upcall_stub()->print_value_on_impl(st);
737     }
738   };
739 
740   static const Vptr _vpntr;
741 };
742 
743 #endif // SHARE_CODE_CODEBLOB_HPP