1 /*
  2  * Copyright (c) 1998, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_CODE_CODEBLOB_HPP
 26 #define SHARE_CODE_CODEBLOB_HPP
 27 
 28 #include "asm/codeBuffer.hpp"
 29 #include "compiler/compilerDefinitions.hpp"
 30 #include "runtime/javaFrameAnchor.hpp"
 31 #include "runtime/frame.hpp"
 32 #include "runtime/handles.hpp"
 33 #include "utilities/align.hpp"
 34 #include "utilities/macros.hpp"
 35 
 36 class ImmutableOopMap;
 37 class ImmutableOopMapSet;
 38 class JNIHandleBlock;
 39 class OopMapSet;
 40 
 41 // CodeBlob Types
 42 // Used in the CodeCache to assign CodeBlobs to different CodeHeaps
 43 struct CodeBlobType {
 44   enum {
 45     MethodNonProfiled   = 0,    // Execution level 1 and 4 (non-profiled) nmethods (including native nmethods)
 46     MethodProfiled      = 1,    // Execution level 2 and 3 (profiled) nmethods
 47     NonNMethod          = 2,    // Non-nmethods like Buffers, Adapters and Runtime Stubs
 48     All                 = 3,    // All types (No code cache segmentation)
 49     NumTypes            = 4     // Number of CodeBlobTypes
 50   };
 51 };
 52 
 53 // CodeBlob - superclass for all entries in the CodeCache.
 54 //
 55 // Subtypes are:
 56 //  CompiledMethod       : Compiled Java methods (include method that calls to native code)
 57 //   nmethod             : JIT Compiled Java methods
 58 //  RuntimeBlob          : Non-compiled method code; generated glue code
 59 //   BufferBlob          : Used for non-relocatable code such as interpreter, stubroutines, etc.
 60 //    AdapterBlob        : Used to hold C2I/I2C adapters
 61 //    VtableBlob         : Used for holding vtable chunks
 62 //    MethodHandlesAdapterBlob : Used to hold MethodHandles adapters
 63 //    OptimizedEntryBlob : Used for upcalls from native code
 64 //   RuntimeStub         : Call to VM runtime methods
 65 //   SingletonBlob       : Super-class for all blobs that exist in only one instance
 66 //    DeoptimizationBlob : Used for deoptimization
 67 //    ExceptionBlob      : Used for stack unrolling
 68 //    SafepointBlob      : Used to handle illegal instruction exceptions
 69 //    UncommonTrapBlob   : Used to handle uncommon traps
 70 //
 71 //
 72 // Layout : continuous in the CodeCache
 73 //   - header
 74 //   - relocation
 75 //   - content space
 76 //     - instruction space
 77 //   - data space
 78 
 79 
 80 class CodeBlobLayout;
 81 class OptimizedEntryBlob; // for as_optimized_entry_blob()
 82 class JavaFrameAnchor; // for OptimizedEntryBlob::jfa_for_frame
 83 
 84 class CodeBlob {
 85   friend class VMStructs;
 86   friend class JVMCIVMStructs;
 87   friend class CodeCacheDumper;
 88 
 89 protected:
 90 
 91   const CompilerType _type;                      // CompilerType
 92   int        _size;                              // total size of CodeBlob in bytes
 93   int        _header_size;                       // size of header (depends on subclass)
 94   int        _frame_complete_offset;             // instruction offsets in [0.._frame_complete_offset) have
 95                                                  // not finished setting up their frame. Beware of pc's in
 96                                                  // that range. There is a similar range(s) on returns
 97                                                  // which we don't detect.
 98   int        _data_offset;                       // offset to where data region begins
 99   int        _frame_size;                        // size of stack frame
100 
101   address    _code_begin;
102   address    _code_end;
103   address    _content_begin;                     // address to where content region begins (this includes consts, insts, stubs)
104                                                  // address    _content_end - not required, for all CodeBlobs _code_end == _content_end for now
105   address    _data_end;
106   address    _relocation_begin;
107   address    _relocation_end;
108 
109   ImmutableOopMapSet* _oop_maps;                 // OopMap for this CodeBlob
110   bool                _caller_must_gc_arguments;
111 
112   const char*         _name;
113   S390_ONLY(int       _ctable_offset;)
114 
115 #ifndef PRODUCT
116   AsmRemarks _asm_remarks;
117   DbgStrings _dbg_strings;
118 
119  ~CodeBlob() {
120     _asm_remarks.clear();
121     _dbg_strings.clear();
122   }
123 #endif // not PRODUCT
124 
125   CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments);
126   CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, CodeBuffer* cb, int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments);
127 
128 public:
129   // Only used by unit test.
130   CodeBlob() : _type(compiler_none) {}
131 
132   // Returns the space needed for CodeBlob
133   static unsigned int allocation_size(CodeBuffer* cb, int header_size);
134   static unsigned int align_code_offset(int offset);
135 
136   // Deletion
137   virtual void flush();
138 
139   // Typing
140   virtual bool is_buffer_blob() const                 { return false; }
141   virtual bool is_nmethod() const                     { return false; }
142   virtual bool is_runtime_stub() const                { return false; }
143   virtual bool is_deoptimization_stub() const         { return false; }
144   virtual bool is_uncommon_trap_stub() const          { return false; }
145   virtual bool is_exception_stub() const              { return false; }
146   virtual bool is_safepoint_stub() const              { return false; }
147   virtual bool is_adapter_blob() const                { return false; }
148   virtual bool is_vtable_blob() const                 { return false; }
149   virtual bool is_method_handles_adapter_blob() const { return false; }
150   virtual bool is_compiled() const                    { return false; }
151   virtual bool is_buffered_inline_type_blob() const   { return false; }
152   virtual bool is_optimized_entry_blob() const                  { return false; }
153 
154   inline bool is_compiled_by_c1() const    { return _type == compiler_c1; };
155   inline bool is_compiled_by_c2() const    { return _type == compiler_c2; };
156   inline bool is_compiled_by_jvmci() const { return _type == compiler_jvmci; };
157   const char* compiler_name() const;
158   CompilerType compiler_type() const { return _type; }
159 
160   // Casting
161   nmethod* as_nmethod_or_null()                { return is_nmethod() ? (nmethod*) this : NULL; }
162   nmethod* as_nmethod()                        { assert(is_nmethod(), "must be nmethod"); return (nmethod*) this; }
163   CompiledMethod* as_compiled_method_or_null() { return is_compiled() ? (CompiledMethod*) this : NULL; }
164   CompiledMethod* as_compiled_method()         { assert(is_compiled(), "must be compiled"); return (CompiledMethod*) this; }
165   CodeBlob* as_codeblob_or_null() const        { return (CodeBlob*) this; }
166   OptimizedEntryBlob* as_optimized_entry_blob() const             { assert(is_optimized_entry_blob(), "must be entry blob"); return (OptimizedEntryBlob*) this; }
167 
168   // Boundaries
169   address header_begin() const        { return (address) this; }
170   relocInfo* relocation_begin() const { return (relocInfo*) _relocation_begin; };
171   relocInfo* relocation_end() const   { return (relocInfo*) _relocation_end; }
172   address content_begin() const       { return _content_begin; }
173   address content_end() const         { return _code_end; } // _code_end == _content_end is true for all types of blobs for now, it is also checked in the constructor
174   address code_begin() const          { return _code_begin;    }
175   address code_end() const            { return _code_end; }
176   address data_end() const            { return _data_end;      }
177 
178   // This field holds the beginning of the const section in the old code buffer.
179   // It is needed to fix relocations of pc-relative loads when resizing the
180   // the constant pool or moving it.
181   S390_ONLY(address ctable_begin() const { return header_begin() + _ctable_offset; })
182   void set_ctable_begin(address ctable) { S390_ONLY(_ctable_offset = ctable - header_begin();) }
183 
184   // Sizes
185   int size() const                               { return _size; }
186   int header_size() const                        { return _header_size; }
187   int relocation_size() const                    { return (address) relocation_end() - (address) relocation_begin(); }
188   int content_size() const                       { return           content_end()    -           content_begin();    }
189   int code_size() const                          { return           code_end()       -           code_begin();       }
190   // Only used from CodeCache::free_unused_tail() after the Interpreter blob was trimmed
191   void adjust_size(size_t used) {
192     _size = (int)used;
193     _data_offset = (int)used;
194     _code_end = (address)this + used;
195     _data_end = (address)this + used;
196   }
197 
198   // Containment
199   bool blob_contains(address addr) const         { return header_begin()       <= addr && addr < data_end();       }
200   bool code_contains(address addr) const         { return code_begin()         <= addr && addr < code_end();       }
201   bool contains(address addr) const              { return content_begin()      <= addr && addr < content_end();    }
202   bool is_frame_complete_at(address addr) const  { return _frame_complete_offset != CodeOffsets::frame_never_safe &&
203                                                           code_contains(addr) && addr >= code_begin() + _frame_complete_offset; }
204   int frame_complete_offset() const              { return _frame_complete_offset; }
205 
206   // CodeCache support: really only used by the nmethods, but in order to get
207   // asserts and certain bookkeeping to work in the CodeCache they are defined
208   // virtual here.
209   virtual bool is_zombie() const                 { return false; }
210   virtual bool is_locked_by_vm() const           { return false; }
211 
212   virtual bool is_unloaded() const               { return false; }
213   virtual bool is_not_entrant() const            { return false; }
214 
215   // GC support
216   virtual bool is_alive() const                  = 0;
217 
218   // OopMap for frame
219   ImmutableOopMapSet* oop_maps() const           { return _oop_maps; }
220   void set_oop_maps(OopMapSet* p);
221   const ImmutableOopMap* oop_map_for_return_address(address return_address);
222   virtual void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) = 0;
223 
224   // Frame support. Sizes are in word units.
225   int  frame_size() const                        { return _frame_size; }
226   void set_frame_size(int size)                  { _frame_size = size; }
227 
228   // Returns true, if the next frame is responsible for GC'ing oops passed as arguments
229   bool caller_must_gc_arguments(JavaThread* thread) const { return _caller_must_gc_arguments; }
230 
231   // Naming
232   const char* name() const                       { return _name; }
233   void set_name(const char* name)                { _name = name; }
234 
235   // Debugging
236   virtual void verify() = 0;
237   virtual void print() const;
238   virtual void print_on(outputStream* st) const;
239   virtual void print_value_on(outputStream* st) const;
240   void dump_for_addr(address addr, outputStream* st, bool verbose) const;
241   void print_code();
242 
243   // Print to stream, any comments associated with offset.
244   virtual void print_block_comment(outputStream* stream, address block_begin) const {
245 #ifndef PRODUCT
246     ptrdiff_t offset = block_begin - code_begin();
247     assert(offset >= 0, "Expecting non-negative offset!");
248     _asm_remarks.print(uint(offset), stream);
249 #endif
250   }
251 
252 #ifndef PRODUCT
253   AsmRemarks &asm_remarks() { return _asm_remarks; }
254   DbgStrings &dbg_strings() { return _dbg_strings; }
255 
256   void use_remarks(AsmRemarks &remarks) { _asm_remarks.share(remarks); }
257   void use_strings(DbgStrings &strings) { _dbg_strings.share(strings); }
258 #endif
259 };
260 
261 class CodeBlobLayout : public StackObj {
262 private:
263   int _size;
264   int _header_size;
265   int _relocation_size;
266   int _content_offset;
267   int _code_offset;
268   int _data_offset;
269   address _code_begin;
270   address _code_end;
271   address _content_begin;
272   address _content_end;
273   address _data_end;
274   address _relocation_begin;
275   address _relocation_end;
276 
277 public:
278   CodeBlobLayout(address code_begin, address code_end, address content_begin, address content_end, address data_end, address relocation_begin, address relocation_end) :
279     _size(0),
280     _header_size(0),
281     _relocation_size(0),
282     _content_offset(0),
283     _code_offset(0),
284     _data_offset(0),
285     _code_begin(code_begin),
286     _code_end(code_end),
287     _content_begin(content_begin),
288     _content_end(content_end),
289     _data_end(data_end),
290     _relocation_begin(relocation_begin),
291     _relocation_end(relocation_end)
292   {
293   }
294 
295   CodeBlobLayout(const address start, int size, int header_size, int relocation_size, int data_offset) :
296     _size(size),
297     _header_size(header_size),
298     _relocation_size(relocation_size),
299     _content_offset(CodeBlob::align_code_offset(_header_size + _relocation_size)),
300     _code_offset(_content_offset),
301     _data_offset(data_offset)
302   {
303     assert(is_aligned(_relocation_size, oopSize), "unaligned size");
304 
305     _code_begin = (address) start + _code_offset;
306     _code_end = (address) start + _data_offset;
307 
308     _content_begin = (address) start + _content_offset;
309     _content_end = (address) start + _data_offset;
310 
311     _data_end = (address) start + _size;
312     _relocation_begin = (address) start + _header_size;
313     _relocation_end = _relocation_begin + _relocation_size;
314   }
315 
316   CodeBlobLayout(const address start, int size, int header_size, const CodeBuffer* cb) :
317     _size(size),
318     _header_size(header_size),
319     _relocation_size(align_up(cb->total_relocation_size(), oopSize)),
320     _content_offset(CodeBlob::align_code_offset(_header_size + _relocation_size)),
321     _code_offset(_content_offset + cb->total_offset_of(cb->insts())),
322     _data_offset(_content_offset + align_up(cb->total_content_size(), oopSize))
323   {
324     assert(is_aligned(_relocation_size, oopSize), "unaligned size");
325 
326     _code_begin = (address) start + _code_offset;
327     _code_end = (address) start + _data_offset;
328 
329     _content_begin = (address) start + _content_offset;
330     _content_end = (address) start + _data_offset;
331 
332     _data_end = (address) start + _size;
333     _relocation_begin = (address) start + _header_size;
334     _relocation_end = _relocation_begin + _relocation_size;
335   }
336 
337   int size() const { return _size; }
338   int header_size() const { return _header_size; }
339   int relocation_size() const { return _relocation_size; }
340   int content_offset() const { return _content_offset; }
341   int code_offset() const { return _code_offset; }
342   int data_offset() const { return _data_offset; }
343   address code_begin() const { return _code_begin; }
344   address code_end() const { return _code_end; }
345   address data_end() const { return _data_end; }
346   address relocation_begin() const { return _relocation_begin; }
347   address relocation_end() const { return _relocation_end; }
348   address content_begin() const { return _content_begin; }
349   address content_end() const { return _content_end; }
350 };
351 
352 
353 class RuntimeBlob : public CodeBlob {
354   friend class VMStructs;
355  public:
356 
357   // Creation
358   // a) simple CodeBlob
359   // frame_complete is the offset from the beginning of the instructions
360   // to where the frame setup (from stackwalk viewpoint) is complete.
361   RuntimeBlob(const char* name, int header_size, int size, int frame_complete, int locs_size);
362 
363   // b) full CodeBlob
364   RuntimeBlob(
365     const char* name,
366     CodeBuffer* cb,
367     int         header_size,
368     int         size,
369     int         frame_complete,
370     int         frame_size,
371     OopMapSet*  oop_maps,
372     bool        caller_must_gc_arguments = false
373   );
374 
375   // GC support
376   virtual bool is_alive() const                  = 0;
377 
378   void verify();
379 
380   // OopMap for frame
381   virtual void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f)  { ShouldNotReachHere(); }
382 
383   // Debugging
384   virtual void print_on(outputStream* st) const { CodeBlob::print_on(st); }
385   virtual void print_value_on(outputStream* st) const { CodeBlob::print_value_on(st); }
386 
387   // Deal with Disassembler, VTune, Forte, JvmtiExport, MemoryService.
388   static void trace_new_stub(RuntimeBlob* blob, const char* name1, const char* name2 = "");
389 };
390 
391 class WhiteBox;
392 //----------------------------------------------------------------------------------------------------
393 // BufferBlob: used to hold non-relocatable machine code such as the interpreter, stubroutines, etc.
394 
395 class BufferBlob: public RuntimeBlob {
396   friend class VMStructs;
397   friend class AdapterBlob;
398   friend class VtableBlob;
399   friend class MethodHandlesAdapterBlob;
400   friend class BufferedInlineTypeBlob;
401   friend class OptimizedEntryBlob;
402   friend class WhiteBox;
403 
404  private:
405   // Creation support
406   BufferBlob(const char* name, int size);
407   BufferBlob(const char* name, int header_size, int size, CodeBuffer* cb);
408   BufferBlob(const char* name, int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments = false);
409 
410   // This ordinary operator delete is needed even though not used, so the
411   // below two-argument operator delete will be treated as a placement
412   // delete rather than an ordinary sized delete; see C++14 3.7.4.2/p2.
413   void operator delete(void* p);
414   void* operator new(size_t s, unsigned size) throw();
415 
416  public:
417   // Creation
418   static BufferBlob* create(const char* name, int buffer_size);
419   static BufferBlob* create(const char* name, CodeBuffer* cb);
420 
421   static void free(BufferBlob* buf);
422 
423   // Typing
424   virtual bool is_buffer_blob() const            { return true; }
425 
426   // GC/Verification support
427   void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f)  { /* nothing to do */ }
428   bool is_alive() const                          { return true; }
429 
430   void verify();
431   void print_on(outputStream* st) const;
432   void print_value_on(outputStream* st) const;
433 };
434 
435 
436 //----------------------------------------------------------------------------------------------------
437 // AdapterBlob: used to hold C2I/I2C adapters
438 
439 class AdapterBlob: public BufferBlob {
440 private:
441   AdapterBlob(int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments = false);
442 
443 public:
444   // Creation
445   static AdapterBlob* create(CodeBuffer* cb,
446                              int frame_complete,
447                              int frame_size,
448                              OopMapSet* oop_maps,
449                              bool caller_must_gc_arguments = false);
450 
451   // Typing
452   virtual bool is_adapter_blob() const { return true; }
453 
454   bool caller_must_gc_arguments(JavaThread* thread) const { return true; }
455 };
456 
457 //---------------------------------------------------------------------------------------------------
458 class VtableBlob: public BufferBlob {
459 private:
460   VtableBlob(const char*, int);
461 
462   void* operator new(size_t s, unsigned size) throw();
463 
464 public:
465   // Creation
466   static VtableBlob* create(const char* name, int buffer_size);
467 
468   // Typing
469   virtual bool is_vtable_blob() const { return true; }
470 };
471 
472 //----------------------------------------------------------------------------------------------------
473 // MethodHandlesAdapterBlob: used to hold MethodHandles adapters
474 
475 class MethodHandlesAdapterBlob: public BufferBlob {
476 private:
477   MethodHandlesAdapterBlob(int size)                 : BufferBlob("MethodHandles adapters", size) {}
478 
479 public:
480   // Creation
481   static MethodHandlesAdapterBlob* create(int buffer_size);
482 
483   // Typing
484   virtual bool is_method_handles_adapter_blob() const { return true; }
485 };
486 
487 //----------------------------------------------------------------------------------------------------
488 // BufferedInlineTypeBlob : used for pack/unpack handlers
489 
490 class BufferedInlineTypeBlob: public BufferBlob {
491 private:
492   const int _pack_fields_off;
493   const int _pack_fields_jobject_off;
494   const int _unpack_fields_off;
495 
496   BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off);
497 
498 public:
499   // Creation
500   static BufferedInlineTypeBlob* create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off);
501 
502   address pack_fields() const { return code_begin() + _pack_fields_off; }
503   address pack_fields_jobject() const { return code_begin() + _pack_fields_jobject_off; }
504   address unpack_fields() const { return code_begin() + _unpack_fields_off; }
505 
506   // Typing
507   virtual bool is_buffered_inline_type_blob() const { return true; }
508 };
509 
510 //----------------------------------------------------------------------------------------------------
511 // RuntimeStub: describes stubs used by compiled code to call a (static) C++ runtime routine
512 
513 class RuntimeStub: public RuntimeBlob {
514   friend class VMStructs;
515  private:
516   // Creation support
517   RuntimeStub(
518     const char* name,
519     CodeBuffer* cb,
520     int         size,
521     int         frame_complete,
522     int         frame_size,
523     OopMapSet*  oop_maps,
524     bool        caller_must_gc_arguments
525   );
526 
527   // This ordinary operator delete is needed even though not used, so the
528   // below two-argument operator delete will be treated as a placement
529   // delete rather than an ordinary sized delete; see C++14 3.7.4.2/p2.
530   void operator delete(void* p);
531   void* operator new(size_t s, unsigned size) throw();
532 
533  public:
534   // Creation
535   static RuntimeStub* new_runtime_stub(
536     const char* stub_name,
537     CodeBuffer* cb,
538     int         frame_complete,
539     int         frame_size,
540     OopMapSet*  oop_maps,
541     bool        caller_must_gc_arguments
542   );
543 
544   // Typing
545   bool is_runtime_stub() const                   { return true; }
546 
547   address entry_point() const                    { return code_begin(); }
548 
549   // GC/Verification support
550   void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f)  { /* nothing to do */ }
551   bool is_alive() const                          { return true; }
552 
553   void verify();
554   void print_on(outputStream* st) const;
555   void print_value_on(outputStream* st) const;
556 };
557 
558 
559 //----------------------------------------------------------------------------------------------------
560 // Super-class for all blobs that exist in only one instance. Implements default behaviour.
561 
562 class SingletonBlob: public RuntimeBlob {
563   friend class VMStructs;
564 
565  protected:
566   // This ordinary operator delete is needed even though not used, so the
567   // below two-argument operator delete will be treated as a placement
568   // delete rather than an ordinary sized delete; see C++14 3.7.4.2/p2.
569   void operator delete(void* p);
570   void* operator new(size_t s, unsigned size) throw();
571 
572  public:
573    SingletonBlob(
574      const char* name,
575      CodeBuffer* cb,
576      int         header_size,
577      int         size,
578      int         frame_size,
579      OopMapSet*  oop_maps
580    )
581    : RuntimeBlob(name, cb, header_size, size, CodeOffsets::frame_never_safe, frame_size, oop_maps)
582   {};
583 
584   address entry_point()                          { return code_begin(); }
585 
586   bool is_alive() const                          { return true; }
587 
588   // GC/Verification support
589   void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f)  { /* nothing to do */ }
590   void verify(); // does nothing
591   void print_on(outputStream* st) const;
592   void print_value_on(outputStream* st) const;
593 };
594 
595 
596 //----------------------------------------------------------------------------------------------------
597 // DeoptimizationBlob
598 
599 class DeoptimizationBlob: public SingletonBlob {
600   friend class VMStructs;
601   friend class JVMCIVMStructs;
602  private:
603   int _unpack_offset;
604   int _unpack_with_exception;
605   int _unpack_with_reexecution;
606 
607   int _unpack_with_exception_in_tls;
608 
609 #if INCLUDE_JVMCI
610   // Offsets when JVMCI calls uncommon_trap.
611   int _uncommon_trap_offset;
612   int _implicit_exception_uncommon_trap_offset;
613 #endif
614 
615   // Creation support
616   DeoptimizationBlob(
617     CodeBuffer* cb,
618     int         size,
619     OopMapSet*  oop_maps,
620     int         unpack_offset,
621     int         unpack_with_exception_offset,
622     int         unpack_with_reexecution_offset,
623     int         frame_size
624   );
625 
626  public:
627   // Creation
628   static DeoptimizationBlob* create(
629     CodeBuffer* cb,
630     OopMapSet*  oop_maps,
631     int         unpack_offset,
632     int         unpack_with_exception_offset,
633     int         unpack_with_reexecution_offset,
634     int         frame_size
635   );
636 
637   // Typing
638   bool is_deoptimization_stub() const { return true; }
639 
640   // GC for args
641   void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { /* Nothing to do */ }
642 
643   // Printing
644   void print_value_on(outputStream* st) const;
645 
646   address unpack() const                         { return code_begin() + _unpack_offset;           }
647   address unpack_with_exception() const          { return code_begin() + _unpack_with_exception;   }
648   address unpack_with_reexecution() const        { return code_begin() + _unpack_with_reexecution; }
649 
650   // Alternate entry point for C1 where the exception and issuing pc
651   // are in JavaThread::_exception_oop and JavaThread::_exception_pc
652   // instead of being in registers.  This is needed because C1 doesn't
653   // model exception paths in a way that keeps these registers free so
654   // there may be live values in those registers during deopt.
655   void set_unpack_with_exception_in_tls_offset(int offset) {
656     _unpack_with_exception_in_tls = offset;
657     assert(code_contains(code_begin() + _unpack_with_exception_in_tls), "must be PC inside codeblob");
658   }
659   address unpack_with_exception_in_tls() const   { return code_begin() + _unpack_with_exception_in_tls; }
660 
661 #if INCLUDE_JVMCI
662   // Offsets when JVMCI calls uncommon_trap.
663   void set_uncommon_trap_offset(int offset) {
664     _uncommon_trap_offset = offset;
665     assert(contains(code_begin() + _uncommon_trap_offset), "must be PC inside codeblob");
666   }
667   address uncommon_trap() const                  { return code_begin() + _uncommon_trap_offset; }
668 
669   void set_implicit_exception_uncommon_trap_offset(int offset) {
670     _implicit_exception_uncommon_trap_offset = offset;
671     assert(contains(code_begin() + _implicit_exception_uncommon_trap_offset), "must be PC inside codeblob");
672   }
673   address implicit_exception_uncommon_trap() const { return code_begin() + _implicit_exception_uncommon_trap_offset; }
674 #endif // INCLUDE_JVMCI
675 };
676 
677 
678 //----------------------------------------------------------------------------------------------------
679 // UncommonTrapBlob (currently only used by Compiler 2)
680 
681 #ifdef COMPILER2
682 
683 class UncommonTrapBlob: public SingletonBlob {
684   friend class VMStructs;
685  private:
686   // Creation support
687   UncommonTrapBlob(
688     CodeBuffer* cb,
689     int         size,
690     OopMapSet*  oop_maps,
691     int         frame_size
692   );
693 
694  public:
695   // Creation
696   static UncommonTrapBlob* create(
697     CodeBuffer* cb,
698     OopMapSet*  oop_maps,
699     int         frame_size
700   );
701 
702   // GC for args
703   void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f)  { /* nothing to do */ }
704 
705   // Typing
706   bool is_uncommon_trap_stub() const             { return true; }
707 };
708 
709 
710 //----------------------------------------------------------------------------------------------------
711 // ExceptionBlob: used for exception unwinding in compiled code (currently only used by Compiler 2)
712 
713 class ExceptionBlob: public SingletonBlob {
714   friend class VMStructs;
715  private:
716   // Creation support
717   ExceptionBlob(
718     CodeBuffer* cb,
719     int         size,
720     OopMapSet*  oop_maps,
721     int         frame_size
722   );
723 
724  public:
725   // Creation
726   static ExceptionBlob* create(
727     CodeBuffer* cb,
728     OopMapSet*  oop_maps,
729     int         frame_size
730   );
731 
732   // GC for args
733   void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f)  { /* nothing to do */ }
734 
735   // Typing
736   bool is_exception_stub() const                 { return true; }
737 };
738 #endif // COMPILER2
739 
740 
741 //----------------------------------------------------------------------------------------------------
742 // SafepointBlob: handles illegal_instruction exceptions during a safepoint
743 
744 class SafepointBlob: public SingletonBlob {
745   friend class VMStructs;
746  private:
747   // Creation support
748   SafepointBlob(
749     CodeBuffer* cb,
750     int         size,
751     OopMapSet*  oop_maps,
752     int         frame_size
753   );
754 
755  public:
756   // Creation
757   static SafepointBlob* create(
758     CodeBuffer* cb,
759     OopMapSet*  oop_maps,
760     int         frame_size
761   );
762 
763   // GC for args
764   void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f)  { /* nothing to do */ }
765 
766   // Typing
767   bool is_safepoint_stub() const                 { return true; }
768 };
769 
770 //----------------------------------------------------------------------------------------------------
771 
772 class ProgrammableUpcallHandler;
773 
774 class OptimizedEntryBlob: public BufferBlob {
775   friend class ProgrammableUpcallHandler;
776  private:
777   intptr_t _exception_handler_offset;
778   jobject _receiver;
779   ByteSize _frame_data_offset;
780 
781   OptimizedEntryBlob(const char* name, int size, CodeBuffer* cb, intptr_t exception_handler_offset,
782                      jobject receiver, ByteSize frame_data_offset);
783 
784   struct FrameData {
785     JavaFrameAnchor jfa;
786     JavaThread* thread;
787     JNIHandleBlock* old_handles;
788     JNIHandleBlock* new_handles;
789     bool should_detach;
790   };
791 
792   // defined in frame_ARCH.cpp
793   FrameData* frame_data_for_frame(const frame& frame) const;
794  public:
795   // Creation
796   static OptimizedEntryBlob* create(const char* name, CodeBuffer* cb,
797                                     intptr_t exception_handler_offset, jobject receiver,
798                                     ByteSize frame_data_offset);
799 
800   address exception_handler() { return code_begin() + _exception_handler_offset; }
801   jobject receiver() { return _receiver; }
802 
803   JavaFrameAnchor* jfa_for_frame(const frame& frame) const;
804 
805   void oops_do(OopClosure* f, const frame& frame);
806 
807   // Typing
808   virtual bool is_optimized_entry_blob() const override { return true; }
809 };
810 
811 #endif // SHARE_CODE_CODEBLOB_HPP