1 /*
  2  * Copyright (c) 2000, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_OPTO_OUTPUT_HPP
 26 #define SHARE_OPTO_OUTPUT_HPP
 27 
 28 #include "code/debugInfo.hpp"
 29 #include "code/exceptionHandlerTable.hpp"
 30 #include "metaprogramming/enableIf.hpp"
 31 #include "opto/ad.hpp"
 32 #include "opto/constantTable.hpp"
 33 #include "opto/phase.hpp"
 34 #include "runtime/vm_version.hpp"
 35 #include "utilities/globalDefinitions.hpp"
 36 #include "utilities/macros.hpp"
 37 
 38 class AbstractCompiler;
 39 class Arena;
 40 class Bundle;
 41 class Block;
 42 class Block_Array;
 43 class C2_MacroAssembler;
 44 class ciMethod;
 45 class Compile;
 46 class MachNode;
 47 class MachSafePointNode;
 48 class Node;
 49 class PhaseCFG;
 50 #ifndef PRODUCT
 51 #define DEBUG_ARG(x) , x
 52 #else
 53 #define DEBUG_ARG(x)
 54 #endif
 55 
 56 // Define the initial sizes for allocation of the resizable code buffer
 57 enum {
 58   initial_const_capacity =   4 * 1024
 59 };
 60 
 61 class BufferSizingData {
 62 public:
 63   int _stub;
 64   int _code;
 65   int _const;
 66   int _reloc;
 67 
 68   BufferSizingData() :
 69     _stub(0),
 70     _code(0),
 71     _const(0),
 72     _reloc(0)
 73   { };
 74 };
 75 
 76 class C2SafepointPollStubTable {
 77 private:
 78   struct C2SafepointPollStub: public ArenaObj {
 79     uintptr_t _safepoint_offset;
 80     Label     _stub_label;
 81     Label     _trampoline_label;
 82     C2SafepointPollStub(uintptr_t safepoint_offset) :
 83       _safepoint_offset(safepoint_offset),
 84       _stub_label(),
 85       _trampoline_label() {}
 86   };
 87 
 88   GrowableArray<C2SafepointPollStub*> _safepoints;
 89 
 90   static volatile int _stub_size;
 91 
 92   void emit_stub_impl(MacroAssembler& masm, C2SafepointPollStub* entry) const;
 93 
 94   // The selection logic below relieves the need to add dummy files to unsupported platforms.
 95   template <bool enabled>
 96   typename EnableIf<enabled>::type
 97   select_emit_stub(MacroAssembler& masm, C2SafepointPollStub* entry) const {
 98     emit_stub_impl(masm, entry);
 99   }
100 
101   template <bool enabled>
102   typename EnableIf<!enabled>::type
103   select_emit_stub(MacroAssembler& masm, C2SafepointPollStub* entry) const {}
104 
105   void emit_stub(MacroAssembler& masm, C2SafepointPollStub* entry) const {
106     select_emit_stub<VM_Version::supports_stack_watermark_barrier()>(masm, entry);
107   }
108 
109   int stub_size_lazy() const;
110 
111 public:
112   Label& add_safepoint(uintptr_t safepoint_offset);
113   int estimate_stub_size() const;
114   void emit(CodeBuffer& cb);
115 };
116 
117 // We move non-hot code of the nmethod entry barrier to an out-of-line stub
118 class C2EntryBarrierStub: public ArenaObj {
119   Label _slow_path;
120   Label _continuation;
121   Label _guard; // Used on AArch64 and RISCV
122 
123 public:
124   C2EntryBarrierStub() :
125     _slow_path(),
126     _continuation(),
127     _guard() {}
128 
129   Label& slow_path() { return _slow_path; }
130   Label& continuation() { return _continuation; }
131   Label& guard() { return _guard; }
132 
133 };
134 
135 class C2EntryBarrierStubTable {
136   C2EntryBarrierStub* _stub;
137 
138 public:
139   C2EntryBarrierStubTable() : _stub(NULL) {}
140   C2EntryBarrierStub* add_entry_barrier();
141   int estimate_stub_size() const;
142   void emit(CodeBuffer& cb);
143 };
144 
145 class PhaseOutput : public Phase {
146 private:
147   // Instruction bits passed off to the VM
148   int                    _method_size;           // Size of nmethod code segment in bytes
149   CodeBuffer             _code_buffer;           // Where the code is assembled
150   int                    _first_block_size;      // Size of unvalidated entry point code / OSR poison code
151   ExceptionHandlerTable  _handler_table;         // Table of native-code exception handlers
152   ImplicitExceptionTable _inc_table;             // Table of implicit null checks in native code
153   C2SafepointPollStubTable _safepoint_poll_table;// Table for safepoint polls
154   C2EntryBarrierStubTable _entry_barrier_table;  // Table for entry barrier stubs
155   OopMapSet*             _oop_map_set;           // Table of oop maps (one for each safepoint location)
156   BufferBlob*            _scratch_buffer_blob;   // For temporary code buffers.
157   relocInfo*             _scratch_locs_memory;   // For temporary code buffers.
158   int                    _scratch_const_size;    // For temporary code buffers.
159   bool                   _in_scratch_emit_size;  // true when in scratch_emit_size.
160 
161   int                    _frame_slots;           // Size of total frame in stack slots
162   CodeOffsets            _code_offsets;          // Offsets into the code for various interesting entries
163 
164   uint                   _node_bundling_limit;
165   Bundle*                _node_bundling_base;    // Information for instruction bundling
166 
167   // For deopt
168   int                    _orig_pc_slot;
169   int                    _orig_pc_slot_offset_in_bytes;
170 
171   ConstantTable          _constant_table;        // The constant table for this compilation unit.
172 
173   BufferSizingData       _buf_sizes;
174   Block*                 _block;
175   uint                   _index;
176 
177   void perform_mach_node_analysis();
178   void pd_perform_mach_node_analysis();
179 
180 public:
181   PhaseOutput();
182   ~PhaseOutput();
183 
184   // Convert Nodes to instruction bits and pass off to the VM
185   void Output();
186   bool need_stack_bang(int frame_size_in_bytes) const;
187   bool need_register_stack_bang() const;
188   void compute_loop_first_inst_sizes();
189 
190   void install_code(ciMethod*         target,
191                     int               entry_bci,
192                     AbstractCompiler* compiler,
193                     bool              has_unsafe_access,
194                     bool              has_wide_vectors,
195                     RTMState          rtm_state);
196 
197   void install_stub(const char* stub_name);
198 
199   // Constant table
200   ConstantTable& constant_table() { return _constant_table; }
201 
202   // Safepoint poll table
203   C2SafepointPollStubTable* safepoint_poll_table() { return &_safepoint_poll_table; }
204 
205   // Entry barrier table
206   C2EntryBarrierStubTable* entry_barrier_table() { return &_entry_barrier_table; }
207 
208   // Code emission iterator
209   Block* block()   { return _block; }
210   int index()      { return _index; }
211 
212   // The architecture description provides short branch variants for some long
213   // branch instructions. Replace eligible long branches with short branches.
214   void shorten_branches(uint* blk_starts);
215   // If "objs" contains an ObjectValue whose id is "id", returns it, else NULL.
216   static ObjectValue* sv_for_node_id(GrowableArray<ScopeValue*> *objs, int id);
217   static void set_sv_for_object_node(GrowableArray<ScopeValue*> *objs, ObjectValue* sv);
218   void FillLocArray( int idx, MachSafePointNode* sfpt, Node *local,
219                      GrowableArray<ScopeValue*> *array,
220                      GrowableArray<ScopeValue*> *objs );
221 
222   void Process_OopMap_Node(MachNode *mach, int current_offset);
223 
224   // Initialize code buffer
225   void estimate_buffer_size(int& const_req);
226   CodeBuffer* init_buffer();
227 
228   // Write out basic block data to code buffer
229   void fill_buffer(CodeBuffer* cb, uint* blk_starts);
230 
231   // Compute the information for the exception tables
232   void FillExceptionTables(uint cnt, uint *call_returns, uint *inct_starts, Label *blk_labels);
233 
234   // Perform instruction scheduling and bundling over the sequence of
235   // instructions in backwards order.
236   void ScheduleAndBundle();
237 
238   void install();
239 
240   // Instruction bits passed off to the VM
241   int               code_size()                 { return _method_size; }
242   CodeBuffer*       code_buffer()               { return &_code_buffer; }
243   int               first_block_size()          { return _first_block_size; }
244   void              set_frame_complete(int off) { if (!in_scratch_emit_size()) { _code_offsets.set_value(CodeOffsets::Frame_Complete, off); } }
245   ExceptionHandlerTable*  handler_table()       { return &_handler_table; }
246   ImplicitExceptionTable* inc_table()           { return &_inc_table; }
247   OopMapSet*        oop_map_set()               { return _oop_map_set; }
248 
249   // Scratch buffer
250   BufferBlob*       scratch_buffer_blob()       { return _scratch_buffer_blob; }
251   void         init_scratch_buffer_blob(int const_size);
252   void        clear_scratch_buffer_blob();
253   void          set_scratch_buffer_blob(BufferBlob* b) { _scratch_buffer_blob = b; }
254   relocInfo*        scratch_locs_memory()       { return _scratch_locs_memory; }
255   void          set_scratch_locs_memory(relocInfo* b)  { _scratch_locs_memory = b; }
256   int               scratch_buffer_code_size()  { return (address)scratch_locs_memory() - _scratch_buffer_blob->content_begin(); }
257 
258   // emit to scratch blob, report resulting size
259   uint              scratch_emit_size(const Node* n);
260   void       set_in_scratch_emit_size(bool x)   {        _in_scratch_emit_size = x; }
261   bool           in_scratch_emit_size() const   { return _in_scratch_emit_size;     }
262 
263   enum ScratchBufferBlob {
264     MAX_inst_size       = 2048,
265     MAX_locs_size       = 128, // number of relocInfo elements
266     MAX_const_size      = 128,
267     MAX_stubs_size      = 128
268   };
269 
270   int               frame_slots() const         { return _frame_slots; }
271   int               frame_size_in_words() const; // frame_slots in units of the polymorphic 'words'
272   int               frame_size_in_bytes() const { return _frame_slots << LogBytesPerInt; }
273 
274   int               bang_size_in_bytes() const;
275 
276   void          set_node_bundling_limit(uint n) { _node_bundling_limit = n; }
277   void          set_node_bundling_base(Bundle* b) { _node_bundling_base = b; }
278 
279   Bundle* node_bundling(const Node *n);
280   bool valid_bundle_info(const Node *n);
281 
282   bool starts_bundle(const Node *n) const;
283 
284   // Dump formatted assembly
285 #if defined(SUPPORT_OPTO_ASSEMBLY)
286   void dump_asm_on(outputStream* ost, int* pcs, uint pc_limit);
287 #else
288   void dump_asm_on(outputStream* ost, int* pcs, uint pc_limit) { return; }
289 #endif
290 
291   // Build OopMaps for each GC point
292   void BuildOopMaps();
293 
294 #ifndef PRODUCT
295   void print_scheduling();
296   static void print_statistics();
297 #endif
298 };
299 
300 #endif // SHARE_OPTO_OUTPUT_HPP