208 static uint _total_instructions_per_bundle[Pipeline::_max_instrs_per_cycle+1];
209
210 public:
211 static void print_statistics();
212
213 static void increment_instructions_per_bundle(uint i) {
214 _total_instructions_per_bundle[i]++;
215 }
216
217 static void increment_nop_size(uint s) {
218 _total_nop_size += s;
219 }
220
221 static void increment_method_size(uint s) {
222 _total_method_size += s;
223 }
224 #endif
225
226 };
227
228 volatile int C2SafepointPollStubTable::_stub_size = 0;
229
230 Label& C2SafepointPollStubTable::add_safepoint(uintptr_t safepoint_offset) {
231 C2SafepointPollStub* entry = new (Compile::current()->comp_arena()) C2SafepointPollStub(safepoint_offset);
232 _safepoints.append(entry);
233 return entry->_stub_label;
234 }
235
236 void C2SafepointPollStubTable::emit(CodeBuffer& cb) {
237 MacroAssembler masm(&cb);
238 for (int i = _safepoints.length() - 1; i >= 0; i--) {
239 // Make sure there is enough space in the code buffer
240 if (cb.insts()->maybe_expand_to_ensure_remaining(PhaseOutput::MAX_inst_size) && cb.blob() == NULL) {
241 ciEnv::current()->record_failure("CodeCache is full");
242 return;
243 }
244
245 C2SafepointPollStub* entry = _safepoints.at(i);
246 emit_stub(masm, entry);
247 }
248 }
249
250 int C2SafepointPollStubTable::stub_size_lazy() const {
251 int size = Atomic::load(&_stub_size);
252
253 if (size != 0) {
254 return size;
255 }
256
257 Compile* const C = Compile::current();
258 BufferBlob* const blob = C->output()->scratch_buffer_blob();
259 CodeBuffer cb(blob->content_begin(), C->output()->scratch_buffer_code_size());
260 MacroAssembler masm(&cb);
261 C2SafepointPollStub* entry = _safepoints.at(0);
262 emit_stub(masm, entry);
263 size += cb.insts_size();
264
265 Atomic::store(&_stub_size, size);
266
267 return size;
268 }
269
270 int C2SafepointPollStubTable::estimate_stub_size() const {
271 if (_safepoints.length() == 0) {
272 return 0;
273 }
274
275 int result = stub_size_lazy() * _safepoints.length();
276
277 #ifdef ASSERT
278 Compile* const C = Compile::current();
279 BufferBlob* const blob = C->output()->scratch_buffer_blob();
280 int size = 0;
281
282 for (int i = _safepoints.length() - 1; i >= 0; i--) {
283 CodeBuffer cb(blob->content_begin(), C->output()->scratch_buffer_code_size());
284 MacroAssembler masm(&cb);
285 C2SafepointPollStub* entry = _safepoints.at(i);
286 emit_stub(masm, entry);
287 size += cb.insts_size();
288 }
289 assert(size == result, "stubs should not have variable size");
290 #endif
291
292 return result;
293 }
294
295 PhaseOutput::PhaseOutput()
296 : Phase(Phase::Output),
297 _code_buffer("Compile::Fill_buffer"),
298 _first_block_size(0),
299 _handler_table(),
300 _inc_table(),
301 _oop_map_set(nullptr),
302 _scratch_buffer_blob(nullptr),
303 _scratch_locs_memory(nullptr),
304 _scratch_const_size(-1),
305 _in_scratch_emit_size(false),
306 _frame_slots(0),
307 _code_offsets(),
308 _node_bundling_limit(0),
309 _node_bundling_base(nullptr),
310 _orig_pc_slot(0),
311 _orig_pc_slot_offset_in_bytes(0),
312 _buf_sizes(),
313 _block(nullptr),
314 _index(0) {
315 C->set_output(this);
316 if (C->stub_name() == nullptr) {
317 _orig_pc_slot = C->fixed_slots() - (sizeof(address) / VMRegImpl::stack_slot_size);
318 }
319 }
320
1296 // Calculate the offsets of the constants and the size of the
1297 // constant table (including the padding to the next section).
1298 constant_table().calculate_offsets_and_size();
1299 const_req = constant_table().size() + add_size;
1300 }
1301
1302 // Initialize the space for the BufferBlob used to find and verify
1303 // instruction size in MachNode::emit_size()
1304 init_scratch_buffer_blob(const_req);
1305 }
1306
1307 CodeBuffer* PhaseOutput::init_buffer() {
1308 int stub_req = _buf_sizes._stub;
1309 int code_req = _buf_sizes._code;
1310 int const_req = _buf_sizes._const;
1311
1312 int pad_req = NativeCall::instruction_size;
1313
1314 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
1315 stub_req += bs->estimate_stub_size();
1316 stub_req += safepoint_poll_table()->estimate_stub_size();
1317
1318 // nmethod and CodeBuffer count stubs & constants as part of method's code.
1319 // class HandlerImpl is platform-specific and defined in the *.ad files.
1320 int exception_handler_req = HandlerImpl::size_exception_handler() + MAX_stubs_size; // add marginal slop for handler
1321 int deopt_handler_req = HandlerImpl::size_deopt_handler() + MAX_stubs_size; // add marginal slop for handler
1322 stub_req += MAX_stubs_size; // ensure per-stub margin
1323 code_req += MAX_inst_size; // ensure per-instruction margin
1324
1325 if (StressCodeBuffers)
1326 code_req = const_req = stub_req = exception_handler_req = deopt_handler_req = 0x10; // force expansion
1327
1328 int total_req =
1329 const_req +
1330 code_req +
1331 pad_req +
1332 stub_req +
1333 exception_handler_req +
1334 deopt_handler_req; // deopt handler
1335
1336 if (C->has_method_handle_invokes())
1803 // Compute the size of the first block
1804 _first_block_size = blk_labels[1].loc_pos() - blk_labels[0].loc_pos();
1805
1806 #ifdef ASSERT
1807 for (uint i = 0; i < nblocks; i++) { // For all blocks
1808 if (jmp_target[i] != 0) {
1809 int br_size = jmp_size[i];
1810 int offset = blk_starts[jmp_target[i]]-(blk_starts[i] + jmp_offset[i]);
1811 if (!C->matcher()->is_short_branch_offset(jmp_rule[i], br_size, offset)) {
1812 tty->print_cr("target (%d) - jmp_offset(%d) = offset (%d), jump_size(%d), jmp_block B%d, target_block B%d", blk_starts[jmp_target[i]], blk_starts[i] + jmp_offset[i], offset, br_size, i, jmp_target[i]);
1813 assert(false, "Displacement too large for short jmp");
1814 }
1815 }
1816 }
1817 #endif
1818
1819 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
1820 bs->emit_stubs(*cb);
1821 if (C->failing()) return;
1822
1823 // Fill in stubs for calling the runtime from safepoint polls.
1824 safepoint_poll_table()->emit(*cb);
1825 if (C->failing()) return;
1826
1827 #ifndef PRODUCT
1828 // Information on the size of the method, without the extraneous code
1829 Scheduling::increment_method_size(cb->insts_size());
1830 #endif
1831
1832 // ------------------
1833 // Fill in exception table entries.
1834 FillExceptionTables(inct_cnt, call_returns, inct_starts, blk_labels);
1835
1836 // Only java methods have exception handlers and deopt handlers
1837 // class HandlerImpl is platform-specific and defined in the *.ad files.
1838 if (C->method()) {
1839 // Emit the exception handler code.
1840 _code_offsets.set_value(CodeOffsets::Exceptions, HandlerImpl::emit_exception_handler(*cb));
1841 if (C->failing()) {
1842 return; // CodeBuffer::expand failed
1843 }
1844 // Emit the deopt handler code.
|
208 static uint _total_instructions_per_bundle[Pipeline::_max_instrs_per_cycle+1];
209
210 public:
211 static void print_statistics();
212
213 static void increment_instructions_per_bundle(uint i) {
214 _total_instructions_per_bundle[i]++;
215 }
216
217 static void increment_nop_size(uint s) {
218 _total_nop_size += s;
219 }
220
221 static void increment_method_size(uint s) {
222 _total_method_size += s;
223 }
224 #endif
225
226 };
227
228 PhaseOutput::PhaseOutput()
229 : Phase(Phase::Output),
230 _code_buffer("Compile::Fill_buffer"),
231 _first_block_size(0),
232 _handler_table(),
233 _inc_table(),
234 _stub_list(),
235 _oop_map_set(nullptr),
236 _scratch_buffer_blob(nullptr),
237 _scratch_locs_memory(nullptr),
238 _scratch_const_size(-1),
239 _in_scratch_emit_size(false),
240 _frame_slots(0),
241 _code_offsets(),
242 _node_bundling_limit(0),
243 _node_bundling_base(nullptr),
244 _orig_pc_slot(0),
245 _orig_pc_slot_offset_in_bytes(0),
246 _buf_sizes(),
247 _block(nullptr),
248 _index(0) {
249 C->set_output(this);
250 if (C->stub_name() == nullptr) {
251 _orig_pc_slot = C->fixed_slots() - (sizeof(address) / VMRegImpl::stack_slot_size);
252 }
253 }
254
1230 // Calculate the offsets of the constants and the size of the
1231 // constant table (including the padding to the next section).
1232 constant_table().calculate_offsets_and_size();
1233 const_req = constant_table().size() + add_size;
1234 }
1235
1236 // Initialize the space for the BufferBlob used to find and verify
1237 // instruction size in MachNode::emit_size()
1238 init_scratch_buffer_blob(const_req);
1239 }
1240
1241 CodeBuffer* PhaseOutput::init_buffer() {
1242 int stub_req = _buf_sizes._stub;
1243 int code_req = _buf_sizes._code;
1244 int const_req = _buf_sizes._const;
1245
1246 int pad_req = NativeCall::instruction_size;
1247
1248 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
1249 stub_req += bs->estimate_stub_size();
1250
1251 // nmethod and CodeBuffer count stubs & constants as part of method's code.
1252 // class HandlerImpl is platform-specific and defined in the *.ad files.
1253 int exception_handler_req = HandlerImpl::size_exception_handler() + MAX_stubs_size; // add marginal slop for handler
1254 int deopt_handler_req = HandlerImpl::size_deopt_handler() + MAX_stubs_size; // add marginal slop for handler
1255 stub_req += MAX_stubs_size; // ensure per-stub margin
1256 code_req += MAX_inst_size; // ensure per-instruction margin
1257
1258 if (StressCodeBuffers)
1259 code_req = const_req = stub_req = exception_handler_req = deopt_handler_req = 0x10; // force expansion
1260
1261 int total_req =
1262 const_req +
1263 code_req +
1264 pad_req +
1265 stub_req +
1266 exception_handler_req +
1267 deopt_handler_req; // deopt handler
1268
1269 if (C->has_method_handle_invokes())
1736 // Compute the size of the first block
1737 _first_block_size = blk_labels[1].loc_pos() - blk_labels[0].loc_pos();
1738
1739 #ifdef ASSERT
1740 for (uint i = 0; i < nblocks; i++) { // For all blocks
1741 if (jmp_target[i] != 0) {
1742 int br_size = jmp_size[i];
1743 int offset = blk_starts[jmp_target[i]]-(blk_starts[i] + jmp_offset[i]);
1744 if (!C->matcher()->is_short_branch_offset(jmp_rule[i], br_size, offset)) {
1745 tty->print_cr("target (%d) - jmp_offset(%d) = offset (%d), jump_size(%d), jmp_block B%d, target_block B%d", blk_starts[jmp_target[i]], blk_starts[i] + jmp_offset[i], offset, br_size, i, jmp_target[i]);
1746 assert(false, "Displacement too large for short jmp");
1747 }
1748 }
1749 }
1750 #endif
1751
1752 BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
1753 bs->emit_stubs(*cb);
1754 if (C->failing()) return;
1755
1756 // Fill in stubs.
1757 _stub_list.emit(*cb);
1758 if (C->failing()) return;
1759
1760 #ifndef PRODUCT
1761 // Information on the size of the method, without the extraneous code
1762 Scheduling::increment_method_size(cb->insts_size());
1763 #endif
1764
1765 // ------------------
1766 // Fill in exception table entries.
1767 FillExceptionTables(inct_cnt, call_returns, inct_starts, blk_labels);
1768
1769 // Only java methods have exception handlers and deopt handlers
1770 // class HandlerImpl is platform-specific and defined in the *.ad files.
1771 if (C->method()) {
1772 // Emit the exception handler code.
1773 _code_offsets.set_value(CodeOffsets::Exceptions, HandlerImpl::emit_exception_handler(*cb));
1774 if (C->failing()) {
1775 return; // CodeBuffer::expand failed
1776 }
1777 // Emit the deopt handler code.
|