1 /*
2 * Copyright (c) 2019, 2024, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OOPS_STACKCHUNKFRAMESTREAM_INLINE_HPP
26 #define SHARE_OOPS_STACKCHUNKFRAMESTREAM_INLINE_HPP
27
28 #include "runtime/stackChunkFrameStream.hpp"
29
30 #include "code/codeCache.inline.hpp"
31 #include "compiler/oopMap.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "logging/log.hpp"
34 #include "oops/instanceStackChunkKlass.inline.hpp"
35 #include "oops/method.hpp"
36 #include "oops/oop.hpp"
37 #include "oops/stackChunkOop.inline.hpp"
38 #include "runtime/frame.inline.hpp"
39 #include "utilities/debug.hpp"
40 #include "utilities/devirtualizer.inline.hpp"
41 #include "utilities/globalDefinitions.hpp"
42 #include "utilities/macros.hpp"
43 #include CPU_HEADER_INLINE(stackChunkFrameStream)
44
45 #ifdef ASSERT
46 extern "C" bool dbg_is_safe(const void* p, intptr_t errvalue);
47 #endif
48
49 template <ChunkFrames frame_kind>
50 StackChunkFrameStream<frame_kind>::StackChunkFrameStream(stackChunkOop chunk) DEBUG_ONLY(: _chunk(chunk)) {
51 assert(chunk->is_stackChunk_noinline(), "");
52 assert(frame_kind == ChunkFrames::Mixed || !chunk->has_mixed_frames(), "");
53
54 DEBUG_ONLY(_index = 0;)
55 _end = chunk->bottom_address();
56 _sp = chunk->start_address() + chunk->sp();
57 assert(_sp <= chunk->end_address() + frame::metadata_words, "");
58
59 get_cb();
60
61 if (frame_kind == ChunkFrames::Mixed) {
62 _unextended_sp = (!is_done() && is_interpreted()) ? unextended_sp_for_interpreter_frame() : _sp;
63 assert(_unextended_sp >= _sp - frame::metadata_words, "");
64 }
65 DEBUG_ONLY(else _unextended_sp = nullptr;)
66
67 if (is_stub()) {
68 get_oopmap(pc(), 0);
69 DEBUG_ONLY(_has_stub = true);
70 } DEBUG_ONLY(else _has_stub = false;)
71 }
72
73 template <ChunkFrames frame_kind>
74 StackChunkFrameStream<frame_kind>::StackChunkFrameStream(stackChunkOop chunk, const frame& f)
75 DEBUG_ONLY(: _chunk(chunk)) {
76 assert(chunk->is_stackChunk_noinline(), "");
77 assert(frame_kind == ChunkFrames::Mixed || !chunk->has_mixed_frames(), "");
78 // assert(!is_empty(), ""); -- allowed to be empty
79
80 DEBUG_ONLY(_index = 0;)
81
82 _end = chunk->bottom_address();
83
84 assert(chunk->is_in_chunk(f.sp()), "");
85 _sp = f.sp();
86 if (frame_kind == ChunkFrames::Mixed) {
87 _unextended_sp = f.unextended_sp();
88 assert(_unextended_sp >= _sp - frame::metadata_words, "");
89 }
90 DEBUG_ONLY(else _unextended_sp = nullptr;)
91 assert(_sp >= chunk->start_address(), "");
92 assert(_sp <= chunk->end_address() + frame::metadata_words, "");
93
94 if (f.cb() != nullptr) {
95 _oopmap = nullptr;
96 _cb = f.cb();
97 } else {
98 get_cb();
99 }
100
101 if (is_stub()) {
102 get_oopmap(pc(), 0);
103 DEBUG_ONLY(_has_stub = true);
104 } DEBUG_ONLY(else _has_stub = false;)
105 }
106
107 template <ChunkFrames frame_kind>
108 inline bool StackChunkFrameStream<frame_kind>::is_stub() const {
109 return cb() != nullptr && _cb->is_runtime_stub();
110 }
111
112 template <ChunkFrames frame_kind>
113 inline bool StackChunkFrameStream<frame_kind>::is_compiled() const {
114 return cb() != nullptr && _cb->is_nmethod();
115 }
116
117 template <>
118 inline bool StackChunkFrameStream<ChunkFrames::Mixed>::is_interpreted() const {
119 return !is_done() && Interpreter::contains(pc());
120 }
121
122 template <>
123 inline bool StackChunkFrameStream<ChunkFrames::CompiledOnly>::is_interpreted() const {
124 return false;
125 }
126
127 // StackChunkFrameStream<frame_kind>::frame_size() returns the words required to
128 // store the given frame as the only frame in a StackChunk. This is the size of the
129 // frame itself plus its stack arguments plus metadata at the caller's frame top (1)
130 //
131 // |====================| ---
132 // | F0's stackargs | ^
133 // | | |
134 // |--------------------| |
135 // | metadata@top | <- caller's sp
136 // |====================| |
137 // | metadata@bottom(2) | |
138 // |--------------------|
139 // | | size S0
140 // | Frame F0 | --- |====================| ---
141 // | | | ^ | F1's stackargs | ^
142 // | | | | | | |
143 // |--------------------| | overlap |--------------------| |
144 // | metadata@top(1) |<- sp v v | metadata@top | <- caller's sp
145 // |====================| --- --- |====================| |
146 // | metadata@bottom | |
147 // | |--------------------|
148 // | | Frame F1 | size S1
149 // Stack Growth | (F0's callee) |
150 // | | | |
151 // | | | |
152 // v |--------------------| |
153 // | metadata@top |<- sp v
154 // |====================| ---
155 //
156 // 2 frames of the same kind (interpreted or compiled) overlap. So the total
157 // size required in the StackChunk is S0 + S1 - overlap, where the overlap is
158 // the size of F1's stackargs plus frame::metadata_words_at_top.
159 //
160 // The callers of frame_size() are supposed to deduct the overlap. The bottom
161 // frame in the StackChunk obviously does not overlap with it's caller, as it is
162 // in the parent chunk.
163 //
164 // There is no overlap if caller/callee are of different kinds. In that case the
165 // caller is extended to accomodate the callee's stack arguments. The extension
166 // is not counted though in the caller's size, so there is indeed no overlap.
167 //
168 // See ppc implementation of StackChunkFrameStream<frame_kind>::interpreter_frame_size()
169 // for more details.
170 //
171 // (1) Metadata at frame top (see frame::metadata_words_at_top)
172 // Part of the overlap. Used on ppc64, empty on x86_64, aarch64
173 // (2) Metadata at the frame bottom (see frame::metadata_words_at_bottom)
174 // Not part of the overlap.
175 // Used on x86_64 (saved rbp, ret. addr.), aarch64. Empty on ppc64.
176 //
177 template <ChunkFrames frame_kind>
178 inline int StackChunkFrameStream<frame_kind>::frame_size() const {
179 return is_interpreted() ? interpreter_frame_size()
180 : cb()->frame_size() + stack_argsize() + frame::metadata_words_at_top;
181 }
182
183 template <ChunkFrames frame_kind>
184 inline int StackChunkFrameStream<frame_kind>::stack_argsize() const {
185 if (is_interpreted()) {
186 return interpreter_frame_stack_argsize();
187 }
188 if (is_stub()) {
189 return 0;
190 }
191 assert(cb() != nullptr, "");
192 assert(cb()->is_nmethod(), "");
193 assert(cb()->as_nmethod()->method() != nullptr, "");
194 return (cb()->as_nmethod()->num_stack_arg_slots() * VMRegImpl::stack_slot_size) >> LogBytesPerWord;
195 }
196
197 template <ChunkFrames frame_kind>
198 inline int StackChunkFrameStream<frame_kind>::num_oops() const {
199 if (is_interpreted()) {
200 return interpreter_frame_num_oops();
201 } else if (is_compiled()) {
202 return oopmap()->num_oops();
203 } else {
204 assert(is_stub(), "invariant");
205 return 0;
206 }
207 }
208
209 template <ChunkFrames frame_kind>
210 inline void StackChunkFrameStream<frame_kind>::initialize_register_map(RegisterMap* map) {
211 update_reg_map_pd(map);
212 }
213
214 template <ChunkFrames frame_kind>
215 template <typename RegisterMapT>
216 inline void StackChunkFrameStream<frame_kind>::next(RegisterMapT* map, bool stop) {
217 update_reg_map(map);
218 bool is_runtime_stub = is_stub();
219 if (frame_kind == ChunkFrames::Mixed) {
220 if (is_interpreted()) {
221 next_for_interpreter_frame();
222 } else {
223 _sp = _unextended_sp + cb()->frame_size();
224 if (_sp >= _end - frame::metadata_words) {
225 _sp = _end;
226 }
227 _unextended_sp = is_interpreted() ? unextended_sp_for_interpreter_frame() : _sp;
228 }
229 assert(_unextended_sp >= _sp - frame::metadata_words, "");
230 } else {
231 _sp += cb()->frame_size();
232 }
233 assert(!is_interpreted() || _unextended_sp == unextended_sp_for_interpreter_frame(), "");
234
235 DEBUG_ONLY(_index++;)
236 if (stop) {
237 return;
238 }
239
240 get_cb();
241 update_reg_map_pd(map);
242 if (is_runtime_stub && cb() != nullptr) { // there's no post-call nop and no fast oopmap lookup
243 // caller could have been deoptimized so use orig_pc()
244 _oopmap = cb()->oop_map_for_return_address(orig_pc());
245 }
246 }
247
248 template <ChunkFrames frame_kind>
249 inline void StackChunkFrameStream<frame_kind>::get_cb() {
250 _oopmap = nullptr;
251 if (is_done() || is_interpreted()) {
252 _cb = nullptr;
253 return;
254 }
255
256 assert(pc() != nullptr, "");
257 assert(dbg_is_safe(pc(), -1), "");
258
259 _cb = CodeCache::find_blob_fast(pc());
260
261 assert(_cb != nullptr, "");
262 assert(is_interpreted() || ((is_stub() || is_compiled()) && _cb->frame_size() > 0), "");
263 }
264
265 template <ChunkFrames frame_kind>
266 inline void StackChunkFrameStream<frame_kind>::get_oopmap() const {
267 if (is_interpreted()) {
268 return;
269 }
270 assert(is_compiled(), "");
271 get_oopmap(pc(), CodeCache::find_oopmap_slot_fast(pc()));
272 }
273
274 template <ChunkFrames frame_kind>
275 inline void StackChunkFrameStream<frame_kind>::get_oopmap(address pc, int oopmap_slot) const {
276 assert(cb() != nullptr, "");
277 assert(!is_compiled() || !cb()->as_nmethod()->is_deopt_pc(pc), "");
278 if (oopmap_slot >= 0) {
279 assert(oopmap_slot >= 0, "");
280 assert(cb()->oop_map_for_slot(oopmap_slot, pc) != nullptr, "");
281 assert(cb()->oop_map_for_slot(oopmap_slot, pc) == cb()->oop_map_for_return_address(pc), "");
282
283 _oopmap = cb()->oop_map_for_slot(oopmap_slot, pc);
284 } else {
285 _oopmap = cb()->oop_map_for_return_address(pc);
286 }
287 assert(_oopmap != nullptr, "");
288 }
289
290 template <ChunkFrames frame_kind>
291 template <typename RegisterMapT>
292 inline void* StackChunkFrameStream<frame_kind>::reg_to_loc(VMReg reg, const RegisterMapT* map) const {
293 assert(!is_done(), "");
294 return reg->is_reg() ? (void*)map->location(reg, sp()) // see frame::update_map_with_saved_link(&map, link_addr);
295 : (void*)((address)unextended_sp() + (reg->reg2stack() * VMRegImpl::stack_slot_size));
296 }
297
298 template<>
299 template<>
300 inline void StackChunkFrameStream<ChunkFrames::Mixed>::update_reg_map(RegisterMap* map) {
301 assert(!map->in_cont() || map->stack_chunk() == _chunk, "");
302 if (map->update_map() && is_stub()) {
303 frame f = to_frame();
304 oopmap()->update_register_map(&f, map); // we have callee-save registers in this case
305 }
306 }
307
308 template<>
309 template<>
310 inline void StackChunkFrameStream<ChunkFrames::CompiledOnly>::update_reg_map(RegisterMap* map) {
311 assert(!map->in_cont() || map->stack_chunk() == _chunk, "");
312 if (map->update_map() && is_stub()) {
313 frame f = to_frame();
314 oopmap()->update_register_map(&f, map); // we have callee-save registers in this case
315 }
316 }
317
318 template <ChunkFrames frame_kind>
319 template <typename RegisterMapT>
320 inline void StackChunkFrameStream<frame_kind>::update_reg_map(RegisterMapT* map) {}
321
322 template <ChunkFrames frame_kind>
323 inline address StackChunkFrameStream<frame_kind>::orig_pc() const {
324 address pc1 = pc();
325 if (is_interpreted() || is_stub()) {
326 return pc1;
327 }
328 nmethod* nm = cb()->as_nmethod();
329 if (nm->is_deopt_pc(pc1)) {
330 pc1 = *(address*)((address)unextended_sp() + nm->orig_pc_offset());
331 }
332
333 assert(pc1 != nullptr, "");
334 assert(!nm->is_deopt_pc(pc1), "");
335 assert(_cb == CodeCache::find_blob_fast(pc1), "");
336
337 return pc1;
338 }
339
340 template<ChunkFrames frame_kind>
341 void StackChunkFrameStream<frame_kind>::handle_deopted() const {
342 assert(!is_done(), "");
343
344 if (_oopmap != nullptr) {
345 return;
346 }
347 if (is_interpreted()) {
348 return;
349 }
350 assert(is_compiled(), "");
351
352 address pc1 = pc();
353 int oopmap_slot = CodeCache::find_oopmap_slot_fast(pc1);
354 if (oopmap_slot < 0) { // UNLIKELY; we could have marked frames for deoptimization in thaw_chunk
355 if (cb()->as_nmethod()->is_deopt_pc(pc1)) {
356 pc1 = orig_pc();
357 oopmap_slot = CodeCache::find_oopmap_slot_fast(pc1);
358 }
359 }
360 get_oopmap(pc1, oopmap_slot);
361 }
362
363 template <ChunkFrames frame_kind>
364 template <class OopClosureType, class RegisterMapT>
365 inline void StackChunkFrameStream<frame_kind>::iterate_oops(OopClosureType* closure, const RegisterMapT* map) const {
366 if (is_interpreted()) {
367 frame f = to_frame();
368 f.oops_interpreted_do(closure, nullptr, true);
369 } else {
370 DEBUG_ONLY(int oops = 0;)
371 for (OopMapStream oms(oopmap()); !oms.is_done(); oms.next()) {
372 OopMapValue omv = oms.current();
373 if (omv.type() != OopMapValue::oop_value && omv.type() != OopMapValue::narrowoop_value) {
374 continue;
375 }
376
377 assert(UseCompressedOops || omv.type() == OopMapValue::oop_value, "");
378 DEBUG_ONLY(oops++;)
379
380 void* p = reg_to_loc(omv.reg(), map);
381 assert(p != nullptr, "");
382 assert((_has_stub && _index == 1) || is_in_frame(p), "");
383
384 log_develop_trace(continuations)("StackChunkFrameStream::iterate_oops narrow: %d reg: %s p: " INTPTR_FORMAT " sp offset: " INTPTR_FORMAT,
385 omv.type() == OopMapValue::narrowoop_value, omv.reg()->name(), p2i(p), (intptr_t*)p - sp());
386 omv.type() == OopMapValue::narrowoop_value ? Devirtualizer::do_oop(closure, (narrowOop*)p) : Devirtualizer::do_oop(closure, (oop*)p);
387 }
388 assert(oops == oopmap()->num_oops(), "oops: %d oopmap->num_oops(): %d", oops, oopmap()->num_oops());
389 }
390 }
391
392 template <ChunkFrames frame_kind>
393 template <class DerivedOopClosureType, class RegisterMapT>
394 inline void StackChunkFrameStream<frame_kind>::iterate_derived_pointers(DerivedOopClosureType* closure, const RegisterMapT* map) const {
395 if (!is_compiled()) {
396 // Only compiled frames have derived pointers
397 return;
398 }
399
400 assert(oopmap()->has_derived_oops() == oopmap()->has_any(OopMapValue::derived_oop_value), "");
401 if (!oopmap()->has_derived_oops()) {
402 return;
403 }
404
405 for (OopMapStream oms(oopmap()); !oms.is_done(); oms.next()) {
406 OopMapValue omv = oms.current();
407 if (omv.type() != OopMapValue::derived_oop_value) {
408 continue;
409 }
410
411 // see OopMapDo<OopMapFnT, DerivedOopFnT, ValueFilterT>::walk_derived_pointers1
412 intptr_t* derived_loc = (intptr_t*)reg_to_loc(omv.reg(), map);
413 intptr_t* base_loc = (intptr_t*)reg_to_loc(omv.content_reg(), map);
414
415 assert((_has_stub && _index == 1) || is_in_frame(base_loc), "");
416 assert((_has_stub && _index == 1) || is_in_frame(derived_loc), "");
417 assert(derived_loc != base_loc, "Base and derived in same location");
418 assert(is_in_oops(base_loc, map), "not found: " INTPTR_FORMAT, p2i(base_loc));
419 assert(!is_in_oops(derived_loc, map), "found: " INTPTR_FORMAT, p2i(derived_loc));
420
421 Devirtualizer::do_derived_oop(closure, (derived_base*)base_loc, (derived_pointer*)derived_loc);
422 }
423 }
424
425 #ifdef ASSERT
426
427 template <ChunkFrames frame_kind>
428 template <typename RegisterMapT>
429 bool StackChunkFrameStream<frame_kind>::is_in_oops(void* p, const RegisterMapT* map) const {
430 for (OopMapStream oms(oopmap()); !oms.is_done(); oms.next()) {
431 if (oms.current().type() != OopMapValue::oop_value) {
432 continue;
433 }
434 if (reg_to_loc(oms.current().reg(), map) == p) {
435 return true;
436 }
437 }
438 return false;
439 }
440
441 template <ChunkFrames frame_kind>
442 void StackChunkFrameStream<frame_kind>::assert_is_interpreted_and_frame_type_mixed() const {
443 assert(is_interpreted(), "");
444 assert(frame_kind == ChunkFrames::Mixed, "");
445 }
446
447 #endif
448
449 #endif // SHARE_OOPS_STACKCHUNKFRAMESTREAM_INLINE_HPP