< prev index next >

src/hotspot/share/runtime/stackChunkFrameStream.inline.hpp

Print this page

178 inline int StackChunkFrameStream<frame_kind>::frame_size() const {
179   return is_interpreted() ? interpreter_frame_size()
180                           : cb()->frame_size() + stack_argsize() + frame::metadata_words_at_top;
181 }
182 
183 template <ChunkFrames frame_kind>
184 inline int StackChunkFrameStream<frame_kind>::stack_argsize() const {
185   if (is_interpreted()) {
186     return interpreter_frame_stack_argsize();
187   }
188   if (is_stub()) {
189     return 0;
190   }
191   assert(cb() != nullptr, "");
192   assert(cb()->is_nmethod(), "");
193   assert(cb()->as_nmethod()->method() != nullptr, "");
194   return (cb()->as_nmethod()->num_stack_arg_slots() * VMRegImpl::stack_slot_size) >> LogBytesPerWord;
195 }
196 
197 template <ChunkFrames frame_kind>
198 inline int StackChunkFrameStream<frame_kind>::num_oops() const {

199   if (is_interpreted()) {
200     return interpreter_frame_num_oops();
201   } else if (is_compiled()) {
202     return oopmap()->num_oops();
203   } else {
204     assert(is_stub(), "invariant");
205     return 0;
206   }
207 }
208 
209 template <ChunkFrames frame_kind>
210 inline void StackChunkFrameStream<frame_kind>::initialize_register_map(RegisterMap* map) {
211   update_reg_map_pd(map);
212 }
213 
214 template <ChunkFrames frame_kind>
215 template <typename RegisterMapT>
216 inline void StackChunkFrameStream<frame_kind>::next(RegisterMapT* map, bool stop) {
217   update_reg_map(map);
218   bool is_runtime_stub = is_stub();
219   if (frame_kind == ChunkFrames::Mixed) {
220     if (is_interpreted()) {

348     return;
349   }
350   assert(is_compiled(), "");
351 
352   address pc1 = pc();
353   int oopmap_slot = CodeCache::find_oopmap_slot_fast(pc1);
354   if (oopmap_slot < 0) { // UNLIKELY; we could have marked frames for deoptimization in thaw_chunk
355     if (cb()->as_nmethod()->is_deopt_pc(pc1)) {
356       pc1 = orig_pc();
357       oopmap_slot = CodeCache::find_oopmap_slot_fast(pc1);
358     }
359   }
360   get_oopmap(pc1, oopmap_slot);
361 }
362 
363 template <ChunkFrames frame_kind>
364 template <class OopClosureType, class RegisterMapT>
365 inline void StackChunkFrameStream<frame_kind>::iterate_oops(OopClosureType* closure, const RegisterMapT* map) const {
366   if (is_interpreted()) {
367     frame f = to_frame();
368     f.oops_interpreted_do(closure, nullptr, true);
369   } else {
370     DEBUG_ONLY(int oops = 0;)
371     for (OopMapStream oms(oopmap()); !oms.is_done(); oms.next()) {
372       OopMapValue omv = oms.current();
373       if (omv.type() != OopMapValue::oop_value && omv.type() != OopMapValue::narrowoop_value) {
374         continue;
375       }
376 
377       assert(UseCompressedOops || omv.type() == OopMapValue::oop_value, "");
378       DEBUG_ONLY(oops++;)
379 
380       void* p = reg_to_loc(omv.reg(), map);
381       assert(p != nullptr, "");
382       assert((_has_stub && _index == 1) || is_in_frame(p), "");
383 
384       log_develop_trace(continuations)("StackChunkFrameStream::iterate_oops narrow: %d reg: %s p: " INTPTR_FORMAT " sp offset: " INTPTR_FORMAT,
385           omv.type() == OopMapValue::narrowoop_value, omv.reg()->name(), p2i(p), (intptr_t*)p - sp());
386           omv.type() == OopMapValue::narrowoop_value ? Devirtualizer::do_oop(closure, (narrowOop*)p) : Devirtualizer::do_oop(closure, (oop*)p);
387     }
388     assert(oops == oopmap()->num_oops(), "oops: %d oopmap->num_oops(): %d", oops, oopmap()->num_oops());

178 inline int StackChunkFrameStream<frame_kind>::frame_size() const {
179   return is_interpreted() ? interpreter_frame_size()
180                           : cb()->frame_size() + stack_argsize() + frame::metadata_words_at_top;
181 }
182 
183 template <ChunkFrames frame_kind>
184 inline int StackChunkFrameStream<frame_kind>::stack_argsize() const {
185   if (is_interpreted()) {
186     return interpreter_frame_stack_argsize();
187   }
188   if (is_stub()) {
189     return 0;
190   }
191   assert(cb() != nullptr, "");
192   assert(cb()->is_nmethod(), "");
193   assert(cb()->as_nmethod()->method() != nullptr, "");
194   return (cb()->as_nmethod()->num_stack_arg_slots() * VMRegImpl::stack_slot_size) >> LogBytesPerWord;
195 }
196 
197 template <ChunkFrames frame_kind>
198 template <typename RegisterMapT>
199 inline int StackChunkFrameStream<frame_kind>::num_oops(RegisterMapT* map) const {
200   if (is_interpreted()) {
201     return interpreter_frame_num_oops(map);
202   } else if (is_compiled()) {
203     return oopmap()->num_oops();
204   } else {
205     assert(is_stub(), "invariant");
206     return 0;
207   }
208 }
209 
210 template <ChunkFrames frame_kind>
211 inline void StackChunkFrameStream<frame_kind>::initialize_register_map(RegisterMap* map) {
212   update_reg_map_pd(map);
213 }
214 
215 template <ChunkFrames frame_kind>
216 template <typename RegisterMapT>
217 inline void StackChunkFrameStream<frame_kind>::next(RegisterMapT* map, bool stop) {
218   update_reg_map(map);
219   bool is_runtime_stub = is_stub();
220   if (frame_kind == ChunkFrames::Mixed) {
221     if (is_interpreted()) {

349     return;
350   }
351   assert(is_compiled(), "");
352 
353   address pc1 = pc();
354   int oopmap_slot = CodeCache::find_oopmap_slot_fast(pc1);
355   if (oopmap_slot < 0) { // UNLIKELY; we could have marked frames for deoptimization in thaw_chunk
356     if (cb()->as_nmethod()->is_deopt_pc(pc1)) {
357       pc1 = orig_pc();
358       oopmap_slot = CodeCache::find_oopmap_slot_fast(pc1);
359     }
360   }
361   get_oopmap(pc1, oopmap_slot);
362 }
363 
364 template <ChunkFrames frame_kind>
365 template <class OopClosureType, class RegisterMapT>
366 inline void StackChunkFrameStream<frame_kind>::iterate_oops(OopClosureType* closure, const RegisterMapT* map) const {
367   if (is_interpreted()) {
368     frame f = to_frame();
369     f.oops_interpreted_do(closure, map, true);
370   } else {
371     DEBUG_ONLY(int oops = 0;)
372     for (OopMapStream oms(oopmap()); !oms.is_done(); oms.next()) {
373       OopMapValue omv = oms.current();
374       if (omv.type() != OopMapValue::oop_value && omv.type() != OopMapValue::narrowoop_value) {
375         continue;
376       }
377 
378       assert(UseCompressedOops || omv.type() == OopMapValue::oop_value, "");
379       DEBUG_ONLY(oops++;)
380 
381       void* p = reg_to_loc(omv.reg(), map);
382       assert(p != nullptr, "");
383       assert((_has_stub && _index == 1) || is_in_frame(p), "");
384 
385       log_develop_trace(continuations)("StackChunkFrameStream::iterate_oops narrow: %d reg: %s p: " INTPTR_FORMAT " sp offset: " INTPTR_FORMAT,
386           omv.type() == OopMapValue::narrowoop_value, omv.reg()->name(), p2i(p), (intptr_t*)p - sp());
387           omv.type() == OopMapValue::narrowoop_value ? Devirtualizer::do_oop(closure, (narrowOop*)p) : Devirtualizer::do_oop(closure, (oop*)p);
388     }
389     assert(oops == oopmap()->num_oops(), "oops: %d oopmap->num_oops(): %d", oops, oopmap()->num_oops());
< prev index next >