1 /*
  2  * Copyright (c) 2019, 2024, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef CPU_X86_STACKCHUNKFRAMESTREAM_X86_INLINE_HPP
 26 #define CPU_X86_STACKCHUNKFRAMESTREAM_X86_INLINE_HPP
 27 
 28 #include "interpreter/oopMapCache.hpp"
 29 #include "runtime/frame.inline.hpp"
 30 #include "runtime/registerMap.hpp"
 31 
 32 #ifdef ASSERT
 33 template <ChunkFrames frame_kind>
 34 inline bool StackChunkFrameStream<frame_kind>::is_in_frame(void* p0) const {
 35   assert(!is_done(), "");
 36   intptr_t* p = (intptr_t*)p0;
 37   int frame_size = _cb->frame_size();
 38   if (is_compiled()) {
 39     nmethod* nm = _cb->as_nmethod_or_null();
 40     if (nm->needs_stack_repair() && nm->is_compiled_by_c2()) {
 41       frame f = to_frame();
 42       bool augmented = f.was_augmented_on_entry(frame_size);
 43       if (!augmented) {
 44         // Fix: C2 caller, so frame was not extended and thus the
 45         // size read from the frame does not include the arguments.
 46         // Ideally we have to count the arg size for the scalarized
 47         // convention. For now we include the size of the caller frame
 48         // which would at least be equal to that.
 49         RegisterMap map(nullptr,
 50                         RegisterMap::UpdateMap::skip,
 51                         RegisterMap::ProcessFrames::skip,
 52                         RegisterMap::WalkContinuation::skip);
 53         frame caller = to_frame().sender(&map);
 54         assert(caller.is_compiled_frame() && caller.cb()->as_nmethod()->is_compiled_by_c2(), "needs stack repair but was not extended with c1/interpreter caller");
 55         frame_size += (caller.real_fp() - caller.sp());
 56       }
 57     } else {
 58       frame_size += _cb->as_nmethod()->num_stack_arg_slots() * VMRegImpl::stack_slot_size >> LogBytesPerWord;
 59     }
 60   }
 61   return p == sp() - frame::sender_sp_offset || ((p - unextended_sp()) >= 0 && (p - unextended_sp()) < frame_size);
 62 }
 63 #endif
 64 
 65 template <ChunkFrames frame_kind>
 66 inline frame StackChunkFrameStream<frame_kind>::to_frame() const {
 67   if (is_done()) {
 68     return frame(_sp, _sp, nullptr, nullptr, nullptr, nullptr, true);
 69   } else {
 70     return frame(sp(), unextended_sp(), fp(), pc(), cb(), _oopmap, true);
 71   }
 72 }
 73 
 74 template <ChunkFrames frame_kind>
 75 inline address StackChunkFrameStream<frame_kind>::get_pc() const {
 76   assert(!is_done(), "");
 77   return *(address*)((_callee_augmented ? _unextended_sp : _sp) - 1);
 78 }
 79 
 80 template <ChunkFrames frame_kind>
 81 inline intptr_t* StackChunkFrameStream<frame_kind>::fp() const {
 82   intptr_t* fp_addr = _sp - frame::sender_sp_offset;
 83   return (frame_kind == ChunkFrames::Mixed && is_interpreted())
 84     ? fp_addr + *fp_addr // derelativize
 85     : *(intptr_t**)fp_addr;
 86 }
 87 
 88 template <ChunkFrames frame_kind>
 89 inline intptr_t* StackChunkFrameStream<frame_kind>::derelativize(int offset) const {
 90   intptr_t* fp = this->fp();
 91   assert(fp != nullptr, "");
 92   return fp + fp[offset];
 93 }
 94 
 95 template <ChunkFrames frame_kind>
 96 inline intptr_t* StackChunkFrameStream<frame_kind>::unextended_sp_for_interpreter_frame() const {
 97   assert_is_interpreted_and_frame_type_mixed();
 98   return derelativize(frame::interpreter_frame_last_sp_offset);
 99 }
100 
101 template <ChunkFrames frame_kind>
102 inline void StackChunkFrameStream<frame_kind>::next_for_interpreter_frame() {
103   assert_is_interpreted_and_frame_type_mixed();
104   if (derelativize(frame::interpreter_frame_locals_offset) + 1 >= _end) {
105     _unextended_sp = _end;
106     _sp = _end;
107   } else {
108     intptr_t* fp = this->fp();
109     _unextended_sp = fp + fp[frame::interpreter_frame_sender_sp_offset];
110     _sp = fp + frame::sender_sp_offset;
111   }
112 }
113 
114 template <ChunkFrames frame_kind>
115 inline int StackChunkFrameStream<frame_kind>::interpreter_frame_size() const {
116   assert_is_interpreted_and_frame_type_mixed();
117 
118   intptr_t* top = unextended_sp(); // later subtract argsize if callee is interpreted
119   intptr_t* bottom = derelativize(frame::interpreter_frame_locals_offset) + 1; // the sender's unextended sp: derelativize(frame::interpreter_frame_sender_sp_offset);
120   return (int)(bottom - top);
121 }
122 
123 template <ChunkFrames frame_kind>
124 inline int StackChunkFrameStream<frame_kind>::interpreter_frame_stack_argsize() const {
125   assert_is_interpreted_and_frame_type_mixed();
126   int diff = (int)(derelativize(frame::interpreter_frame_locals_offset) - derelativize(frame::interpreter_frame_sender_sp_offset) + 1);
127   return diff;
128 }
129 
130 template <ChunkFrames frame_kind>
131 inline int StackChunkFrameStream<frame_kind>::interpreter_frame_num_oops() const {
132   assert_is_interpreted_and_frame_type_mixed();
133   ResourceMark rm;
134   InterpreterOopMap mask;
135   frame f = to_frame();
136   f.interpreted_frame_oop_map(&mask);
137   return  mask.num_oops()
138         + 1 // for the mirror oop
139         + (f.interpreter_frame_method()->is_native() ? 1 : 0) // temp oop slot
140         + pointer_delta_as_int((intptr_t*)f.interpreter_frame_monitor_begin(),
141               (intptr_t*)f.interpreter_frame_monitor_end())/BasicObjectLock::size();
142 }
143 
144 template<>
145 template<>
146 inline void StackChunkFrameStream<ChunkFrames::Mixed>::update_reg_map_pd(RegisterMap* map) {
147   if (map->update_map()) {
148     frame::update_map_with_saved_link(map, map->in_cont() ? (intptr_t**)(intptr_t)frame::sender_sp_offset
149                                                           : (intptr_t**)(_sp - frame::sender_sp_offset));
150   }
151 }
152 
153 template<>
154 template<>
155 inline void StackChunkFrameStream<ChunkFrames::CompiledOnly>::update_reg_map_pd(RegisterMap* map) {
156   if (map->update_map()) {
157     frame::update_map_with_saved_link(map, map->in_cont() ? (intptr_t**)(intptr_t)frame::sender_sp_offset
158                                                           : (intptr_t**)(_sp - frame::sender_sp_offset));
159   }
160 }
161 
162 template <ChunkFrames frame_kind>
163 template <typename RegisterMapT>
164 inline void StackChunkFrameStream<frame_kind>::update_reg_map_pd(RegisterMapT* map) {}
165 
166 #endif // CPU_X86_STACKCHUNKFRAMESTREAM_X86_INLINE_HPP