< prev index next >

src/hotspot/share/oops/stackChunkOop.inline.hpp

Print this page

135   HeapWord* end = start + stack_size();
136   return (HeapWord*)p >= start && (HeapWord*)p < end;
137 }
138 
139 bool stackChunkOopDesc::is_usable_in_chunk(void* p) const {
140   HeapWord* start = (HeapWord*)start_address() + sp() - frame::metadata_words_at_bottom;
141   HeapWord* end = start + stack_size();
142   return (HeapWord*)p >= start && (HeapWord*)p < end;
143 }
144 
145 inline bool stackChunkOopDesc::is_flag(uint8_t flag) const {
146   return (flags() & flag) != 0;
147 }
148 inline bool stackChunkOopDesc::is_flag_acquire(uint8_t flag) const {
149   return (flags_acquire() & flag) != 0;
150 }
151 inline void stackChunkOopDesc::set_flag(uint8_t flag, bool value) {
152   uint32_t flags = this->flags();
153   set_flags((uint8_t)(value ? flags |= flag : flags &= ~flag));
154 }
155 inline void stackChunkOopDesc::clear_flags() {
156   set_flags(0);
157 }
158 
159 inline bool stackChunkOopDesc::has_mixed_frames() const { return is_flag(FLAG_HAS_INTERPRETED_FRAMES); }
160 inline void stackChunkOopDesc::set_has_mixed_frames(bool value) {
161   assert((flags() & ~(FLAG_HAS_INTERPRETED_FRAMES | FLAG_PREEMPTED)) == 0, "other flags should not be set");
162   set_flag(FLAG_HAS_INTERPRETED_FRAMES, value);
163 }
164 
165 inline bool stackChunkOopDesc::preempted() const { return is_flag(FLAG_PREEMPTED); }
166 inline void stackChunkOopDesc::set_preempted(bool value) {
167   assert(preempted() != value, "");
168   set_flag(FLAG_PREEMPTED, value);
169 }
170 












171 inline bool stackChunkOopDesc::has_lockstack() const         { return is_flag(FLAG_HAS_LOCKSTACK); }
172 inline void stackChunkOopDesc::set_has_lockstack(bool value) { set_flag(FLAG_HAS_LOCKSTACK, value); }
173 
174 inline bool stackChunkOopDesc::is_gc_mode() const                  { return is_flag(FLAG_GC_MODE); }
175 inline bool stackChunkOopDesc::is_gc_mode_acquire() const          { return is_flag_acquire(FLAG_GC_MODE); }
176 inline void stackChunkOopDesc::set_gc_mode(bool value)             { set_flag(FLAG_GC_MODE, value); }
177 
178 inline bool stackChunkOopDesc::has_bitmap() const                  { return is_flag(FLAG_HAS_BITMAP); }
179 inline void stackChunkOopDesc::set_has_bitmap(bool value)          { set_flag(FLAG_HAS_BITMAP, value); }
180 
181 inline bool stackChunkOopDesc::has_thaw_slowpath_condition() const { return flags() != 0; }
182 
183 inline bool stackChunkOopDesc::requires_barriers() {
184   return Universe::heap()->requires_barriers(this);
185 }
186 
187 template <stackChunkOopDesc::BarrierType barrier, ChunkFrames frame_kind, typename RegisterMapT>
188 void stackChunkOopDesc::do_barriers(const StackChunkFrameStream<frame_kind>& f, const RegisterMapT* map) {
189   if (frame_kind == ChunkFrames::Mixed) {
190     // we could freeze deopted frames in slow mode.

194 }
195 
196 template <typename OopT, class StackChunkLockStackClosureType>
197 inline void stackChunkOopDesc::iterate_lockstack(StackChunkLockStackClosureType* closure) {
198   assert(LockingMode == LM_LIGHTWEIGHT, "");
199   int cnt = lockstack_size();
200   intptr_t* lockstart_addr = start_address();
201   for (int i = 0; i < cnt; i++) {
202     closure->do_oop((OopT*)&lockstart_addr[i]);
203   }
204 }
205 
206 template <class StackChunkFrameClosureType>
207 inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
208   has_mixed_frames() ? iterate_stack<ChunkFrames::Mixed>(closure)
209                      : iterate_stack<ChunkFrames::CompiledOnly>(closure);
210 }
211 
212 template <ChunkFrames frame_kind, class StackChunkFrameClosureType>
213 inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
214   const SmallRegisterMap* map = SmallRegisterMap::instance();
215   assert(!map->in_cont(), "");
216 
217   StackChunkFrameStream<frame_kind> f(this);
218   bool should_continue = true;
219 
220   if (f.is_stub()) {
221     RegisterMap full_map(nullptr,
222                          RegisterMap::UpdateMap::include,
223                          RegisterMap::ProcessFrames::skip,
224                          RegisterMap::WalkContinuation::include);
225     full_map.set_include_argument_oops(false);
226     closure->do_frame(f, map);
227 
228     f.next(&full_map);
229     assert(!f.is_done(), "");
230     assert(f.is_compiled(), "");
231 
232     should_continue = closure->do_frame(f, &full_map);
233     f.next(map);



234   }
235   assert(!f.is_stub(), "");
236 
237   for(; should_continue && !f.is_done(); f.next(map)) {
238     if (frame_kind == ChunkFrames::Mixed) {
239       // in slow mode we might freeze deoptimized frames
240       f.handle_deopted();
241     }
242     should_continue = closure->do_frame(f, map);
243   }
244 }
245 
246 inline frame stackChunkOopDesc::relativize(frame fr)   const { relativize_frame(fr);   return fr; }
247 inline frame stackChunkOopDesc::derelativize(frame fr) const { derelativize_frame(fr); return fr; }
248 
249 inline void* stackChunkOopDesc::gc_data() const {
250   int stack_sz = stack_size();
251   assert(stack_sz != 0, "stack should not be empty");
252 
253   // The gc data is located after the stack.

135   HeapWord* end = start + stack_size();
136   return (HeapWord*)p >= start && (HeapWord*)p < end;
137 }
138 
139 bool stackChunkOopDesc::is_usable_in_chunk(void* p) const {
140   HeapWord* start = (HeapWord*)start_address() + sp() - frame::metadata_words_at_bottom;
141   HeapWord* end = start + stack_size();
142   return (HeapWord*)p >= start && (HeapWord*)p < end;
143 }
144 
145 inline bool stackChunkOopDesc::is_flag(uint8_t flag) const {
146   return (flags() & flag) != 0;
147 }
148 inline bool stackChunkOopDesc::is_flag_acquire(uint8_t flag) const {
149   return (flags_acquire() & flag) != 0;
150 }
151 inline void stackChunkOopDesc::set_flag(uint8_t flag, bool value) {
152   uint32_t flags = this->flags();
153   set_flags((uint8_t)(value ? flags |= flag : flags &= ~flag));
154 }



155 
156 inline bool stackChunkOopDesc::has_mixed_frames() const { return is_flag(FLAG_HAS_INTERPRETED_FRAMES); }
157 inline void stackChunkOopDesc::set_has_mixed_frames(bool value) {
158   assert((flags() & ~(FLAG_HAS_INTERPRETED_FRAMES | FLAG_PREEMPTED)) == 0, "other flags should not be set");
159   set_flag(FLAG_HAS_INTERPRETED_FRAMES, value);
160 }
161 
162 inline bool stackChunkOopDesc::preempted() const { return is_flag(FLAG_PREEMPTED); }
163 inline void stackChunkOopDesc::set_preempted(bool value) {
164   assert(preempted() != value, "");
165   set_flag(FLAG_PREEMPTED, value);
166 }
167 
168 inline bool stackChunkOopDesc::at_klass_init() const { return jdk_internal_vm_StackChunk::atKlassInit(as_oop()); }
169 inline void stackChunkOopDesc::set_at_klass_init(bool value) {
170   assert(at_klass_init() != value, "");
171   jdk_internal_vm_StackChunk::set_atKlassInit(this, value);
172 }
173 
174 inline bool stackChunkOopDesc::has_args_at_top() const { return jdk_internal_vm_StackChunk::hasArgsAtTop(as_oop()); }
175 inline void stackChunkOopDesc::set_has_args_at_top(bool value) {
176   assert(has_args_at_top() != value, "");
177   jdk_internal_vm_StackChunk::set_hasArgsAtTop(this, value);
178 }
179 
180 inline bool stackChunkOopDesc::has_lockstack() const         { return is_flag(FLAG_HAS_LOCKSTACK); }
181 inline void stackChunkOopDesc::set_has_lockstack(bool value) { set_flag(FLAG_HAS_LOCKSTACK, value); }
182 
183 inline bool stackChunkOopDesc::is_gc_mode() const                  { return is_flag(FLAG_GC_MODE); }
184 inline bool stackChunkOopDesc::is_gc_mode_acquire() const          { return is_flag_acquire(FLAG_GC_MODE); }
185 inline void stackChunkOopDesc::set_gc_mode(bool value)             { set_flag(FLAG_GC_MODE, value); }
186 
187 inline bool stackChunkOopDesc::has_bitmap() const                  { return is_flag(FLAG_HAS_BITMAP); }
188 inline void stackChunkOopDesc::set_has_bitmap(bool value)          { set_flag(FLAG_HAS_BITMAP, value); }
189 
190 inline bool stackChunkOopDesc::has_thaw_slowpath_condition() const { return flags() != 0; }
191 
192 inline bool stackChunkOopDesc::requires_barriers() {
193   return Universe::heap()->requires_barriers(this);
194 }
195 
196 template <stackChunkOopDesc::BarrierType barrier, ChunkFrames frame_kind, typename RegisterMapT>
197 void stackChunkOopDesc::do_barriers(const StackChunkFrameStream<frame_kind>& f, const RegisterMapT* map) {
198   if (frame_kind == ChunkFrames::Mixed) {
199     // we could freeze deopted frames in slow mode.

203 }
204 
205 template <typename OopT, class StackChunkLockStackClosureType>
206 inline void stackChunkOopDesc::iterate_lockstack(StackChunkLockStackClosureType* closure) {
207   assert(LockingMode == LM_LIGHTWEIGHT, "");
208   int cnt = lockstack_size();
209   intptr_t* lockstart_addr = start_address();
210   for (int i = 0; i < cnt; i++) {
211     closure->do_oop((OopT*)&lockstart_addr[i]);
212   }
213 }
214 
215 template <class StackChunkFrameClosureType>
216 inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
217   has_mixed_frames() ? iterate_stack<ChunkFrames::Mixed>(closure)
218                      : iterate_stack<ChunkFrames::CompiledOnly>(closure);
219 }
220 
221 template <ChunkFrames frame_kind, class StackChunkFrameClosureType>
222 inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
223   const auto* map = SmallRegisterMap::instance_no_args();
224   assert(!map->in_cont(), "");
225 
226   StackChunkFrameStream<frame_kind> f(this);
227   bool should_continue = true;
228 
229   if (f.is_stub()) {
230     RegisterMap full_map(nullptr,
231                          RegisterMap::UpdateMap::include,
232                          RegisterMap::ProcessFrames::skip,
233                          RegisterMap::WalkContinuation::include);
234     full_map.set_include_argument_oops(false);
235     closure->do_frame(f, map);
236 
237     f.next(&full_map);
238     assert(!f.is_done(), "");
239     assert(f.is_compiled(), "");
240 
241     should_continue = closure->do_frame(f, &full_map);
242     f.next(map);
243   } else if (frame_kind == ChunkFrames::Mixed && f.is_interpreted() && has_args_at_top()) {
244     should_continue = closure->do_frame(f, SmallRegisterMap::instance_with_args());
245     f.next(map);
246   }
247   assert(!f.is_stub(), "");
248 
249   for(; should_continue && !f.is_done(); f.next(map)) {
250     if (frame_kind == ChunkFrames::Mixed) {
251       // in slow mode we might freeze deoptimized frames
252       f.handle_deopted();
253     }
254     should_continue = closure->do_frame(f, map);
255   }
256 }
257 
258 inline frame stackChunkOopDesc::relativize(frame fr)   const { relativize_frame(fr);   return fr; }
259 inline frame stackChunkOopDesc::derelativize(frame fr) const { derelativize_frame(fr); return fr; }
260 
261 inline void* stackChunkOopDesc::gc_data() const {
262   int stack_sz = stack_size();
263   assert(stack_sz != 0, "stack should not be empty");
264 
265   // The gc data is located after the stack.
< prev index next >