135 HeapWord* end = start + stack_size();
136 return (HeapWord*)p >= start && (HeapWord*)p < end;
137 }
138
139 bool stackChunkOopDesc::is_usable_in_chunk(void* p) const {
140 HeapWord* start = (HeapWord*)start_address() + sp() - frame::metadata_words_at_bottom;
141 HeapWord* end = start + stack_size();
142 return (HeapWord*)p >= start && (HeapWord*)p < end;
143 }
144
145 inline bool stackChunkOopDesc::is_flag(uint8_t flag) const {
146 return (flags() & flag) != 0;
147 }
148 inline bool stackChunkOopDesc::is_flag_acquire(uint8_t flag) const {
149 return (flags_acquire() & flag) != 0;
150 }
151 inline void stackChunkOopDesc::set_flag(uint8_t flag, bool value) {
152 uint32_t flags = this->flags();
153 set_flags((uint8_t)(value ? flags |= flag : flags &= ~flag));
154 }
155 inline void stackChunkOopDesc::clear_flags() {
156 set_flags(0);
157 }
158
159 inline bool stackChunkOopDesc::has_mixed_frames() const { return is_flag(FLAG_HAS_INTERPRETED_FRAMES); }
160 inline void stackChunkOopDesc::set_has_mixed_frames(bool value) {
161 assert((flags() & ~(FLAG_HAS_INTERPRETED_FRAMES | FLAG_PREEMPTED)) == 0, "other flags should not be set");
162 set_flag(FLAG_HAS_INTERPRETED_FRAMES, value);
163 }
164
165 inline bool stackChunkOopDesc::preempted() const { return is_flag(FLAG_PREEMPTED); }
166 inline void stackChunkOopDesc::set_preempted(bool value) {
167 assert(preempted() != value, "");
168 set_flag(FLAG_PREEMPTED, value);
169 }
170
171 inline bool stackChunkOopDesc::has_lockstack() const { return is_flag(FLAG_HAS_LOCKSTACK); }
172 inline void stackChunkOopDesc::set_has_lockstack(bool value) { set_flag(FLAG_HAS_LOCKSTACK, value); }
173
174 inline bool stackChunkOopDesc::is_gc_mode() const { return is_flag(FLAG_GC_MODE); }
175 inline bool stackChunkOopDesc::is_gc_mode_acquire() const { return is_flag_acquire(FLAG_GC_MODE); }
176 inline void stackChunkOopDesc::set_gc_mode(bool value) { set_flag(FLAG_GC_MODE, value); }
177
178 inline bool stackChunkOopDesc::has_bitmap() const { return is_flag(FLAG_HAS_BITMAP); }
179 inline void stackChunkOopDesc::set_has_bitmap(bool value) { set_flag(FLAG_HAS_BITMAP, value); }
180
181 inline bool stackChunkOopDesc::has_thaw_slowpath_condition() const { return flags() != 0; }
182
183 inline bool stackChunkOopDesc::requires_barriers() {
184 return Universe::heap()->requires_barriers(this);
185 }
186
187 template <stackChunkOopDesc::BarrierType barrier, ChunkFrames frame_kind, typename RegisterMapT>
188 void stackChunkOopDesc::do_barriers(const StackChunkFrameStream<frame_kind>& f, const RegisterMapT* map) {
189 if (frame_kind == ChunkFrames::Mixed) {
190 // we could freeze deopted frames in slow mode.
193 do_barriers0<barrier>(f, map);
194 }
195
196 template <typename OopT, class StackChunkLockStackClosureType>
197 inline void stackChunkOopDesc::iterate_lockstack(StackChunkLockStackClosureType* closure) {
198 int cnt = lockstack_size();
199 intptr_t* lockstart_addr = start_address();
200 for (int i = 0; i < cnt; i++) {
201 closure->do_oop((OopT*)&lockstart_addr[i]);
202 }
203 }
204
205 template <class StackChunkFrameClosureType>
206 inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
207 has_mixed_frames() ? iterate_stack<ChunkFrames::Mixed>(closure)
208 : iterate_stack<ChunkFrames::CompiledOnly>(closure);
209 }
210
211 template <ChunkFrames frame_kind, class StackChunkFrameClosureType>
212 inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
213 const SmallRegisterMap* map = SmallRegisterMap::instance();
214 assert(!map->in_cont(), "");
215
216 StackChunkFrameStream<frame_kind> f(this);
217 bool should_continue = true;
218
219 if (f.is_stub()) {
220 RegisterMap full_map(nullptr,
221 RegisterMap::UpdateMap::include,
222 RegisterMap::ProcessFrames::skip,
223 RegisterMap::WalkContinuation::include);
224 full_map.set_include_argument_oops(false);
225 closure->do_frame(f, map);
226
227 f.next(&full_map);
228 assert(!f.is_done(), "");
229 assert(f.is_compiled(), "");
230
231 should_continue = closure->do_frame(f, &full_map);
232 f.next(map);
233 }
234 assert(!f.is_stub(), "");
235
236 for(; should_continue && !f.is_done(); f.next(map)) {
237 if (frame_kind == ChunkFrames::Mixed) {
238 // in slow mode we might freeze deoptimized frames
239 f.handle_deopted();
240 }
241 should_continue = closure->do_frame(f, map);
242 }
243 }
244
245 inline frame stackChunkOopDesc::relativize(frame fr) const { relativize_frame(fr); return fr; }
246 inline frame stackChunkOopDesc::derelativize(frame fr) const { derelativize_frame(fr); return fr; }
247
248 inline void* stackChunkOopDesc::gc_data() const {
249 int stack_sz = stack_size();
250 assert(stack_sz != 0, "stack should not be empty");
251
252 // The gc data is located after the stack.
|
135 HeapWord* end = start + stack_size();
136 return (HeapWord*)p >= start && (HeapWord*)p < end;
137 }
138
139 bool stackChunkOopDesc::is_usable_in_chunk(void* p) const {
140 HeapWord* start = (HeapWord*)start_address() + sp() - frame::metadata_words_at_bottom;
141 HeapWord* end = start + stack_size();
142 return (HeapWord*)p >= start && (HeapWord*)p < end;
143 }
144
145 inline bool stackChunkOopDesc::is_flag(uint8_t flag) const {
146 return (flags() & flag) != 0;
147 }
148 inline bool stackChunkOopDesc::is_flag_acquire(uint8_t flag) const {
149 return (flags_acquire() & flag) != 0;
150 }
151 inline void stackChunkOopDesc::set_flag(uint8_t flag, bool value) {
152 uint32_t flags = this->flags();
153 set_flags((uint8_t)(value ? flags |= flag : flags &= ~flag));
154 }
155
156 inline bool stackChunkOopDesc::has_mixed_frames() const { return is_flag(FLAG_HAS_INTERPRETED_FRAMES); }
157 inline void stackChunkOopDesc::set_has_mixed_frames(bool value) {
158 assert((flags() & ~(FLAG_HAS_INTERPRETED_FRAMES | FLAG_PREEMPTED)) == 0, "other flags should not be set");
159 set_flag(FLAG_HAS_INTERPRETED_FRAMES, value);
160 }
161
162 inline bool stackChunkOopDesc::preempted() const { return is_flag(FLAG_PREEMPTED); }
163 inline void stackChunkOopDesc::set_preempted(bool value) {
164 assert(preempted() != value, "");
165 set_flag(FLAG_PREEMPTED, value);
166 }
167
168 inline bool stackChunkOopDesc::at_klass_init() const { return jdk_internal_vm_StackChunk::atKlassInit(as_oop()); }
169 inline void stackChunkOopDesc::set_at_klass_init(bool value) {
170 assert(at_klass_init() != value, "");
171 jdk_internal_vm_StackChunk::set_atKlassInit(this, value);
172 }
173
174 inline bool stackChunkOopDesc::has_args_at_top() const { return jdk_internal_vm_StackChunk::hasArgsAtTop(as_oop()); }
175 inline void stackChunkOopDesc::set_has_args_at_top(bool value) {
176 assert(has_args_at_top() != value, "");
177 jdk_internal_vm_StackChunk::set_hasArgsAtTop(this, value);
178 }
179
180 inline bool stackChunkOopDesc::has_lockstack() const { return is_flag(FLAG_HAS_LOCKSTACK); }
181 inline void stackChunkOopDesc::set_has_lockstack(bool value) { set_flag(FLAG_HAS_LOCKSTACK, value); }
182
183 inline bool stackChunkOopDesc::is_gc_mode() const { return is_flag(FLAG_GC_MODE); }
184 inline bool stackChunkOopDesc::is_gc_mode_acquire() const { return is_flag_acquire(FLAG_GC_MODE); }
185 inline void stackChunkOopDesc::set_gc_mode(bool value) { set_flag(FLAG_GC_MODE, value); }
186
187 inline bool stackChunkOopDesc::has_bitmap() const { return is_flag(FLAG_HAS_BITMAP); }
188 inline void stackChunkOopDesc::set_has_bitmap(bool value) { set_flag(FLAG_HAS_BITMAP, value); }
189
190 inline bool stackChunkOopDesc::has_thaw_slowpath_condition() const { return flags() != 0; }
191
192 inline bool stackChunkOopDesc::requires_barriers() {
193 return Universe::heap()->requires_barriers(this);
194 }
195
196 template <stackChunkOopDesc::BarrierType barrier, ChunkFrames frame_kind, typename RegisterMapT>
197 void stackChunkOopDesc::do_barriers(const StackChunkFrameStream<frame_kind>& f, const RegisterMapT* map) {
198 if (frame_kind == ChunkFrames::Mixed) {
199 // we could freeze deopted frames in slow mode.
202 do_barriers0<barrier>(f, map);
203 }
204
205 template <typename OopT, class StackChunkLockStackClosureType>
206 inline void stackChunkOopDesc::iterate_lockstack(StackChunkLockStackClosureType* closure) {
207 int cnt = lockstack_size();
208 intptr_t* lockstart_addr = start_address();
209 for (int i = 0; i < cnt; i++) {
210 closure->do_oop((OopT*)&lockstart_addr[i]);
211 }
212 }
213
214 template <class StackChunkFrameClosureType>
215 inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
216 has_mixed_frames() ? iterate_stack<ChunkFrames::Mixed>(closure)
217 : iterate_stack<ChunkFrames::CompiledOnly>(closure);
218 }
219
220 template <ChunkFrames frame_kind, class StackChunkFrameClosureType>
221 inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
222 const auto* map = SmallRegisterMap::instance_no_args();
223 assert(!map->in_cont(), "");
224
225 StackChunkFrameStream<frame_kind> f(this);
226 bool should_continue = true;
227
228 if (f.is_stub()) {
229 RegisterMap full_map(nullptr,
230 RegisterMap::UpdateMap::include,
231 RegisterMap::ProcessFrames::skip,
232 RegisterMap::WalkContinuation::include);
233 full_map.set_include_argument_oops(false);
234 closure->do_frame(f, map);
235
236 f.next(&full_map);
237 assert(!f.is_done(), "");
238 assert(f.is_compiled(), "");
239
240 should_continue = closure->do_frame(f, &full_map);
241 f.next(map);
242 } else if (frame_kind == ChunkFrames::Mixed && f.is_interpreted() && has_args_at_top()) {
243 should_continue = closure->do_frame(f, SmallRegisterMap::instance_with_args());
244 f.next(map);
245 }
246 assert(!f.is_stub(), "");
247
248 for(; should_continue && !f.is_done(); f.next(map)) {
249 if (frame_kind == ChunkFrames::Mixed) {
250 // in slow mode we might freeze deoptimized frames
251 f.handle_deopted();
252 }
253 should_continue = closure->do_frame(f, map);
254 }
255 }
256
257 inline frame stackChunkOopDesc::relativize(frame fr) const { relativize_frame(fr); return fr; }
258 inline frame stackChunkOopDesc::derelativize(frame fr) const { derelativize_frame(fr); return fr; }
259
260 inline void* stackChunkOopDesc::gc_data() const {
261 int stack_sz = stack_size();
262 assert(stack_sz != 0, "stack should not be empty");
263
264 // The gc data is located after the stack.
|