< prev index next > src/hotspot/share/oops/stackChunkOop.cpp
Print this page
visitor.oops_do(&_f, _map, _f.oop_map());
}
}
};
+ class LockStackOopIterator : public OopIterator {
+ private:
+ const stackChunkOop _chunk;
+ public:
+ LockStackOopIterator(const stackChunkOop chunk) : _chunk(chunk) {}
+
+ virtual void oops_do(OopClosure* cl) override {
+ int cnt = _chunk->lockStackSize();
+ oop* lockstack_start = (oop*)_chunk->start_address();
+ for (int i = 0; i < cnt; i++) {
+ cl->do_oop(&lockstack_start[i]);
+ }
+ }
+ };
+
frame stackChunkOopDesc::top_frame(RegisterMap* map) {
assert(!is_empty(), "");
StackChunkFrameStream<ChunkFrames::Mixed> fs(this);
map->set_stack_chunk(this);
FrameOopIterator<RegisterMapT> iterator(fr, map);
bs_chunk->encode_gc_mode(_chunk, &iterator);
return true;
}
+
+ bool do_lockstack() {
+ BarrierSetStackChunk* bs_chunk = BarrierSet::barrier_set()->barrier_set_stack_chunk();
+ LockStackOopIterator iterator(_chunk);
+ bs_chunk->encode_gc_mode(_chunk, &iterator);
+
+ return true;
+ }
};
bool stackChunkOopDesc::try_acquire_relativization() {
for (;;) {
// We use an acquiring load when reading the flags to ensure that if we leave this
}
DerivedPointersSupport::RelativizeClosure derived_cl;
EncodeGCModeConcurrentFrameClosure<decltype(derived_cl)> frame_cl(this, &derived_cl);
iterate_stack(&frame_cl);
+ frame_cl.do_lockstack();
release_relativization();
}
class TransformStackChunkClosure {
FrameOopIterator<RegisterMapT> iterator(fr, map);
bs_chunk->encode_gc_mode(_chunk, &iterator);
return true;
}
+
+ bool do_lockstack() {
+ BarrierSetStackChunk* bs_chunk = BarrierSet::barrier_set()->barrier_set_stack_chunk();
+ LockStackOopIterator iterator(_chunk);
+ bs_chunk->encode_gc_mode(_chunk, &iterator);
+
+ return true;
+ }
};
void stackChunkOopDesc::transform() {
assert(!is_gc_mode(), "Should only be called once per chunk");
set_gc_mode(true);
set_has_bitmap(true);
bitmap().clear();
TransformStackChunkClosure closure(this);
iterate_stack(&closure);
+ closure.do_lockstack();
}
template <stackChunkOopDesc::BarrierType barrier, bool compressedOopsWithBitmap>
class BarrierClosure: public OopClosure {
NOT_PRODUCT(intptr_t* _sp;)
}
template void stackChunkOopDesc::fix_thawed_frame(const frame& f, const RegisterMap* map);
template void stackChunkOopDesc::fix_thawed_frame(const frame& f, const SmallRegisterMap* map);
+ void stackChunkOopDesc::copy_lockstack(oop* dst) {
+ int cnt = lockStackSize();
+
+ if (!(is_gc_mode() || requires_barriers())) {
+ oop* lockstack_start = (oop*)start_address();
+ for (int i = 0; i < cnt; i++) {
+ dst[i] = lockstack_start[i];
+ assert(oopDesc::is_oop(dst[i]), "not an oop");
+ }
+ return;
+ }
+
+ if (has_bitmap() && UseCompressedOops) {
+ intptr_t* lockstack_start = start_address();
+ for (int i = 0; i < cnt; i++) {
+ oop mon_owner = HeapAccess<>::oop_load((narrowOop*)&lockstack_start[i]);
+ assert(oopDesc::is_oop(mon_owner), "not an oop");
+ dst[i] = mon_owner;
+ }
+ } else {
+ intptr_t* lockstack_start = start_address();
+ for (int i = 0; i < cnt; i++) {
+ oop mon_owner = HeapAccess<>::oop_load((oop*)&lockstack_start[i]);
+ assert(oopDesc::is_oop(mon_owner), "not an oop");
+ dst[i] = mon_owner;
+ }
+ }
+ }
+
void stackChunkOopDesc::print_on(bool verbose, outputStream* st) const {
if (*((juint*)this) == badHeapWordVal) {
st->print_cr("BAD WORD");
} else {
InstanceStackChunkKlass::print_chunk(const_cast<stackChunkOopDesc*>(this), verbose, st);
int _num_oops;
int _num_frames;
int _num_interpreted_frames;
int _num_i2c;
- VerifyStackChunkFrameClosure(stackChunkOop chunk, int num_frames, int size)
+ VerifyStackChunkFrameClosure(stackChunkOop chunk)
: _chunk(chunk), _sp(nullptr), _cb(nullptr), _callee_interpreted(false),
- _size(size), _argsize(0), _num_oops(0), _num_frames(num_frames), _num_interpreted_frames(0), _num_i2c(0) {}
+ _size(0), _argsize(0), _num_oops(0), _num_frames(0), _num_interpreted_frames(0), _num_i2c(0) {}
template <ChunkFrames frame_kind, typename RegisterMapT>
bool do_frame(const StackChunkFrameStream<frame_kind>& f, const RegisterMapT* map) {
_sp = f.sp();
_cb = f.cb();
int size = stack_size() - argsize() - sp();
assert(size >= 0, "");
assert((size == 0) == is_empty(), "");
const StackChunkFrameStream<ChunkFrames::Mixed> first(this);
- const bool has_safepoint_stub_frame = first.is_stub();
- VerifyStackChunkFrameClosure closure(this,
- has_safepoint_stub_frame ? 1 : 0, // Iterate_stack skips the safepoint stub
- has_safepoint_stub_frame ? first.frame_size() : 0);
+ VerifyStackChunkFrameClosure closure(this);
iterate_stack(&closure);
assert(!is_empty() || closure._cb == nullptr, "");
if (closure._cb != nullptr && closure._cb->is_nmethod()) {
assert(argsize() ==
< prev index next >