109 oops.push(addr);
110 }
111 }
112 }
113
114 ShenandoahNMethod* ShenandoahNMethod::for_nmethod(nmethod* nm) {
115 ResourceMark rm;
116 bool non_immediate_oops = false;
117 GrowableArray<oop*> oops;
118
119 detect_reloc_oops(nm, oops, non_immediate_oops);
120 return new ShenandoahNMethod(nm, oops, non_immediate_oops);
121 }
122
123 void ShenandoahNMethod::heal_nmethod(nmethod* nm) {
124 ShenandoahNMethod* data = gc_data(nm);
125 assert(data != nullptr, "Sanity");
126 assert(data->lock()->owned_by_self(), "Must hold the lock");
127
128 ShenandoahHeap* const heap = ShenandoahHeap::heap();
129 if (heap->is_concurrent_mark_in_progress()) {
130 ShenandoahKeepAliveClosure cl;
131 data->oops_do(&cl);
132 } else if (heap->is_concurrent_weak_root_in_progress() ||
133 heap->is_concurrent_strong_root_in_progress() ) {
134 ShenandoahEvacOOMScope evac_scope;
135 heal_nmethod_metadata(data);
136 } else {
137 // There is possibility that GC is cancelled when it arrives final mark.
138 // In this case, concurrent root phase is skipped and degenerated GC should be
139 // followed, where nmethods are disarmed.
140 }
141 }
142
143 #ifdef ASSERT
144 void ShenandoahNMethod::assert_correct() {
145 ShenandoahHeap* heap = ShenandoahHeap::heap();
146 for (int c = 0; c < _oops_count; c++) {
147 oop *loc = _oops[c];
148 assert(_nm->code_contains((address) loc) || _nm->oops_contains(loc), "nmethod should contain the oop*");
149 oop o = RawAccess<>::oop_load(loc);
150 shenandoah_assert_correct_except(loc, o, o == nullptr || heap->is_full_gc_move_in_progress());
151 }
152
153 oop* const begin = _nm->oops_begin();
154 oop* const end = _nm->oops_end();
155 for (oop* p = begin; p < end; p++) {
|
109 oops.push(addr);
110 }
111 }
112 }
113
114 ShenandoahNMethod* ShenandoahNMethod::for_nmethod(nmethod* nm) {
115 ResourceMark rm;
116 bool non_immediate_oops = false;
117 GrowableArray<oop*> oops;
118
119 detect_reloc_oops(nm, oops, non_immediate_oops);
120 return new ShenandoahNMethod(nm, oops, non_immediate_oops);
121 }
122
123 void ShenandoahNMethod::heal_nmethod(nmethod* nm) {
124 ShenandoahNMethod* data = gc_data(nm);
125 assert(data != nullptr, "Sanity");
126 assert(data->lock()->owned_by_self(), "Must hold the lock");
127
128 ShenandoahHeap* const heap = ShenandoahHeap::heap();
129 if (heap->is_concurrent_weak_root_in_progress() ||
130 heap->is_concurrent_strong_root_in_progress()) {
131 ShenandoahEvacOOMScope evac_scope;
132 heal_nmethod_metadata(data);
133 } else if (heap->is_concurrent_mark_in_progress()) {
134 ShenandoahKeepAliveClosure cl;
135 data->oops_do(&cl);
136 } else {
137 // There is possibility that GC is cancelled when it arrives final mark.
138 // In this case, concurrent root phase is skipped and degenerated GC should be
139 // followed, where nmethods are disarmed.
140 }
141 }
142
143 #ifdef ASSERT
144 void ShenandoahNMethod::assert_correct() {
145 ShenandoahHeap* heap = ShenandoahHeap::heap();
146 for (int c = 0; c < _oops_count; c++) {
147 oop *loc = _oops[c];
148 assert(_nm->code_contains((address) loc) || _nm->oops_contains(loc), "nmethod should contain the oop*");
149 oop o = RawAccess<>::oop_load(loc);
150 shenandoah_assert_correct_except(loc, o, o == nullptr || heap->is_full_gc_move_in_progress());
151 }
152
153 oop* const begin = _nm->oops_begin();
154 oop* const end = _nm->oops_end();
155 for (oop* p = begin; p < end; p++) {
|