108 oops.push(addr);
109 }
110 }
111 }
112
113 ShenandoahNMethod* ShenandoahNMethod::for_nmethod(nmethod* nm) {
114 ResourceMark rm;
115 bool non_immediate_oops = false;
116 GrowableArray<oop*> oops;
117
118 detect_reloc_oops(nm, oops, non_immediate_oops);
119 return new ShenandoahNMethod(nm, oops, non_immediate_oops);
120 }
121
122 void ShenandoahNMethod::heal_nmethod(nmethod* nm) {
123 ShenandoahNMethod* data = gc_data(nm);
124 assert(data != nullptr, "Sanity");
125 assert(data->lock()->owned_by_self(), "Must hold the lock");
126
127 ShenandoahHeap* const heap = ShenandoahHeap::heap();
128 if (heap->is_concurrent_mark_in_progress()) {
129 ShenandoahKeepAliveClosure cl;
130 data->oops_do(&cl);
131 } else if (heap->is_concurrent_weak_root_in_progress() ||
132 heap->is_concurrent_strong_root_in_progress() ) {
133 ShenandoahEvacOOMScope evac_scope;
134 heal_nmethod_metadata(data);
135 } else {
136 // There is possibility that GC is cancelled when it arrives final mark.
137 // In this case, concurrent root phase is skipped and degenerated GC should be
138 // followed, where nmethods are disarmed.
139 }
140 }
141
142 #ifdef ASSERT
143 void ShenandoahNMethod::assert_correct() {
144 ShenandoahHeap* heap = ShenandoahHeap::heap();
145 for (int c = 0; c < _oops_count; c++) {
146 oop *loc = _oops[c];
147 assert(_nm->code_contains((address) loc) || _nm->oops_contains(loc), "nmethod should contain the oop*");
148 oop o = RawAccess<>::oop_load(loc);
149 shenandoah_assert_correct_except(loc, o, o == nullptr || heap->is_full_gc_move_in_progress());
150 }
151
152 oop* const begin = _nm->oops_begin();
153 oop* const end = _nm->oops_end();
154 for (oop* p = begin; p < end; p++) {
|
108 oops.push(addr);
109 }
110 }
111 }
112
113 ShenandoahNMethod* ShenandoahNMethod::for_nmethod(nmethod* nm) {
114 ResourceMark rm;
115 bool non_immediate_oops = false;
116 GrowableArray<oop*> oops;
117
118 detect_reloc_oops(nm, oops, non_immediate_oops);
119 return new ShenandoahNMethod(nm, oops, non_immediate_oops);
120 }
121
122 void ShenandoahNMethod::heal_nmethod(nmethod* nm) {
123 ShenandoahNMethod* data = gc_data(nm);
124 assert(data != nullptr, "Sanity");
125 assert(data->lock()->owned_by_self(), "Must hold the lock");
126
127 ShenandoahHeap* const heap = ShenandoahHeap::heap();
128 if (heap->is_concurrent_weak_root_in_progress() ||
129 heap->is_concurrent_strong_root_in_progress()) {
130 ShenandoahEvacOOMScope evac_scope;
131 heal_nmethod_metadata(data);
132 } else if (heap->is_concurrent_mark_in_progress()) {
133 ShenandoahKeepAliveClosure cl;
134 data->oops_do(&cl);
135 } else {
136 // There is possibility that GC is cancelled when it arrives final mark.
137 // In this case, concurrent root phase is skipped and degenerated GC should be
138 // followed, where nmethods are disarmed.
139 }
140 }
141
142 #ifdef ASSERT
143 void ShenandoahNMethod::assert_correct() {
144 ShenandoahHeap* heap = ShenandoahHeap::heap();
145 for (int c = 0; c < _oops_count; c++) {
146 oop *loc = _oops[c];
147 assert(_nm->code_contains((address) loc) || _nm->oops_contains(loc), "nmethod should contain the oop*");
148 oop o = RawAccess<>::oop_load(loc);
149 shenandoah_assert_correct_except(loc, o, o == nullptr || heap->is_full_gc_move_in_progress());
150 }
151
152 oop* const begin = _nm->oops_begin();
153 oop* const end = _nm->oops_end();
154 for (oop* p = begin; p < end; p++) {
|