< prev index next >

src/hotspot/share/runtime/continuationFreezeThaw.cpp

Print this page

1724   int remove_top_compiled_frame_from_chunk(stackChunkOop chunk, int &argsize);
1725   void copy_from_chunk(intptr_t* from, intptr_t* to, int size);
1726 
1727   // fast path
1728   inline void prefetch_chunk_pd(void* start, int size_words);
1729   void patch_return(intptr_t* sp, bool is_last);
1730 
1731   // slow path
1732   NOINLINE intptr_t* thaw_slow(stackChunkOop chunk, bool return_barrier);
1733 
1734 private:
1735   void recurse_thaw(const frame& heap_frame, frame& caller, int num_frames, bool top);
1736   template<typename FKind> bool recurse_thaw_java_frame(frame& caller, int num_frames);
1737   void finalize_thaw(frame& entry, int argsize);
1738 
1739   inline bool seen_by_gc();
1740 
1741   inline void before_thaw_java_frame(const frame& hf, const frame& caller, bool bottom, int num_frame);
1742   inline void after_thaw_java_frame(const frame& f, bool bottom);
1743   inline void patch(frame& f, const frame& caller, bool bottom);
1744   void clear_bitmap_bits(intptr_t* start, int range);
1745 
1746   NOINLINE void recurse_thaw_interpreted_frame(const frame& hf, frame& caller, int num_frames);
1747   void recurse_thaw_compiled_frame(const frame& hf, frame& caller, int num_frames, bool stub_caller);
1748   void recurse_thaw_stub_frame(const frame& hf, frame& caller, int num_frames);
1749   void finish_thaw(frame& f);
1750 
1751   void push_return_frame(frame& f);
1752   inline frame new_entry_frame();
1753   template<typename FKind> frame new_stack_frame(const frame& hf, frame& caller, bool bottom);
1754   inline void patch_pd(frame& f, const frame& sender);
1755   inline intptr_t* align(const frame& hf, intptr_t* frame_sp, frame& caller, bool bottom);
1756 
1757   void maybe_set_fastpath(intptr_t* sp) { if (sp > _fastpath) _fastpath = sp; }
1758 
1759   static inline void derelativize_interpreted_frame_metadata(const frame& hf, const frame& f);
1760 
1761  public:
1762   CONT_JFR_ONLY(FreezeThawJfrInfo& jfr_info() { return _jfr_info; })
1763 };
1764 

2105   assert(!bottom || caller.fp() == _cont.entryFP(), "");
2106   if (bottom) {
2107     ContinuationHelper::Frame::patch_pc(caller, _cont.is_empty() ? caller.pc()
2108                                                                  : StubRoutines::cont_returnBarrier());
2109   } else {
2110     // caller might have been deoptimized during thaw but we've overwritten the return address when copying f from the heap.
2111     // If the caller is not deoptimized, pc is unchanged.
2112     ContinuationHelper::Frame::patch_pc(caller, caller.raw_pc());
2113   }
2114 
2115   patch_pd(f, caller);
2116 
2117   if (f.is_interpreted_frame()) {
2118     ContinuationHelper::InterpretedFrame::patch_sender_sp(f, caller);
2119   }
2120 
2121   assert(!bottom || !_cont.is_empty() || Continuation::is_continuation_entry_frame(f, nullptr), "");
2122   assert(!bottom || (_cont.is_empty() != Continuation::is_cont_barrier_frame(f)), "");
2123 }
2124 
2125 void ThawBase::clear_bitmap_bits(intptr_t* start, int range) {



2126   // we need to clear the bits that correspond to arguments as they reside in the caller frame
2127   // or they will keep objects that are otherwise unreachable alive
2128   log_develop_trace(continuations)("clearing bitmap for " INTPTR_FORMAT " - " INTPTR_FORMAT, p2i(start), p2i(start+range));






2129   stackChunkOop chunk = _cont.tail();
2130   chunk->bitmap().clear_range(chunk->bit_index_for(start),
2131                               chunk->bit_index_for(start+range));
2132 }
2133 
2134 NOINLINE void ThawBase::recurse_thaw_interpreted_frame(const frame& hf, frame& caller, int num_frames) {
2135   assert(hf.is_interpreted_frame(), "");
2136 
2137   if (UNLIKELY(seen_by_gc())) {
2138     _cont.tail()->do_barriers<stackChunkOopDesc::BarrierType::Store>(_stream, SmallRegisterMap::instance);
2139   }
2140 
2141   const bool is_bottom_frame = recurse_thaw_java_frame<ContinuationHelper::InterpretedFrame>(caller, num_frames);
2142 
2143   DEBUG_ONLY(before_thaw_java_frame(hf, caller, is_bottom_frame, num_frames);)
2144 
2145   _align_size += frame::align_wiggle; // possible added alignment for internal interpreted frame alignment om AArch64
2146 
2147   frame f = new_stack_frame<ContinuationHelper::InterpretedFrame>(hf, caller, is_bottom_frame);
2148 
2149   intptr_t* const stack_frame_top = f.sp() + frame::metadata_words_at_top;
2150   intptr_t* const stack_frame_bottom = ContinuationHelper::InterpretedFrame::frame_bottom(f);
2151   intptr_t* const heap_frame_top = hf.unextended_sp() + frame::metadata_words_at_top;

2164   // Make sure the relativized locals is already set.
2165   assert(f.interpreter_frame_local_at(0) == stack_frame_bottom - 1, "invalid frame bottom");
2166 
2167   derelativize_interpreted_frame_metadata(hf, f);
2168   patch(f, caller, is_bottom_frame);
2169 
2170   assert(f.is_interpreted_frame_valid(_cont.thread()), "invalid thawed frame");
2171   assert(stack_frame_bottom <= ContinuationHelper::Frame::frame_top(caller), "");
2172 
2173   CONT_JFR_ONLY(_jfr_info.record_interpreted_frame();)
2174 
2175   maybe_set_fastpath(f.sp());
2176 
2177   const int locals = hf.interpreter_frame_method()->max_locals();
2178 
2179   if (!is_bottom_frame) {
2180     // can only fix caller once this frame is thawed (due to callee saved regs)
2181     _cont.tail()->fix_thawed_frame(caller, SmallRegisterMap::instance);
2182   } else if (_cont.tail()->has_bitmap() && locals > 0) {
2183     assert(hf.is_heap_frame(), "should be");
2184     clear_bitmap_bits(heap_frame_bottom - locals, locals);


2185   }
2186 
2187   DEBUG_ONLY(after_thaw_java_frame(f, is_bottom_frame);)
2188   caller = f;
2189 }
2190 
2191 void ThawBase::recurse_thaw_compiled_frame(const frame& hf, frame& caller, int num_frames, bool stub_caller) {
2192   assert(!hf.is_interpreted_frame(), "");
2193   assert(_cont.is_preempted() || !stub_caller, "stub caller not at preemption");
2194 
2195   if (!stub_caller && UNLIKELY(seen_by_gc())) { // recurse_thaw_stub_frame already invoked our barriers with a full regmap
2196     _cont.tail()->do_barriers<stackChunkOopDesc::BarrierType::Store>(_stream, SmallRegisterMap::instance);
2197   }
2198 
2199   const bool is_bottom_frame = recurse_thaw_java_frame<ContinuationHelper::CompiledFrame>(caller, num_frames);
2200 
2201   DEBUG_ONLY(before_thaw_java_frame(hf, caller, is_bottom_frame, num_frames);)
2202 
2203   assert(caller.sp() == caller.unextended_sp(), "");
2204 

2237     maybe_set_fastpath(f.sp());
2238   } else if (_thread->is_interp_only_mode()
2239               || (_cont.is_preempted() && f.cb()->as_compiled_method()->is_marked_for_deoptimization())) {
2240     // The caller of the safepoint stub when the continuation is preempted is not at a call instruction, and so
2241     // cannot rely on nmethod patching for deopt.
2242     assert(_thread->is_interp_only_mode() || stub_caller, "expected a stub-caller");
2243 
2244     log_develop_trace(continuations)("Deoptimizing thawed frame");
2245     DEBUG_ONLY(ContinuationHelper::Frame::patch_pc(f, nullptr));
2246 
2247     f.deoptimize(nullptr); // the null thread simply avoids the assertion in deoptimize which we're not set up for
2248     assert(f.is_deoptimized_frame(), "");
2249     assert(ContinuationHelper::Frame::is_deopt_return(f.raw_pc(), f), "");
2250     maybe_set_fastpath(f.sp());
2251   }
2252 
2253   if (!is_bottom_frame) {
2254     // can only fix caller once this frame is thawed (due to callee saved regs); this happens on the stack
2255     _cont.tail()->fix_thawed_frame(caller, SmallRegisterMap::instance);
2256   } else if (_cont.tail()->has_bitmap() && added_argsize > 0) {
2257     clear_bitmap_bits(heap_frame_top + ContinuationHelper::CompiledFrame::size(hf) + frame::metadata_words_at_top, added_argsize);



2258   }
2259 
2260   DEBUG_ONLY(after_thaw_java_frame(f, is_bottom_frame);)
2261   caller = f;
2262 }
2263 
2264 void ThawBase::recurse_thaw_stub_frame(const frame& hf, frame& caller, int num_frames) {
2265   DEBUG_ONLY(_frames++;)
2266 
2267   {
2268     RegisterMap map(nullptr,
2269                     RegisterMap::UpdateMap::include,
2270                     RegisterMap::ProcessFrames::skip,
2271                     RegisterMap::WalkContinuation::skip);
2272     map.set_include_argument_oops(false);
2273     _stream.next(&map);
2274     assert(!_stream.is_done(), "");
2275     if (UNLIKELY(seen_by_gc())) { // we're now doing this on the stub's caller
2276       _cont.tail()->do_barriers<stackChunkOopDesc::BarrierType::Store>(_stream, &map);
2277     }

1724   int remove_top_compiled_frame_from_chunk(stackChunkOop chunk, int &argsize);
1725   void copy_from_chunk(intptr_t* from, intptr_t* to, int size);
1726 
1727   // fast path
1728   inline void prefetch_chunk_pd(void* start, int size_words);
1729   void patch_return(intptr_t* sp, bool is_last);
1730 
1731   // slow path
1732   NOINLINE intptr_t* thaw_slow(stackChunkOop chunk, bool return_barrier);
1733 
1734 private:
1735   void recurse_thaw(const frame& heap_frame, frame& caller, int num_frames, bool top);
1736   template<typename FKind> bool recurse_thaw_java_frame(frame& caller, int num_frames);
1737   void finalize_thaw(frame& entry, int argsize);
1738 
1739   inline bool seen_by_gc();
1740 
1741   inline void before_thaw_java_frame(const frame& hf, const frame& caller, bool bottom, int num_frame);
1742   inline void after_thaw_java_frame(const frame& f, bool bottom);
1743   inline void patch(frame& f, const frame& caller, bool bottom);
1744   void clear_bitmap_bits(address start, address end);
1745 
1746   NOINLINE void recurse_thaw_interpreted_frame(const frame& hf, frame& caller, int num_frames);
1747   void recurse_thaw_compiled_frame(const frame& hf, frame& caller, int num_frames, bool stub_caller);
1748   void recurse_thaw_stub_frame(const frame& hf, frame& caller, int num_frames);
1749   void finish_thaw(frame& f);
1750 
1751   void push_return_frame(frame& f);
1752   inline frame new_entry_frame();
1753   template<typename FKind> frame new_stack_frame(const frame& hf, frame& caller, bool bottom);
1754   inline void patch_pd(frame& f, const frame& sender);
1755   inline intptr_t* align(const frame& hf, intptr_t* frame_sp, frame& caller, bool bottom);
1756 
1757   void maybe_set_fastpath(intptr_t* sp) { if (sp > _fastpath) _fastpath = sp; }
1758 
1759   static inline void derelativize_interpreted_frame_metadata(const frame& hf, const frame& f);
1760 
1761  public:
1762   CONT_JFR_ONLY(FreezeThawJfrInfo& jfr_info() { return _jfr_info; })
1763 };
1764 

2105   assert(!bottom || caller.fp() == _cont.entryFP(), "");
2106   if (bottom) {
2107     ContinuationHelper::Frame::patch_pc(caller, _cont.is_empty() ? caller.pc()
2108                                                                  : StubRoutines::cont_returnBarrier());
2109   } else {
2110     // caller might have been deoptimized during thaw but we've overwritten the return address when copying f from the heap.
2111     // If the caller is not deoptimized, pc is unchanged.
2112     ContinuationHelper::Frame::patch_pc(caller, caller.raw_pc());
2113   }
2114 
2115   patch_pd(f, caller);
2116 
2117   if (f.is_interpreted_frame()) {
2118     ContinuationHelper::InterpretedFrame::patch_sender_sp(f, caller);
2119   }
2120 
2121   assert(!bottom || !_cont.is_empty() || Continuation::is_continuation_entry_frame(f, nullptr), "");
2122   assert(!bottom || (_cont.is_empty() != Continuation::is_cont_barrier_frame(f)), "");
2123 }
2124 
2125 void ThawBase::clear_bitmap_bits(address start, address end) {
2126   assert(is_aligned(start, wordSize), "should be aligned: " PTR_FORMAT, p2i(start));
2127   assert(is_aligned(end, VMRegImpl::stack_slot_size), "should be aligned: " PTR_FORMAT, p2i(end));
2128 
2129   // we need to clear the bits that correspond to arguments as they reside in the caller frame
2130   // or they will keep objects that are otherwise unreachable alive.
2131 
2132   // Align `end` if UseCompressedOops is not set to avoid UB when calculating the bit index, since
2133   // `end` could be at an odd number of stack slots from `start`, i.e might not be oop aligned.
2134   // If that's the case the bit range corresponding to the last stack slot should not have bits set
2135   // anyways and we assert that before returning.
2136   address effective_end = UseCompressedOops ? end : align_down(end, wordSize);
2137   log_develop_trace(continuations)("clearing bitmap for " INTPTR_FORMAT " - " INTPTR_FORMAT, p2i(start), p2i(effective_end));
2138   stackChunkOop chunk = _cont.tail();
2139   chunk->bitmap().clear_range(chunk->bit_index_for(start), chunk->bit_index_for(effective_end));
2140   assert(effective_end == end || !chunk->bitmap().at(chunk->bit_index_for(effective_end)), "bit should not be set");
2141 }
2142 
2143 NOINLINE void ThawBase::recurse_thaw_interpreted_frame(const frame& hf, frame& caller, int num_frames) {
2144   assert(hf.is_interpreted_frame(), "");
2145 
2146   if (UNLIKELY(seen_by_gc())) {
2147     _cont.tail()->do_barriers<stackChunkOopDesc::BarrierType::Store>(_stream, SmallRegisterMap::instance);
2148   }
2149 
2150   const bool is_bottom_frame = recurse_thaw_java_frame<ContinuationHelper::InterpretedFrame>(caller, num_frames);
2151 
2152   DEBUG_ONLY(before_thaw_java_frame(hf, caller, is_bottom_frame, num_frames);)
2153 
2154   _align_size += frame::align_wiggle; // possible added alignment for internal interpreted frame alignment om AArch64
2155 
2156   frame f = new_stack_frame<ContinuationHelper::InterpretedFrame>(hf, caller, is_bottom_frame);
2157 
2158   intptr_t* const stack_frame_top = f.sp() + frame::metadata_words_at_top;
2159   intptr_t* const stack_frame_bottom = ContinuationHelper::InterpretedFrame::frame_bottom(f);
2160   intptr_t* const heap_frame_top = hf.unextended_sp() + frame::metadata_words_at_top;

2173   // Make sure the relativized locals is already set.
2174   assert(f.interpreter_frame_local_at(0) == stack_frame_bottom - 1, "invalid frame bottom");
2175 
2176   derelativize_interpreted_frame_metadata(hf, f);
2177   patch(f, caller, is_bottom_frame);
2178 
2179   assert(f.is_interpreted_frame_valid(_cont.thread()), "invalid thawed frame");
2180   assert(stack_frame_bottom <= ContinuationHelper::Frame::frame_top(caller), "");
2181 
2182   CONT_JFR_ONLY(_jfr_info.record_interpreted_frame();)
2183 
2184   maybe_set_fastpath(f.sp());
2185 
2186   const int locals = hf.interpreter_frame_method()->max_locals();
2187 
2188   if (!is_bottom_frame) {
2189     // can only fix caller once this frame is thawed (due to callee saved regs)
2190     _cont.tail()->fix_thawed_frame(caller, SmallRegisterMap::instance);
2191   } else if (_cont.tail()->has_bitmap() && locals > 0) {
2192     assert(hf.is_heap_frame(), "should be");
2193     address start = (address)(heap_frame_bottom - locals);
2194     address end = (address)heap_frame_bottom;
2195     clear_bitmap_bits(start, end);
2196   }
2197 
2198   DEBUG_ONLY(after_thaw_java_frame(f, is_bottom_frame);)
2199   caller = f;
2200 }
2201 
2202 void ThawBase::recurse_thaw_compiled_frame(const frame& hf, frame& caller, int num_frames, bool stub_caller) {
2203   assert(!hf.is_interpreted_frame(), "");
2204   assert(_cont.is_preempted() || !stub_caller, "stub caller not at preemption");
2205 
2206   if (!stub_caller && UNLIKELY(seen_by_gc())) { // recurse_thaw_stub_frame already invoked our barriers with a full regmap
2207     _cont.tail()->do_barriers<stackChunkOopDesc::BarrierType::Store>(_stream, SmallRegisterMap::instance);
2208   }
2209 
2210   const bool is_bottom_frame = recurse_thaw_java_frame<ContinuationHelper::CompiledFrame>(caller, num_frames);
2211 
2212   DEBUG_ONLY(before_thaw_java_frame(hf, caller, is_bottom_frame, num_frames);)
2213 
2214   assert(caller.sp() == caller.unextended_sp(), "");
2215 

2248     maybe_set_fastpath(f.sp());
2249   } else if (_thread->is_interp_only_mode()
2250               || (_cont.is_preempted() && f.cb()->as_compiled_method()->is_marked_for_deoptimization())) {
2251     // The caller of the safepoint stub when the continuation is preempted is not at a call instruction, and so
2252     // cannot rely on nmethod patching for deopt.
2253     assert(_thread->is_interp_only_mode() || stub_caller, "expected a stub-caller");
2254 
2255     log_develop_trace(continuations)("Deoptimizing thawed frame");
2256     DEBUG_ONLY(ContinuationHelper::Frame::patch_pc(f, nullptr));
2257 
2258     f.deoptimize(nullptr); // the null thread simply avoids the assertion in deoptimize which we're not set up for
2259     assert(f.is_deoptimized_frame(), "");
2260     assert(ContinuationHelper::Frame::is_deopt_return(f.raw_pc(), f), "");
2261     maybe_set_fastpath(f.sp());
2262   }
2263 
2264   if (!is_bottom_frame) {
2265     // can only fix caller once this frame is thawed (due to callee saved regs); this happens on the stack
2266     _cont.tail()->fix_thawed_frame(caller, SmallRegisterMap::instance);
2267   } else if (_cont.tail()->has_bitmap() && added_argsize > 0) {
2268     address start = (address)(heap_frame_top + ContinuationHelper::CompiledFrame::size(hf) + frame::metadata_words_at_top);
2269     int stack_args_slots = f.cb()->as_compiled_method()->method()->num_stack_arg_slots(false /* rounded */);
2270     int argsize_in_bytes = stack_args_slots * VMRegImpl::stack_slot_size;
2271     clear_bitmap_bits(start, start + argsize_in_bytes);
2272   }
2273 
2274   DEBUG_ONLY(after_thaw_java_frame(f, is_bottom_frame);)
2275   caller = f;
2276 }
2277 
2278 void ThawBase::recurse_thaw_stub_frame(const frame& hf, frame& caller, int num_frames) {
2279   DEBUG_ONLY(_frames++;)
2280 
2281   {
2282     RegisterMap map(nullptr,
2283                     RegisterMap::UpdateMap::include,
2284                     RegisterMap::ProcessFrames::skip,
2285                     RegisterMap::WalkContinuation::skip);
2286     map.set_include_argument_oops(false);
2287     _stream.next(&map);
2288     assert(!_stream.is_done(), "");
2289     if (UNLIKELY(seen_by_gc())) { // we're now doing this on the stub's caller
2290       _cont.tail()->do_barriers<stackChunkOopDesc::BarrierType::Store>(_stream, &map);
2291     }
< prev index next >