1 /*
2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24 #include "asm/codeBuffer.hpp"
25 #include "interpreter/interpreter.hpp"
26 #include "jfr/periodic/sampling/jfrSampleRequest.hpp"
27 #include "jfr/utilities/jfrTime.hpp"
28 #include "runtime/continuationEntry.hpp"
29 #include "runtime/frame.inline.hpp"
30 #include "runtime/javaThread.inline.hpp"
31 #include "runtime/os.hpp"
32 #include "runtime/safepointMechanism.inline.hpp"
33 #include "runtime/stubRoutines.hpp"
34
35 static inline bool is_entry_frame(address pc) {
36 return StubRoutines::returns_to_call_stub(pc);
37 }
38
39 static inline bool is_entry_frame(const JfrSampleRequest& request) {
40 return is_entry_frame(static_cast<address>(request._sample_pc));
41 }
42
43 static inline bool is_interpreter(address pc) {
44 return Interpreter::contains(pc);
45 }
46
47 static inline bool is_interpreter(const JfrSampleRequest& request) {
48 return is_interpreter(static_cast<address>(request._sample_pc));
49 }
50
51 static inline address interpreter_frame_bcp(const JfrSampleRequest& request) {
52 assert(is_interpreter(request), "invariant");
53 return frame::interpreter_bcp(static_cast<intptr_t*>(request._sample_bcp));
54 }
55
56 static inline bool in_stack(intptr_t* ptr, JavaThread* jt) {
57 assert(jt != nullptr, "invariant");
58 return jt->is_in_full_stack_checked(reinterpret_cast<address>(ptr));
59 }
60
61 #ifdef ASSERT
62 static inline bool sp_in_stack(const JfrSampleRequest& request, JavaThread* jt) {
63 return in_stack(static_cast<intptr_t*>(request._sample_sp), jt);
64 }
65 #endif // ASSERT
66
67 static inline bool fp_in_stack(const JfrSampleRequest& request, JavaThread* jt) {
68 return in_stack(static_cast<intptr_t*>(request._sample_bcp), jt);
69 }
70
71 static inline void update_interpreter_frame_pc(JfrSampleRequest& request, JavaThread* jt) {
72 assert(fp_in_stack(request, jt), "invariant");
73 assert(is_interpreter(request), "invariant");
74 request._sample_pc = frame::interpreter_return_address(static_cast<intptr_t*>(request._sample_bcp));
75 }
76
77 static inline intptr_t* frame_sender_sp(const JfrSampleRequest& request, JavaThread* jt) {
78 assert(fp_in_stack(request, jt), "invariant");
79 return frame::sender_sp(static_cast<intptr_t*>(request._sample_bcp));
80 }
81
82 static inline void update_frame_sender_sp(JfrSampleRequest& request, JavaThread* jt) {
83 request._sample_sp = frame_sender_sp(request, jt);
84 }
85
86 static inline intptr_t* frame_link(const JfrSampleRequest& request) {
87 return frame::link(static_cast<intptr_t*>(request._sample_bcp));
88 }
89
90 // Less extensive sanity checks for an interpreter frame.
91 static bool is_valid_interpreter_frame(const JfrSampleRequest& request, JavaThread* jt) {
92 assert(sp_in_stack(request, jt), "invariant");
93 assert(fp_in_stack(request, jt), "invariant");
94 return frame::is_interpreter_frame_setup_at(static_cast<intptr_t*>(request._sample_bcp), request._sample_sp);
95 }
96
97 static inline bool is_continuation_frame(address pc) {
98 return ContinuationEntry::return_pc() == pc;
99 }
100
101 static inline bool is_continuation_frame(const JfrSampleRequest& request) {
102 return is_continuation_frame(static_cast<address>(request._sample_pc));
103 }
104
105 static intptr_t* sender_for_interpreter_frame(JfrSampleRequest& request, JavaThread* jt) {
106 update_interpreter_frame_pc(request, jt); // pick up return address
107 if (is_continuation_frame(request) || is_entry_frame(request)) {
108 request._sample_pc = nullptr;
109 return nullptr;
110 }
111 update_frame_sender_sp(request, jt);
112 intptr_t* fp = nullptr;
113 if (is_interpreter(request)) {
114 fp = frame_link(request);
115 }
116 request._sample_bcp = nullptr;
117 return fp;
118 }
119
120 static bool build(JfrSampleRequest& request, intptr_t* fp, JavaThread* jt);
121
122 static bool build_for_interpreter(JfrSampleRequest& request, JavaThread* jt) {
123 assert(is_interpreter(request), "invariant");
124 assert(jt != nullptr, "invariant");
125 if (!fp_in_stack(request, jt)) {
126 return false;
127 }
128 if (is_valid_interpreter_frame(request, jt)) {
129 // Set fp as sp for interpreter frames.
130 request._sample_sp = request._sample_bcp;
131 // Get real bcp.
132 void* const bcp = interpreter_frame_bcp(request);
133 // Setting bcp = 1 marks the sample request to represent a native method.
134 request._sample_bcp = bcp != nullptr ? bcp : reinterpret_cast<address>(1);
135 return true;
136 }
137 intptr_t* fp = sender_for_interpreter_frame(request, jt);
138 if (request._sample_pc == nullptr || request._sample_sp == nullptr) {
139 return false;
140 }
141 return build(request, fp, jt);
142 }
143
144 // Attempt to build a Jfr sample request.
145 static bool build(JfrSampleRequest& request, intptr_t* fp, JavaThread* jt) {
146 assert(request._sample_sp != nullptr, "invariant");
147 assert(request._sample_pc != nullptr, "invariant");
148 assert(jt != nullptr, "invariant");
149 assert(jt->thread_state() == _thread_in_Java || jt->thread_state() == _thread_in_native, "invariant");
150
151 // 1. Interpreter frame?
152 if (is_interpreter(request)) {
153 request._sample_bcp = fp;
154 return build_for_interpreter(request, jt);
155 }
156 const CodeBlob* const cb = CodeCache::find_blob(request._sample_pc);
157 if (cb != nullptr) {
158 // 2. Is nmethod?
159 return cb->is_nmethod();
160 // 3. What kind of CodeBlob or Stub?
161 // Longer plan is to make stubs and blobs parsable,
162 // and we will have a list of cases here for each blob type
163 // describing how to locate the sender. We can't get to the
164 // sender of a blob or stub until they have a standardized
165 // layout and proper metadata descriptions.
166 }
167 return false;
168 }
169
170 static bool build_from_ljf(JfrSampleRequest& request,
171 const JfrThreadLocal* tl,
172 JavaThread* jt) {
173 assert(tl != nullptr, "invariant");
174 assert(jt != nullptr, "invariant");
175 assert(jt->jfr_thread_local() == tl, "invariant");
176 assert(sp_in_stack(request, jt), "invariant");
177 // Last Java frame is available, but might not be walkable, fix it.
178 address last_pc = jt->last_Java_pc();
179 if (last_pc == nullptr) {
180 last_pc = frame::return_address(static_cast<intptr_t*>(request._sample_sp));
181 if (last_pc == nullptr) {
182 return false;
183 }
184 }
185 assert(last_pc != nullptr, "invariant");
186 if (is_interpreter(last_pc)) {
187 if (tl->in_sampling_critical_section()) {
188 return false;
189 }
190 request._sample_pc = last_pc;
191 request._sample_bcp = jt->frame_anchor()->last_Java_fp();
192 return build_for_interpreter(request, jt);
193 }
194 request._sample_pc = last_pc;
195 return build(request, nullptr, jt);
196 }
197
198 static bool build_from_context(JfrSampleRequest& request,
199 const void* ucontext,
200 const JfrThreadLocal* tl,
201 JavaThread* jt) {
202 assert(ucontext != nullptr, "invariant");
203 assert(tl != nullptr, "invariant");
204 assert(jt != nullptr, "invariant");
205 assert(jt->jfr_thread_local() == tl, "invariant");
206 assert(!jt->has_last_Java_frame(), "invariant");
207 intptr_t* fp;
208 request._sample_pc = os::fetch_frame_from_context(ucontext, reinterpret_cast<intptr_t**>(&request._sample_sp), &fp);
209 assert(sp_in_stack(request, jt), "invariant");
210 if (is_interpreter(request)) {
211 if (tl->in_sampling_critical_section() || !in_stack(fp, jt)) {
212 return false;
213 }
214 if (frame::is_interpreter_frame_setup_at(fp, request._sample_sp)) {
215 // Set fp as sp for interpreter frames.
216 request._sample_sp = fp;
217 void* bcp = os::fetch_bcp_from_context(ucontext);
218 // Setting bcp = 1 marks the sample request to represent a native method.
219 request._sample_bcp = bcp != nullptr ? bcp : reinterpret_cast<void*>(1);
220 return true;
221 }
222 request._sample_bcp = fp;
223 fp = sender_for_interpreter_frame(request, jt);
224 if (request._sample_pc == nullptr || request._sample_sp == nullptr) {
225 return false;
226 }
227 }
228 return build(request, fp, jt);
229 }
230
231 static inline JfrSampleResult set_request_and_arm_local_poll(JfrSampleRequest& request, JfrThreadLocal* tl, JavaThread* jt) {
232 assert(tl != nullptr, "invariant");
233 assert(jt->jfr_thread_local() == tl, "invariant");
234 tl->set_sample_state(JAVA_SAMPLE);
235 SafepointMechanism::arm_local_poll_release(jt);
236 // For a Java sample, request._sample_ticks is also the start time for the SafepointLatency event.
237 request._sample_ticks = JfrTicks::now();
238 tl->set_sample_request(request);
239 return SAMPLE_JAVA;
240 }
241
242 // A biased sample request is denoted by an empty bcp and an empty pc.
243 static inline JfrSampleResult set_biased_java_sample(JfrSampleRequest& request, JfrThreadLocal* tl, JavaThread* jt) {
244 if (request._sample_bcp != nullptr) {
245 request._sample_bcp = nullptr;
246 }
247 assert(request._sample_bcp == nullptr, "invariant");
248 request._sample_pc = nullptr;
249 return set_request_and_arm_local_poll(request, tl, jt);
250 }
251
252 static inline JfrSampleResult set_unbiased_java_sample(JfrSampleRequest& request, JfrThreadLocal* tl, JavaThread* jt) {
253 assert(request._sample_sp != nullptr, "invariant");
254 assert(sp_in_stack(request, jt), "invariant");
255 assert(request._sample_bcp != nullptr || !is_interpreter(request), "invariant");
256 return set_request_and_arm_local_poll(request, tl, jt);
257 }
258
259 JfrSampleResult JfrSampleRequestBuilder::build_java_sample_request(const void* ucontext,
260 JfrThreadLocal* tl,
261 JavaThread* jt) {
262 assert(ucontext != nullptr, "invariant");
263 assert(tl != nullptr, "invariant");
264 assert(tl->sample_state() == NO_SAMPLE, "invariant");
265 assert(jt != nullptr, "invariant");
266 assert(jt->thread_state() == _thread_in_Java, "invariant");
267
268 JfrSampleRequest request;
269
270 // Prioritize the ljf, if one exists.
271 request._sample_sp = jt->last_Java_sp();
272 if (request._sample_sp != nullptr) {
273 if (build_from_ljf(request, tl, jt)) {
274 return set_unbiased_java_sample(request, tl, jt);
275 }
276 } else if (build_from_context(request, ucontext, tl, jt)) {
277 return set_unbiased_java_sample(request, tl, jt);
278 }
279 return set_biased_java_sample(request, tl, jt);
280 }
281
282
283 // A biased sample request is denoted by an empty bcp and an empty pc.
284 static inline void set_cpu_time_biased_sample(JfrSampleRequest& request, JavaThread* jt) {
285 if (request._sample_bcp != nullptr) {
286 request._sample_bcp = nullptr;
287 }
288 assert(request._sample_bcp == nullptr, "invariant");
289 request._sample_pc = nullptr;
290 }
291
292 void JfrSampleRequestBuilder::build_cpu_time_sample_request(JfrSampleRequest& request,
293 void* ucontext,
294 JavaThread* jt,
295 JfrThreadLocal* tl,
296 JfrTicks& now) {
297 assert(jt != nullptr, "invariant");
298 request._sample_ticks = now;
299
300 // Prioritize the ljf, if one exists.
301 request._sample_sp = jt->last_Java_sp();
302 if (request._sample_sp == nullptr || !build_from_ljf(request, tl, jt)) {
303 intptr_t* fp;
304 request._sample_pc = os::fetch_frame_from_context(ucontext, reinterpret_cast<intptr_t**>(&request._sample_sp), &fp);
305 assert(sp_in_stack(request, jt), "invariant");
306 if (!build(request, fp, jt)) {
307 set_cpu_time_biased_sample(request, jt);
308 }
309 }
310 }