1 /*
  2  * Copyright (c) 2012, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "jfr/jfrEvents.hpp"
 27 #include "jfr/jni/jfrJavaSupport.hpp"
 28 #include "jfr/leakprofiler/checkpoint/objectSampleCheckpoint.hpp"
 29 #include "jfr/periodic/jfrThreadCPULoadEvent.hpp"
 30 #include "jfr/recorder/checkpoint/jfrCheckpointManager.hpp"
 31 #include "jfr/recorder/checkpoint/types/traceid/jfrOopTraceId.inline.hpp"
 32 #include "jfr/recorder/jfrRecorder.hpp"
 33 #include "jfr/recorder/service/jfrOptionSet.hpp"
 34 #include "jfr/recorder/stacktrace/jfrStackTraceRepository.hpp"
 35 #include "jfr/recorder/storage/jfrStorage.hpp"
 36 #include "jfr/support/jfrThreadId.inline.hpp"
 37 #include "jfr/support/jfrThreadLocal.hpp"
 38 #include "jfr/utilities/jfrSpinlockHelper.hpp"
 39 #include "jfr/writers/jfrJavaEventWriter.hpp"
 40 #include "logging/log.hpp"
 41 #include "memory/allocation.inline.hpp"
 42 #include "memory/arena.hpp"
 43 #include "runtime/atomic.hpp"
 44 #include "runtime/javaThread.hpp"
 45 #include "runtime/os.hpp"
 46 #include "runtime/threadIdentifier.hpp"
 47 #include "utilities/sizes.hpp"
 48 
 49 JfrThreadLocal::JfrThreadLocal() :
 50   _java_event_writer(nullptr),
 51   _java_buffer(nullptr),
 52   _native_buffer(nullptr),
 53   _shelved_buffer(nullptr),
 54   _load_barrier_buffer_epoch_0(nullptr),
 55   _load_barrier_buffer_epoch_1(nullptr),
 56   _checkpoint_buffer_epoch_0(nullptr),
 57   _checkpoint_buffer_epoch_1(nullptr),
 58   _stackframes(nullptr),
 59   _dcmd_arena(nullptr),
 60   _thread(),
 61   _vthread_id(0),
 62   _jvm_thread_id(0),
 63   _thread_id_alias(max_julong),
 64   _data_lost(0),
 65   _stack_trace_id(max_julong),
 66   _parent_trace_id(0),
 67   _last_allocated_bytes(0),
 68   _user_time(0),
 69   _cpu_time(0),
 70   _wallclock_time(os::javaTimeNanos()),
 71   _stack_trace_hash(0),
 72   _stackdepth(0),
 73   _entering_suspend_flag(0),
 74   _critical_section(0),
 75   _vthread_epoch(0),
 76   _vthread_excluded(false),
 77   _jvm_thread_excluded(false),
 78   _vthread(false),
 79   _notified(false),
 80   _dead(false) {
 81   Thread* thread = Thread::current_or_null();
 82   _parent_trace_id = thread != nullptr ? jvm_thread_id(thread) : (traceid)0;
 83 }
 84 
 85 u8 JfrThreadLocal::add_data_lost(u8 value) {
 86   _data_lost += value;
 87   return _data_lost;
 88 }
 89 
 90 bool JfrThreadLocal::has_thread_blob() const {
 91   return _thread.valid();
 92 }
 93 
 94 void JfrThreadLocal::set_thread_blob(const JfrBlobHandle& ref) {
 95   assert(!_thread.valid(), "invariant");
 96   _thread = ref;
 97 }
 98 
 99 const JfrBlobHandle& JfrThreadLocal::thread_blob() const {
100   return _thread;
101 }
102 
103 static void send_java_thread_start_event(JavaThread* jt) {
104   assert(jt != nullptr, "invariant");
105   assert(Thread::current() == jt, "invariant");
106   if (!JfrJavaSupport::on_thread_start(jt)) {
107     // thread is excluded
108     return;
109   }
110   EventThreadStart event;
111   traceid thread_id = JfrThreadLocal::jvm_thread_id(jt);
112   assert(thread_id != 0, "invariant");
113   event.set_thread(thread_id);
114   event.set_parentThread(jt->jfr_thread_local()->parent_thread_id());
115   event.commit();
116 }
117 
118 void JfrThreadLocal::on_start(Thread* t) {
119   assign_thread_id(t, t->jfr_thread_local());
120   if (JfrRecorder::is_recording()) {
121     JfrCheckpointManager::write_checkpoint(t);
122     if (t->is_Java_thread()) {
123       send_java_thread_start_event(JavaThread::cast(t));
124     }
125   }
126   if (t->jfr_thread_local()->has_cached_stack_trace()) {
127     t->jfr_thread_local()->clear_cached_stack_trace();
128   }
129 }
130 
131 // The starter thread ensures that the startee has a valid _vm_thread_id and _contextual_id.
132 // This is to avoid recursion in thread assignment since accessing the java threadObj can lead
133 // to events being fired, a situation the starter thread can handle but not the startee.
134 void JfrThreadLocal::on_java_thread_start(JavaThread* starter, JavaThread* startee) {
135   assert(starter != nullptr, "invariant");
136   assert(startee != nullptr, "invariant");
137   JfrThreadLocal* const tl = startee->jfr_thread_local();
138   assign_thread_id(startee, tl);
139   assert(vthread_id(startee) != 0, "invariant");
140   assert(jvm_thread_id(startee) == vthread_id(startee), "invariant");
141   if (JfrRecorder::is_recording() && EventThreadStart::is_enabled() && EventThreadStart::is_stacktrace_enabled()) {
142     // skip level 2 to skip frames Thread.start() and Thread.start0()
143     startee->jfr_thread_local()->set_cached_stack_trace_id(JfrStackTraceRepository::record(starter, 2));
144   }
145 }
146 
147 void JfrThreadLocal::release(Thread* t) {
148   if (has_java_event_writer()) {
149     assert(t->is_Java_thread(), "invariant");
150     JfrJavaSupport::destroy_global_jni_handle(java_event_writer());
151     _java_event_writer = nullptr;
152   }
153   if (has_native_buffer()) {
154     JfrStorage::release_thread_local(native_buffer(), t);
155     _native_buffer = nullptr;
156   }
157   if (has_java_buffer()) {
158     JfrStorage::release_thread_local(java_buffer(), t);
159     _java_buffer = nullptr;
160   }
161   if (_stackframes != nullptr) {
162     FREE_C_HEAP_ARRAY(JfrStackFrame, _stackframes);
163     _stackframes = nullptr;
164   }
165   if (_load_barrier_buffer_epoch_0 != nullptr) {
166     _load_barrier_buffer_epoch_0->set_retired();
167     _load_barrier_buffer_epoch_0 = nullptr;
168   }
169   if (_load_barrier_buffer_epoch_1 != nullptr) {
170     _load_barrier_buffer_epoch_1->set_retired();
171     _load_barrier_buffer_epoch_1 = nullptr;
172   }
173   if (_checkpoint_buffer_epoch_0 != nullptr) {
174     _checkpoint_buffer_epoch_0->set_retired();
175     _checkpoint_buffer_epoch_0 = nullptr;
176   }
177   if (_checkpoint_buffer_epoch_1 != nullptr) {
178     _checkpoint_buffer_epoch_1->set_retired();
179     _checkpoint_buffer_epoch_1 = nullptr;
180   }
181   if (_dcmd_arena != nullptr) {
182     delete _dcmd_arena;
183     _dcmd_arena = nullptr;
184   }
185 }
186 
187 void JfrThreadLocal::release(JfrThreadLocal* tl, Thread* t) {
188   assert(tl != nullptr, "invariant");
189   assert(t != nullptr, "invariant");
190   assert(Thread::current() == t, "invariant");
191   assert(!tl->is_dead(), "invariant");
192   assert(tl->shelved_buffer() == nullptr, "invariant");
193   tl->_dead = true;
194   tl->release(t);
195 }
196 
197 static void send_java_thread_end_event(JavaThread* jt, traceid tid) {
198   assert(jt != nullptr, "invariant");
199   assert(Thread::current() == jt, "invariant");
200   assert(tid != 0, "invariant");
201   if (JfrRecorder::is_recording()) {
202     EventThreadEnd event;
203     event.set_thread(tid);
204     event.commit();
205     ObjectSampleCheckpoint::on_thread_exit(tid);
206   }
207 }
208 
209 void JfrThreadLocal::on_exit(Thread* t) {
210   assert(t != nullptr, "invariant");
211   JfrThreadLocal * const tl = t->jfr_thread_local();
212   assert(!tl->is_dead(), "invariant");
213   if (JfrRecorder::is_recording()) {
214     JfrCheckpointManager::write_checkpoint(t);
215   }
216   if (t->is_Java_thread()) {
217     JavaThread* const jt = JavaThread::cast(t);
218     send_java_thread_end_event(jt, JfrThreadLocal::jvm_thread_id(jt));
219     JfrThreadCPULoadEvent::send_event_for_thread(jt);
220   }
221   release(tl, Thread::current()); // because it could be that Thread::current() != t
222 }
223 
224 static JfrBuffer* acquire_buffer() {
225   return JfrStorage::acquire_thread_local(Thread::current());
226 }
227 
228 JfrBuffer* JfrThreadLocal::install_native_buffer() const {
229   assert(!has_native_buffer(), "invariant");
230   _native_buffer = acquire_buffer();
231   return _native_buffer;
232 }
233 
234 JfrBuffer* JfrThreadLocal::install_java_buffer() const {
235   assert(!has_java_buffer(), "invariant");
236   assert(!has_java_event_writer(), "invariant");
237   _java_buffer = acquire_buffer();
238   return _java_buffer;
239 }
240 
241 JfrStackFrame* JfrThreadLocal::install_stackframes() const {
242   assert(_stackframes == nullptr, "invariant");
243   _stackframes = NEW_C_HEAP_ARRAY(JfrStackFrame, stackdepth(), mtTracing);
244   return _stackframes;
245 }
246 
247 ByteSize JfrThreadLocal::java_event_writer_offset() {
248   return byte_offset_of(JfrThreadLocal, _java_event_writer);
249 }
250 
251 ByteSize JfrThreadLocal::java_buffer_offset() {
252   return byte_offset_of(JfrThreadLocal, _java_buffer);
253 }
254 
255 ByteSize JfrThreadLocal::vthread_id_offset() {
256   return byte_offset_of(JfrThreadLocal, _vthread_id);
257 }
258 
259 ByteSize JfrThreadLocal::vthread_offset() {
260   return byte_offset_of(JfrThreadLocal, _vthread);
261 }
262 
263 ByteSize JfrThreadLocal::vthread_epoch_offset() {
264   return byte_offset_of(JfrThreadLocal, _vthread_epoch);
265 }
266 
267 ByteSize JfrThreadLocal::vthread_excluded_offset() {
268   return byte_offset_of(JfrThreadLocal, _vthread_excluded);
269 }
270 
271 ByteSize JfrThreadLocal::notified_offset() {
272   return byte_offset_of(JfrThreadLocal, _notified);
273 }
274 
275 void JfrThreadLocal::set(bool* exclusion_field, bool state) {
276   assert(exclusion_field != nullptr, "invariant");
277   *exclusion_field = state;
278 }
279 
280 bool JfrThreadLocal::is_vthread_excluded() const {
281   return Atomic::load(&_vthread_excluded);
282 }
283 
284 bool JfrThreadLocal::is_jvm_thread_excluded(const Thread* t) {
285   assert(t != nullptr, "invariant");
286   return t->jfr_thread_local()->_jvm_thread_excluded;
287 }
288 
289 void JfrThreadLocal::exclude_vthread(const JavaThread* jt) {
290   set(&jt->jfr_thread_local()->_vthread_excluded, true);
291   JfrJavaEventWriter::exclude(vthread_id(jt), jt);
292 }
293 
294 void JfrThreadLocal::include_vthread(const JavaThread* jt) {
295   set(&jt->jfr_thread_local()->_vthread_excluded, false);
296   JfrJavaEventWriter::include(vthread_id(jt), jt);
297 }
298 
299 void JfrThreadLocal::exclude_jvm_thread(const Thread* t) {
300   set(&t->jfr_thread_local()->_jvm_thread_excluded, true);
301   if (t->is_Java_thread()) {
302     JfrJavaEventWriter::exclude(t->jfr_thread_local()->_jvm_thread_id, JavaThread::cast(t));
303   }
304 }
305 
306 void JfrThreadLocal::include_jvm_thread(const Thread* t) {
307   set(&t->jfr_thread_local()->_jvm_thread_excluded, false);
308   if (t->is_Java_thread()) {
309     JfrJavaEventWriter::include(t->jfr_thread_local()->_jvm_thread_id, JavaThread::cast(t));
310   }
311 }
312 
313 bool JfrThreadLocal::is_excluded() const {
314   return Atomic::load_acquire(&_vthread) ? is_vthread_excluded(): _jvm_thread_excluded;
315 }
316 
317 bool JfrThreadLocal::is_included() const {
318   return !is_excluded();
319 }
320 
321 bool JfrThreadLocal::is_excluded(const Thread* t) {
322   assert(t != nullptr, "invariant");
323   return t->jfr_thread_local()->is_excluded();
324 }
325 
326 bool JfrThreadLocal::is_included(const Thread* t) {
327   assert(t != nullptr, "invariant");
328   return t->jfr_thread_local()->is_included();
329 }
330 
331 u4 JfrThreadLocal::stackdepth() const {
332   return _stackdepth != 0 ? _stackdepth : (u4)JfrOptionSet::stackdepth();
333 }
334 
335 bool JfrThreadLocal::is_impersonating(const Thread* t) {
336   return t->jfr_thread_local()->_thread_id_alias != max_julong;
337 }
338 
339 void JfrThreadLocal::impersonate(const Thread* t, traceid other_thread_id) {
340   assert(t != nullptr, "invariant");
341   assert(other_thread_id != 0, "invariant");
342   JfrThreadLocal* const tl = t->jfr_thread_local();
343   tl->_thread_id_alias = other_thread_id;
344 }
345 
346 void JfrThreadLocal::stop_impersonating(const Thread* t) {
347   assert(t != nullptr, "invariant");
348   JfrThreadLocal* const tl = t->jfr_thread_local();
349   if (is_impersonating(t)) {
350     tl->_thread_id_alias = max_julong;
351   }
352   assert(!is_impersonating(t), "invariant");
353 }
354 
355 typedef JfrOopTraceId<ThreadIdAccess> AccessThreadTraceId;
356 
357 void JfrThreadLocal::set_vthread_epoch(const JavaThread* jt, traceid tid, u2 epoch) {
358   assert(jt != nullptr, "invariant");
359   assert(is_vthread(jt), "invariant");
360   // To support event recursion, we update the native side first,
361   // this provides the terminating case.
362   Atomic::store(&jt->jfr_thread_local()->_vthread_epoch, epoch);
363   /*
364   * The java side, i.e. the vthread object, can now be updated.
365   * Accessing the vthread object itself is a recursive case,
366   * because it can trigger additional events, e.g.
367   * loading the oop through load barriers.
368   * Note there is a potential problem with this solution:
369   * The recursive write hitting the terminating case will
370   * use the thread id _before_ the checkpoint is committed.
371   * Hence, the periodic thread can possibly flush that event
372   * to a segment that does not include an associated checkpoint.
373   * Considered rare and quite benign for now. The worst case is
374   * that thread information for that event is not resolvable, i.e. null.
375   */
376   oop vthread = jt->vthread();
377   assert(vthread != nullptr, "invariant");
378   AccessThreadTraceId::set_epoch(vthread, epoch);
379   JfrCheckpointManager::write_checkpoint(const_cast<JavaThread*>(jt), tid, vthread);
380 }
381 
382 traceid JfrThreadLocal::vthread_id(const Thread* t) {
383   assert(t != nullptr, "invariant");
384   return Atomic::load(&t->jfr_thread_local()->_vthread_id);
385 }
386 
387 u2 JfrThreadLocal::vthread_epoch(const JavaThread* jt) {
388   assert(jt != nullptr, "invariant");
389   return Atomic::load(&jt->jfr_thread_local()->_vthread_epoch);
390 }
391 
392 traceid JfrThreadLocal::thread_id(const Thread* t) {
393   assert(t != nullptr, "invariant");
394   if (is_impersonating(t)) {
395     return t->jfr_thread_local()->_thread_id_alias;
396   }
397   JfrThreadLocal* const tl = t->jfr_thread_local();
398   if (!t->is_Java_thread() || !Atomic::load_acquire(&tl->_vthread)) {
399     return jvm_thread_id(t, tl);
400   }
401   // virtual thread
402   const JavaThread* jt = JavaThread::cast(t);
403   const traceid tid = vthread_id(jt);
404   assert(tid != 0, "invariant");
405   if (!tl->is_vthread_excluded()) {
406     const u2 current_epoch = AccessThreadTraceId::current_epoch();
407     if (vthread_epoch(jt) != current_epoch) {
408       set_vthread_epoch(jt, tid, current_epoch);
409     }
410   }
411   return tid;
412 }
413 
414 // When not recording, there is no checkpoint system
415 // in place for writing vthread information.
416 traceid JfrThreadLocal::external_thread_id(const Thread* t) {
417   assert(t != nullptr, "invariant");
418   return JfrRecorder::is_recording() ? thread_id(t) : jvm_thread_id(t);
419 }
420 
421 inline traceid load_java_thread_id(const Thread* t) {
422   assert(t != nullptr, "invariant");
423   assert(t->is_Java_thread(), "invariant");
424   oop threadObj = JavaThread::cast(t)->threadObj();
425   return threadObj != nullptr ? AccessThreadTraceId::id(threadObj) : 0;
426 }
427 
428 traceid JfrThreadLocal::assign_thread_id(const Thread* t, JfrThreadLocal* tl) {
429   assert(t != nullptr, "invariant");
430   assert(tl != nullptr, "invariant");
431   JfrSpinlockHelper spinlock(&tl->_critical_section);
432   traceid tid = tl->_jvm_thread_id;
433   if (tid == 0) {
434     if (t->is_Java_thread()) {
435       tid = load_java_thread_id(t);
436       tl->_jvm_thread_id = tid;
437       Atomic::store(&tl->_vthread_id, tid);
438       return tid;
439     }
440     tid = static_cast<traceid>(ThreadIdentifier::next());
441     tl->_jvm_thread_id = tid;
442   }
443   return tid;
444 }
445 
446 traceid JfrThreadLocal::jvm_thread_id(const Thread* t, JfrThreadLocal* tl) {
447   assert(t != nullptr, "invariant");
448   assert(tl != nullptr, "invariant");
449   return tl->_jvm_thread_id != 0 ? tl->_jvm_thread_id : JfrThreadLocal::assign_thread_id(t, tl);
450 }
451 
452 traceid JfrThreadLocal::jvm_thread_id(const Thread* t) {
453   assert(t != nullptr, "invariant");
454   return jvm_thread_id(t, t->jfr_thread_local());
455 }
456 
457 bool JfrThreadLocal::is_vthread(const JavaThread* jt) {
458   assert(jt != nullptr, "invariant");
459   return Atomic::load_acquire(&jt->jfr_thread_local()->_vthread);
460 }
461 
462 inline bool is_virtual(const JavaThread* jt, oop thread) {
463   assert(jt != nullptr, "invariant");
464   return thread != jt->threadObj();
465 }
466 
467 void JfrThreadLocal::on_set_current_thread(JavaThread* jt, oop thread) {
468   assert(jt != nullptr, "invariant");
469   assert(thread != nullptr, "invariant");
470   JfrThreadLocal* const tl = jt->jfr_thread_local();
471   if (!is_virtual(jt, thread)) {
472     Atomic::release_store(&tl->_vthread, false);
473     return;
474   }
475   Atomic::store(&tl->_vthread_id, AccessThreadTraceId::id(thread));
476   const u2 epoch_raw = AccessThreadTraceId::epoch(thread);
477   const bool excluded = epoch_raw & excluded_bit;
478   Atomic::store(&tl->_vthread_excluded, excluded);
479   if (!excluded) {
480     Atomic::store(&tl->_vthread_epoch, static_cast<u2>(epoch_raw & epoch_mask));
481   }
482   Atomic::release_store(&tl->_vthread, true);
483 }
484 
485 Arena* JfrThreadLocal::dcmd_arena(JavaThread* jt) {
486   assert(jt != nullptr, "invariant");
487   JfrThreadLocal* tl = jt->jfr_thread_local();
488   Arena* arena = tl->_dcmd_arena;
489   if (arena != nullptr) {
490     return arena;
491   }
492   arena = new (mtTracing) Arena(mtTracing);
493   tl->_dcmd_arena = arena;
494   return arena;
495 }