< prev index next >

src/hotspot/share/runtime/thread.hpp

Print this page

 44 #include "jfr/support/jfrThreadExtension.hpp"
 45 #endif
 46 
 47 class CompilerThread;
 48 class HandleArea;
 49 class HandleMark;
 50 class JvmtiRawMonitor;
 51 class NMethodClosure;
 52 class Metadata;
 53 class OopClosure;
 54 class OSThread;
 55 class ParkEvent;
 56 class ResourceArea;
 57 class SafeThreadsListPtr;
 58 class ThreadClosure;
 59 class ThreadsList;
 60 class ThreadsSMRSupport;
 61 class VMErrorCallback;
 62 
 63 


 64 DEBUG_ONLY(class ResourceMark;)
 65 
 66 class WorkerThread;
 67 
 68 class JavaThread;
 69 
 70 // Class hierarchy
 71 // - Thread
 72 //   - JavaThread
 73 //     - various subclasses eg CompilerThread, ServiceThread
 74 //   - NonJavaThread
 75 //     - NamedThread
 76 //       - VMThread
 77 //       - ConcurrentGCThread
 78 //       - WorkerThread
 79 //     - WatcherThread
 80 //     - JfrThreadSampler
 81 //     - JfrCPUSamplerThread
 82 //     - LogAsyncWriter
 83 //

618   void init_wx();
619   WXMode enable_wx(WXMode new_state);
620 
621   void assert_wx_state(WXMode expected) {
622     assert(_wx_state == expected, "wrong state");
623   }
624 #endif // __APPLE__ && AARCH64
625 
626  private:
627   bool _in_asgct = false;
628  public:
629   bool in_asgct() const { return _in_asgct; }
630   void set_in_asgct(bool value) { _in_asgct = value; }
631   static bool current_in_asgct() {
632     Thread *cur = Thread::current_or_null_safe();
633     return cur != nullptr && cur->in_asgct();
634   }
635 
636  private:
637   VMErrorCallback* _vm_error_callbacks;


































































































638 };
639 
640 class ThreadInAsgct {
641  private:
642   Thread* _thread;
643   bool _saved_in_asgct;
644  public:
645   ThreadInAsgct(Thread* thread) : _thread(thread) {
646     assert(thread != nullptr, "invariant");
647     // Allow AsyncGetCallTrace to be reentrant - save the previous state.
648     _saved_in_asgct = thread->in_asgct();
649     thread->set_in_asgct(true);
650   }
651   ~ThreadInAsgct() {
652     assert(_thread->in_asgct(), "invariant");
653     _thread->set_in_asgct(_saved_in_asgct);
654   }
655 };
656 
657 // Inline implementation of Thread::current()

 44 #include "jfr/support/jfrThreadExtension.hpp"
 45 #endif
 46 
 47 class CompilerThread;
 48 class HandleArea;
 49 class HandleMark;
 50 class JvmtiRawMonitor;
 51 class NMethodClosure;
 52 class Metadata;
 53 class OopClosure;
 54 class OSThread;
 55 class ParkEvent;
 56 class ResourceArea;
 57 class SafeThreadsListPtr;
 58 class ThreadClosure;
 59 class ThreadsList;
 60 class ThreadsSMRSupport;
 61 class VMErrorCallback;
 62 
 63 
 64 class PerfTraceTime;
 65 
 66 DEBUG_ONLY(class ResourceMark;)
 67 
 68 class WorkerThread;
 69 
 70 class JavaThread;
 71 
 72 // Class hierarchy
 73 // - Thread
 74 //   - JavaThread
 75 //     - various subclasses eg CompilerThread, ServiceThread
 76 //   - NonJavaThread
 77 //     - NamedThread
 78 //       - VMThread
 79 //       - ConcurrentGCThread
 80 //       - WorkerThread
 81 //     - WatcherThread
 82 //     - JfrThreadSampler
 83 //     - JfrCPUSamplerThread
 84 //     - LogAsyncWriter
 85 //

620   void init_wx();
621   WXMode enable_wx(WXMode new_state);
622 
623   void assert_wx_state(WXMode expected) {
624     assert(_wx_state == expected, "wrong state");
625   }
626 #endif // __APPLE__ && AARCH64
627 
628  private:
629   bool _in_asgct = false;
630  public:
631   bool in_asgct() const { return _in_asgct; }
632   void set_in_asgct(bool value) { _in_asgct = value; }
633   static bool current_in_asgct() {
634     Thread *cur = Thread::current_or_null_safe();
635     return cur != nullptr && cur->in_asgct();
636   }
637 
638  private:
639   VMErrorCallback* _vm_error_callbacks;
640 
641   bool  _profile_vm_locks;
642   bool  _profile_vm_calls;
643   bool  _profile_vm_ops;
644   bool  _profile_rt_calls;
645   bool  _profile_upcalls;
646 
647   jlong    _all_bc_counter_value;
648   jlong _clinit_bc_counter_value;
649 
650   PerfTraceTime* _current_rt_call_timer;
651  public:
652   bool     profile_vm_locks() const { return _profile_vm_locks; }
653   void set_profile_vm_locks(bool v) { _profile_vm_locks = v; }
654 
655   bool     profile_vm_calls() const { return _profile_vm_calls; }
656   void set_profile_vm_calls(bool v) { _profile_vm_calls = v; }
657 
658   bool     profile_vm_ops() const { return _profile_vm_ops; }
659   void set_profile_vm_ops(bool v) { _profile_vm_ops = v; }
660 
661   bool     profile_rt_calls() const { return _profile_rt_calls; }
662   void set_profile_rt_calls(bool v) { _profile_rt_calls = v; }
663 
664   bool     profile_upcalls() const { return _profile_upcalls; }
665   void set_profile_upcalls(bool v) { _profile_upcalls = v; }
666 
667   PerfTraceTime*     current_rt_call_timer() const           { return _current_rt_call_timer;            }
668   void           set_current_rt_call_timer(PerfTraceTime* c) {        _current_rt_call_timer = c;        }
669   bool           has_current_rt_call_timer() const           { return _current_rt_call_timer != nullptr; }
670 
671   bool do_profile_rt_call() const {
672     return ProfileRuntimeCalls && profile_rt_calls() && !has_current_rt_call_timer();
673   }
674 
675   jlong        bc_counter_value() const { return    _all_bc_counter_value; }
676 
677   jlong clinit_bc_counter_value() const { return _clinit_bc_counter_value; }
678 
679   void inc_clinit_bc_counter_value(jlong l) { _clinit_bc_counter_value += l; }
680 
681   static ByteSize bc_counter_offset() { return byte_offset_of(Thread, _all_bc_counter_value); }
682 };
683 
684 class ProfileVMCallContext : StackObj {
685  private:
686   Thread* _thread;
687   bool _enabled;
688   PerfTraceTime* _timer;
689 
690   static int _perf_nested_runtime_calls_count;
691 
692   static const char* name(PerfTraceTime* t);
693   static void notify_nested_rt_call(PerfTraceTime* current, PerfTraceTime* inner_timer);
694  public:
695   inline ProfileVMCallContext(Thread* current, PerfTraceTime* timer, bool is_on)
696   : _thread(current), _enabled(is_on), _timer(timer) {
697     if (_enabled) {
698       assert(timer != nullptr, "");
699       assert(_thread->current_rt_call_timer() == nullptr, "%s", name(_thread->current_rt_call_timer()));
700       _thread->set_current_rt_call_timer(timer);
701     } else if (current->profile_rt_calls()) {
702       notify_nested_rt_call(current->current_rt_call_timer(), timer);
703     }
704   }
705 
706   inline ~ProfileVMCallContext() {
707     if (_enabled) {
708       assert(_timer == _thread->current_rt_call_timer(),
709              "%s vs %s", name(_timer), name(_thread->current_rt_call_timer()));
710       _thread->set_current_rt_call_timer(nullptr);
711     }
712   }
713 
714   static int nested_runtime_calls_count() { return _perf_nested_runtime_calls_count; };
715 };
716 
717 class PauseRuntimeCallProfiling : public StackObj {
718  protected:
719   Thread* _thread;
720   bool _enabled;
721   PerfTraceTime* _timer;
722 
723  public:
724   inline PauseRuntimeCallProfiling(Thread* current, bool is_on)
725   : _thread(current), _enabled(is_on), _timer(nullptr) {
726     if (_enabled) {
727       _timer = _thread->current_rt_call_timer();
728       _thread->set_current_rt_call_timer(nullptr);
729     }
730   }
731 
732   inline ~PauseRuntimeCallProfiling () {
733     if (_enabled) {
734       guarantee(_thread->current_rt_call_timer() == nullptr, "");
735       _thread->set_current_rt_call_timer(_timer); // restore
736     }
737   }
738 };
739 
740 class ThreadInAsgct {
741  private:
742   Thread* _thread;
743   bool _saved_in_asgct;
744  public:
745   ThreadInAsgct(Thread* thread) : _thread(thread) {
746     assert(thread != nullptr, "invariant");
747     // Allow AsyncGetCallTrace to be reentrant - save the previous state.
748     _saved_in_asgct = thread->in_asgct();
749     thread->set_in_asgct(true);
750   }
751   ~ThreadInAsgct() {
752     assert(_thread->in_asgct(), "invariant");
753     _thread->set_in_asgct(_saved_in_asgct);
754   }
755 };
756 
757 // Inline implementation of Thread::current()
< prev index next >