< prev index next >

src/hotspot/share/runtime/thread.hpp

Print this page

 44 #include "jfr/support/jfrThreadExtension.hpp"
 45 #endif
 46 
 47 class CompilerThread;
 48 class HandleArea;
 49 class HandleMark;
 50 class JvmtiRawMonitor;
 51 class NMethodClosure;
 52 class Metadata;
 53 class OopClosure;
 54 class OSThread;
 55 class ParkEvent;
 56 class ResourceArea;
 57 class SafeThreadsListPtr;
 58 class ThreadClosure;
 59 class ThreadsList;
 60 class ThreadsSMRSupport;
 61 class VMErrorCallback;
 62 
 63 


 64 DEBUG_ONLY(class ResourceMark;)
 65 
 66 class WorkerThread;
 67 
 68 class JavaThread;
 69 
 70 // Class hierarchy
 71 // - Thread
 72 //   - JavaThread
 73 //     - various subclasses eg CompilerThread, ServiceThread
 74 //   - NonJavaThread
 75 //     - NamedThread
 76 //       - VMThread
 77 //       - ConcurrentGCThread
 78 //       - WorkerThread
 79 //     - WatcherThread
 80 //     - JfrThreadSampler
 81 //     - JfrCPUSamplerThread
 82 //     - LogAsyncWriter
 83 //

615   void init_wx();
616   WXMode enable_wx(WXMode new_state);
617 
618   void assert_wx_state(WXMode expected) {
619     assert(_wx_state == expected, "wrong state");
620   }
621 #endif // __APPLE__ && AARCH64
622 
623  private:
624   bool _in_asgct = false;
625  public:
626   bool in_asgct() const { return _in_asgct; }
627   void set_in_asgct(bool value) { _in_asgct = value; }
628   static bool current_in_asgct() {
629     Thread *cur = Thread::current_or_null_safe();
630     return cur != nullptr && cur->in_asgct();
631   }
632 
633  private:
634   VMErrorCallback* _vm_error_callbacks;


































































































635 };
636 
637 class ThreadInAsgct {
638  private:
639   Thread* _thread;
640   bool _saved_in_asgct;
641  public:
642   ThreadInAsgct(Thread* thread) : _thread(thread) {
643     assert(thread != nullptr, "invariant");
644     // Allow AsyncGetCallTrace to be reentrant - save the previous state.
645     _saved_in_asgct = thread->in_asgct();
646     thread->set_in_asgct(true);
647   }
648   ~ThreadInAsgct() {
649     assert(_thread->in_asgct(), "invariant");
650     _thread->set_in_asgct(_saved_in_asgct);
651   }
652 };
653 
654 // Inline implementation of Thread::current()

 44 #include "jfr/support/jfrThreadExtension.hpp"
 45 #endif
 46 
 47 class CompilerThread;
 48 class HandleArea;
 49 class HandleMark;
 50 class JvmtiRawMonitor;
 51 class NMethodClosure;
 52 class Metadata;
 53 class OopClosure;
 54 class OSThread;
 55 class ParkEvent;
 56 class ResourceArea;
 57 class SafeThreadsListPtr;
 58 class ThreadClosure;
 59 class ThreadsList;
 60 class ThreadsSMRSupport;
 61 class VMErrorCallback;
 62 
 63 
 64 class PerfTraceTime;
 65 
 66 DEBUG_ONLY(class ResourceMark;)
 67 
 68 class WorkerThread;
 69 
 70 class JavaThread;
 71 
 72 // Class hierarchy
 73 // - Thread
 74 //   - JavaThread
 75 //     - various subclasses eg CompilerThread, ServiceThread
 76 //   - NonJavaThread
 77 //     - NamedThread
 78 //       - VMThread
 79 //       - ConcurrentGCThread
 80 //       - WorkerThread
 81 //     - WatcherThread
 82 //     - JfrThreadSampler
 83 //     - JfrCPUSamplerThread
 84 //     - LogAsyncWriter
 85 //

617   void init_wx();
618   WXMode enable_wx(WXMode new_state);
619 
620   void assert_wx_state(WXMode expected) {
621     assert(_wx_state == expected, "wrong state");
622   }
623 #endif // __APPLE__ && AARCH64
624 
625  private:
626   bool _in_asgct = false;
627  public:
628   bool in_asgct() const { return _in_asgct; }
629   void set_in_asgct(bool value) { _in_asgct = value; }
630   static bool current_in_asgct() {
631     Thread *cur = Thread::current_or_null_safe();
632     return cur != nullptr && cur->in_asgct();
633   }
634 
635  private:
636   VMErrorCallback* _vm_error_callbacks;
637 
638   bool  _profile_vm_locks;
639   bool  _profile_vm_calls;
640   bool  _profile_vm_ops;
641   bool  _profile_rt_calls;
642   bool  _profile_upcalls;
643 
644   jlong    _all_bc_counter_value;
645   jlong _clinit_bc_counter_value;
646 
647   PerfTraceTime* _current_rt_call_timer;
648  public:
649   bool     profile_vm_locks() const { return _profile_vm_locks; }
650   void set_profile_vm_locks(bool v) { _profile_vm_locks = v; }
651 
652   bool     profile_vm_calls() const { return _profile_vm_calls; }
653   void set_profile_vm_calls(bool v) { _profile_vm_calls = v; }
654 
655   bool     profile_vm_ops() const { return _profile_vm_ops; }
656   void set_profile_vm_ops(bool v) { _profile_vm_ops = v; }
657 
658   bool     profile_rt_calls() const { return _profile_rt_calls; }
659   void set_profile_rt_calls(bool v) { _profile_rt_calls = v; }
660 
661   bool     profile_upcalls() const { return _profile_upcalls; }
662   void set_profile_upcalls(bool v) { _profile_upcalls = v; }
663 
664   PerfTraceTime*     current_rt_call_timer() const           { return _current_rt_call_timer;            }
665   void           set_current_rt_call_timer(PerfTraceTime* c) {        _current_rt_call_timer = c;        }
666   bool           has_current_rt_call_timer() const           { return _current_rt_call_timer != nullptr; }
667 
668   bool do_profile_rt_call() const {
669     return ProfileRuntimeCalls && profile_rt_calls() && !has_current_rt_call_timer();
670   }
671 
672   jlong        bc_counter_value() const { return    _all_bc_counter_value; }
673 
674   jlong clinit_bc_counter_value() const { return _clinit_bc_counter_value; }
675 
676   void inc_clinit_bc_counter_value(jlong l) { _clinit_bc_counter_value += l; }
677 
678   static ByteSize bc_counter_offset() { return byte_offset_of(Thread, _all_bc_counter_value); }
679 };
680 
681 class ProfileVMCallContext : StackObj {
682  private:
683   Thread* _thread;
684   bool _enabled;
685   PerfTraceTime* _timer;
686 
687   static int _perf_nested_runtime_calls_count;
688 
689   static const char* name(PerfTraceTime* t);
690   static void notify_nested_rt_call(PerfTraceTime* current, PerfTraceTime* inner_timer);
691  public:
692   inline ProfileVMCallContext(Thread* current, PerfTraceTime* timer, bool is_on)
693   : _thread(current), _enabled(is_on), _timer(timer) {
694     if (_enabled) {
695       assert(timer != nullptr, "");
696       assert(_thread->current_rt_call_timer() == nullptr, "%s", name(_thread->current_rt_call_timer()));
697       _thread->set_current_rt_call_timer(timer);
698     } else if (current->profile_rt_calls()) {
699       notify_nested_rt_call(current->current_rt_call_timer(), timer);
700     }
701   }
702 
703   inline ~ProfileVMCallContext() {
704     if (_enabled) {
705       assert(_timer == _thread->current_rt_call_timer(),
706              "%s vs %s", name(_timer), name(_thread->current_rt_call_timer()));
707       _thread->set_current_rt_call_timer(nullptr);
708     }
709   }
710 
711   static int nested_runtime_calls_count() { return _perf_nested_runtime_calls_count; };
712 };
713 
714 class PauseRuntimeCallProfiling : public StackObj {
715  protected:
716   Thread* _thread;
717   bool _enabled;
718   PerfTraceTime* _timer;
719 
720  public:
721   inline PauseRuntimeCallProfiling(Thread* current, bool is_on)
722   : _thread(current), _enabled(is_on), _timer(nullptr) {
723     if (_enabled) {
724       _timer = _thread->current_rt_call_timer();
725       _thread->set_current_rt_call_timer(nullptr);
726     }
727   }
728 
729   inline ~PauseRuntimeCallProfiling () {
730     if (_enabled) {
731       guarantee(_thread->current_rt_call_timer() == nullptr, "");
732       _thread->set_current_rt_call_timer(_timer); // restore
733     }
734   }
735 };
736 
737 class ThreadInAsgct {
738  private:
739   Thread* _thread;
740   bool _saved_in_asgct;
741  public:
742   ThreadInAsgct(Thread* thread) : _thread(thread) {
743     assert(thread != nullptr, "invariant");
744     // Allow AsyncGetCallTrace to be reentrant - save the previous state.
745     _saved_in_asgct = thread->in_asgct();
746     thread->set_in_asgct(true);
747   }
748   ~ThreadInAsgct() {
749     assert(_thread->in_asgct(), "invariant");
750     _thread->set_in_asgct(_saved_in_asgct);
751   }
752 };
753 
754 // Inline implementation of Thread::current()
< prev index next >