< prev index next >

src/hotspot/share/runtime/thread.hpp

Print this page

 45 #include "jfr/support/jfrThreadExtension.hpp"
 46 #endif
 47 
 48 class CompilerThread;
 49 class HandleArea;
 50 class HandleMark;
 51 class JvmtiRawMonitor;
 52 class NMethodClosure;
 53 class Metadata;
 54 class OopClosure;
 55 class OSThread;
 56 class ParkEvent;
 57 class ResourceArea;
 58 class SafeThreadsListPtr;
 59 class ThreadClosure;
 60 class ThreadsList;
 61 class ThreadsSMRSupport;
 62 class VMErrorCallback;
 63 
 64 


 65 DEBUG_ONLY(class ResourceMark;)
 66 
 67 class WorkerThread;
 68 
 69 class JavaThread;
 70 
 71 // Class hierarchy
 72 // - Thread
 73 //   - JavaThread
 74 //     - various subclasses eg CompilerThread, ServiceThread
 75 //   - NonJavaThread
 76 //     - NamedThread
 77 //       - VMThread
 78 //       - ConcurrentGCThread
 79 //       - WorkerThread
 80 //     - WatcherThread
 81 //     - JfrThreadSampler
 82 //     - JfrCPUSamplerThread
 83 //     - LogAsyncWriter
 84 //

611   void init_wx();
612   WXMode enable_wx(WXMode new_state);
613 
614   void assert_wx_state(WXMode expected) {
615     assert(_wx_state == expected, "wrong state");
616   }
617 #endif // __APPLE__ && AARCH64
618 
619  private:
620   bool _in_asgct = false;
621  public:
622   bool in_asgct() const { return _in_asgct; }
623   void set_in_asgct(bool value) { _in_asgct = value; }
624   static bool current_in_asgct() {
625     Thread *cur = Thread::current_or_null_safe();
626     return cur != nullptr && cur->in_asgct();
627   }
628 
629  private:
630   VMErrorCallback* _vm_error_callbacks;


































































































631 };
632 
633 class ThreadInAsgct {
634  private:
635   Thread* _thread;
636   bool _saved_in_asgct;
637  public:
638   ThreadInAsgct(Thread* thread) : _thread(thread) {
639     assert(thread != nullptr, "invariant");
640     // Allow AsyncGetCallTrace to be reentrant - save the previous state.
641     _saved_in_asgct = thread->in_asgct();
642     thread->set_in_asgct(true);
643   }
644   ~ThreadInAsgct() {
645     assert(_thread->in_asgct(), "invariant");
646     _thread->set_in_asgct(_saved_in_asgct);
647   }
648 };
649 
650 // Inline implementation of Thread::current()

 45 #include "jfr/support/jfrThreadExtension.hpp"
 46 #endif
 47 
 48 class CompilerThread;
 49 class HandleArea;
 50 class HandleMark;
 51 class JvmtiRawMonitor;
 52 class NMethodClosure;
 53 class Metadata;
 54 class OopClosure;
 55 class OSThread;
 56 class ParkEvent;
 57 class ResourceArea;
 58 class SafeThreadsListPtr;
 59 class ThreadClosure;
 60 class ThreadsList;
 61 class ThreadsSMRSupport;
 62 class VMErrorCallback;
 63 
 64 
 65 class PerfTraceTime;
 66 
 67 DEBUG_ONLY(class ResourceMark;)
 68 
 69 class WorkerThread;
 70 
 71 class JavaThread;
 72 
 73 // Class hierarchy
 74 // - Thread
 75 //   - JavaThread
 76 //     - various subclasses eg CompilerThread, ServiceThread
 77 //   - NonJavaThread
 78 //     - NamedThread
 79 //       - VMThread
 80 //       - ConcurrentGCThread
 81 //       - WorkerThread
 82 //     - WatcherThread
 83 //     - JfrThreadSampler
 84 //     - JfrCPUSamplerThread
 85 //     - LogAsyncWriter
 86 //

613   void init_wx();
614   WXMode enable_wx(WXMode new_state);
615 
616   void assert_wx_state(WXMode expected) {
617     assert(_wx_state == expected, "wrong state");
618   }
619 #endif // __APPLE__ && AARCH64
620 
621  private:
622   bool _in_asgct = false;
623  public:
624   bool in_asgct() const { return _in_asgct; }
625   void set_in_asgct(bool value) { _in_asgct = value; }
626   static bool current_in_asgct() {
627     Thread *cur = Thread::current_or_null_safe();
628     return cur != nullptr && cur->in_asgct();
629   }
630 
631  private:
632   VMErrorCallback* _vm_error_callbacks;
633 
634   bool  _profile_vm_locks;
635   bool  _profile_vm_calls;
636   bool  _profile_vm_ops;
637   bool  _profile_rt_calls;
638   bool  _profile_upcalls;
639 
640   jlong    _all_bc_counter_value;
641   jlong _clinit_bc_counter_value;
642 
643   PerfTraceTime* _current_rt_call_timer;
644  public:
645   bool     profile_vm_locks() const { return _profile_vm_locks; }
646   void set_profile_vm_locks(bool v) { _profile_vm_locks = v; }
647 
648   bool     profile_vm_calls() const { return _profile_vm_calls; }
649   void set_profile_vm_calls(bool v) { _profile_vm_calls = v; }
650 
651   bool     profile_vm_ops() const { return _profile_vm_ops; }
652   void set_profile_vm_ops(bool v) { _profile_vm_ops = v; }
653 
654   bool     profile_rt_calls() const { return _profile_rt_calls; }
655   void set_profile_rt_calls(bool v) { _profile_rt_calls = v; }
656 
657   bool     profile_upcalls() const { return _profile_upcalls; }
658   void set_profile_upcalls(bool v) { _profile_upcalls = v; }
659 
660   PerfTraceTime*     current_rt_call_timer() const           { return _current_rt_call_timer;            }
661   void           set_current_rt_call_timer(PerfTraceTime* c) {        _current_rt_call_timer = c;        }
662   bool           has_current_rt_call_timer() const           { return _current_rt_call_timer != nullptr; }
663 
664   bool do_profile_rt_call() const {
665     return ProfileRuntimeCalls && profile_rt_calls() && !has_current_rt_call_timer();
666   }
667 
668   jlong        bc_counter_value() const { return    _all_bc_counter_value; }
669 
670   jlong clinit_bc_counter_value() const { return _clinit_bc_counter_value; }
671 
672   void inc_clinit_bc_counter_value(jlong l) { _clinit_bc_counter_value += l; }
673 
674   static ByteSize bc_counter_offset() { return byte_offset_of(Thread, _all_bc_counter_value); }
675 };
676 
677 class ProfileVMCallContext : StackObj {
678  private:
679   Thread* _thread;
680   bool _enabled;
681   PerfTraceTime* _timer;
682 
683   static int _perf_nested_runtime_calls_count;
684 
685   static const char* name(PerfTraceTime* t);
686   static void notify_nested_rt_call(PerfTraceTime* current, PerfTraceTime* inner_timer);
687  public:
688   inline ProfileVMCallContext(Thread* current, PerfTraceTime* timer, bool is_on)
689   : _thread(current), _enabled(is_on), _timer(timer) {
690     if (_enabled) {
691       assert(timer != nullptr, "");
692       assert(_thread->current_rt_call_timer() == nullptr, "%s", name(_thread->current_rt_call_timer()));
693       _thread->set_current_rt_call_timer(timer);
694     } else if (current->profile_rt_calls()) {
695       notify_nested_rt_call(current->current_rt_call_timer(), timer);
696     }
697   }
698 
699   inline ~ProfileVMCallContext() {
700     if (_enabled) {
701       assert(_timer == _thread->current_rt_call_timer(),
702              "%s vs %s", name(_timer), name(_thread->current_rt_call_timer()));
703       _thread->set_current_rt_call_timer(nullptr);
704     }
705   }
706 
707   static int nested_runtime_calls_count() { return _perf_nested_runtime_calls_count; };
708 };
709 
710 class PauseRuntimeCallProfiling : public StackObj {
711  protected:
712   Thread* _thread;
713   bool _enabled;
714   PerfTraceTime* _timer;
715 
716  public:
717   inline PauseRuntimeCallProfiling(Thread* current, bool is_on)
718   : _thread(current), _enabled(is_on), _timer(nullptr) {
719     if (_enabled) {
720       _timer = _thread->current_rt_call_timer();
721       _thread->set_current_rt_call_timer(nullptr);
722     }
723   }
724 
725   inline ~PauseRuntimeCallProfiling () {
726     if (_enabled) {
727       guarantee(_thread->current_rt_call_timer() == nullptr, "");
728       _thread->set_current_rt_call_timer(_timer); // restore
729     }
730   }
731 };
732 
733 class ThreadInAsgct {
734  private:
735   Thread* _thread;
736   bool _saved_in_asgct;
737  public:
738   ThreadInAsgct(Thread* thread) : _thread(thread) {
739     assert(thread != nullptr, "invariant");
740     // Allow AsyncGetCallTrace to be reentrant - save the previous state.
741     _saved_in_asgct = thread->in_asgct();
742     thread->set_in_asgct(true);
743   }
744   ~ThreadInAsgct() {
745     assert(_thread->in_asgct(), "invariant");
746     _thread->set_in_asgct(_saved_in_asgct);
747   }
748 };
749 
750 // Inline implementation of Thread::current()
< prev index next >