< prev index next >

src/hotspot/share/oops/stackChunkOop.inline.hpp

Print this page
@@ -86,10 +86,16 @@
  inline void stackChunkOopDesc::set_max_thawing_size(int value)  {
    assert(value >= 0, "size must be >= 0");
    jdk_internal_vm_StackChunk::set_maxThawingSize(this, (jint)value);
  }
  
+ inline uint8_t stackChunkOopDesc::lockStackSize() const         { return jdk_internal_vm_StackChunk::lockStackSize(as_oop()); }
+ inline void stackChunkOopDesc::set_lockStackSize(uint8_t value) { jdk_internal_vm_StackChunk::set_lockStackSize(this, value); }
+ 
+ inline ObjectMonitor* stackChunkOopDesc::objectMonitor() const       { return (ObjectMonitor*)jdk_internal_vm_StackChunk::objectMonitor(as_oop()); }
+ inline void stackChunkOopDesc::set_objectMonitor(ObjectMonitor* mon) { jdk_internal_vm_StackChunk::set_objectMonitor(this, (address)mon); }
+ 
  inline oop stackChunkOopDesc::cont() const                {
    if (UseZGC && !ZGenerational) {
      assert(!UseCompressedOops, "Non-generational ZGC does not support compressed oops");
      // The state of the cont oop is used by XCollectedHeap::requires_barriers,
      // to determine the age of the stackChunkOopDesc. For that to work, it is

@@ -164,14 +170,21 @@
    set_flags(0);
  }
  
  inline bool stackChunkOopDesc::has_mixed_frames() const { return is_flag(FLAG_HAS_INTERPRETED_FRAMES); }
  inline void stackChunkOopDesc::set_has_mixed_frames(bool value) {
-   assert((flags() & ~FLAG_HAS_INTERPRETED_FRAMES) == 0, "other flags should not be set");
+   assert((flags() & ~(FLAG_HAS_INTERPRETED_FRAMES | FLAG_PREEMPTED)) == 0, "other flags should not be set");
    set_flag(FLAG_HAS_INTERPRETED_FRAMES, value);
  }
  
+ inline bool stackChunkOopDesc::is_preempted() const         { return is_flag(FLAG_PREEMPTED); }
+ inline void stackChunkOopDesc::set_is_preempted(bool value) { set_flag(FLAG_PREEMPTED, value); }
+ inline bool stackChunkOopDesc::preempted_on_monitorenter() const   { return objectMonitor() != nullptr; }
+ 
+ inline bool stackChunkOopDesc::has_lockStack() const         { return is_flag(FLAG_HAS_LOCKSTACK); }
+ inline void stackChunkOopDesc::set_has_lockStack(bool value) { set_flag(FLAG_HAS_LOCKSTACK, value); }
+ 
  inline bool stackChunkOopDesc::is_gc_mode() const                  { return is_flag(FLAG_GC_MODE); }
  inline bool stackChunkOopDesc::is_gc_mode_acquire() const          { return is_flag_acquire(FLAG_GC_MODE); }
  inline void stackChunkOopDesc::set_gc_mode(bool value)             { set_flag(FLAG_GC_MODE, value); }
  
  inline bool stackChunkOopDesc::has_bitmap() const                  { return is_flag(FLAG_HAS_BITMAP); }

@@ -190,10 +203,22 @@
      f.handle_deopted();
    }
    do_barriers0<barrier>(f, map);
  }
  
+ template <typename OopT, class StackChunkLockStackClosureType>
+ inline void stackChunkOopDesc::iterate_lockstack(StackChunkLockStackClosureType* closure) {
+   if (LockingMode != LM_LIGHTWEIGHT) {
+     return;
+   }
+   int cnt = lockStackSize();
+   intptr_t* lockstart_addr = start_address();
+   for (int i = 0; i < cnt; i++) {
+     closure->do_oop((OopT*)&lockstart_addr[i]);
+   }
+ }
+ 
  template <class StackChunkFrameClosureType>
  inline void stackChunkOopDesc::iterate_stack(StackChunkFrameClosureType* closure) {
    has_mixed_frames() ? iterate_stack<ChunkFrames::Mixed>(closure)
                       : iterate_stack<ChunkFrames::CompiledOnly>(closure);
  }

@@ -210,19 +235,17 @@
      RegisterMap full_map(nullptr,
                           RegisterMap::UpdateMap::include,
                           RegisterMap::ProcessFrames::skip,
                           RegisterMap::WalkContinuation::include);
      full_map.set_include_argument_oops(false);
+     closure->do_frame(f, map);
  
      f.next(&full_map);
- 
-     assert(!f.is_done(), "");
-     assert(f.is_compiled(), "");
+     if (f.is_done()) return;
  
      should_continue = closure->do_frame(f, &full_map);
-     f.next(map);
-     f.handle_deopted(); // the stub caller might be deoptimized (as it's not at a call)
+     f.next(&map);
    }
    assert(!f.is_stub(), "");
  
    for(; should_continue && !f.is_done(); f.next(map)) {
      if (frame_kind == ChunkFrames::Mixed) {

@@ -279,11 +302,11 @@
  inline MemRegion stackChunkOopDesc::range() {
    return MemRegion((HeapWord*)this, size());
  }
  
  inline int stackChunkOopDesc::relativize_usp_offset(const frame& fr, const int usp_offset_in_bytes) const {
-   assert(fr.is_compiled_frame() || fr.cb()->is_safepoint_stub(), "");
+   assert(fr.is_compiled_frame() || fr.cb()->is_runtime_stub(), "");
    assert(is_in_chunk(fr.unextended_sp()), "");
  
    intptr_t* base = fr.real_fp(); // equal to the caller's sp
    intptr_t* loc = (intptr_t*)((address)fr.unextended_sp() + usp_offset_in_bytes);
    assert(base > loc, "");
< prev index next >