< prev index next >

src/hotspot/share/oops/methodData.cpp

Print this page
*** 21,10 ***
--- 21,11 ---
   * questions.
   *
   */
  
  #include "precompiled.hpp"
+ #include "cds/cdsConfig.hpp"
  #include "ci/ciMethodData.hpp"
  #include "classfile/vmSymbols.hpp"
  #include "compiler/compilationPolicy.hpp"
  #include "compiler/compilerDefinitions.inline.hpp"
  #include "compiler/compilerOracle.hpp"

*** 316,24 ***
  
  void TypeStackSlotEntries::clean_weak_klass_links(bool always_clean) {
    for (int i = 0; i < _number_of_entries; i++) {
      intptr_t p = type(i);
      Klass* k = (Klass*)klass_part(p);
!     if (k != nullptr && (always_clean || !k->is_loader_alive())) {
!       set_type(i, with_status((Klass*)nullptr, p));
      }
    }
  }
  
  void ReturnTypeEntry::clean_weak_klass_links(bool always_clean) {
    intptr_t p = type();
    Klass* k = (Klass*)klass_part(p);
!   if (k != nullptr && (always_clean || !k->is_loader_alive())) {
!     set_type(with_status((Klass*)nullptr, p));
    }
  }
  
  bool TypeEntriesAtCall::return_profiling_enabled() {
    return MethodData::profile_return();
  }
  
  bool TypeEntriesAtCall::arguments_profiling_enabled() {
--- 317,50 ---
  
  void TypeStackSlotEntries::clean_weak_klass_links(bool always_clean) {
    for (int i = 0; i < _number_of_entries; i++) {
      intptr_t p = type(i);
      Klass* k = (Klass*)klass_part(p);
!     if (k != nullptr) {
!       if (!always_clean && k->is_instance_klass() && InstanceKlass::cast(k)->is_not_initialized()) {
+         continue; // skip not-yet-initialized classes // TODO: maybe clear the slot instead?
+       }
+       if (always_clean || !k->is_loader_alive()) {
+         set_type(i, with_status((Klass*)nullptr, p));
+       }
      }
    }
  }
  
+ void TypeStackSlotEntries::metaspace_pointers_do(MetaspaceClosure* it) {
+   for (int i = 0; i < _number_of_entries; i++) {
+     set_type(i, klass_part(type(i))); // reset tag; FIXME: properly handle tagged pointers
+     Klass** k = (Klass**)type_adr(i);
+     it->push(k);
+ //    it->push_tagged(k);
+   }
+ }
+ 
  void ReturnTypeEntry::clean_weak_klass_links(bool always_clean) {
    intptr_t p = type();
    Klass* k = (Klass*)klass_part(p);
!   if (k != nullptr) {
!     if (!always_clean && k->is_instance_klass() && InstanceKlass::cast(k)->is_not_initialized()) {
+       return; // skip not-yet-initialized classes // TODO: maybe clear the slot instead?
+     }
+     if (always_clean || !k->is_loader_alive()) {
+       set_type(with_status((Klass*)nullptr, p));
+     }
    }
  }
  
+ void ReturnTypeEntry::metaspace_pointers_do(MetaspaceClosure* it) {
+   Klass** k = (Klass**)type_adr(); // tagged
+   set_type(klass_part(type())); // reset tag; FIXME: properly handle tagged pointers
+   it->push(k);
+ //  it->push_tagged(k);
+ }
+ 
  bool TypeEntriesAtCall::return_profiling_enabled() {
    return MethodData::profile_return();
  }
  
  bool TypeEntriesAtCall::arguments_profiling_enabled() {

*** 405,16 ***
  // which are used to store a type profile for the receiver of the check.
  
  void ReceiverTypeData::clean_weak_klass_links(bool always_clean) {
      for (uint row = 0; row < row_limit(); row++) {
      Klass* p = receiver(row);
!     if (p != nullptr && (always_clean || !p->is_loader_alive())) {
!       clear_row(row);
      }
    }
  }
  
  void ReceiverTypeData::print_receiver_data_on(outputStream* st) const {
    uint row;
    int entries = 0;
    for (row = 0; row < row_limit(); row++) {
      if (receiver(row) != nullptr)  entries++;
--- 432,28 ---
  // which are used to store a type profile for the receiver of the check.
  
  void ReceiverTypeData::clean_weak_klass_links(bool always_clean) {
      for (uint row = 0; row < row_limit(); row++) {
      Klass* p = receiver(row);
!     if (p != nullptr) {
!       if (!always_clean && p->is_instance_klass() && InstanceKlass::cast(p)->is_not_initialized()) {
+         continue; // skip not-yet-initialized classes // TODO: maybe clear the slot instead?
+       }
+       if (always_clean || !p->is_loader_alive()) {
+         clear_row(row);
+       }
      }
    }
  }
  
+ void ReceiverTypeData::metaspace_pointers_do(MetaspaceClosure *it) {
+   for (uint row = 0; row < row_limit(); row++) {
+     Klass** recv = (Klass**)intptr_at_adr(receiver_cell_index(row));
+     it->push(recv);
+   }
+ }
+ 
  void ReceiverTypeData::print_receiver_data_on(outputStream* st) const {
    uint row;
    int entries = 0;
    for (row = 0; row < row_limit(); row++) {
      if (receiver(row) != nullptr)  entries++;

*** 639,10 ***
--- 678,15 ---
    tab(st);
    _parameters.print_data_on(st);
    st->cr();
  }
  
+ void SpeculativeTrapData::metaspace_pointers_do(MetaspaceClosure* it) {
+   Method** m = (Method**)intptr_at_adr(speculative_trap_method);
+   it->push(m);
+ }
+ 
  void SpeculativeTrapData::print_data_on(outputStream* st, const char* extra) const {
    print_shared(st, "SpeculativeTrapData", extra);
    tab(st);
    method()->print_short_name(st);
    st->cr();

*** 1216,14 ***
  
  // Initialize the MethodData* corresponding to a given method.
  MethodData::MethodData(const methodHandle& method)
    : _method(method()),
      // Holds Compile_lock
-     _extra_data_lock(Mutex::nosafepoint, "MDOExtraData_lock"),
      _compiler_counters(),
      _parameters_type_data_di(parameters_uninitialized) {
!   initialize();
  }
  
  void MethodData::initialize() {
    Thread* thread = Thread::current();
    NoSafepointVerifier no_safepoint;  // init function atomic wrt GC
--- 1260,18 ---
  
  // Initialize the MethodData* corresponding to a given method.
  MethodData::MethodData(const methodHandle& method)
    : _method(method()),
      // Holds Compile_lock
      _compiler_counters(),
      _parameters_type_data_di(parameters_uninitialized) {
!     _extra_data_lock = nullptr;
+     initialize();
+ }
+ 
+ MethodData::MethodData() {
+   assert(CDSConfig::is_dumping_static_archive() || UseSharedSpaces, "only for CDS");
  }
  
  void MethodData::initialize() {
    Thread* thread = Thread::current();
    NoSafepointVerifier no_safepoint;  // init function atomic wrt GC

*** 1341,11 ***
  int MethodData::mileage_of(Method* method) {
    return MAX2(method->invocation_count(), method->backedge_count());
  }
  
  bool MethodData::is_mature() const {
!   return CompilationPolicy::is_mature(_method);
  }
  
  // Translate a bci to its corresponding data index (di).
  address MethodData::bci_to_dp(int bci) {
    ResourceMark rm;
--- 1389,11 ---
  int MethodData::mileage_of(Method* method) {
    return MAX2(method->invocation_count(), method->backedge_count());
  }
  
  bool MethodData::is_mature() const {
!   return CompilationPolicy::is_mature((MethodData*)this);
  }
  
  // Translate a bci to its corresponding data index (di).
  address MethodData::bci_to_dp(int bci) {
    ResourceMark rm;

*** 1699,12 ***
    assert(profile_parameters_jsr292_only(), "inconsistent");
    return m->is_compiled_lambda_form();
  }
  
  void MethodData::metaspace_pointers_do(MetaspaceClosure* it) {
!   log_trace(cds)("Iter(MethodData): %p", this);
    it->push(&_method);
  }
  
  void MethodData::clean_extra_data_helper(DataLayout* dp, int shift, bool reset) {
    check_extra_data_locked();
  
--- 1747,30 ---
    assert(profile_parameters_jsr292_only(), "inconsistent");
    return m->is_compiled_lambda_form();
  }
  
  void MethodData::metaspace_pointers_do(MetaspaceClosure* it) {
!   log_trace(cds)("Iter(MethodData): %p for %p %s", this, _method, _method->name_and_sig_as_C_string());
    it->push(&_method);
+   if (_parameters_type_data_di != no_parameters) {
+     parameters_type_data()->metaspace_pointers_do(it);
+   }
+   for (ProfileData* data = first_data(); is_valid(data); data = next_data(data)) {
+     data->metaspace_pointers_do(it);
+   }
+   for (DataLayout* dp = extra_data_base();
+                    dp < extra_data_limit();
+                    dp = MethodData::next_extra(dp)) {
+     if (dp->tag() == DataLayout::speculative_trap_data_tag) {
+       ResourceMark rm;
+       SpeculativeTrapData* data = new SpeculativeTrapData(dp);
+       data->metaspace_pointers_do(it);
+     } else if (dp->tag() == DataLayout::no_tag ||
+                dp->tag() == DataLayout::arg_info_data_tag) {
+       break;
+     }
+   }
  }
  
  void MethodData::clean_extra_data_helper(DataLayout* dp, int shift, bool reset) {
    check_extra_data_locked();
  

*** 1732,10 ***
--- 1798,13 ---
  class CleanExtraDataKlassClosure : public CleanExtraDataClosure {
    bool _always_clean;
  public:
    CleanExtraDataKlassClosure(bool always_clean) : _always_clean(always_clean) {}
    bool is_live(Method* m) {
+     if (!_always_clean && m->method_holder()->is_instance_klass() && InstanceKlass::cast(m->method_holder())->is_not_initialized()) {
+       return true; // TODO: treat as unloaded instead?
+     }
      return !(_always_clean) && m->method_holder()->is_loader_alive();
    }
  };
  
  // Check for entries that reference a redefined method

*** 1743,10 ***
--- 1812,23 ---
  public:
    CleanExtraDataMethodClosure() {}
    bool is_live(Method* m) { return !m->is_old(); }
  };
  
+ Mutex* MethodData::extra_data_lock() {
+   Mutex* lock = Atomic::load(&_extra_data_lock);
+   if (lock == nullptr) {
+     lock = new Mutex(Mutex::nosafepoint, "MDOExtraData_lock");
+     Mutex* old = Atomic::cmpxchg(&_extra_data_lock, (Mutex*)nullptr, lock);
+     if (old != nullptr) {
+       // Another thread created the lock before us. Use that lock instead.
+       delete lock;
+       return old;
+     }
+   }
+   return lock;
+ }
  
  // Remove SpeculativeTrapData entries that reference an unloaded or
  // redefined method
  void MethodData::clean_extra_data(CleanExtraDataClosure* cl) {
    check_extra_data_locked();

*** 1863,17 ***
  #if INCLUDE_JVMCI
    FailedSpeculation::free_failed_speculations(get_failed_speculations_address());
  #endif
  }
  
  #ifdef ASSERT
  void MethodData::check_extra_data_locked() const {
      // Cast const away, just to be able to verify the lock
      // Usually we only want non-const accesses on the lock,
      // so this here is an exception.
      MethodData* self = (MethodData*)this;
!     assert(self->extra_data_lock()->owned_by_self(), "must have lock");
      assert(!Thread::current()->is_Java_thread() ||
             JavaThread::current()->is_in_no_safepoint_scope(),
             "JavaThread must have NoSafepointVerifier inside lock scope");
  }
  #endif
--- 1945,27 ---
  #if INCLUDE_JVMCI
    FailedSpeculation::free_failed_speculations(get_failed_speculations_address());
  #endif
  }
  
+ #if INCLUDE_CDS
+ void MethodData::remove_unshareable_info() {
+   _extra_data_lock = nullptr;
+ }
+ 
+ void MethodData::restore_unshareable_info(TRAPS) {
+   //_extra_data_lock = new Mutex(Mutex::nosafepoint, "MDOExtraData_lock");
+ }
+ #endif // INCLUDE_CDS
+        
  #ifdef ASSERT
  void MethodData::check_extra_data_locked() const {
      // Cast const away, just to be able to verify the lock
      // Usually we only want non-const accesses on the lock,
      // so this here is an exception.
      MethodData* self = (MethodData*)this;
!     assert(self->extra_data_lock()->owned_by_self() || CDSConfig::is_dumping_archive(), "must have lock");
      assert(!Thread::current()->is_Java_thread() ||
             JavaThread::current()->is_in_no_safepoint_scope(),
             "JavaThread must have NoSafepointVerifier inside lock scope");
  }
  #endif
< prev index next >