< prev index next >

src/share/vm/runtime/jniHandles.cpp

Print this page




  99       MutexLocker ml(JNIGlobalHandle_lock);
 100       assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
 101       res = _weak_global_handles->allocate_handle(obj());
 102     }
 103     // Add weak tag.
 104     assert(is_ptr_aligned(res, weak_tag_alignment), "invariant");
 105     char* tptr = reinterpret_cast<char*>(res) + weak_tag_value;
 106     res = reinterpret_cast<jobject>(tptr);
 107   } else {
 108     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 109   }
 110   return res;
 111 }
 112 
 113 template<bool external_guard>
 114 oop JNIHandles::resolve_jweak(jweak handle) {
 115   assert(is_jweak(handle), "precondition");
 116   oop result = jweak_ref(handle);
 117   result = guard_value<external_guard>(result);
 118 #if INCLUDE_ALL_GCS
 119   if (result != NULL && UseG1GC) {
 120     G1SATBCardTableModRefBS::enqueue(result);
 121   }
 122 #endif // INCLUDE_ALL_GCS
 123   return result;
 124 }
 125 
 126 template oop JNIHandles::resolve_jweak<true>(jweak);
 127 template oop JNIHandles::resolve_jweak<false>(jweak);
 128 
 129 void JNIHandles::destroy_global(jobject handle) {
 130   if (handle != NULL) {
 131     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
 132     jobject_ref(handle) = deleted_handle();
 133   }
 134 }
 135 
 136 void JNIHandles::destroy_weak_global(jobject handle) {
 137   if (handle != NULL) {
 138     jweak_ref(handle) = deleted_handle();
 139   }


 486     // Append new block
 487     Thread* thread = Thread::current();
 488     Handle obj_handle(thread, obj);
 489     // This can block, so we need to preserve obj accross call.
 490     _last->_next = JNIHandleBlock::allocate_block(thread);
 491     _last = _last->_next;
 492     _allocate_before_rebuild--;
 493     obj = obj_handle();
 494   }
 495   return allocate_handle(obj);  // retry
 496 }
 497 
 498 
 499 void JNIHandleBlock::rebuild_free_list() {
 500   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
 501   int free = 0;
 502   int blocks = 0;
 503   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 504     for (int index = 0; index < current->_top; index++) {
 505       oop* handle = &(current->_handles)[index];
 506       if (*handle ==  JNIHandles::deleted_handle()) {
 507         // this handle was cleared out by a delete call, reuse it
 508         *handle = (oop) _free_list;
 509         _free_list = handle;
 510         free++;
 511       }
 512     }
 513     // we should not rebuild free list if there are unused handles at the end
 514     assert(current->_top == block_size_in_oops, "just checking");
 515     blocks++;
 516   }
 517   // Heuristic: if more than half of the handles are free we rebuild next time
 518   // as well, otherwise we append a corresponding number of new blocks before
 519   // attempting a free list rebuild again.
 520   int total = blocks * block_size_in_oops;
 521   int extra = total - 2*free;
 522   if (extra > 0) {
 523     // Not as many free handles as we would like - compute number of new blocks to append
 524     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
 525   }
 526   if (TraceJNIHandleAllocation) {




  99       MutexLocker ml(JNIGlobalHandle_lock);
 100       assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
 101       res = _weak_global_handles->allocate_handle(obj());
 102     }
 103     // Add weak tag.
 104     assert(is_ptr_aligned(res, weak_tag_alignment), "invariant");
 105     char* tptr = reinterpret_cast<char*>(res) + weak_tag_value;
 106     res = reinterpret_cast<jobject>(tptr);
 107   } else {
 108     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 109   }
 110   return res;
 111 }
 112 
 113 template<bool external_guard>
 114 oop JNIHandles::resolve_jweak(jweak handle) {
 115   assert(is_jweak(handle), "precondition");
 116   oop result = jweak_ref(handle);
 117   result = guard_value<external_guard>(result);
 118 #if INCLUDE_ALL_GCS
 119   if (result != NULL && (UseG1GC || (UseShenandoahGC && ShenandoahSATBBarrier))) {
 120     G1SATBCardTableModRefBS::enqueue(result);
 121   }
 122 #endif // INCLUDE_ALL_GCS
 123   return result;
 124 }
 125 
 126 template oop JNIHandles::resolve_jweak<true>(jweak);
 127 template oop JNIHandles::resolve_jweak<false>(jweak);
 128 
 129 void JNIHandles::destroy_global(jobject handle) {
 130   if (handle != NULL) {
 131     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
 132     jobject_ref(handle) = deleted_handle();
 133   }
 134 }
 135 
 136 void JNIHandles::destroy_weak_global(jobject handle) {
 137   if (handle != NULL) {
 138     jweak_ref(handle) = deleted_handle();
 139   }


 486     // Append new block
 487     Thread* thread = Thread::current();
 488     Handle obj_handle(thread, obj);
 489     // This can block, so we need to preserve obj accross call.
 490     _last->_next = JNIHandleBlock::allocate_block(thread);
 491     _last = _last->_next;
 492     _allocate_before_rebuild--;
 493     obj = obj_handle();
 494   }
 495   return allocate_handle(obj);  // retry
 496 }
 497 
 498 
 499 void JNIHandleBlock::rebuild_free_list() {
 500   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
 501   int free = 0;
 502   int blocks = 0;
 503   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
 504     for (int index = 0; index < current->_top; index++) {
 505       oop* handle = &(current->_handles)[index];
 506       if (*handle == JNIHandles::deleted_handle()) {
 507         // this handle was cleared out by a delete call, reuse it
 508         *handle = (oop) _free_list;
 509         _free_list = handle;
 510         free++;
 511       }
 512     }
 513     // we should not rebuild free list if there are unused handles at the end
 514     assert(current->_top == block_size_in_oops, "just checking");
 515     blocks++;
 516   }
 517   // Heuristic: if more than half of the handles are free we rebuild next time
 518   // as well, otherwise we append a corresponding number of new blocks before
 519   // attempting a free list rebuild again.
 520   int total = blocks * block_size_in_oops;
 521   int extra = total - 2*free;
 522   if (extra > 0) {
 523     // Not as many free handles as we would like - compute number of new blocks to append
 524     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
 525   }
 526   if (TraceJNIHandleAllocation) {


< prev index next >