1 /*
  2  * Copyright (c) 1998, 2021, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "classfile/vmSymbols.hpp"
 27 #include "gc/shared/collectedHeap.hpp"
 28 #include "gc/shared/oopStorage.inline.hpp"
 29 #include "gc/shared/oopStorageSet.hpp"
 30 #include "logging/log.hpp"
 31 #include "memory/iterator.hpp"
 32 #include "memory/universe.hpp"
 33 #include "oops/access.inline.hpp"
 34 #include "oops/oop.inline.hpp"
 35 #include "runtime/handles.inline.hpp"
 36 #include "runtime/javaCalls.hpp"
 37 #include "runtime/jniHandles.inline.hpp"
 38 #include "runtime/mutexLocker.hpp"
 39 #include "runtime/thread.inline.hpp"
 40 #include "utilities/align.hpp"
 41 #include "utilities/debug.hpp"
 42 
 43 OopStorage* JNIHandles::global_handles() {
 44   return _global_handles;
 45 }
 46 
 47 OopStorage* JNIHandles::weak_global_handles() {
 48   return _weak_global_handles;
 49 }
 50 
 51 // Serviceability agent support.
 52 OopStorage* JNIHandles::_global_handles = NULL;
 53 OopStorage* JNIHandles::_weak_global_handles = NULL;
 54 
 55 void jni_handles_init() {
 56   JNIHandles::_global_handles = OopStorageSet::create_strong("JNI Global", mtInternal);
 57   JNIHandles::_weak_global_handles = OopStorageSet::create_weak("JNI Weak", mtInternal);
 58 }
 59 
 60 jobject JNIHandles::make_local(oop obj) {
 61   return make_local(Thread::current(), obj);
 62 }
 63 
 64 // Used by NewLocalRef which requires NULL on out-of-memory
 65 jobject JNIHandles::make_local(Thread* thread, oop obj, AllocFailType alloc_failmode) {
 66   if (obj == NULL) {
 67     return NULL;                // ignore null handles
 68   } else {
 69     assert(oopDesc::is_oop(obj), "not an oop");
 70     assert(thread->is_Java_thread(), "not a Java thread");
 71     assert(!current_thread_in_native(), "must not be in native");
 72     return thread->active_handles()->allocate_handle(obj, alloc_failmode);
 73   }
 74 }
 75 
 76 static void report_handle_allocation_failure(AllocFailType alloc_failmode,
 77                                              const char* handle_kind) {
 78   if (alloc_failmode == AllocFailStrategy::EXIT_OOM) {
 79     // Fake size value, since we don't know the min allocation size here.
 80     vm_exit_out_of_memory(sizeof(oop), OOM_MALLOC_ERROR,
 81                           "Cannot create %s JNI handle", handle_kind);
 82   } else {
 83     assert(alloc_failmode == AllocFailStrategy::RETURN_NULL, "invariant");
 84   }
 85 }
 86 
 87 jobject JNIHandles::make_global(Handle obj, AllocFailType alloc_failmode) {
 88   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
 89   assert(!current_thread_in_native(), "must not be in native");
 90   jobject res = NULL;
 91   if (!obj.is_null()) {
 92     // ignore null handles
 93     assert(oopDesc::is_oop(obj()), "not an oop");
 94     oop* ptr = global_handles()->allocate();
 95     // Return NULL on allocation failure.
 96     if (ptr != NULL) {
 97       assert(*ptr == NULL, "invariant");
 98       NativeAccess<>::oop_store(ptr, obj());
 99       res = reinterpret_cast<jobject>(ptr);
100     } else {
101       report_handle_allocation_failure(alloc_failmode, "global");
102     }
103   }
104 
105   return res;
106 }
107 
108 jobject JNIHandles::make_weak_global(Handle obj, AllocFailType alloc_failmode) {
109   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
110   assert(!current_thread_in_native(), "must not be in native");
111   jobject res = NULL;
112   if (!obj.is_null()) {
113     // ignore null handles
114     assert(oopDesc::is_oop(obj()), "not an oop");
115     oop* ptr = weak_global_handles()->allocate();
116     // Return NULL on allocation failure.
117     if (ptr != NULL) {
118       assert(*ptr == NULL, "invariant");
119       NativeAccess<ON_PHANTOM_OOP_REF>::oop_store(ptr, obj());
120       char* tptr = reinterpret_cast<char*>(ptr) + weak_tag_value;
121       res = reinterpret_cast<jobject>(tptr);
122     } else {
123       report_handle_allocation_failure(alloc_failmode, "weak global");
124     }
125   }
126   return res;
127 }
128 
129 // Resolve some erroneous cases to NULL, rather than treating them as
130 // possibly unchecked errors.  In particular, deleted handles are
131 // treated as NULL (though a deleted and later reallocated handle
132 // isn't detected).
133 oop JNIHandles::resolve_external_guard(jobject handle) {
134   oop result = NULL;
135   if (handle != NULL) {
136     result = resolve_impl<DECORATORS_NONE, true /* external_guard */>(handle);
137   }
138   return result;
139 }
140 
141 bool JNIHandles::is_global_weak_cleared(jweak handle) {
142   assert(handle != NULL, "precondition");
143   assert(is_jweak(handle), "not a weak handle");
144   oop* oop_ptr = jweak_ptr(handle);
145   oop value = NativeAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(oop_ptr);
146   return value == NULL;
147 }
148 
149 void JNIHandles::destroy_global(jobject handle) {
150   if (handle != NULL) {
151     assert(!is_jweak(handle), "wrong method for detroying jweak");
152     oop* oop_ptr = jobject_ptr(handle);
153     NativeAccess<>::oop_store(oop_ptr, (oop)NULL);
154     global_handles()->release(oop_ptr);
155   }
156 }
157 
158 
159 void JNIHandles::destroy_weak_global(jobject handle) {
160   if (handle != NULL) {
161     assert(is_jweak(handle), "JNI handle not jweak");
162     oop* oop_ptr = jweak_ptr(handle);
163     NativeAccess<ON_PHANTOM_OOP_REF>::oop_store(oop_ptr, (oop)NULL);
164     weak_global_handles()->release(oop_ptr);
165   }
166 }
167 
168 
169 void JNIHandles::oops_do(OopClosure* f) {
170   global_handles()->oops_do(f);
171 }
172 
173 
174 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
175   weak_global_handles()->weak_oops_do(is_alive, f);
176 }
177 
178 
179 void JNIHandles::weak_oops_do(OopClosure* f) {
180   weak_global_handles()->weak_oops_do(f);
181 }
182 
183 bool JNIHandles::is_global_storage(const OopStorage* storage) {
184   return _global_handles == storage;
185 }
186 
187 inline bool is_storage_handle(const OopStorage* storage, const oop* ptr) {
188   return storage->allocation_status(ptr) == OopStorage::ALLOCATED_ENTRY;
189 }
190 
191 
192 jobjectRefType JNIHandles::handle_type(Thread* thread, jobject handle) {
193   assert(handle != NULL, "precondition");
194   jobjectRefType result = JNIInvalidRefType;
195   if (is_jweak(handle)) {
196     if (is_storage_handle(weak_global_handles(), jweak_ptr(handle))) {
197       result = JNIWeakGlobalRefType;
198     }
199   } else {
200     switch (global_handles()->allocation_status(jobject_ptr(handle))) {
201     case OopStorage::ALLOCATED_ENTRY:
202       result = JNIGlobalRefType;
203       break;
204 
205     case OopStorage::UNALLOCATED_ENTRY:
206       break;                    // Invalid global handle
207 
208     case OopStorage::INVALID_ENTRY:
209       // Not in global storage.  Might be a local handle.
210       if (is_local_handle(thread, handle) ||
211           (thread->is_Java_thread() &&
212            is_frame_handle(JavaThread::cast(thread), handle))) {
213         result = JNILocalRefType;
214       }
215       break;
216 
217     default:
218       ShouldNotReachHere();
219     }
220   }
221   return result;
222 }
223 
224 
225 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
226   assert(handle != NULL, "precondition");
227   JNIHandleBlock* block = thread->active_handles();
228 
229   // Look back past possible native calls to jni_PushLocalFrame.
230   while (block != NULL) {
231     if (block->chain_contains(handle)) {
232       return true;
233     }
234     block = block->pop_frame_link();
235   }
236   return false;
237 }
238 
239 
240 // Determine if the handle is somewhere in the current thread's stack.
241 // We easily can't isolate any particular stack frame the handle might
242 // come from, so we'll check the whole stack.
243 
244 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject handle) {
245   assert(handle != NULL, "precondition");
246   // If there is no java frame, then this must be top level code, such
247   // as the java command executable, in which case, this type of handle
248   // is not permitted.
249   return (thr->has_last_Java_frame() &&
250           thr->is_in_stack_range_incl((address)handle, (address)thr->last_Java_sp()));
251 }
252 
253 
254 bool JNIHandles::is_global_handle(jobject handle) {
255   assert(handle != NULL, "precondition");
256   return !is_jweak(handle) && is_storage_handle(global_handles(), jobject_ptr(handle));
257 }
258 
259 
260 bool JNIHandles::is_weak_global_handle(jobject handle) {
261   assert(handle != NULL, "precondition");
262   return is_jweak(handle) && is_storage_handle(weak_global_handles(), jweak_ptr(handle));
263 }
264 
265 size_t JNIHandles::global_handle_memory_usage() {
266   return global_handles()->total_memory_usage();
267 }
268 
269 size_t JNIHandles::weak_global_handle_memory_usage() {
270   return weak_global_handles()->total_memory_usage();
271 }
272 
273 
274 // We assume this is called at a safepoint: no lock is needed.
275 void JNIHandles::print_on(outputStream* st) {
276   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
277 
278   st->print_cr("JNI global refs: " SIZE_FORMAT ", weak refs: " SIZE_FORMAT,
279                global_handles()->allocation_count(),
280                weak_global_handles()->allocation_count());
281   st->cr();
282   st->flush();
283 }
284 
285 void JNIHandles::print() { print_on(tty); }
286 
287 class VerifyJNIHandles: public OopClosure {
288 public:
289   virtual void do_oop(oop* root) {
290     guarantee(oopDesc::is_oop_or_null(RawAccess<>::oop_load(root)), "Invalid oop");
291   }
292   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
293 };
294 
295 void JNIHandles::verify() {
296   VerifyJNIHandles verify_handle;
297 
298   oops_do(&verify_handle);
299   weak_oops_do(&verify_handle);
300 }
301 
302 // This method is implemented here to avoid circular includes between
303 // jniHandles.hpp and thread.hpp.
304 bool JNIHandles::current_thread_in_native() {
305   Thread* thread = Thread::current();
306   return (thread->is_Java_thread() &&
307           JavaThread::cast(thread)->thread_state() == _thread_in_native);
308 }
309 
310 bool JNIHandles::is_same_object(jobject handle1, jobject handle2) {
311   oop obj1 = resolve_no_keepalive(handle1);
312   oop obj2 = resolve_no_keepalive(handle2);
313 
314   bool ret = obj1 == obj2;
315 
316   if (EnableValhalla) {
317     if (!ret && obj1 != NULL && obj2 != NULL && obj1->klass() == obj2->klass() && obj1->klass()->is_inline_klass()) {
318       // The two references are different, they are not null and they are both inline types,
319       // a full substitutability test is required, calling PrimitiveObjectMethods.isSubstitutable()
320       // (similarly to InterpreterRuntime::is_substitutable)
321       JavaThread* THREAD = JavaThread::current();
322       Handle ha(THREAD, obj1);
323       Handle hb(THREAD, obj2);
324       JavaValue result(T_BOOLEAN);
325       JavaCallArguments args;
326       args.push_oop(ha);
327       args.push_oop(hb);
328       methodHandle method(THREAD, Universe::is_substitutable_method());
329       JavaCalls::call(&result, method, &args, THREAD);
330       if (HAS_PENDING_EXCEPTION) {
331         // Something really bad happened because isSubstitutable() should not throw exceptions
332         // If it is an error, just let it propagate
333         // If it is an exception, wrap it into an InternalError
334         if (!PENDING_EXCEPTION->is_a(vmClasses::Error_klass())) {
335           Handle e(THREAD, PENDING_EXCEPTION);
336           CLEAR_PENDING_EXCEPTION;
337           THROW_MSG_CAUSE_(vmSymbols::java_lang_InternalError(), "Internal error in substitutability test", e, false);
338         }
339       }
340       ret = result.get_jboolean();
341     }
342   }
343 
344   return ret;
345 }
346 
347 
348 int             JNIHandleBlock::_blocks_allocated     = 0;
349 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
350 #ifndef PRODUCT
351 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
352 #endif
353 
354 static inline bool is_tagged_free_list(uintptr_t value) {
355   return (value & 1u) != 0;
356 }
357 
358 static inline uintptr_t tag_free_list(uintptr_t value) {
359   return value | 1u;
360 }
361 
362 static inline uintptr_t untag_free_list(uintptr_t value) {
363   return value & ~(uintptr_t)1u;
364 }
365 
366 // There is a freelist of handles running through the JNIHandleBlock
367 // with a tagged next pointer, distinguishing these next pointers from
368 // oops. The freelist handling currently relies on the size of oops
369 // being the same as a native pointer. If this ever changes, then
370 // this freelist handling must change too.
371 STATIC_ASSERT(sizeof(oop) == sizeof(uintptr_t));
372 
373 #ifdef ASSERT
374 void JNIHandleBlock::zap() {
375   // Zap block values
376   _top = 0;
377   for (int index = 0; index < block_size_in_oops; index++) {
378     // NOT using Access here; just bare clobbering to NULL, since the
379     // block no longer contains valid oops.
380     _handles[index] = 0;
381   }
382 }
383 #endif // ASSERT
384 
385 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread, AllocFailType alloc_failmode)  {
386   assert(thread == NULL || thread == Thread::current(), "sanity check");
387   JNIHandleBlock* block;
388   // Check the thread-local free list for a block so we don't
389   // have to acquire a mutex.
390   if (thread != NULL && thread->free_handle_block() != NULL) {
391     block = thread->free_handle_block();
392     thread->set_free_handle_block(block->_next);
393   }
394   else {
395     // locking with safepoint checking introduces a potential deadlock:
396     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
397     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
398     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
399     MutexLocker ml(JNIHandleBlockFreeList_lock,
400                    Mutex::_no_safepoint_check_flag);
401     if (_block_free_list == NULL) {
402       // Allocate new block
403       if (alloc_failmode == AllocFailStrategy::RETURN_NULL) {
404         block = new (std::nothrow) JNIHandleBlock();
405         if (block == NULL) {
406           return NULL;
407         }
408       } else {
409         block = new JNIHandleBlock();
410       }
411       _blocks_allocated++;
412       block->zap();
413       #ifndef PRODUCT
414       // Link new block to list of all allocated blocks
415       block->_block_list_link = _block_list;
416       _block_list = block;
417       #endif
418     } else {
419       // Get block from free list
420       block = _block_free_list;
421       _block_free_list = _block_free_list->_next;
422     }
423   }
424   block->_top = 0;
425   block->_next = NULL;
426   block->_pop_frame_link = NULL;
427   block->_planned_capacity = block_size_in_oops;
428   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
429   debug_only(block->_last = NULL);
430   debug_only(block->_free_list = NULL);
431   debug_only(block->_allocate_before_rebuild = -1);
432   return block;
433 }
434 
435 
436 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
437   assert(thread == NULL || thread == Thread::current(), "sanity check");
438   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
439   // Put returned block at the beginning of the thread-local free list.
440   // Note that if thread == NULL, we use it as an implicit argument that
441   // we _don't_ want the block to be kept on the free_handle_block.
442   // See for instance JavaThread::exit().
443   if (thread != NULL ) {
444     block->zap();
445     JNIHandleBlock* freelist = thread->free_handle_block();
446     block->_pop_frame_link = NULL;
447     thread->set_free_handle_block(block);
448 
449     // Add original freelist to end of chain
450     if ( freelist != NULL ) {
451       while ( block->_next != NULL ) block = block->_next;
452       block->_next = freelist;
453     }
454     block = NULL;
455   }
456   if (block != NULL) {
457     // Return blocks to free list
458     // locking with safepoint checking introduces a potential deadlock:
459     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
460     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
461     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
462     MutexLocker ml(JNIHandleBlockFreeList_lock,
463                    Mutex::_no_safepoint_check_flag);
464     while (block != NULL) {
465       block->zap();
466       JNIHandleBlock* next = block->_next;
467       block->_next = _block_free_list;
468       _block_free_list = block;
469       block = next;
470     }
471   }
472   if (pop_frame_link != NULL) {
473     // As a sanity check we release blocks pointed to by the pop_frame_link.
474     // This should never happen (only if PopLocalFrame is not called the
475     // correct number of times).
476     release_block(pop_frame_link, thread);
477   }
478 }
479 
480 
481 void JNIHandleBlock::oops_do(OopClosure* f) {
482   JNIHandleBlock* current_chain = this;
483   // Iterate over chain of blocks, followed by chains linked through the
484   // pop frame links.
485   while (current_chain != NULL) {
486     for (JNIHandleBlock* current = current_chain; current != NULL;
487          current = current->_next) {
488       assert(current == current_chain || current->pop_frame_link() == NULL,
489         "only blocks first in chain should have pop frame link set");
490       for (int index = 0; index < current->_top; index++) {
491         uintptr_t* addr = &(current->_handles)[index];
492         uintptr_t value = *addr;
493         // traverse heap pointers only, not deleted handles or free list
494         // pointers
495         if (value != 0 && !is_tagged_free_list(value)) {
496           oop* root = (oop*)addr;
497           f->do_oop(root);
498         }
499       }
500       // the next handle block is valid only if current block is full
501       if (current->_top < block_size_in_oops) {
502         break;
503       }
504     }
505     current_chain = current_chain->pop_frame_link();
506   }
507 }
508 
509 
510 jobject JNIHandleBlock::allocate_handle(oop obj, AllocFailType alloc_failmode) {
511   assert(Universe::heap()->is_in(obj), "sanity check");
512   if (_top == 0) {
513     // This is the first allocation or the initial block got zapped when
514     // entering a native function. If we have any following blocks they are
515     // not valid anymore.
516     for (JNIHandleBlock* current = _next; current != NULL;
517          current = current->_next) {
518       assert(current->_last == NULL, "only first block should have _last set");
519       assert(current->_free_list == NULL,
520              "only first block should have _free_list set");
521       if (current->_top == 0) {
522         // All blocks after the first clear trailing block are already cleared.
523 #ifdef ASSERT
524         for (current = current->_next; current != NULL; current = current->_next) {
525           assert(current->_top == 0, "trailing blocks must already be cleared");
526         }
527 #endif
528         break;
529       }
530       current->_top = 0;
531       current->zap();
532     }
533     // Clear initial block
534     _free_list = NULL;
535     _allocate_before_rebuild = 0;
536     _last = this;
537     zap();
538   }
539 
540   // Try last block
541   if (_last->_top < block_size_in_oops) {
542     oop* handle = (oop*)&(_last->_handles)[_last->_top++];
543     NativeAccess<IS_DEST_UNINITIALIZED>::oop_store(handle, obj);
544     return (jobject) handle;
545   }
546 
547   // Try free list
548   if (_free_list != NULL) {
549     oop* handle = (oop*)_free_list;
550     _free_list = (uintptr_t*) untag_free_list(*_free_list);
551     NativeAccess<IS_DEST_UNINITIALIZED>::oop_store(handle, obj);
552     return (jobject) handle;
553   }
554   // Check if unused block follow last
555   if (_last->_next != NULL) {
556     // update last and retry
557     _last = _last->_next;
558     return allocate_handle(obj, alloc_failmode);
559   }
560 
561   // No space available, we have to rebuild free list or expand
562   if (_allocate_before_rebuild == 0) {
563       rebuild_free_list();        // updates _allocate_before_rebuild counter
564   } else {
565     // Append new block
566     Thread* thread = Thread::current();
567     Handle obj_handle(thread, obj);
568     // This can block, so we need to preserve obj across call.
569     _last->_next = JNIHandleBlock::allocate_block(thread, alloc_failmode);
570     if (_last->_next == NULL) {
571       return NULL;
572     }
573     _last = _last->_next;
574     _allocate_before_rebuild--;
575     obj = obj_handle();
576   }
577   return allocate_handle(obj, alloc_failmode);  // retry
578 }
579 
580 void JNIHandleBlock::rebuild_free_list() {
581   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
582   int free = 0;
583   int blocks = 0;
584   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
585     for (int index = 0; index < current->_top; index++) {
586       uintptr_t* handle = &(current->_handles)[index];
587       if (*handle == 0) {
588         // this handle was cleared out by a delete call, reuse it
589         *handle = _free_list == NULL ? 0 : tag_free_list((uintptr_t)_free_list);
590         _free_list = handle;
591         free++;
592       }
593     }
594     // we should not rebuild free list if there are unused handles at the end
595     assert(current->_top == block_size_in_oops, "just checking");
596     blocks++;
597   }
598   // Heuristic: if more than half of the handles are free we rebuild next time
599   // as well, otherwise we append a corresponding number of new blocks before
600   // attempting a free list rebuild again.
601   int total = blocks * block_size_in_oops;
602   int extra = total - 2*free;
603   if (extra > 0) {
604     // Not as many free handles as we would like - compute number of new blocks to append
605     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
606   }
607 }
608 
609 
610 bool JNIHandleBlock::contains(jobject handle) const {
611   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
612 }
613 
614 
615 bool JNIHandleBlock::chain_contains(jobject handle) const {
616   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
617     if (current->contains(handle)) {
618       return true;
619     }
620   }
621   return false;
622 }
623 
624 
625 size_t JNIHandleBlock::length() const {
626   size_t result = 1;
627   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
628     result++;
629   }
630   return result;
631 }
632 
633 class CountJNIHandleClosure: public OopClosure {
634 private:
635   int _count;
636 public:
637   CountJNIHandleClosure(): _count(0) {}
638   virtual void do_oop(oop* ooph) { _count++; }
639   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
640   int count() { return _count; }
641 };
642 
643 const size_t JNIHandleBlock::get_number_of_live_handles() {
644   CountJNIHandleClosure counter;
645   oops_do(&counter);
646   return counter.count();
647 }
648 
649 // This method is not thread-safe, i.e., must be called while holding a lock on the
650 // structure.
651 size_t JNIHandleBlock::memory_usage() const {
652   return length() * sizeof(JNIHandleBlock);
653 }
654 
655 
656 #ifndef PRODUCT
657 
658 bool JNIHandles::is_local_handle(jobject handle) {
659   return JNIHandleBlock::any_contains(handle);
660 }
661 
662 bool JNIHandleBlock::any_contains(jobject handle) {
663   assert(handle != NULL, "precondition");
664   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
665     if (current->contains(handle)) {
666       return true;
667     }
668   }
669   return false;
670 }
671 
672 void JNIHandleBlock::print_statistics() {
673   int used_blocks = 0;
674   int free_blocks = 0;
675   int used_handles = 0;
676   int free_handles = 0;
677   JNIHandleBlock* block = _block_list;
678   while (block != NULL) {
679     if (block->_top > 0) {
680       used_blocks++;
681     } else {
682       free_blocks++;
683     }
684     used_handles += block->_top;
685     free_handles += (block_size_in_oops - block->_top);
686     block = block->_block_list_link;
687   }
688   tty->print_cr("JNIHandleBlocks statistics");
689   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
690   tty->print_cr("- blocks in use:    %d", used_blocks);
691   tty->print_cr("- blocks free:      %d", free_blocks);
692   tty->print_cr("- handles in use:   %d", used_handles);
693   tty->print_cr("- handles free:     %d", free_handles);
694 }
695 
696 #endif