1 /*
   2  * Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "gc_implementation/shared/markSweep.inline.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "memory/genOopClosures.inline.hpp"
  31 #include "memory/iterator.inline.hpp"
  32 #include "memory/oopFactory.hpp"
  33 #include "oops/instanceKlass.hpp"
  34 #include "oops/instanceMirrorKlass.hpp"
  35 #include "oops/instanceOop.hpp"
  36 #include "oops/oop.inline.hpp"
  37 #include "oops/symbol.hpp"
  38 #include "runtime/handles.inline.hpp"
  39 #include "utilities/macros.hpp"
  40 #if INCLUDE_ALL_GCS
  41 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
  42 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
  43 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
  44 #include "gc_implementation/g1/g1RemSet.inline.hpp"
  45 #include "gc_implementation/g1/heapRegionManager.inline.hpp"
  46 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
  47 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
  48 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
  49 #include "gc_implementation/shenandoah/shenandoahOopClosures.inline.hpp"
  50 #include "oops/oop.pcgc.inline.hpp"
  51 #endif // INCLUDE_ALL_GCS
  52 
  53 int InstanceMirrorKlass::_offset_of_static_fields = 0;
  54 
  55 #ifdef ASSERT
  56 template <class T> void assert_is_in(T *p) {
  57   T heap_oop = oopDesc::load_heap_oop(p);
  58   if (!oopDesc::is_null(heap_oop)) {
  59     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  60     assert(Universe::heap()->is_in(o), "should be in heap");
  61   }
  62 }
  63 template <class T> void assert_is_in_closed_subset(T *p) {
  64   T heap_oop = oopDesc::load_heap_oop(p);
  65   if (!oopDesc::is_null(heap_oop)) {
  66     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  67     assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
  68   }
  69 }
  70 template <class T> void assert_is_in_reserved(T *p) {
  71   T heap_oop = oopDesc::load_heap_oop(p);
  72   if (!oopDesc::is_null(heap_oop)) {
  73     oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
  74     assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
  75   }
  76 }
  77 template <class T> void assert_nothing(T *p) {}
  78 
  79 #else
  80 template <class T> void assert_is_in(T *p) {}
  81 template <class T> void assert_is_in_closed_subset(T *p) {}
  82 template <class T> void assert_is_in_reserved(T *p) {}
  83 template <class T> void assert_nothing(T *p) {}
  84 #endif // ASSERT
  85 
  86 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
  87   T, start_p, count, do_oop,                         \
  88   assert_fn)                                         \
  89 {                                                    \
  90   T* p         = (T*)(start_p);                      \
  91   T* const end = p + (count);                        \
  92   while (p < end) {                                  \
  93     (assert_fn)(p);                                  \
  94     do_oop;                                          \
  95     ++p;                                             \
  96   }                                                  \
  97 }
  98 
  99 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
 100   T, start_p, count, low, high,                              \
 101   do_oop, assert_fn)                                         \
 102 {                                                            \
 103   T* const l = (T*)(low);                                    \
 104   T* const h = (T*)(high);                                   \
 105   assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 &&         \
 106          mask_bits((intptr_t)h, sizeof(T)-1) == 0,           \
 107          "bounded region must be properly aligned");         \
 108   T* p       = (T*)(start_p);                                \
 109   T* end     = p + (count);                                  \
 110   if (p < l) p = l;                                          \
 111   if (end > h) end = h;                                      \
 112   while (p < end) {                                          \
 113     (assert_fn)(p);                                          \
 114     do_oop;                                                  \
 115     ++p;                                                     \
 116   }                                                          \
 117 }
 118 
 119 
 120 #define InstanceMirrorKlass_OOP_ITERATE(start_p, count,    \
 121                                   do_oop, assert_fn)       \
 122 {                                                          \
 123   if (UseCompressedOops) {                                 \
 124     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
 125       start_p, count,                                      \
 126       do_oop, assert_fn)                                   \
 127   } else {                                                 \
 128     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop,       \
 129       start_p, count,                                      \
 130       do_oop, assert_fn)                                   \
 131   }                                                        \
 132 }
 133 
 134 // The following macros call specialized macros, passing either oop or
 135 // narrowOop as the specialization type.  These test the UseCompressedOops
 136 // flag.
 137 #define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \
 138                                           do_oop, assert_fn)               \
 139 {                                                                          \
 140   if (UseCompressedOops) {                                                 \
 141     InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop,         \
 142       start_p, count,                                                      \
 143       low, high,                                                           \
 144       do_oop, assert_fn)                                                   \
 145   } else {                                                                 \
 146     InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,               \
 147       start_p, count,                                                      \
 148       low, high,                                                           \
 149       do_oop, assert_fn)                                                   \
 150   }                                                                        \
 151 }
 152 
 153 
 154 void InstanceMirrorKlass::oop_follow_contents(oop obj) {
 155   InstanceKlass::oop_follow_contents(obj);
 156 
 157   // Follow the klass field in the mirror.
 158   Klass* klass = java_lang_Class::as_Klass(obj);
 159   if (klass != NULL) {
 160     // An anonymous class doesn't have its own class loader, so the call
 161     // to follow_klass will mark and push its java mirror instead of the
 162     // class loader. When handling the java mirror for an anonymous class
 163     // we need to make sure its class loader data is claimed, this is done
 164     // by calling follow_class_loader explicitly. For non-anonymous classes
 165     // the call to follow_class_loader is made when the class loader itself
 166     // is handled.
 167     if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
 168       MarkSweep::follow_class_loader(klass->class_loader_data());
 169     } else {
 170       MarkSweep::follow_klass(klass);
 171     }
 172   } else {
 173     // If klass is NULL then this a mirror for a primitive type.
 174     // We don't have to follow them, since they are handled as strong
 175     // roots in Universe::oops_do.
 176     assert(java_lang_Class::is_primitive(obj), "Sanity check");
 177   }
 178 
 179   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 180     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 181     MarkSweep::mark_and_push(p),                                                      \
 182     assert_is_in_closed_subset)
 183 }
 184 
 185 #if INCLUDE_ALL_GCS
 186 void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,
 187                                               oop obj) {
 188   InstanceKlass::oop_follow_contents(cm, obj);
 189 
 190   // Follow the klass field in the mirror.
 191   Klass* klass = java_lang_Class::as_Klass(obj);
 192   if (klass != NULL) {
 193     // An anonymous class doesn't have its own class loader, so the call
 194     // to follow_klass will mark and push its java mirror instead of the
 195     // class loader. When handling the java mirror for an anonymous class
 196     // we need to make sure its class loader data is claimed, this is done
 197     // by calling follow_class_loader explicitly. For non-anonymous classes
 198     // the call to follow_class_loader is made when the class loader itself
 199     // is handled.
 200     if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
 201       PSParallelCompact::follow_class_loader(cm, klass->class_loader_data());
 202     } else {
 203       PSParallelCompact::follow_klass(cm, klass);
 204     }
 205   } else {
 206     // If klass is NULL then this a mirror for a primitive type.
 207     // We don't have to follow them, since they are handled as strong
 208     // roots in Universe::oops_do.
 209     assert(java_lang_Class::is_primitive(obj), "Sanity check");
 210   }
 211 
 212   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 213     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 214     PSParallelCompact::mark_and_push(cm, p),                                          \
 215     assert_is_in)
 216 }
 217 #endif // INCLUDE_ALL_GCS
 218 
 219 int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {
 220   int size = oop_size(obj);
 221   InstanceKlass::oop_adjust_pointers(obj);
 222 
 223   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 224     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 225     MarkSweep::adjust_pointer(p),                                                     \
 226     assert_nothing)
 227   return size;
 228 }
 229 
 230 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix)                \
 231   InstanceMirrorKlass_OOP_ITERATE(                                                    \
 232     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 233       (closure)->do_oop##nv_suffix(p),                                                \
 234     assert_is_in_closed_subset)                                                       \
 235   return oop_size(obj);                                                               \
 236 
 237 #define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr)         \
 238   InstanceMirrorKlass_BOUNDED_OOP_ITERATE(                                            \
 239     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
 240     mr.start(), mr.end(),                                                             \
 241       (closure)->do_oop##nv_suffix(p),                                                \
 242     assert_is_in_closed_subset)                                                       \
 243   return oop_size(obj);                                                               \
 244 
 245 
 246 // Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for
 247 // all closures.  Macros calling macros above for each oop size.
 248 
 249 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)           \
 250                                                                                       \
 251 int InstanceMirrorKlass::                                                             \
 252 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                        \
 253   /* Get size before changing pointers */                                             \
 254   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 255                                                                                       \
 256   InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);                            \
 257                                                                                       \
 258   if_do_metadata_checked(closure, nv_suffix) {                                        \
 259     Klass* klass = java_lang_Class::as_Klass(obj);                                    \
 260     /* We'll get NULL for primitive mirrors. */                                       \
 261     if (klass != NULL) {                                                              \
 262       closure->do_klass##nv_suffix(klass);                                            \
 263     }                                                                                 \
 264   }                                                                                   \
 265                                                                                       \
 266   if (UseCompressedOops) {                                                            \
 267     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix);           \
 268   } else {                                                                            \
 269     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix);                 \
 270   }                                                                                   \
 271 }
 272 
 273 #if INCLUDE_ALL_GCS
 274 #define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
 275                                                                                       \
 276 int InstanceMirrorKlass::                                                             \
 277 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {              \
 278   /* Get size before changing pointers */                                             \
 279   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 280                                                                                       \
 281   InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure);                  \
 282                                                                                       \
 283   if (UseCompressedOops) {                                                            \
 284     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix);           \
 285   } else {                                                                            \
 286     InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix);                 \
 287   }                                                                                   \
 288 }
 289 #endif // INCLUDE_ALL_GCS
 290 
 291 
 292 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)         \
 293                                                                                       \
 294 int InstanceMirrorKlass::                                                             \
 295 oop_oop_iterate##nv_suffix##_m(oop obj,                                               \
 296                                OopClosureType* closure,                               \
 297                                MemRegion mr) {                                        \
 298   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);      \
 299                                                                                       \
 300   InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);                    \
 301                                                                                       \
 302   if_do_metadata_checked(closure, nv_suffix) {                                        \
 303     if (mr.contains(obj)) {                                                           \
 304       Klass* klass = java_lang_Class::as_Klass(obj);                                  \
 305       /* We'll get NULL for primitive mirrors. */                                     \
 306       if (klass != NULL) {                                                            \
 307         closure->do_klass##nv_suffix(klass);                                          \
 308       }                                                                               \
 309     }                                                                                 \
 310   }                                                                                   \
 311                                                                                       \
 312   if (UseCompressedOops) {                                                            \
 313     InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr);    \
 314   } else {                                                                            \
 315     InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr);          \
 316   }                                                                                   \
 317 }
 318 
 319 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
 320 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
 321 #if INCLUDE_ALL_GCS
 322 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 323 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
 324 #endif // INCLUDE_ALL_GCS
 325 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
 326 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
 327 
 328 #if INCLUDE_ALL_GCS
 329 void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
 330   // Note that we don't have to follow the mirror -> klass pointer, since all
 331   // klasses that are dirty will be scavenged when we iterate over the
 332   // ClassLoaderData objects.
 333 
 334   InstanceKlass::oop_push_contents(pm, obj);
 335   InstanceMirrorKlass_OOP_ITERATE(                                            \
 336     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
 337     if (PSScavenge::should_scavenge(p)) {                                     \
 338       pm->claim_or_forward_depth(p);                                          \
 339     },                                                                        \
 340     assert_nothing )
 341 }
 342 
 343 int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
 344   int size = oop_size(obj);
 345   InstanceKlass::oop_update_pointers(cm, obj);
 346 
 347   InstanceMirrorKlass_OOP_ITERATE(                                            \
 348     start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
 349     PSParallelCompact::adjust_pointer(p),                                     \
 350     assert_nothing)
 351   return size;
 352 }
 353 #endif // INCLUDE_ALL_GCS
 354 
 355 int InstanceMirrorKlass::instance_size(KlassHandle k) {
 356   if (k() != NULL && k->oop_is_instance()) {
 357     return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());
 358   }
 359   return size_helper();
 360 }
 361 
 362 instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {
 363   // Query before forming handle.
 364   int size = instance_size(k);
 365   KlassHandle h_k(THREAD, this);
 366   instanceOop i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
 367 
 368   // Since mirrors can be variable sized because of the static fields, store
 369   // the size in the mirror itself.
 370   java_lang_Class::set_oop_size(i, size);
 371 
 372   return i;
 373 }
 374 
 375 int InstanceMirrorKlass::oop_size(oop obj) const {
 376   return java_lang_Class::oop_size(obj);
 377 }
 378 
 379 int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {
 380   Klass* k = java_lang_Class::as_Klass(obj);
 381   if (k != NULL && k->oop_is_instance()) {
 382     return InstanceKlass::cast(k)->static_oop_field_count();
 383   }
 384   return 0;
 385 }