1 /*
   2  * Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "cds/metaspaceShared.hpp"
  27 #include "classfile/classFileStream.hpp"
  28 #include "classfile/classLoaderDataGraph.hpp"
  29 #include "classfile/classLoadInfo.hpp"
  30 #include "classfile/javaClasses.inline.hpp"
  31 #include "classfile/metadataOnStackMark.hpp"
  32 #include "classfile/symbolTable.hpp"
  33 #include "classfile/klassFactory.hpp"
  34 #include "classfile/verifier.hpp"
  35 #include "classfile/vmClasses.hpp"
  36 #include "classfile/vmSymbols.hpp"
  37 #include "code/codeCache.hpp"
  38 #include "compiler/compileBroker.hpp"
  39 #include "interpreter/oopMapCache.hpp"
  40 #include "interpreter/rewriter.hpp"
  41 #include "jfr/jfrEvents.hpp"
  42 #include "logging/logStream.hpp"
  43 #include "memory/metadataFactory.hpp"
  44 #include "memory/resourceArea.hpp"
  45 #include "memory/universe.hpp"
  46 #include "oops/annotations.hpp"
  47 #include "oops/constantPool.hpp"
  48 #include "oops/fieldStreams.inline.hpp"
  49 #include "oops/klass.inline.hpp"
  50 #include "oops/klassVtable.hpp"
  51 #include "oops/oop.inline.hpp"
  52 #include "oops/recordComponent.hpp"
  53 #include "prims/jvmtiImpl.hpp"
  54 #include "prims/jvmtiRedefineClasses.hpp"
  55 #include "prims/jvmtiThreadState.inline.hpp"
  56 #include "prims/resolvedMethodTable.hpp"
  57 #include "prims/methodComparator.hpp"
  58 #include "runtime/atomic.hpp"
  59 #include "runtime/deoptimization.hpp"
  60 #include "runtime/handles.inline.hpp"
  61 #include "runtime/jniHandles.inline.hpp"
  62 #include "runtime/relocator.hpp"
  63 #include "runtime/safepointVerifiers.hpp"
  64 #include "utilities/bitMap.inline.hpp"
  65 #include "utilities/checkedCast.hpp"
  66 #include "utilities/events.hpp"
  67 
  68 Array<Method*>* VM_RedefineClasses::_old_methods = nullptr;
  69 Array<Method*>* VM_RedefineClasses::_new_methods = nullptr;
  70 Method**  VM_RedefineClasses::_matching_old_methods = nullptr;
  71 Method**  VM_RedefineClasses::_matching_new_methods = nullptr;
  72 Method**  VM_RedefineClasses::_deleted_methods      = nullptr;
  73 Method**  VM_RedefineClasses::_added_methods        = nullptr;
  74 int       VM_RedefineClasses::_matching_methods_length = 0;
  75 int       VM_RedefineClasses::_deleted_methods_length  = 0;
  76 int       VM_RedefineClasses::_added_methods_length    = 0;
  77 
  78 // This flag is global as the constructor does not reset it:
  79 bool      VM_RedefineClasses::_has_redefined_Object = false;
  80 u8        VM_RedefineClasses::_id_counter = 0;
  81 
  82 VM_RedefineClasses::VM_RedefineClasses(jint class_count,
  83                                        const jvmtiClassDefinition *class_defs,
  84                                        JvmtiClassLoadKind class_load_kind) {
  85   _class_count = class_count;
  86   _class_defs = class_defs;
  87   _class_load_kind = class_load_kind;
  88   _any_class_has_resolved_methods = false;
  89   _res = JVMTI_ERROR_NONE;
  90   _the_class = nullptr;
  91   _id = next_id();
  92 }
  93 
  94 static inline InstanceKlass* get_ik(jclass def) {
  95   oop mirror = JNIHandles::resolve_non_null(def);
  96   return InstanceKlass::cast(java_lang_Class::as_Klass(mirror));
  97 }
  98 
  99 // If any of the classes are being redefined, wait
 100 // Parallel constant pool merging leads to indeterminate constant pools.
 101 void VM_RedefineClasses::lock_classes() {
 102   JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
 103   GrowableArray<Klass*>* redef_classes = state->get_classes_being_redefined();
 104 
 105   MonitorLocker ml(RedefineClasses_lock);
 106 
 107   if (redef_classes == nullptr) {
 108     redef_classes = new (mtClass) GrowableArray<Klass*>(1, mtClass);
 109     state->set_classes_being_redefined(redef_classes);
 110   }
 111 
 112   bool has_redefined;
 113   do {
 114     has_redefined = false;
 115     // Go through classes each time until none are being redefined. Skip
 116     // the ones that are being redefined by this thread currently. Class file
 117     // load hook event may trigger new class redefine when we are redefining
 118     // a class (after lock_classes()).
 119     for (int i = 0; i < _class_count; i++) {
 120       InstanceKlass* ik = get_ik(_class_defs[i].klass);
 121       // Check if we are currently redefining the class in this thread already.
 122       if (redef_classes->contains(ik)) {
 123         assert(ik->is_being_redefined(), "sanity");
 124       } else {
 125         if (ik->is_being_redefined()) {
 126           ml.wait();
 127           has_redefined = true;
 128           break;  // for loop
 129         }
 130       }
 131     }
 132   } while (has_redefined);
 133 
 134   for (int i = 0; i < _class_count; i++) {
 135     InstanceKlass* ik = get_ik(_class_defs[i].klass);
 136     redef_classes->push(ik); // Add to the _classes_being_redefined list
 137     ik->set_is_being_redefined(true);
 138   }
 139   ml.notify_all();
 140 }
 141 
 142 void VM_RedefineClasses::unlock_classes() {
 143   JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
 144   GrowableArray<Klass*>* redef_classes = state->get_classes_being_redefined();
 145   assert(redef_classes != nullptr, "_classes_being_redefined is not allocated");
 146 
 147   MonitorLocker ml(RedefineClasses_lock);
 148 
 149   for (int i = _class_count - 1; i >= 0; i--) {
 150     InstanceKlass* def_ik = get_ik(_class_defs[i].klass);
 151     if (redef_classes->length() > 0) {
 152       // Remove the class from _classes_being_redefined list
 153       Klass* k = redef_classes->pop();
 154       assert(def_ik == k, "unlocking wrong class");
 155     }
 156     assert(def_ik->is_being_redefined(),
 157            "should be being redefined to get here");
 158 
 159     // Unlock after we finish all redefines for this class within
 160     // the thread. Same class can be pushed to the list multiple
 161     // times (not more than once by each recursive redefinition).
 162     if (!redef_classes->contains(def_ik)) {
 163       def_ik->set_is_being_redefined(false);
 164     }
 165   }
 166   ml.notify_all();
 167 }
 168 
 169 bool VM_RedefineClasses::doit_prologue() {
 170   if (_class_count == 0) {
 171     _res = JVMTI_ERROR_NONE;
 172     return false;
 173   }
 174   if (_class_defs == nullptr) {
 175     _res = JVMTI_ERROR_NULL_POINTER;
 176     return false;
 177   }
 178 
 179   for (int i = 0; i < _class_count; i++) {
 180     if (_class_defs[i].klass == nullptr) {
 181       _res = JVMTI_ERROR_INVALID_CLASS;
 182       return false;
 183     }
 184     if (_class_defs[i].class_byte_count == 0) {
 185       _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
 186       return false;
 187     }
 188     if (_class_defs[i].class_bytes == nullptr) {
 189       _res = JVMTI_ERROR_NULL_POINTER;
 190       return false;
 191     }
 192 
 193     oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
 194     // classes for primitives, arrays, and hidden classes
 195     // cannot be redefined.
 196     if (!is_modifiable_class(mirror)) {
 197       _res = JVMTI_ERROR_UNMODIFIABLE_CLASS;
 198       return false;
 199     }
 200   }
 201 
 202   // Start timer after all the sanity checks; not quite accurate, but
 203   // better than adding a bunch of stop() calls.
 204   if (log_is_enabled(Info, redefine, class, timer)) {
 205     _timer_vm_op_prologue.start();
 206   }
 207 
 208   lock_classes();
 209   // We first load new class versions in the prologue, because somewhere down the
 210   // call chain it is required that the current thread is a Java thread.
 211   _res = load_new_class_versions();
 212   if (_res != JVMTI_ERROR_NONE) {
 213     // free any successfully created classes, since none are redefined
 214     for (int i = 0; i < _class_count; i++) {
 215       if (_scratch_classes[i] != nullptr) {
 216         ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
 217         // Free the memory for this class at class unloading time.  Not before
 218         // because CMS might think this is still live.
 219         InstanceKlass* ik = get_ik(_class_defs[i].klass);
 220         if (ik->get_cached_class_file() == _scratch_classes[i]->get_cached_class_file()) {
 221           // Don't double-free cached_class_file copied from the original class if error.
 222           _scratch_classes[i]->set_cached_class_file(nullptr);
 223         }
 224         cld->add_to_deallocate_list(InstanceKlass::cast(_scratch_classes[i]));
 225       }
 226     }
 227     // Free os::malloc allocated memory in load_new_class_version.
 228     os::free(_scratch_classes);
 229     _timer_vm_op_prologue.stop();
 230     unlock_classes();
 231     return false;
 232   }
 233 
 234   _timer_vm_op_prologue.stop();
 235   return true;
 236 }
 237 
 238 void VM_RedefineClasses::doit() {
 239   Thread* current = Thread::current();
 240 
 241   if (log_is_enabled(Info, redefine, class, timer)) {
 242     _timer_vm_op_doit.start();
 243   }
 244 
 245 #if INCLUDE_CDS
 246   if (UseSharedSpaces) {
 247     // Sharing is enabled so we remap the shared readonly space to
 248     // shared readwrite, private just in case we need to redefine
 249     // a shared class. We do the remap during the doit() phase of
 250     // the safepoint to be safer.
 251     if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
 252       log_info(redefine, class, load)("failed to remap shared readonly space to readwrite, private");
 253       _res = JVMTI_ERROR_INTERNAL;
 254       _timer_vm_op_doit.stop();
 255       return;
 256     }
 257   }
 258 #endif
 259 
 260   // Mark methods seen on stack and everywhere else so old methods are not
 261   // cleaned up if they're on the stack.
 262   MetadataOnStackMark md_on_stack(/*walk_all_metadata*/true, /*redefinition_walk*/true);
 263   HandleMark hm(current);   // make sure any handles created are deleted
 264                             // before the stack walk again.
 265 
 266   for (int i = 0; i < _class_count; i++) {
 267     redefine_single_class(current, _class_defs[i].klass, _scratch_classes[i]);
 268   }
 269 
 270   // Flush all compiled code that depends on the classes redefined.
 271   flush_dependent_code();
 272 
 273   // Adjust constantpool caches and vtables for all classes
 274   // that reference methods of the evolved classes.
 275   // Have to do this after all classes are redefined and all methods that
 276   // are redefined are marked as old.
 277   AdjustAndCleanMetadata adjust_and_clean_metadata(current);
 278   ClassLoaderDataGraph::classes_do(&adjust_and_clean_metadata);
 279 
 280   // JSR-292 support
 281   if (_any_class_has_resolved_methods) {
 282     bool trace_name_printed = false;
 283     ResolvedMethodTable::adjust_method_entries(&trace_name_printed);
 284   }
 285 
 286   // Increment flag indicating that some invariants are no longer true.
 287   // See jvmtiExport.hpp for detailed explanation.
 288   JvmtiExport::increment_redefinition_count();
 289 
 290   // check_class() is optionally called for product bits, but is
 291   // always called for non-product bits.
 292 #ifdef PRODUCT
 293   if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
 294 #endif
 295     log_trace(redefine, class, obsolete, metadata)("calling check_class");
 296     CheckClass check_class(current);
 297     ClassLoaderDataGraph::classes_do(&check_class);
 298 #ifdef PRODUCT
 299   }
 300 #endif
 301 
 302   // Clean up any metadata now unreferenced while MetadataOnStackMark is set.
 303   ClassLoaderDataGraph::clean_deallocate_lists(false);
 304 
 305   _timer_vm_op_doit.stop();
 306 }
 307 
 308 void VM_RedefineClasses::doit_epilogue() {
 309   unlock_classes();
 310 
 311   // Free os::malloc allocated memory.
 312   os::free(_scratch_classes);
 313 
 314   // Reset the_class to null for error printing.
 315   _the_class = nullptr;
 316 
 317   if (log_is_enabled(Info, redefine, class, timer)) {
 318     // Used to have separate timers for "doit" and "all", but the timer
 319     // overhead skewed the measurements.
 320     julong doit_time = _timer_vm_op_doit.milliseconds();
 321     julong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
 322 
 323     log_info(redefine, class, timer)
 324       ("vm_op: all=" JULONG_FORMAT "  prologue=" JULONG_FORMAT "  doit=" JULONG_FORMAT,
 325        all_time, (julong)_timer_vm_op_prologue.milliseconds(), doit_time);
 326     log_info(redefine, class, timer)
 327       ("redefine_single_class: phase1=" JULONG_FORMAT "  phase2=" JULONG_FORMAT,
 328        (julong)_timer_rsc_phase1.milliseconds(), (julong)_timer_rsc_phase2.milliseconds());
 329   }
 330 }
 331 
 332 bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
 333   // classes for primitives cannot be redefined
 334   if (java_lang_Class::is_primitive(klass_mirror)) {
 335     return false;
 336   }
 337   Klass* k = java_lang_Class::as_Klass(klass_mirror);
 338   // classes for arrays cannot be redefined
 339   if (k == nullptr || !k->is_instance_klass()) {
 340     return false;
 341   }
 342 
 343   // Cannot redefine or retransform a hidden class.
 344   if (InstanceKlass::cast(k)->is_hidden()) {
 345     return false;
 346   }
 347   if (InstanceKlass::cast(k) == vmClasses::Continuation_klass()) {
 348     // Don't redefine Continuation class. See 8302779.
 349     return false;
 350   }
 351   return true;
 352 }
 353 
 354 // Append the current entry at scratch_i in scratch_cp to *merge_cp_p
 355 // where the end of *merge_cp_p is specified by *merge_cp_length_p. For
 356 // direct CP entries, there is just the current entry to append. For
 357 // indirect and double-indirect CP entries, there are zero or more
 358 // referenced CP entries along with the current entry to append.
 359 // Indirect and double-indirect CP entries are handled by recursive
 360 // calls to append_entry() as needed. The referenced CP entries are
 361 // always appended to *merge_cp_p before the referee CP entry. These
 362 // referenced CP entries may already exist in *merge_cp_p in which case
 363 // there is nothing extra to append and only the current entry is
 364 // appended.
 365 void VM_RedefineClasses::append_entry(const constantPoolHandle& scratch_cp,
 366        int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
 367 
 368   // append is different depending on entry tag type
 369   switch (scratch_cp->tag_at(scratch_i).value()) {
 370 
 371     // The old verifier is implemented outside the VM. It loads classes,
 372     // but does not resolve constant pool entries directly so we never
 373     // see Class entries here with the old verifier. Similarly the old
 374     // verifier does not like Class entries in the input constant pool.
 375     // The split-verifier is implemented in the VM so it can optionally
 376     // and directly resolve constant pool entries to load classes. The
 377     // split-verifier can accept either Class entries or UnresolvedClass
 378     // entries in the input constant pool. We revert the appended copy
 379     // back to UnresolvedClass so that either verifier will be happy
 380     // with the constant pool entry.
 381     //
 382     // this is an indirect CP entry so it needs special handling
 383     case JVM_CONSTANT_Class:
 384     case JVM_CONSTANT_UnresolvedClass:
 385     {
 386       int name_i = scratch_cp->klass_name_index_at(scratch_i);
 387       int new_name_i = find_or_append_indirect_entry(scratch_cp, name_i, merge_cp_p,
 388                                                      merge_cp_length_p);
 389 
 390       if (new_name_i != name_i) {
 391         log_trace(redefine, class, constantpool)
 392           ("Class entry@%d name_index change: %d to %d",
 393            *merge_cp_length_p, name_i, new_name_i);
 394       }
 395 
 396       (*merge_cp_p)->temp_unresolved_klass_at_put(*merge_cp_length_p, new_name_i);
 397       if (scratch_i != *merge_cp_length_p) {
 398         // The new entry in *merge_cp_p is at a different index than
 399         // the new entry in scratch_cp so we need to map the index values.
 400         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 401       }
 402       (*merge_cp_length_p)++;
 403     } break;
 404 
 405     // these are direct CP entries so they can be directly appended,
 406     // but double and long take two constant pool entries
 407     case JVM_CONSTANT_Double:  // fall through
 408     case JVM_CONSTANT_Long:
 409     {
 410       ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p);
 411 
 412       if (scratch_i != *merge_cp_length_p) {
 413         // The new entry in *merge_cp_p is at a different index than
 414         // the new entry in scratch_cp so we need to map the index values.
 415         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 416       }
 417       (*merge_cp_length_p) += 2;
 418     } break;
 419 
 420     // these are direct CP entries so they can be directly appended
 421     case JVM_CONSTANT_Float:   // fall through
 422     case JVM_CONSTANT_Integer: // fall through
 423     case JVM_CONSTANT_Utf8:    // fall through
 424 
 425     // This was an indirect CP entry, but it has been changed into
 426     // Symbol*s so this entry can be directly appended.
 427     case JVM_CONSTANT_String:      // fall through
 428     {
 429       ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p);
 430 
 431       if (scratch_i != *merge_cp_length_p) {
 432         // The new entry in *merge_cp_p is at a different index than
 433         // the new entry in scratch_cp so we need to map the index values.
 434         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 435       }
 436       (*merge_cp_length_p)++;
 437     } break;
 438 
 439     // this is an indirect CP entry so it needs special handling
 440     case JVM_CONSTANT_NameAndType:
 441     {
 442       int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
 443       int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
 444                                                          merge_cp_length_p);
 445 
 446       int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
 447       int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
 448                                                               merge_cp_p, merge_cp_length_p);
 449 
 450       // If the referenced entries already exist in *merge_cp_p, then
 451       // both new_name_ref_i and new_signature_ref_i will both be 0.
 452       // In that case, all we are appending is the current entry.
 453       if (new_name_ref_i != name_ref_i) {
 454         log_trace(redefine, class, constantpool)
 455           ("NameAndType entry@%d name_ref_index change: %d to %d",
 456            *merge_cp_length_p, name_ref_i, new_name_ref_i);
 457       }
 458       if (new_signature_ref_i != signature_ref_i) {
 459         log_trace(redefine, class, constantpool)
 460           ("NameAndType entry@%d signature_ref_index change: %d to %d",
 461            *merge_cp_length_p, signature_ref_i, new_signature_ref_i);
 462       }
 463 
 464       (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
 465         new_name_ref_i, new_signature_ref_i);
 466       if (scratch_i != *merge_cp_length_p) {
 467         // The new entry in *merge_cp_p is at a different index than
 468         // the new entry in scratch_cp so we need to map the index values.
 469         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 470       }
 471       (*merge_cp_length_p)++;
 472     } break;
 473 
 474     // this is a double-indirect CP entry so it needs special handling
 475     case JVM_CONSTANT_Fieldref:           // fall through
 476     case JVM_CONSTANT_InterfaceMethodref: // fall through
 477     case JVM_CONSTANT_Methodref:
 478     {
 479       int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
 480       int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
 481                                                           merge_cp_p, merge_cp_length_p);
 482 
 483       int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
 484       int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
 485                                                           merge_cp_p, merge_cp_length_p);
 486 
 487       const char *entry_name = nullptr;
 488       switch (scratch_cp->tag_at(scratch_i).value()) {
 489       case JVM_CONSTANT_Fieldref:
 490         entry_name = "Fieldref";
 491         (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
 492           new_name_and_type_ref_i);
 493         break;
 494       case JVM_CONSTANT_InterfaceMethodref:
 495         entry_name = "IFMethodref";
 496         (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
 497           new_klass_ref_i, new_name_and_type_ref_i);
 498         break;
 499       case JVM_CONSTANT_Methodref:
 500         entry_name = "Methodref";
 501         (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
 502           new_name_and_type_ref_i);
 503         break;
 504       default:
 505         guarantee(false, "bad switch");
 506         break;
 507       }
 508 
 509       if (klass_ref_i != new_klass_ref_i) {
 510         log_trace(redefine, class, constantpool)
 511           ("%s entry@%d class_index changed: %d to %d", entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i);
 512       }
 513       if (name_and_type_ref_i != new_name_and_type_ref_i) {
 514         log_trace(redefine, class, constantpool)
 515           ("%s entry@%d name_and_type_index changed: %d to %d",
 516            entry_name, *merge_cp_length_p, name_and_type_ref_i, new_name_and_type_ref_i);
 517       }
 518 
 519       if (scratch_i != *merge_cp_length_p) {
 520         // The new entry in *merge_cp_p is at a different index than
 521         // the new entry in scratch_cp so we need to map the index values.
 522         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 523       }
 524       (*merge_cp_length_p)++;
 525     } break;
 526 
 527     // this is an indirect CP entry so it needs special handling
 528     case JVM_CONSTANT_MethodType:
 529     {
 530       int ref_i = scratch_cp->method_type_index_at(scratch_i);
 531       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 532                                                     merge_cp_length_p);
 533       if (new_ref_i != ref_i) {
 534         log_trace(redefine, class, constantpool)
 535           ("MethodType entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
 536       }
 537       (*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i);
 538       if (scratch_i != *merge_cp_length_p) {
 539         // The new entry in *merge_cp_p is at a different index than
 540         // the new entry in scratch_cp so we need to map the index values.
 541         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 542       }
 543       (*merge_cp_length_p)++;
 544     } break;
 545 
 546     // this is an indirect CP entry so it needs special handling
 547     case JVM_CONSTANT_MethodHandle:
 548     {
 549       int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i);
 550       int ref_i = scratch_cp->method_handle_index_at(scratch_i);
 551       int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
 552                                                     merge_cp_length_p);
 553       if (new_ref_i != ref_i) {
 554         log_trace(redefine, class, constantpool)
 555           ("MethodHandle entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
 556       }
 557       (*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i);
 558       if (scratch_i != *merge_cp_length_p) {
 559         // The new entry in *merge_cp_p is at a different index than
 560         // the new entry in scratch_cp so we need to map the index values.
 561         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 562       }
 563       (*merge_cp_length_p)++;
 564     } break;
 565 
 566     // this is an indirect CP entry so it needs special handling
 567     case JVM_CONSTANT_Dynamic:  // fall through
 568     case JVM_CONSTANT_InvokeDynamic:
 569     {
 570       // Index of the bootstrap specifier in the operands array
 571       int old_bs_i = scratch_cp->bootstrap_methods_attribute_index(scratch_i);
 572       int new_bs_i = find_or_append_operand(scratch_cp, old_bs_i, merge_cp_p,
 573                                             merge_cp_length_p);
 574       // The bootstrap method NameAndType_info index
 575       int old_ref_i = scratch_cp->bootstrap_name_and_type_ref_index_at(scratch_i);
 576       int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
 577                                                     merge_cp_length_p);
 578       if (new_bs_i != old_bs_i) {
 579         log_trace(redefine, class, constantpool)
 580           ("Dynamic entry@%d bootstrap_method_attr_index change: %d to %d",
 581            *merge_cp_length_p, old_bs_i, new_bs_i);
 582       }
 583       if (new_ref_i != old_ref_i) {
 584         log_trace(redefine, class, constantpool)
 585           ("Dynamic entry@%d name_and_type_index change: %d to %d", *merge_cp_length_p, old_ref_i, new_ref_i);
 586       }
 587 
 588       if (scratch_cp->tag_at(scratch_i).is_dynamic_constant())
 589         (*merge_cp_p)->dynamic_constant_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
 590       else
 591         (*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
 592       if (scratch_i != *merge_cp_length_p) {
 593         // The new entry in *merge_cp_p is at a different index than
 594         // the new entry in scratch_cp so we need to map the index values.
 595         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 596       }
 597       (*merge_cp_length_p)++;
 598     } break;
 599 
 600     // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
 601     // ClassIndex
 602     case JVM_CONSTANT_ClassIndex: // fall through
 603 
 604     // Invalid is used as the tag for the second constant pool entry
 605     // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
 606     // not be seen by itself.
 607     case JVM_CONSTANT_Invalid: // fall through
 608 
 609     // At this stage, String could be here, but not StringIndex
 610     case JVM_CONSTANT_StringIndex: // fall through
 611 
 612     // At this stage JVM_CONSTANT_UnresolvedClassInError should not be here
 613     case JVM_CONSTANT_UnresolvedClassInError: // fall through
 614 
 615     default:
 616     {
 617       // leave a breadcrumb
 618       jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
 619       ShouldNotReachHere();
 620     } break;
 621   } // end switch tag value
 622 } // end append_entry()
 623 
 624 
 625 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
 626       int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
 627 
 628   int new_ref_i = ref_i;
 629   bool match = (ref_i < *merge_cp_length_p) &&
 630                scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
 631 
 632   if (!match) {
 633     // forward reference in *merge_cp_p or not a direct match
 634     int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p);
 635     if (found_i != 0) {
 636       guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree");
 637       // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
 638       new_ref_i = found_i;
 639       map_index(scratch_cp, ref_i, found_i);
 640     } else {
 641       // no match found so we have to append this entry to *merge_cp_p
 642       append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p);
 643       // The above call to append_entry() can only append one entry
 644       // so the post call query of *merge_cp_length_p is only for
 645       // the sake of consistency.
 646       new_ref_i = *merge_cp_length_p - 1;
 647     }
 648   }
 649 
 650   // constant pool indices are u2, unless the merged constant pool overflows which
 651   // we don't check for.
 652   return checked_cast<u2>(new_ref_i);
 653 } // end find_or_append_indirect_entry()
 654 
 655 
 656 // Append a bootstrap specifier into the merge_cp operands that is semantically equal
 657 // to the scratch_cp operands bootstrap specifier passed by the old_bs_i index.
 658 // Recursively append new merge_cp entries referenced by the new bootstrap specifier.
 659 void VM_RedefineClasses::append_operand(const constantPoolHandle& scratch_cp, int old_bs_i,
 660        constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
 661 
 662   u2 old_ref_i = scratch_cp->operand_bootstrap_method_ref_index_at(old_bs_i);
 663   u2 new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
 664                                                merge_cp_length_p);
 665   if (new_ref_i != old_ref_i) {
 666     log_trace(redefine, class, constantpool)
 667       ("operands entry@%d bootstrap method ref_index change: %d to %d", _operands_cur_length, old_ref_i, new_ref_i);
 668   }
 669 
 670   Array<u2>* merge_ops = (*merge_cp_p)->operands();
 671   int new_bs_i = _operands_cur_length;
 672   // We have _operands_cur_length == 0 when the merge_cp operands is empty yet.
 673   // However, the operand_offset_at(0) was set in the extend_operands() call.
 674   int new_base = (new_bs_i == 0) ? (*merge_cp_p)->operand_offset_at(0)
 675                                  : (*merge_cp_p)->operand_next_offset_at(new_bs_i - 1);
 676   u2 argc      = scratch_cp->operand_argument_count_at(old_bs_i);
 677 
 678   ConstantPool::operand_offset_at_put(merge_ops, _operands_cur_length, new_base);
 679   merge_ops->at_put(new_base++, new_ref_i);
 680   merge_ops->at_put(new_base++, argc);
 681 
 682   for (int i = 0; i < argc; i++) {
 683     u2 old_arg_ref_i = scratch_cp->operand_argument_index_at(old_bs_i, i);
 684     u2 new_arg_ref_i = find_or_append_indirect_entry(scratch_cp, old_arg_ref_i, merge_cp_p,
 685                                                      merge_cp_length_p);
 686     merge_ops->at_put(new_base++, new_arg_ref_i);
 687     if (new_arg_ref_i != old_arg_ref_i) {
 688       log_trace(redefine, class, constantpool)
 689         ("operands entry@%d bootstrap method argument ref_index change: %d to %d",
 690          _operands_cur_length, old_arg_ref_i, new_arg_ref_i);
 691     }
 692   }
 693   if (old_bs_i != _operands_cur_length) {
 694     // The bootstrap specifier in *merge_cp_p is at a different index than
 695     // that in scratch_cp so we need to map the index values.
 696     map_operand_index(old_bs_i, new_bs_i);
 697   }
 698   _operands_cur_length++;
 699 } // end append_operand()
 700 
 701 
 702 int VM_RedefineClasses::find_or_append_operand(const constantPoolHandle& scratch_cp,
 703       int old_bs_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
 704 
 705   int new_bs_i = old_bs_i; // bootstrap specifier index
 706   bool match = (old_bs_i < _operands_cur_length) &&
 707                scratch_cp->compare_operand_to(old_bs_i, *merge_cp_p, old_bs_i);
 708 
 709   if (!match) {
 710     // forward reference in *merge_cp_p or not a direct match
 711     int found_i = scratch_cp->find_matching_operand(old_bs_i, *merge_cp_p,
 712                                                     _operands_cur_length);
 713     if (found_i != -1) {
 714       guarantee(found_i != old_bs_i, "compare_operand_to() and find_matching_operand() disagree");
 715       // found a matching operand somewhere else in *merge_cp_p so just need a mapping
 716       new_bs_i = found_i;
 717       map_operand_index(old_bs_i, found_i);
 718     } else {
 719       // no match found so we have to append this bootstrap specifier to *merge_cp_p
 720       append_operand(scratch_cp, old_bs_i, merge_cp_p, merge_cp_length_p);
 721       new_bs_i = _operands_cur_length - 1;
 722     }
 723   }
 724   return new_bs_i;
 725 } // end find_or_append_operand()
 726 
 727 
 728 void VM_RedefineClasses::finalize_operands_merge(const constantPoolHandle& merge_cp, TRAPS) {
 729   if (merge_cp->operands() == nullptr) {
 730     return;
 731   }
 732   // Shrink the merge_cp operands
 733   merge_cp->shrink_operands(_operands_cur_length, CHECK);
 734 
 735   if (log_is_enabled(Trace, redefine, class, constantpool)) {
 736     // don't want to loop unless we are tracing
 737     int count = 0;
 738     for (int i = 1; i < _operands_index_map_p->length(); i++) {
 739       int value = _operands_index_map_p->at(i);
 740       if (value != -1) {
 741         log_trace(redefine, class, constantpool)("operands_index_map[%d]: old=%d new=%d", count, i, value);
 742         count++;
 743       }
 744     }
 745   }
 746   // Clean-up
 747   _operands_index_map_p = nullptr;
 748   _operands_cur_length = 0;
 749   _operands_index_map_count = 0;
 750 } // end finalize_operands_merge()
 751 
 752 // Symbol* comparator for qsort
 753 // The caller must have an active ResourceMark.
 754 static int symcmp(const void* a, const void* b) {
 755   char* astr = (*(Symbol**)a)->as_C_string();
 756   char* bstr = (*(Symbol**)b)->as_C_string();
 757   return strcmp(astr, bstr);
 758 }
 759 
 760 // The caller must have an active ResourceMark.
 761 static jvmtiError check_attribute_arrays(const char* attr_name,
 762            InstanceKlass* the_class, InstanceKlass* scratch_class,
 763            Array<u2>* the_array, Array<u2>* scr_array) {
 764   bool the_array_exists = the_array != Universe::the_empty_short_array();
 765   bool scr_array_exists = scr_array != Universe::the_empty_short_array();
 766 
 767   int array_len = the_array->length();
 768   if (the_array_exists && scr_array_exists) {
 769     if (array_len != scr_array->length()) {
 770       log_trace(redefine, class)
 771         ("redefined class %s attribute change error: %s len=%d changed to len=%d",
 772          the_class->external_name(), attr_name, array_len, scr_array->length());
 773       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 774     }
 775 
 776     // The order of entries in the attribute array is not specified so we
 777     // have to explicitly check for the same contents. We do this by copying
 778     // the referenced symbols into their own arrays, sorting them and then
 779     // comparing each element pair.
 780 
 781     Symbol** the_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, array_len);
 782     Symbol** scr_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, array_len);
 783 
 784     if (the_syms == nullptr || scr_syms == nullptr) {
 785       return JVMTI_ERROR_OUT_OF_MEMORY;
 786     }
 787 
 788     for (int i = 0; i < array_len; i++) {
 789       int the_cp_index = the_array->at(i);
 790       int scr_cp_index = scr_array->at(i);
 791       the_syms[i] = the_class->constants()->klass_name_at(the_cp_index);
 792       scr_syms[i] = scratch_class->constants()->klass_name_at(scr_cp_index);
 793     }
 794 
 795     qsort(the_syms, array_len, sizeof(Symbol*), symcmp);
 796     qsort(scr_syms, array_len, sizeof(Symbol*), symcmp);
 797 
 798     for (int i = 0; i < array_len; i++) {
 799       if (the_syms[i] != scr_syms[i]) {
 800         log_info(redefine, class)
 801           ("redefined class %s attribute change error: %s[%d]: %s changed to %s",
 802            the_class->external_name(), attr_name, i,
 803            the_syms[i]->as_C_string(), scr_syms[i]->as_C_string());
 804         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 805       }
 806     }
 807   } else if (the_array_exists ^ scr_array_exists) {
 808     const char* action_str = (the_array_exists) ? "removed" : "added";
 809     log_info(redefine, class)
 810       ("redefined class %s attribute change error: %s attribute %s",
 811        the_class->external_name(), attr_name, action_str);
 812     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 813   }
 814   return JVMTI_ERROR_NONE;
 815 }
 816 
 817 static jvmtiError check_nest_attributes(InstanceKlass* the_class,
 818                                         InstanceKlass* scratch_class) {
 819   // Check whether the class NestHost attribute has been changed.
 820   Thread* thread = Thread::current();
 821   ResourceMark rm(thread);
 822   u2 the_nest_host_idx = the_class->nest_host_index();
 823   u2 scr_nest_host_idx = scratch_class->nest_host_index();
 824 
 825   if (the_nest_host_idx != 0 && scr_nest_host_idx != 0) {
 826     Symbol* the_sym = the_class->constants()->klass_name_at(the_nest_host_idx);
 827     Symbol* scr_sym = scratch_class->constants()->klass_name_at(scr_nest_host_idx);
 828     if (the_sym != scr_sym) {
 829       log_info(redefine, class, nestmates)
 830         ("redefined class %s attribute change error: NestHost class: %s replaced with: %s",
 831          the_class->external_name(), the_sym->as_C_string(), scr_sym->as_C_string());
 832       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 833     }
 834   } else if ((the_nest_host_idx == 0) ^ (scr_nest_host_idx == 0)) {
 835     const char* action_str = (the_nest_host_idx != 0) ? "removed" : "added";
 836     log_info(redefine, class, nestmates)
 837       ("redefined class %s attribute change error: NestHost attribute %s",
 838        the_class->external_name(), action_str);
 839     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 840   }
 841 
 842   // Check whether the class NestMembers attribute has been changed.
 843   return check_attribute_arrays("NestMembers",
 844                                 the_class, scratch_class,
 845                                 the_class->nest_members(),
 846                                 scratch_class->nest_members());
 847 }
 848 
 849 // Return an error status if the class Record attribute was changed.
 850 static jvmtiError check_record_attribute(InstanceKlass* the_class, InstanceKlass* scratch_class) {
 851   // Get lists of record components.
 852   Array<RecordComponent*>* the_record = the_class->record_components();
 853   Array<RecordComponent*>* scr_record = scratch_class->record_components();
 854   bool the_record_exists = the_record != nullptr;
 855   bool scr_record_exists = scr_record != nullptr;
 856 
 857   if (the_record_exists && scr_record_exists) {
 858     int the_num_components = the_record->length();
 859     int scr_num_components = scr_record->length();
 860     if (the_num_components != scr_num_components) {
 861       log_info(redefine, class, record)
 862         ("redefined class %s attribute change error: Record num_components=%d changed to num_components=%d",
 863          the_class->external_name(), the_num_components, scr_num_components);
 864       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 865     }
 866 
 867     // Compare each field in each record component.
 868     ConstantPool* the_cp =  the_class->constants();
 869     ConstantPool* scr_cp =  scratch_class->constants();
 870     for (int x = 0; x < the_num_components; x++) {
 871       RecordComponent* the_component = the_record->at(x);
 872       RecordComponent* scr_component = scr_record->at(x);
 873       const Symbol* const the_name = the_cp->symbol_at(the_component->name_index());
 874       const Symbol* const scr_name = scr_cp->symbol_at(scr_component->name_index());
 875       const Symbol* const the_descr = the_cp->symbol_at(the_component->descriptor_index());
 876       const Symbol* const scr_descr = scr_cp->symbol_at(scr_component->descriptor_index());
 877       if (the_name != scr_name || the_descr != scr_descr) {
 878         log_info(redefine, class, record)
 879           ("redefined class %s attribute change error: Record name_index, descriptor_index, and/or attributes_count changed",
 880            the_class->external_name());
 881         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 882       }
 883 
 884       int the_gen_sig = the_component->generic_signature_index();
 885       int scr_gen_sig = scr_component->generic_signature_index();
 886       const Symbol* const the_gen_sig_sym = (the_gen_sig == 0 ? nullptr :
 887         the_cp->symbol_at(the_component->generic_signature_index()));
 888       const Symbol* const scr_gen_sig_sym = (scr_gen_sig == 0 ? nullptr :
 889         scr_cp->symbol_at(scr_component->generic_signature_index()));
 890       if (the_gen_sig_sym != scr_gen_sig_sym) {
 891         log_info(redefine, class, record)
 892           ("redefined class %s attribute change error: Record generic_signature attribute changed",
 893            the_class->external_name());
 894         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 895       }
 896 
 897       // It's okay if a record component's annotations were changed.
 898     }
 899 
 900   } else if (the_record_exists ^ scr_record_exists) {
 901     const char* action_str = (the_record_exists) ? "removed" : "added";
 902     log_info(redefine, class, record)
 903       ("redefined class %s attribute change error: Record attribute %s",
 904        the_class->external_name(), action_str);
 905     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
 906   }
 907 
 908   return JVMTI_ERROR_NONE;
 909 }
 910 
 911 
 912 static jvmtiError check_permitted_subclasses_attribute(InstanceKlass* the_class,
 913                                                        InstanceKlass* scratch_class) {
 914   Thread* thread = Thread::current();
 915   ResourceMark rm(thread);
 916 
 917   // Check whether the class PermittedSubclasses attribute has been changed.
 918   return check_attribute_arrays("PermittedSubclasses",
 919                                 the_class, scratch_class,
 920                                 the_class->permitted_subclasses(),
 921                                 scratch_class->permitted_subclasses());
 922 }
 923 
 924 static jvmtiError check_preload_attribute(InstanceKlass* the_class,
 925                                           InstanceKlass* scratch_class) {
 926   Thread* thread = Thread::current();
 927   ResourceMark rm(thread);
 928 
 929   // Check whether the class Preload attribute has been changed.
 930   return check_attribute_arrays("Preload",
 931                                 the_class, scratch_class,
 932                                 the_class->preload_classes(),
 933                                 scratch_class->preload_classes());
 934 }
 935 
 936 static bool can_add_or_delete(Method* m) {
 937       // Compatibility mode
 938   return (AllowRedefinitionToAddDeleteMethods &&
 939           (m->is_private() && (m->is_static() || m->is_final())));
 940 }
 941 
 942 jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
 943              InstanceKlass* the_class,
 944              InstanceKlass* scratch_class) {
 945   int i;
 946 
 947   // Check superclasses, or rather their names, since superclasses themselves can be
 948   // requested to replace.
 949   // Check for null superclass first since this might be java.lang.Object
 950   if (the_class->super() != scratch_class->super() &&
 951       (the_class->super() == nullptr || scratch_class->super() == nullptr ||
 952        the_class->super()->name() !=
 953        scratch_class->super()->name())) {
 954     log_info(redefine, class, normalize)
 955       ("redefined class %s superclass change error: superclass changed from %s to %s.",
 956        the_class->external_name(),
 957        the_class->super() == nullptr ? "null" : the_class->super()->external_name(),
 958        scratch_class->super() == nullptr ? "null" : scratch_class->super()->external_name());
 959     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 960   }
 961 
 962   // Check if the number, names and order of directly implemented interfaces are the same.
 963   // I think in principle we should just check if the sets of names of directly implemented
 964   // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
 965   // .java file, also changes in .class file) should not matter. However, comparing sets is
 966   // technically a bit more difficult, and, more importantly, I am not sure at present that the
 967   // order of interfaces does not matter on the implementation level, i.e. that the VM does not
 968   // rely on it somewhere.
 969   Array<InstanceKlass*>* k_interfaces = the_class->local_interfaces();
 970   Array<InstanceKlass*>* k_new_interfaces = scratch_class->local_interfaces();
 971   int n_intfs = k_interfaces->length();
 972   if (n_intfs != k_new_interfaces->length()) {
 973     log_info(redefine, class, normalize)
 974       ("redefined class %s interfaces change error: number of implemented interfaces changed from %d to %d.",
 975        the_class->external_name(), n_intfs, k_new_interfaces->length());
 976     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 977   }
 978   for (i = 0; i < n_intfs; i++) {
 979     if (k_interfaces->at(i)->name() !=
 980         k_new_interfaces->at(i)->name()) {
 981       log_info(redefine, class, normalize)
 982           ("redefined class %s interfaces change error: interface changed from %s to %s.",
 983            the_class->external_name(),
 984            k_interfaces->at(i)->external_name(), k_new_interfaces->at(i)->external_name());
 985       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
 986     }
 987   }
 988 
 989   // Check whether class is in the error init state.
 990   if (the_class->is_in_error_state()) {
 991     log_info(redefine, class, normalize)
 992       ("redefined class %s is in error init state.", the_class->external_name());
 993     // TBD #5057930: special error code is needed in 1.6
 994     return JVMTI_ERROR_INVALID_CLASS;
 995   }
 996 
 997   // Check whether the nest-related attributes have been changed.
 998   jvmtiError err = check_nest_attributes(the_class, scratch_class);
 999   if (err != JVMTI_ERROR_NONE) {
1000     return err;
1001   }
1002 
1003   // Check whether the Record attribute has been changed.
1004   err = check_record_attribute(the_class, scratch_class);
1005   if (err != JVMTI_ERROR_NONE) {
1006     return err;
1007   }
1008 
1009   // Check whether the PermittedSubclasses attribute has been changed.
1010   err = check_permitted_subclasses_attribute(the_class, scratch_class);
1011   if (err != JVMTI_ERROR_NONE) {
1012     return err;
1013   }
1014 
1015   // Check whether the Preload attribute has been changed.
1016   err = check_preload_attribute(the_class, scratch_class);
1017   if (err != JVMTI_ERROR_NONE) {
1018     return err;
1019   }
1020 
1021   // Check whether class modifiers are the same.
1022   jushort old_flags = (jushort) the_class->access_flags().get_flags();
1023   jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
1024   if (old_flags != new_flags) {
1025     log_info(redefine, class, normalize)
1026         ("redefined class %s modifiers change error: modifiers changed from %d to %d.",
1027          the_class->external_name(), old_flags, new_flags);
1028     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
1029   }
1030 
1031   // Check if the number, names, types and order of fields declared in these classes
1032   // are the same.
1033   JavaFieldStream old_fs(the_class);
1034   JavaFieldStream new_fs(scratch_class);
1035   for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
1036     // name and signature
1037     Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
1038     Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
1039     Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
1040     Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
1041     if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
1042       log_info(redefine, class, normalize)
1043           ("redefined class %s fields change error: field %s %s changed to %s %s.",
1044            the_class->external_name(),
1045            sig_sym1->as_C_string(), name_sym1->as_C_string(),
1046            sig_sym2->as_C_string(), name_sym2->as_C_string());
1047       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
1048     }
1049     // offset
1050     if (old_fs.offset() != new_fs.offset()) {
1051       log_info(redefine, class, normalize)
1052           ("redefined class %s field %s change error: offset changed from %d to %d.",
1053            the_class->external_name(), name_sym2->as_C_string(), old_fs.offset(), new_fs.offset());
1054       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
1055     }
1056     // access
1057     old_flags = old_fs.access_flags().as_short();
1058     new_flags = new_fs.access_flags().as_short();
1059     if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
1060       log_info(redefine, class, normalize)
1061           ("redefined class %s field %s change error: modifiers changed from %d to %d.",
1062            the_class->external_name(), name_sym2->as_C_string(), old_flags, new_flags);
1063       return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
1064     }
1065   }
1066 
1067   // If both streams aren't done then we have a differing number of
1068   // fields.
1069   if (!old_fs.done() || !new_fs.done()) {
1070     const char* action = old_fs.done() ? "added" : "deleted";
1071     log_info(redefine, class, normalize)
1072         ("redefined class %s fields change error: some fields were %s.",
1073          the_class->external_name(), action);
1074     return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
1075   }
1076 
1077   // Do a parallel walk through the old and new methods. Detect
1078   // cases where they match (exist in both), have been added in
1079   // the new methods, or have been deleted (exist only in the
1080   // old methods).  The class file parser places methods in order
1081   // by method name, but does not order overloaded methods by
1082   // signature.  In order to determine what fate befell the methods,
1083   // this code places the overloaded new methods that have matching
1084   // old methods in the same order as the old methods and places
1085   // new overloaded methods at the end of overloaded methods of
1086   // that name. The code for this order normalization is adapted
1087   // from the algorithm used in InstanceKlass::find_method().
1088   // Since we are swapping out of order entries as we find them,
1089   // we only have to search forward through the overloaded methods.
1090   // Methods which are added and have the same name as an existing
1091   // method (but different signature) will be put at the end of
1092   // the methods with that name, and the name mismatch code will
1093   // handle them.
1094   Array<Method*>* k_old_methods(the_class->methods());
1095   Array<Method*>* k_new_methods(scratch_class->methods());
1096   int n_old_methods = k_old_methods->length();
1097   int n_new_methods = k_new_methods->length();
1098   Thread* thread = Thread::current();
1099 
1100   int ni = 0;
1101   int oi = 0;
1102   while (true) {
1103     Method* k_old_method;
1104     Method* k_new_method;
1105     enum { matched, added, deleted, undetermined } method_was = undetermined;
1106 
1107     if (oi >= n_old_methods) {
1108       if (ni >= n_new_methods) {
1109         break; // we've looked at everything, done
1110       }
1111       // New method at the end
1112       k_new_method = k_new_methods->at(ni);
1113       method_was = added;
1114     } else if (ni >= n_new_methods) {
1115       // Old method, at the end, is deleted
1116       k_old_method = k_old_methods->at(oi);
1117       method_was = deleted;
1118     } else {
1119       // There are more methods in both the old and new lists
1120       k_old_method = k_old_methods->at(oi);
1121       k_new_method = k_new_methods->at(ni);
1122       if (k_old_method->name() != k_new_method->name()) {
1123         // Methods are sorted by method name, so a mismatch means added
1124         // or deleted
1125         if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
1126           method_was = added;
1127         } else {
1128           method_was = deleted;
1129         }
1130       } else if (k_old_method->signature() == k_new_method->signature()) {
1131         // Both the name and signature match
1132         method_was = matched;
1133       } else {
1134         // The name matches, but the signature doesn't, which means we have to
1135         // search forward through the new overloaded methods.
1136         int nj;  // outside the loop for post-loop check
1137         for (nj = ni + 1; nj < n_new_methods; nj++) {
1138           Method* m = k_new_methods->at(nj);
1139           if (k_old_method->name() != m->name()) {
1140             // reached another method name so no more overloaded methods
1141             method_was = deleted;
1142             break;
1143           }
1144           if (k_old_method->signature() == m->signature()) {
1145             // found a match so swap the methods
1146             k_new_methods->at_put(ni, m);
1147             k_new_methods->at_put(nj, k_new_method);
1148             k_new_method = m;
1149             method_was = matched;
1150             break;
1151           }
1152         }
1153 
1154         if (nj >= n_new_methods) {
1155           // reached the end without a match; so method was deleted
1156           method_was = deleted;
1157         }
1158       }
1159     }
1160 
1161     switch (method_was) {
1162     case matched:
1163       // methods match, be sure modifiers do too
1164       old_flags = (jushort) k_old_method->access_flags().get_flags();
1165       new_flags = (jushort) k_new_method->access_flags().get_flags();
1166       if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
1167         log_info(redefine, class, normalize)
1168           ("redefined class %s  method %s modifiers error: modifiers changed from %d to %d",
1169            the_class->external_name(), k_old_method->name_and_sig_as_C_string(), old_flags, new_flags);
1170         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
1171       }
1172       {
1173         u2 new_num = k_new_method->method_idnum();
1174         u2 old_num = k_old_method->method_idnum();
1175         if (new_num != old_num) {
1176           Method* idnum_owner = scratch_class->method_with_idnum(old_num);
1177           if (idnum_owner != nullptr) {
1178             // There is already a method assigned this idnum -- switch them
1179             // Take current and original idnum from the new_method
1180             idnum_owner->set_method_idnum(new_num);
1181             idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
1182           }
1183           // Take current and original idnum from the old_method
1184           k_new_method->set_method_idnum(old_num);
1185           k_new_method->set_orig_method_idnum(k_old_method->orig_method_idnum());
1186           if (thread->has_pending_exception()) {
1187             return JVMTI_ERROR_OUT_OF_MEMORY;
1188           }
1189         }
1190       }
1191       log_trace(redefine, class, normalize)
1192         ("Method matched: new: %s [%d] == old: %s [%d]",
1193          k_new_method->name_and_sig_as_C_string(), ni, k_old_method->name_and_sig_as_C_string(), oi);
1194       // advance to next pair of methods
1195       ++oi;
1196       ++ni;
1197       break;
1198     case added:
1199       // method added, see if it is OK
1200       if (!can_add_or_delete(k_new_method)) {
1201         log_info(redefine, class, normalize)
1202           ("redefined class %s methods error: added method: %s [%d]",
1203            the_class->external_name(), k_new_method->name_and_sig_as_C_string(), ni);
1204         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
1205       }
1206       {
1207         u2 num = the_class->next_method_idnum();
1208         if (num == ConstMethod::UNSET_IDNUM) {
1209           // cannot add any more methods
1210           log_info(redefine, class, normalize)
1211             ("redefined class %s methods error: can't create ID for new method %s [%d]",
1212              the_class->external_name(), k_new_method->name_and_sig_as_C_string(), ni);
1213           return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
1214         }
1215         u2 new_num = k_new_method->method_idnum();
1216         Method* idnum_owner = scratch_class->method_with_idnum(num);
1217         if (idnum_owner != nullptr) {
1218           // There is already a method assigned this idnum -- switch them
1219           // Take current and original idnum from the new_method
1220           idnum_owner->set_method_idnum(new_num);
1221           idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
1222         }
1223         k_new_method->set_method_idnum(num);
1224         k_new_method->set_orig_method_idnum(num);
1225         if (thread->has_pending_exception()) {
1226           return JVMTI_ERROR_OUT_OF_MEMORY;
1227         }
1228       }
1229       log_trace(redefine, class, normalize)
1230         ("Method added: new: %s [%d]", k_new_method->name_and_sig_as_C_string(), ni);
1231       ++ni; // advance to next new method
1232       break;
1233     case deleted:
1234       // method deleted, see if it is OK
1235       if (!can_add_or_delete(k_old_method)) {
1236         log_info(redefine, class, normalize)
1237           ("redefined class %s methods error: deleted method %s [%d]",
1238            the_class->external_name(), k_old_method->name_and_sig_as_C_string(), oi);
1239         return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
1240       }
1241       log_trace(redefine, class, normalize)
1242         ("Method deleted: old: %s [%d]", k_old_method->name_and_sig_as_C_string(), oi);
1243       ++oi; // advance to next old method
1244       break;
1245     default:
1246       ShouldNotReachHere();
1247     }
1248   }
1249 
1250   return JVMTI_ERROR_NONE;
1251 }
1252 
1253 
1254 // Find new constant pool index value for old constant pool index value
1255 // by searching the index map. Returns zero (0) if there is no mapped
1256 // value for the old constant pool index.
1257 u2 VM_RedefineClasses::find_new_index(int old_index) {
1258   if (_index_map_count == 0) {
1259     // map is empty so nothing can be found
1260     return 0;
1261   }
1262 
1263   if (old_index < 1 || old_index >= _index_map_p->length()) {
1264     // The old_index is out of range so it is not mapped. This should
1265     // not happen in regular constant pool merging use, but it can
1266     // happen if a corrupt annotation is processed.
1267     return 0;
1268   }
1269 
1270   int value = _index_map_p->at(old_index);
1271   if (value == -1) {
1272     // the old_index is not mapped
1273     return 0;
1274   }
1275 
1276   // constant pool indices are u2, unless the merged constant pool overflows which
1277   // we don't check for.
1278   return checked_cast<u2>(value);
1279 } // end find_new_index()
1280 
1281 
1282 // Find new bootstrap specifier index value for old bootstrap specifier index
1283 // value by searching the index map. Returns unused index (-1) if there is
1284 // no mapped value for the old bootstrap specifier index.
1285 int VM_RedefineClasses::find_new_operand_index(int old_index) {
1286   if (_operands_index_map_count == 0) {
1287     // map is empty so nothing can be found
1288     return -1;
1289   }
1290 
1291   if (old_index == -1 || old_index >= _operands_index_map_p->length()) {
1292     // The old_index is out of range so it is not mapped.
1293     // This should not happen in regular constant pool merging use.
1294     return -1;
1295   }
1296 
1297   int value = _operands_index_map_p->at(old_index);
1298   if (value == -1) {
1299     // the old_index is not mapped
1300     return -1;
1301   }
1302 
1303   return value;
1304 } // end find_new_operand_index()
1305 
1306 
1307 // Returns true if the current mismatch is due to a resolved/unresolved
1308 // class pair. Otherwise, returns false.
1309 bool VM_RedefineClasses::is_unresolved_class_mismatch(const constantPoolHandle& cp1,
1310        int index1, const constantPoolHandle& cp2, int index2) {
1311 
1312   jbyte t1 = cp1->tag_at(index1).value();
1313   if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
1314     return false;  // wrong entry type; not our special case
1315   }
1316 
1317   jbyte t2 = cp2->tag_at(index2).value();
1318   if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
1319     return false;  // wrong entry type; not our special case
1320   }
1321 
1322   if (t1 == t2) {
1323     return false;  // not a mismatch; not our special case
1324   }
1325 
1326   char *s1 = cp1->klass_name_at(index1)->as_C_string();
1327   char *s2 = cp2->klass_name_at(index2)->as_C_string();
1328   if (strcmp(s1, s2) != 0) {
1329     return false;  // strings don't match; not our special case
1330   }
1331 
1332   return true;  // made it through the gauntlet; this is our special case
1333 } // end is_unresolved_class_mismatch()
1334 
1335 
1336 // The bug 6214132 caused the verification to fail.
1337 // 1. What's done in RedefineClasses() before verification:
1338 //  a) A reference to the class being redefined (_the_class) and a
1339 //     reference to new version of the class (_scratch_class) are
1340 //     saved here for use during the bytecode verification phase of
1341 //     RedefineClasses.
1342 //  b) The _java_mirror field from _the_class is copied to the
1343 //     _java_mirror field in _scratch_class. This means that a jclass
1344 //     returned for _the_class or _scratch_class will refer to the
1345 //     same Java mirror. The verifier will see the "one true mirror"
1346 //     for the class being verified.
1347 // 2. See comments in JvmtiThreadState for what is done during verification.
1348 
1349 class RedefineVerifyMark : public StackObj {
1350  private:
1351   JvmtiThreadState* _state;
1352   Klass*            _scratch_class;
1353   OopHandle         _scratch_mirror;
1354 
1355  public:
1356 
1357   RedefineVerifyMark(Klass* the_class, Klass* scratch_class,
1358                      JvmtiThreadState* state) : _state(state), _scratch_class(scratch_class)
1359   {
1360     _state->set_class_versions_map(the_class, scratch_class);
1361     _scratch_mirror = the_class->java_mirror_handle();  // this is a copy that is swapped
1362     _scratch_class->swap_java_mirror_handle(_scratch_mirror);
1363   }
1364 
1365   ~RedefineVerifyMark() {
1366     // Restore the scratch class's mirror, so when scratch_class is removed
1367     // the correct mirror pointing to it can be cleared.
1368     _scratch_class->swap_java_mirror_handle(_scratch_mirror);
1369     _state->clear_class_versions_map();
1370   }
1371 };
1372 
1373 
1374 jvmtiError VM_RedefineClasses::load_new_class_versions() {
1375 
1376   // For consistency allocate memory using os::malloc wrapper.
1377   _scratch_classes = (InstanceKlass**)
1378     os::malloc(sizeof(InstanceKlass*) * _class_count, mtClass);
1379   if (_scratch_classes == nullptr) {
1380     return JVMTI_ERROR_OUT_OF_MEMORY;
1381   }
1382   // Zero initialize the _scratch_classes array.
1383   for (int i = 0; i < _class_count; i++) {
1384     _scratch_classes[i] = nullptr;
1385   }
1386 
1387   JavaThread* current = JavaThread::current();
1388   ResourceMark rm(current);
1389 
1390   JvmtiThreadState *state = JvmtiThreadState::state_for(current);
1391   // state can only be null if the current thread is exiting which
1392   // should not happen since we're trying to do a RedefineClasses
1393   guarantee(state != nullptr, "exiting thread calling load_new_class_versions");
1394   for (int i = 0; i < _class_count; i++) {
1395     // Create HandleMark so that any handles created while loading new class
1396     // versions are deleted. Constant pools are deallocated while merging
1397     // constant pools
1398     HandleMark hm(current);
1399     InstanceKlass* the_class = get_ik(_class_defs[i].klass);
1400 
1401     log_debug(redefine, class, load)
1402       ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
1403        the_class->external_name(), _class_load_kind, os::available_memory() >> 10);
1404 
1405     ClassFileStream st((u1*)_class_defs[i].class_bytes,
1406                        _class_defs[i].class_byte_count,
1407                        "__VM_RedefineClasses__",
1408                        ClassFileStream::verify);
1409 
1410     // Set redefined class handle in JvmtiThreadState class.
1411     // This redefined class is sent to agent event handler for class file
1412     // load hook event.
1413     state->set_class_being_redefined(the_class, _class_load_kind);
1414 
1415     JavaThread* THREAD = current; // For exception macros.
1416     ExceptionMark em(THREAD);
1417     Handle protection_domain(THREAD, the_class->protection_domain());
1418     ClassLoadInfo cl_info(protection_domain);
1419     // Parse and create a class from the bytes, but this class isn't added
1420     // to the dictionary, so do not call resolve_from_stream.
1421     InstanceKlass* scratch_class = KlassFactory::create_from_stream(&st,
1422                                                       the_class->name(),
1423                                                       the_class->class_loader_data(),
1424                                                       cl_info,
1425                                                       THREAD);
1426 
1427     // Clear class_being_redefined just to be sure.
1428     state->clear_class_being_redefined();
1429 
1430     // TODO: if this is retransform, and nothing changed we can skip it
1431 
1432     // Need to clean up allocated InstanceKlass if there's an error so assign
1433     // the result here. Caller deallocates all the scratch classes in case of
1434     // an error.
1435     _scratch_classes[i] = scratch_class;
1436 
1437     if (HAS_PENDING_EXCEPTION) {
1438       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1439       log_info(redefine, class, load, exceptions)("create_from_stream exception: '%s'", ex_name->as_C_string());
1440       CLEAR_PENDING_EXCEPTION;
1441 
1442       if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
1443         return JVMTI_ERROR_UNSUPPORTED_VERSION;
1444       } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
1445         return JVMTI_ERROR_INVALID_CLASS_FORMAT;
1446       } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
1447         return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
1448       } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
1449         // The message will be "XXX (wrong name: YYY)"
1450         return JVMTI_ERROR_NAMES_DONT_MATCH;
1451       } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1452         return JVMTI_ERROR_OUT_OF_MEMORY;
1453       } else {  // Just in case more exceptions can be thrown..
1454         return JVMTI_ERROR_FAILS_VERIFICATION;
1455       }
1456     }
1457 
1458     // Ensure class is linked before redefine
1459     if (!the_class->is_linked()) {
1460       the_class->link_class(THREAD);
1461       if (HAS_PENDING_EXCEPTION) {
1462         Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1463         oop message = java_lang_Throwable::message(PENDING_EXCEPTION);
1464         if (message != nullptr) {
1465           char* ex_msg = java_lang_String::as_utf8_string(message);
1466           log_info(redefine, class, load, exceptions)("link_class exception: '%s %s'",
1467                    ex_name->as_C_string(), ex_msg);
1468         } else {
1469           log_info(redefine, class, load, exceptions)("link_class exception: '%s'",
1470                    ex_name->as_C_string());
1471         }
1472         CLEAR_PENDING_EXCEPTION;
1473         if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1474           return JVMTI_ERROR_OUT_OF_MEMORY;
1475         } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
1476           return JVMTI_ERROR_INVALID_CLASS;
1477         } else {
1478           return JVMTI_ERROR_INTERNAL;
1479         }
1480       }
1481     }
1482 
1483     // Do the validity checks in compare_and_normalize_class_versions()
1484     // before verifying the byte codes. By doing these checks first, we
1485     // limit the number of functions that require redirection from
1486     // the_class to scratch_class. In particular, we don't have to
1487     // modify JNI GetSuperclass() and thus won't change its performance.
1488     jvmtiError res = compare_and_normalize_class_versions(the_class,
1489                        scratch_class);
1490     if (res != JVMTI_ERROR_NONE) {
1491       return res;
1492     }
1493 
1494     // verify what the caller passed us
1495     {
1496       // The bug 6214132 caused the verification to fail.
1497       // Information about the_class and scratch_class is temporarily
1498       // recorded into jvmtiThreadState. This data is used to redirect
1499       // the_class to scratch_class in the JVM_* functions called by the
1500       // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
1501       // description.
1502       RedefineVerifyMark rvm(the_class, scratch_class, state);
1503       Verifier::verify(scratch_class, true, THREAD);
1504     }
1505 
1506     if (HAS_PENDING_EXCEPTION) {
1507       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1508       log_info(redefine, class, load, exceptions)("verify_byte_codes exception: '%s'", ex_name->as_C_string());
1509       CLEAR_PENDING_EXCEPTION;
1510       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1511         return JVMTI_ERROR_OUT_OF_MEMORY;
1512       } else {
1513         // tell the caller the bytecodes are bad
1514         return JVMTI_ERROR_FAILS_VERIFICATION;
1515       }
1516     }
1517 
1518     res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
1519     if (HAS_PENDING_EXCEPTION) {
1520       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1521       log_info(redefine, class, load, exceptions)("merge_cp_and_rewrite exception: '%s'", ex_name->as_C_string());
1522       CLEAR_PENDING_EXCEPTION;
1523       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1524         return JVMTI_ERROR_OUT_OF_MEMORY;
1525       } else {
1526         return JVMTI_ERROR_INTERNAL;
1527       }
1528     }
1529 
1530 #ifdef ASSERT
1531     {
1532       // verify what we have done during constant pool merging
1533       {
1534         RedefineVerifyMark rvm(the_class, scratch_class, state);
1535         Verifier::verify(scratch_class, true, THREAD);
1536       }
1537 
1538       if (HAS_PENDING_EXCEPTION) {
1539         Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1540         log_info(redefine, class, load, exceptions)
1541           ("verify_byte_codes post merge-CP exception: '%s'", ex_name->as_C_string());
1542         CLEAR_PENDING_EXCEPTION;
1543         if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1544           return JVMTI_ERROR_OUT_OF_MEMORY;
1545         } else {
1546           // tell the caller that constant pool merging screwed up
1547           return JVMTI_ERROR_INTERNAL;
1548         }
1549       }
1550     }
1551 #endif // ASSERT
1552 
1553     Rewriter::rewrite(scratch_class, THREAD);
1554     if (!HAS_PENDING_EXCEPTION) {
1555       scratch_class->link_methods(THREAD);
1556     }
1557     if (HAS_PENDING_EXCEPTION) {
1558       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1559       log_info(redefine, class, load, exceptions)
1560         ("Rewriter::rewrite or link_methods exception: '%s'", ex_name->as_C_string());
1561       CLEAR_PENDING_EXCEPTION;
1562       if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1563         return JVMTI_ERROR_OUT_OF_MEMORY;
1564       } else {
1565         return JVMTI_ERROR_INTERNAL;
1566       }
1567     }
1568 
1569     log_debug(redefine, class, load)
1570       ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)", the_class->external_name(), os::available_memory() >> 10);
1571   }
1572 
1573   return JVMTI_ERROR_NONE;
1574 }
1575 
1576 
1577 // Map old_index to new_index as needed. scratch_cp is only needed
1578 // for log calls.
1579 void VM_RedefineClasses::map_index(const constantPoolHandle& scratch_cp,
1580        int old_index, int new_index) {
1581   if (find_new_index(old_index) != 0) {
1582     // old_index is already mapped
1583     return;
1584   }
1585 
1586   if (old_index == new_index) {
1587     // no mapping is needed
1588     return;
1589   }
1590 
1591   _index_map_p->at_put(old_index, new_index);
1592   _index_map_count++;
1593 
1594   log_trace(redefine, class, constantpool)
1595     ("mapped tag %d at index %d to %d", scratch_cp->tag_at(old_index).value(), old_index, new_index);
1596 } // end map_index()
1597 
1598 
1599 // Map old_index to new_index as needed.
1600 void VM_RedefineClasses::map_operand_index(int old_index, int new_index) {
1601   if (find_new_operand_index(old_index) != -1) {
1602     // old_index is already mapped
1603     return;
1604   }
1605 
1606   if (old_index == new_index) {
1607     // no mapping is needed
1608     return;
1609   }
1610 
1611   _operands_index_map_p->at_put(old_index, new_index);
1612   _operands_index_map_count++;
1613 
1614   log_trace(redefine, class, constantpool)("mapped bootstrap specifier at index %d to %d", old_index, new_index);
1615 } // end map_index()
1616 
1617 
1618 // Merge old_cp and scratch_cp and return the results of the merge via
1619 // merge_cp_p. The number of entries in *merge_cp_p is returned via
1620 // merge_cp_length_p. The entries in old_cp occupy the same locations
1621 // in *merge_cp_p. Also creates a map of indices from entries in
1622 // scratch_cp to the corresponding entry in *merge_cp_p. Index map
1623 // entries are only created for entries in scratch_cp that occupy a
1624 // different location in *merged_cp_p.
1625 bool VM_RedefineClasses::merge_constant_pools(const constantPoolHandle& old_cp,
1626        const constantPoolHandle& scratch_cp, constantPoolHandle *merge_cp_p,
1627        int *merge_cp_length_p, TRAPS) {
1628 
1629   if (merge_cp_p == nullptr) {
1630     assert(false, "caller must provide scratch constantPool");
1631     return false; // robustness
1632   }
1633   if (merge_cp_length_p == nullptr) {
1634     assert(false, "caller must provide scratch CP length");
1635     return false; // robustness
1636   }
1637   // Worst case we need old_cp->length() + scratch_cp()->length(),
1638   // but the caller might be smart so make sure we have at least
1639   // the minimum.
1640   if ((*merge_cp_p)->length() < old_cp->length()) {
1641     assert(false, "merge area too small");
1642     return false; // robustness
1643   }
1644 
1645   log_info(redefine, class, constantpool)("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(), scratch_cp->length());
1646 
1647   {
1648     // Pass 0:
1649     // The old_cp is copied to *merge_cp_p; this means that any code
1650     // using old_cp does not have to change. This work looks like a
1651     // perfect fit for ConstantPool*::copy_cp_to(), but we need to
1652     // handle one special case:
1653     // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
1654     // This will make verification happy.
1655 
1656     int old_i;  // index into old_cp
1657 
1658     // index zero (0) is not used in constantPools
1659     for (old_i = 1; old_i < old_cp->length(); old_i++) {
1660       // leave debugging crumb
1661       jbyte old_tag = old_cp->tag_at(old_i).value();
1662       switch (old_tag) {
1663       case JVM_CONSTANT_Class:
1664       case JVM_CONSTANT_UnresolvedClass:
1665         // revert the copy to JVM_CONSTANT_UnresolvedClass
1666         // May be resolving while calling this so do the same for
1667         // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
1668         (*merge_cp_p)->temp_unresolved_klass_at_put(old_i,
1669           old_cp->klass_name_index_at(old_i));
1670         break;
1671 
1672       case JVM_CONSTANT_Double:
1673       case JVM_CONSTANT_Long:
1674         // just copy the entry to *merge_cp_p, but double and long take
1675         // two constant pool entries
1676         ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i);
1677         old_i++;
1678         break;
1679 
1680       default:
1681         // just copy the entry to *merge_cp_p
1682         ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i);
1683         break;
1684       }
1685     } // end for each old_cp entry
1686 
1687     ConstantPool::copy_operands(old_cp, *merge_cp_p, CHECK_false);
1688     (*merge_cp_p)->extend_operands(scratch_cp, CHECK_false);
1689 
1690     // We don't need to sanity check that *merge_cp_length_p is within
1691     // *merge_cp_p bounds since we have the minimum on-entry check above.
1692     (*merge_cp_length_p) = old_i;
1693   }
1694 
1695   // merge_cp_len should be the same as old_cp->length() at this point
1696   // so this trace message is really a "warm-and-breathing" message.
1697   log_debug(redefine, class, constantpool)("after pass 0: merge_cp_len=%d", *merge_cp_length_p);
1698 
1699   int scratch_i;  // index into scratch_cp
1700   {
1701     // Pass 1a:
1702     // Compare scratch_cp entries to the old_cp entries that we have
1703     // already copied to *merge_cp_p. In this pass, we are eliminating
1704     // exact duplicates (matching entry at same index) so we only
1705     // compare entries in the common indice range.
1706     int increment = 1;
1707     int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
1708     for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
1709       switch (scratch_cp->tag_at(scratch_i).value()) {
1710       case JVM_CONSTANT_Double:
1711       case JVM_CONSTANT_Long:
1712         // double and long take two constant pool entries
1713         increment = 2;
1714         break;
1715 
1716       default:
1717         increment = 1;
1718         break;
1719       }
1720 
1721       bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p, scratch_i);
1722       if (match) {
1723         // found a match at the same index so nothing more to do
1724         continue;
1725       } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
1726                                               *merge_cp_p, scratch_i)) {
1727         // The mismatch in compare_entry_to() above is because of a
1728         // resolved versus unresolved class entry at the same index
1729         // with the same string value. Since Pass 0 reverted any
1730         // class entries to unresolved class entries in *merge_cp_p,
1731         // we go with the unresolved class entry.
1732         continue;
1733       }
1734 
1735       int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p);
1736       if (found_i != 0) {
1737         guarantee(found_i != scratch_i,
1738           "compare_entry_to() and find_matching_entry() do not agree");
1739 
1740         // Found a matching entry somewhere else in *merge_cp_p so
1741         // just need a mapping entry.
1742         map_index(scratch_cp, scratch_i, found_i);
1743         continue;
1744       }
1745 
1746       // The find_matching_entry() call above could fail to find a match
1747       // due to a resolved versus unresolved class or string entry situation
1748       // like we solved above with the is_unresolved_*_mismatch() calls.
1749       // However, we would have to call is_unresolved_*_mismatch() over
1750       // all of *merge_cp_p (potentially) and that doesn't seem to be
1751       // worth the time.
1752 
1753       // No match found so we have to append this entry and any unique
1754       // referenced entries to *merge_cp_p.
1755       append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p);
1756     }
1757   }
1758 
1759   log_debug(redefine, class, constantpool)
1760     ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1761      *merge_cp_length_p, scratch_i, _index_map_count);
1762 
1763   if (scratch_i < scratch_cp->length()) {
1764     // Pass 1b:
1765     // old_cp is smaller than scratch_cp so there are entries in
1766     // scratch_cp that we have not yet processed. We take care of
1767     // those now.
1768     int increment = 1;
1769     for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
1770       switch (scratch_cp->tag_at(scratch_i).value()) {
1771       case JVM_CONSTANT_Double:
1772       case JVM_CONSTANT_Long:
1773         // double and long take two constant pool entries
1774         increment = 2;
1775         break;
1776 
1777       default:
1778         increment = 1;
1779         break;
1780       }
1781 
1782       int found_i =
1783         scratch_cp->find_matching_entry(scratch_i, *merge_cp_p);
1784       if (found_i != 0) {
1785         // Found a matching entry somewhere else in *merge_cp_p so
1786         // just need a mapping entry.
1787         map_index(scratch_cp, scratch_i, found_i);
1788         continue;
1789       }
1790 
1791       // No match found so we have to append this entry and any unique
1792       // referenced entries to *merge_cp_p.
1793       append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p);
1794     }
1795 
1796     log_debug(redefine, class, constantpool)
1797       ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1798        *merge_cp_length_p, scratch_i, _index_map_count);
1799   }
1800   finalize_operands_merge(*merge_cp_p, CHECK_false);
1801 
1802   return true;
1803 } // end merge_constant_pools()
1804 
1805 
1806 // Scoped object to clean up the constant pool(s) created for merging
1807 class MergeCPCleaner {
1808   ClassLoaderData*   _loader_data;
1809   ConstantPool*      _cp;
1810   ConstantPool*      _scratch_cp;
1811  public:
1812   MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
1813                  _loader_data(loader_data), _cp(merge_cp), _scratch_cp(nullptr) {}
1814   ~MergeCPCleaner() {
1815     _loader_data->add_to_deallocate_list(_cp);
1816     if (_scratch_cp != nullptr) {
1817       _loader_data->add_to_deallocate_list(_scratch_cp);
1818     }
1819   }
1820   void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
1821 };
1822 
1823 // Merge constant pools between the_class and scratch_class and
1824 // potentially rewrite bytecodes in scratch_class to use the merged
1825 // constant pool.
1826 jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
1827              InstanceKlass* the_class, InstanceKlass* scratch_class,
1828              TRAPS) {
1829   // worst case merged constant pool length is old and new combined
1830   int merge_cp_length = the_class->constants()->length()
1831         + scratch_class->constants()->length();
1832 
1833   // Constant pools are not easily reused so we allocate a new one
1834   // each time.
1835   // merge_cp is created unsafe for concurrent GC processing.  It
1836   // should be marked safe before discarding it. Even though
1837   // garbage,  if it crosses a card boundary, it may be scanned
1838   // in order to find the start of the first complete object on the card.
1839   ClassLoaderData* loader_data = the_class->class_loader_data();
1840   ConstantPool* merge_cp_oop =
1841     ConstantPool::allocate(loader_data,
1842                            merge_cp_length,
1843                            CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1844   MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
1845 
1846   HandleMark hm(THREAD);  // make sure handles are cleared before
1847                           // MergeCPCleaner clears out merge_cp_oop
1848   constantPoolHandle merge_cp(THREAD, merge_cp_oop);
1849 
1850   // Get constants() from the old class because it could have been rewritten
1851   // while we were at a safepoint allocating a new constant pool.
1852   constantPoolHandle old_cp(THREAD, the_class->constants());
1853   constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
1854 
1855   // If the length changed, the class was redefined out from under us. Return
1856   // an error.
1857   if (merge_cp_length != the_class->constants()->length()
1858          + scratch_class->constants()->length()) {
1859     return JVMTI_ERROR_INTERNAL;
1860   }
1861 
1862   // Update the version number of the constant pools (may keep scratch_cp)
1863   merge_cp->increment_and_save_version(old_cp->version());
1864   scratch_cp->increment_and_save_version(old_cp->version());
1865 
1866   ResourceMark rm(THREAD);
1867   _index_map_count = 0;
1868   _index_map_p = new intArray(scratch_cp->length(), scratch_cp->length(), -1);
1869 
1870   _operands_cur_length = ConstantPool::operand_array_length(old_cp->operands());
1871   _operands_index_map_count = 0;
1872   int operands_index_map_len = ConstantPool::operand_array_length(scratch_cp->operands());
1873   _operands_index_map_p = new intArray(operands_index_map_len, operands_index_map_len, -1);
1874 
1875   // reference to the cp holder is needed for copy_operands()
1876   merge_cp->set_pool_holder(scratch_class);
1877   bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
1878                   &merge_cp_length, THREAD);
1879   merge_cp->set_pool_holder(nullptr);
1880 
1881   if (!result) {
1882     // The merge can fail due to memory allocation failure or due
1883     // to robustness checks.
1884     return JVMTI_ERROR_INTERNAL;
1885   }
1886 
1887   // Set dynamic constants attribute from the original CP.
1888   if (old_cp->has_dynamic_constant()) {
1889     scratch_cp->set_has_dynamic_constant();
1890   }
1891 
1892   log_info(redefine, class, constantpool)("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count);
1893 
1894   if (_index_map_count == 0) {
1895     // there is nothing to map between the new and merged constant pools
1896 
1897     // Copy attributes from scratch_cp to merge_cp
1898     merge_cp->copy_fields(scratch_cp());
1899 
1900     if (old_cp->length() == scratch_cp->length()) {
1901       // The old and new constant pools are the same length and the
1902       // index map is empty. This means that the three constant pools
1903       // are equivalent (but not the same). Unfortunately, the new
1904       // constant pool has not gone through link resolution nor have
1905       // the new class bytecodes gone through constant pool cache
1906       // rewriting so we can't use the old constant pool with the new
1907       // class.
1908 
1909       // toss the merged constant pool at return
1910     } else if (old_cp->length() < scratch_cp->length()) {
1911       // The old constant pool has fewer entries than the new constant
1912       // pool and the index map is empty. This means the new constant
1913       // pool is a superset of the old constant pool. However, the old
1914       // class bytecodes have already gone through constant pool cache
1915       // rewriting so we can't use the new constant pool with the old
1916       // class.
1917 
1918       // toss the merged constant pool at return
1919     } else {
1920       // The old constant pool has more entries than the new constant
1921       // pool and the index map is empty. This means that both the old
1922       // and merged constant pools are supersets of the new constant
1923       // pool.
1924 
1925       // Replace the new constant pool with a shrunken copy of the
1926       // merged constant pool
1927       set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1928                             CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1929       // The new constant pool replaces scratch_cp so have cleaner clean it up.
1930       // It can't be cleaned up while there are handles to it.
1931       cp_cleaner.add_scratch_cp(scratch_cp());
1932     }
1933   } else {
1934     if (log_is_enabled(Trace, redefine, class, constantpool)) {
1935       // don't want to loop unless we are tracing
1936       int count = 0;
1937       for (int i = 1; i < _index_map_p->length(); i++) {
1938         int value = _index_map_p->at(i);
1939 
1940         if (value != -1) {
1941           log_trace(redefine, class, constantpool)("index_map[%d]: old=%d new=%d", count, i, value);
1942           count++;
1943         }
1944       }
1945     }
1946 
1947     // We have entries mapped between the new and merged constant pools
1948     // so we have to rewrite some constant pool references.
1949     if (!rewrite_cp_refs(scratch_class)) {
1950       return JVMTI_ERROR_INTERNAL;
1951     }
1952 
1953     // Copy attributes from scratch_cp to merge_cp (should be done after rewrite_cp_refs())
1954     merge_cp->copy_fields(scratch_cp());
1955 
1956     // Replace the new constant pool with a shrunken copy of the
1957     // merged constant pool so now the rewritten bytecodes have
1958     // valid references; the previous new constant pool will get
1959     // GCed.
1960     set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1961                           CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1962     // The new constant pool replaces scratch_cp so have cleaner clean it up.
1963     // It can't be cleaned up while there are handles to it.
1964     cp_cleaner.add_scratch_cp(scratch_cp());
1965   }
1966 
1967   return JVMTI_ERROR_NONE;
1968 } // end merge_cp_and_rewrite()
1969 
1970 
1971 // Rewrite constant pool references in klass scratch_class.
1972 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1973 
1974   // rewrite constant pool references in the nest attributes:
1975   if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1976     // propagate failure back to caller
1977     return false;
1978   }
1979 
1980   // rewrite constant pool references in the Record attribute:
1981   if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1982     // propagate failure back to caller
1983     return false;
1984   }
1985 
1986   // rewrite constant pool references in the PermittedSubclasses attribute:
1987   if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1988     // propagate failure back to caller
1989     return false;
1990   }
1991 
1992   // rewrite constant pool references in the Preload attribute:
1993   if (!rewrite_cp_refs_in_preload_attribute(scratch_class)) {
1994     // propagate failure back to caller
1995     return false;
1996   }
1997 
1998   // rewrite constant pool references in the methods:
1999   if (!rewrite_cp_refs_in_methods(scratch_class)) {
2000     // propagate failure back to caller
2001     return false;
2002   }
2003 
2004   // rewrite constant pool references in the class_annotations:
2005   if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
2006     // propagate failure back to caller
2007     return false;
2008   }
2009 
2010   // rewrite constant pool references in the fields_annotations:
2011   if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
2012     // propagate failure back to caller
2013     return false;
2014   }
2015 
2016   // rewrite constant pool references in the methods_annotations:
2017   if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {
2018     // propagate failure back to caller
2019     return false;
2020   }
2021 
2022   // rewrite constant pool references in the methods_parameter_annotations:
2023   if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class)) {
2024     // propagate failure back to caller
2025     return false;
2026   }
2027 
2028   // rewrite constant pool references in the methods_default_annotations:
2029   if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class)) {
2030     // propagate failure back to caller
2031     return false;
2032   }
2033 
2034   // rewrite constant pool references in the class_type_annotations:
2035   if (!rewrite_cp_refs_in_class_type_annotations(scratch_class)) {
2036     // propagate failure back to caller
2037     return false;
2038   }
2039 
2040   // rewrite constant pool references in the fields_type_annotations:
2041   if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class)) {
2042     // propagate failure back to caller
2043     return false;
2044   }
2045 
2046   // rewrite constant pool references in the methods_type_annotations:
2047   if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class)) {
2048     // propagate failure back to caller
2049     return false;
2050   }
2051 
2052   // There can be type annotations in the Code part of a method_info attribute.
2053   // These annotations are not accessible, even by reflection.
2054   // Currently they are not even parsed by the ClassFileParser.
2055   // If runtime access is added they will also need to be rewritten.
2056 
2057   // rewrite source file name index:
2058   u2 source_file_name_idx = scratch_class->source_file_name_index();
2059   if (source_file_name_idx != 0) {
2060     u2 new_source_file_name_idx = find_new_index(source_file_name_idx);
2061     if (new_source_file_name_idx != 0) {
2062       scratch_class->set_source_file_name_index(new_source_file_name_idx);
2063     }
2064   }
2065 
2066   // rewrite class generic signature index:
2067   u2 generic_signature_index = scratch_class->generic_signature_index();
2068   if (generic_signature_index != 0) {
2069     u2 new_generic_signature_index = find_new_index(generic_signature_index);
2070     if (new_generic_signature_index != 0) {
2071       scratch_class->set_generic_signature_index(new_generic_signature_index);
2072     }
2073   }
2074 
2075   return true;
2076 } // end rewrite_cp_refs()
2077 
2078 // Rewrite constant pool references in the NestHost and NestMembers attributes.
2079 bool VM_RedefineClasses::rewrite_cp_refs_in_nest_attributes(
2080        InstanceKlass* scratch_class) {
2081 
2082   u2 cp_index = scratch_class->nest_host_index();
2083   if (cp_index != 0) {
2084     scratch_class->set_nest_host_index(find_new_index(cp_index));
2085   }
2086   Array<u2>* nest_members = scratch_class->nest_members();
2087   for (int i = 0; i < nest_members->length(); i++) {
2088     u2 cp_index = nest_members->at(i);
2089     nest_members->at_put(i, find_new_index(cp_index));
2090   }
2091   return true;
2092 }
2093 
2094 // Rewrite constant pool references in the Record attribute.
2095 bool VM_RedefineClasses::rewrite_cp_refs_in_record_attribute(InstanceKlass* scratch_class) {
2096   Array<RecordComponent*>* components = scratch_class->record_components();
2097   if (components != nullptr) {
2098     for (int i = 0; i < components->length(); i++) {
2099       RecordComponent* component = components->at(i);
2100       u2 cp_index = component->name_index();
2101       component->set_name_index(find_new_index(cp_index));
2102       cp_index = component->descriptor_index();
2103       component->set_descriptor_index(find_new_index(cp_index));
2104       cp_index = component->generic_signature_index();
2105       if (cp_index != 0) {
2106         component->set_generic_signature_index(find_new_index(cp_index));
2107       }
2108 
2109       AnnotationArray* annotations = component->annotations();
2110       if (annotations != nullptr && annotations->length() != 0) {
2111         int byte_i = 0;  // byte index into annotations
2112         if (!rewrite_cp_refs_in_annotations_typeArray(annotations, byte_i)) {
2113           log_debug(redefine, class, annotation)("bad record_component_annotations at %d", i);
2114           // propagate failure back to caller
2115           return false;
2116         }
2117       }
2118 
2119       AnnotationArray* type_annotations = component->type_annotations();
2120       if (type_annotations != nullptr && type_annotations->length() != 0) {
2121         int byte_i = 0;  // byte index into annotations
2122         if (!rewrite_cp_refs_in_annotations_typeArray(type_annotations, byte_i)) {
2123           log_debug(redefine, class, annotation)("bad record_component_type_annotations at %d", i);
2124           // propagate failure back to caller
2125           return false;
2126         }
2127       }
2128     }
2129   }
2130   return true;
2131 }
2132 
2133 // Rewrite constant pool references in the PermittedSubclasses attribute.
2134 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2135        InstanceKlass* scratch_class) {
2136 
2137   Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2138   assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2139   for (int i = 0; i < permitted_subclasses->length(); i++) {
2140     u2 cp_index = permitted_subclasses->at(i);
2141     permitted_subclasses->at_put(i, find_new_index(cp_index));
2142   }
2143   return true;
2144 }
2145 
2146 // Rewrite constant pool references in the Preload attribute.
2147 bool VM_RedefineClasses::rewrite_cp_refs_in_preload_attribute(
2148        InstanceKlass* scratch_class) {
2149 
2150   Array<u2>* preload_classes = scratch_class->preload_classes();
2151   assert(preload_classes != nullptr, "unexpected null preload_classes");
2152   for (int i = 0; i < preload_classes->length(); i++) {
2153     u2 cp_index = preload_classes->at(i);
2154     preload_classes->at_put(i, find_new_index(cp_index));
2155   }
2156   return true;
2157 }
2158 
2159 // Rewrite constant pool references in the methods.
2160 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2161 
2162   Array<Method*>* methods = scratch_class->methods();
2163 
2164   if (methods == nullptr || methods->length() == 0) {
2165     // no methods so nothing to do
2166     return true;
2167   }
2168 
2169   JavaThread* THREAD = JavaThread::current(); // For exception macros.
2170   ExceptionMark em(THREAD);
2171 
2172   // rewrite constant pool references in the methods:
2173   for (int i = methods->length() - 1; i >= 0; i--) {
2174     methodHandle method(THREAD, methods->at(i));
2175     methodHandle new_method;
2176     rewrite_cp_refs_in_method(method, &new_method, THREAD);
2177     if (!new_method.is_null()) {
2178       // the method has been replaced so save the new method version
2179       // even in the case of an exception.  original method is on the
2180       // deallocation list.
2181       methods->at_put(i, new_method());
2182     }
2183     if (HAS_PENDING_EXCEPTION) {
2184       Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
2185       log_info(redefine, class, load, exceptions)("rewrite_cp_refs_in_method exception: '%s'", ex_name->as_C_string());
2186       // Need to clear pending exception here as the super caller sets
2187       // the JVMTI_ERROR_INTERNAL if the returned value is false.
2188       CLEAR_PENDING_EXCEPTION;
2189       return false;
2190     }
2191   }
2192 
2193   return true;
2194 }
2195 
2196 
2197 // Rewrite constant pool references in the specific method. This code
2198 // was adapted from Rewriter::rewrite_method().
2199 void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
2200        methodHandle *new_method_p, TRAPS) {
2201 
2202   *new_method_p = methodHandle();  // default is no new method
2203 
2204   // We cache a pointer to the bytecodes here in code_base. If GC
2205   // moves the Method*, then the bytecodes will also move which
2206   // will likely cause a crash. We create a NoSafepointVerifier
2207   // object to detect whether we pass a possible safepoint in this
2208   // code block.
2209   NoSafepointVerifier nsv;
2210 
2211   // Bytecodes and their length
2212   address code_base = method->code_base();
2213   int code_length = method->code_size();
2214 
2215   int bc_length;
2216   for (int bci = 0; bci < code_length; bci += bc_length) {
2217     address bcp = code_base + bci;
2218     Bytecodes::Code c = (Bytecodes::Code)(*bcp);
2219 
2220     bc_length = Bytecodes::length_for(c);
2221     if (bc_length == 0) {
2222       // More complicated bytecodes report a length of zero so
2223       // we have to try again a slightly different way.
2224       bc_length = Bytecodes::length_at(method(), bcp);
2225     }
2226 
2227     assert(bc_length != 0, "impossible bytecode length");
2228 
2229     switch (c) {
2230       case Bytecodes::_ldc:
2231       {
2232         u1 cp_index = *(bcp + 1);
2233         u2 new_index = find_new_index(cp_index);
2234 
2235         if (StressLdcRewrite && new_index == 0) {
2236           // If we are stressing ldc -> ldc_w rewriting, then we
2237           // always need a new_index value.
2238           new_index = cp_index;
2239         }
2240         if (new_index != 0) {
2241           // the original index is mapped so we have more work to do
2242           if (!StressLdcRewrite && new_index <= max_jubyte) {
2243             // The new value can still use ldc instead of ldc_w
2244             // unless we are trying to stress ldc -> ldc_w rewriting
2245             log_trace(redefine, class, constantpool)
2246               ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), p2i(bcp), cp_index, new_index);
2247             // We checked that new_index fits in a u1 so this cast is safe
2248             *(bcp + 1) = (u1)new_index;
2249           } else {
2250             log_trace(redefine, class, constantpool)
2251               ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), p2i(bcp), cp_index, new_index);
2252             // the new value needs ldc_w instead of ldc
2253             u_char inst_buffer[4]; // max instruction size is 4 bytes
2254             bcp = (address)inst_buffer;
2255             // construct new instruction sequence
2256             *bcp = Bytecodes::_ldc_w;
2257             bcp++;
2258             // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
2259             // See comment below for difference between put_Java_u2()
2260             // and put_native_u2().
2261             Bytes::put_Java_u2(bcp, new_index);
2262 
2263             Relocator rc(method, nullptr /* no RelocatorListener needed */);
2264             methodHandle m;
2265             {
2266               PauseNoSafepointVerifier pnsv(&nsv);
2267 
2268               // ldc is 2 bytes and ldc_w is 3 bytes
2269               m = rc.insert_space_at(bci, 3, inst_buffer, CHECK);
2270             }
2271 
2272             // return the new method so that the caller can update
2273             // the containing class
2274             *new_method_p = method = m;
2275             // switch our bytecode processing loop from the old method
2276             // to the new method
2277             code_base = method->code_base();
2278             code_length = method->code_size();
2279             bcp = code_base + bci;
2280             c = (Bytecodes::Code)(*bcp);
2281             bc_length = Bytecodes::length_for(c);
2282             assert(bc_length != 0, "sanity check");
2283           } // end we need ldc_w instead of ldc
2284         } // end if there is a mapped index
2285       } break;
2286 
2287       // these bytecodes have a two-byte constant pool index
2288       case Bytecodes::_anewarray      : // fall through
2289       case Bytecodes::_checkcast      : // fall through
2290       case Bytecodes::_getfield       : // fall through
2291       case Bytecodes::_getstatic      : // fall through
2292       case Bytecodes::_aconst_init   : // fall through
2293       case Bytecodes::_withfield      : // fall through
2294       case Bytecodes::_instanceof     : // fall through
2295       case Bytecodes::_invokedynamic  : // fall through
2296       case Bytecodes::_invokeinterface: // fall through
2297       case Bytecodes::_invokespecial  : // fall through
2298       case Bytecodes::_invokestatic   : // fall through
2299       case Bytecodes::_invokevirtual  : // fall through
2300       case Bytecodes::_ldc_w          : // fall through
2301       case Bytecodes::_ldc2_w         : // fall through
2302       case Bytecodes::_multianewarray : // fall through
2303       case Bytecodes::_new            : // fall through
2304       case Bytecodes::_putfield       : // fall through
2305       case Bytecodes::_putstatic      :
2306       {
2307         address p = bcp + 1;
2308         int cp_index = Bytes::get_Java_u2(p);
2309         u2 new_index = find_new_index(cp_index);
2310         if (new_index != 0) {
2311           // the original index is mapped so update w/ new value
2312           log_trace(redefine, class, constantpool)
2313             ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),p2i(bcp), cp_index, new_index);
2314           // Rewriter::rewrite_method() uses put_native_u2() in this
2315           // situation because it is reusing the constant pool index
2316           // location for a native index into the ConstantPoolCache.
2317           // Since we are updating the constant pool index prior to
2318           // verification and ConstantPoolCache initialization, we
2319           // need to keep the new index in Java byte order.
2320           Bytes::put_Java_u2(p, new_index);
2321         }
2322       } break;
2323       default:
2324         break;
2325     }
2326   } // end for each bytecode
2327 } // end rewrite_cp_refs_in_method()
2328 
2329 
2330 // Rewrite constant pool references in the class_annotations field.
2331 bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(InstanceKlass* scratch_class) {
2332 
2333   AnnotationArray* class_annotations = scratch_class->class_annotations();
2334   if (class_annotations == nullptr || class_annotations->length() == 0) {
2335     // no class_annotations so nothing to do
2336     return true;
2337   }
2338 
2339   log_debug(redefine, class, annotation)("class_annotations length=%d", class_annotations->length());
2340 
2341   int byte_i = 0;  // byte index into class_annotations
2342   return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i);
2343 }
2344 
2345 
2346 // Rewrite constant pool references in an annotations typeArray. This
2347 // "structure" is adapted from the RuntimeVisibleAnnotations_attribute
2348 // that is described in section 4.8.15 of the 2nd-edition of the VM spec:
2349 //
2350 // annotations_typeArray {
2351 //   u2 num_annotations;
2352 //   annotation annotations[num_annotations];
2353 // }
2354 //
2355 bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
2356        AnnotationArray* annotations_typeArray, int &byte_i_ref) {
2357 
2358   if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2359     // not enough room for num_annotations field
2360     log_debug(redefine, class, annotation)("length() is too small for num_annotations field");
2361     return false;
2362   }
2363 
2364   u2 num_annotations = Bytes::get_Java_u2((address)
2365                          annotations_typeArray->adr_at(byte_i_ref));
2366   byte_i_ref += 2;
2367 
2368   log_debug(redefine, class, annotation)("num_annotations=%d", num_annotations);
2369 
2370   int calc_num_annotations = 0;
2371   for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2372     if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray, byte_i_ref)) {
2373       log_debug(redefine, class, annotation)("bad annotation_struct at %d", calc_num_annotations);
2374       // propagate failure back to caller
2375       return false;
2376     }
2377   }
2378   assert(num_annotations == calc_num_annotations, "sanity check");
2379 
2380   return true;
2381 } // end rewrite_cp_refs_in_annotations_typeArray()
2382 
2383 
2384 // Rewrite constant pool references in the annotation struct portion of
2385 // an annotations_typeArray. This "structure" is from section 4.8.15 of
2386 // the 2nd-edition of the VM spec:
2387 //
2388 // struct annotation {
2389 //   u2 type_index;
2390 //   u2 num_element_value_pairs;
2391 //   {
2392 //     u2 element_name_index;
2393 //     element_value value;
2394 //   } element_value_pairs[num_element_value_pairs];
2395 // }
2396 //
2397 bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
2398        AnnotationArray* annotations_typeArray, int &byte_i_ref) {
2399   if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
2400     // not enough room for smallest annotation_struct
2401     log_debug(redefine, class, annotation)("length() is too small for annotation_struct");
2402     return false;
2403   }
2404 
2405   u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
2406                     byte_i_ref, "type_index");
2407 
2408   u2 num_element_value_pairs = Bytes::get_Java_u2((address)
2409                                  annotations_typeArray->adr_at(byte_i_ref));
2410   byte_i_ref += 2;
2411 
2412   log_debug(redefine, class, annotation)
2413     ("type_index=%d  num_element_value_pairs=%d", type_index, num_element_value_pairs);
2414 
2415   int calc_num_element_value_pairs = 0;
2416   for (; calc_num_element_value_pairs < num_element_value_pairs;
2417        calc_num_element_value_pairs++) {
2418     if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2419       // not enough room for another element_name_index, let alone
2420       // the rest of another component
2421       log_debug(redefine, class, annotation)("length() is too small for element_name_index");
2422       return false;
2423     }
2424 
2425     u2 element_name_index = rewrite_cp_ref_in_annotation_data(
2426                               annotations_typeArray, byte_i_ref,
2427                               "element_name_index");
2428 
2429     log_debug(redefine, class, annotation)("element_name_index=%d", element_name_index);
2430 
2431     if (!rewrite_cp_refs_in_element_value(annotations_typeArray, byte_i_ref)) {
2432       log_debug(redefine, class, annotation)("bad element_value at %d", calc_num_element_value_pairs);
2433       // propagate failure back to caller
2434       return false;
2435     }
2436   } // end for each component
2437   assert(num_element_value_pairs == calc_num_element_value_pairs,
2438     "sanity check");
2439 
2440   return true;
2441 } // end rewrite_cp_refs_in_annotation_struct()
2442 
2443 
2444 // Rewrite a constant pool reference at the current position in
2445 // annotations_typeArray if needed. Returns the original constant
2446 // pool reference if a rewrite was not needed or the new constant
2447 // pool reference if a rewrite was needed.
2448 u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
2449      AnnotationArray* annotations_typeArray, int &byte_i_ref,
2450      const char * trace_mesg) {
2451 
2452   address cp_index_addr = (address)
2453     annotations_typeArray->adr_at(byte_i_ref);
2454   u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
2455   u2 new_cp_index = find_new_index(old_cp_index);
2456   if (new_cp_index != 0) {
2457     log_debug(redefine, class, annotation)("mapped old %s=%d", trace_mesg, old_cp_index);
2458     Bytes::put_Java_u2(cp_index_addr, new_cp_index);
2459     old_cp_index = new_cp_index;
2460   }
2461   byte_i_ref += 2;
2462   return old_cp_index;
2463 }
2464 
2465 
2466 // Rewrite constant pool references in the element_value portion of an
2467 // annotations_typeArray. This "structure" is from section 4.8.15.1 of
2468 // the 2nd-edition of the VM spec:
2469 //
2470 // struct element_value {
2471 //   u1 tag;
2472 //   union {
2473 //     u2 const_value_index;
2474 //     {
2475 //       u2 type_name_index;
2476 //       u2 const_name_index;
2477 //     } enum_const_value;
2478 //     u2 class_info_index;
2479 //     annotation annotation_value;
2480 //     struct {
2481 //       u2 num_values;
2482 //       element_value values[num_values];
2483 //     } array_value;
2484 //   } value;
2485 // }
2486 //
2487 bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
2488        AnnotationArray* annotations_typeArray, int &byte_i_ref) {
2489 
2490   if ((byte_i_ref + 1) > annotations_typeArray->length()) {
2491     // not enough room for a tag let alone the rest of an element_value
2492     log_debug(redefine, class, annotation)("length() is too small for a tag");
2493     return false;
2494   }
2495 
2496   u1 tag = annotations_typeArray->at(byte_i_ref);
2497   byte_i_ref++;
2498   log_debug(redefine, class, annotation)("tag='%c'", tag);
2499 
2500   switch (tag) {
2501     // These BaseType tag values are from Table 4.2 in VM spec:
2502     case JVM_SIGNATURE_BYTE:
2503     case JVM_SIGNATURE_CHAR:
2504     case JVM_SIGNATURE_DOUBLE:
2505     case JVM_SIGNATURE_FLOAT:
2506     case JVM_SIGNATURE_INT:
2507     case JVM_SIGNATURE_LONG:
2508     case JVM_SIGNATURE_SHORT:
2509     case JVM_SIGNATURE_BOOLEAN:
2510 
2511     // The remaining tag values are from Table 4.8 in the 2nd-edition of
2512     // the VM spec:
2513     case 's':
2514     {
2515       // For the above tag values (including the BaseType values),
2516       // value.const_value_index is right union field.
2517 
2518       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2519         // not enough room for a const_value_index
2520         log_debug(redefine, class, annotation)("length() is too small for a const_value_index");
2521         return false;
2522       }
2523 
2524       u2 const_value_index = rewrite_cp_ref_in_annotation_data(
2525                                annotations_typeArray, byte_i_ref,
2526                                "const_value_index");
2527 
2528       log_debug(redefine, class, annotation)("const_value_index=%d", const_value_index);
2529     } break;
2530 
2531     case 'e':
2532     {
2533       // for the above tag value, value.enum_const_value is right union field
2534 
2535       if ((byte_i_ref + 4) > annotations_typeArray->length()) {
2536         // not enough room for a enum_const_value
2537         log_debug(redefine, class, annotation)("length() is too small for a enum_const_value");
2538         return false;
2539       }
2540 
2541       u2 type_name_index = rewrite_cp_ref_in_annotation_data(
2542                              annotations_typeArray, byte_i_ref,
2543                              "type_name_index");
2544 
2545       u2 const_name_index = rewrite_cp_ref_in_annotation_data(
2546                               annotations_typeArray, byte_i_ref,
2547                               "const_name_index");
2548 
2549       log_debug(redefine, class, annotation)
2550         ("type_name_index=%d  const_name_index=%d", type_name_index, const_name_index);
2551     } break;
2552 
2553     case 'c':
2554     {
2555       // for the above tag value, value.class_info_index is right union field
2556 
2557       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2558         // not enough room for a class_info_index
2559         log_debug(redefine, class, annotation)("length() is too small for a class_info_index");
2560         return false;
2561       }
2562 
2563       u2 class_info_index = rewrite_cp_ref_in_annotation_data(
2564                               annotations_typeArray, byte_i_ref,
2565                               "class_info_index");
2566 
2567       log_debug(redefine, class, annotation)("class_info_index=%d", class_info_index);
2568     } break;
2569 
2570     case '@':
2571       // For the above tag value, value.attr_value is the right union
2572       // field. This is a nested annotation.
2573       if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray, byte_i_ref)) {
2574         // propagate failure back to caller
2575         return false;
2576       }
2577       break;
2578 
2579     case JVM_SIGNATURE_ARRAY:
2580     {
2581       if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2582         // not enough room for a num_values field
2583         log_debug(redefine, class, annotation)("length() is too small for a num_values field");
2584         return false;
2585       }
2586 
2587       // For the above tag value, value.array_value is the right union
2588       // field. This is an array of nested element_value.
2589       u2 num_values = Bytes::get_Java_u2((address)
2590                         annotations_typeArray->adr_at(byte_i_ref));
2591       byte_i_ref += 2;
2592       log_debug(redefine, class, annotation)("num_values=%d", num_values);
2593 
2594       int calc_num_values = 0;
2595       for (; calc_num_values < num_values; calc_num_values++) {
2596         if (!rewrite_cp_refs_in_element_value(annotations_typeArray, byte_i_ref)) {
2597           log_debug(redefine, class, annotation)("bad nested element_value at %d", calc_num_values);
2598           // propagate failure back to caller
2599           return false;
2600         }
2601       }
2602       assert(num_values == calc_num_values, "sanity check");
2603     } break;
2604 
2605     default:
2606       log_debug(redefine, class, annotation)("bad tag=0x%x", tag);
2607       return false;
2608   } // end decode tag field
2609 
2610   return true;
2611 } // end rewrite_cp_refs_in_element_value()
2612 
2613 
2614 // Rewrite constant pool references in a fields_annotations field.
2615 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
2616        InstanceKlass* scratch_class) {
2617 
2618   Array<AnnotationArray*>* fields_annotations = scratch_class->fields_annotations();
2619 
2620   if (fields_annotations == nullptr || fields_annotations->length() == 0) {
2621     // no fields_annotations so nothing to do
2622     return true;
2623   }
2624 
2625   log_debug(redefine, class, annotation)("fields_annotations length=%d", fields_annotations->length());
2626 
2627   for (int i = 0; i < fields_annotations->length(); i++) {
2628     AnnotationArray* field_annotations = fields_annotations->at(i);
2629     if (field_annotations == nullptr || field_annotations->length() == 0) {
2630       // this field does not have any annotations so skip it
2631       continue;
2632     }
2633 
2634     int byte_i = 0;  // byte index into field_annotations
2635     if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i)) {
2636       log_debug(redefine, class, annotation)("bad field_annotations at %d", i);
2637       // propagate failure back to caller
2638       return false;
2639     }
2640   }
2641 
2642   return true;
2643 } // end rewrite_cp_refs_in_fields_annotations()
2644 
2645 
2646 // Rewrite constant pool references in a methods_annotations field.
2647 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
2648        InstanceKlass* scratch_class) {
2649 
2650   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2651     Method* m = scratch_class->methods()->at(i);
2652     AnnotationArray* method_annotations = m->constMethod()->method_annotations();
2653 
2654     if (method_annotations == nullptr || method_annotations->length() == 0) {
2655       // this method does not have any annotations so skip it
2656       continue;
2657     }
2658 
2659     int byte_i = 0;  // byte index into method_annotations
2660     if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i)) {
2661       log_debug(redefine, class, annotation)("bad method_annotations at %d", i);
2662       // propagate failure back to caller
2663       return false;
2664     }
2665   }
2666 
2667   return true;
2668 } // end rewrite_cp_refs_in_methods_annotations()
2669 
2670 
2671 // Rewrite constant pool references in a methods_parameter_annotations
2672 // field. This "structure" is adapted from the
2673 // RuntimeVisibleParameterAnnotations_attribute described in section
2674 // 4.8.17 of the 2nd-edition of the VM spec:
2675 //
2676 // methods_parameter_annotations_typeArray {
2677 //   u1 num_parameters;
2678 //   {
2679 //     u2 num_annotations;
2680 //     annotation annotations[num_annotations];
2681 //   } parameter_annotations[num_parameters];
2682 // }
2683 //
2684 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
2685        InstanceKlass* scratch_class) {
2686 
2687   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2688     Method* m = scratch_class->methods()->at(i);
2689     AnnotationArray* method_parameter_annotations = m->constMethod()->parameter_annotations();
2690     if (method_parameter_annotations == nullptr
2691         || method_parameter_annotations->length() == 0) {
2692       // this method does not have any parameter annotations so skip it
2693       continue;
2694     }
2695 
2696     if (method_parameter_annotations->length() < 1) {
2697       // not enough room for a num_parameters field
2698       log_debug(redefine, class, annotation)("length() is too small for a num_parameters field at %d", i);
2699       return false;
2700     }
2701 
2702     int byte_i = 0;  // byte index into method_parameter_annotations
2703 
2704     u1 num_parameters = method_parameter_annotations->at(byte_i);
2705     byte_i++;
2706 
2707     log_debug(redefine, class, annotation)("num_parameters=%d", num_parameters);
2708 
2709     int calc_num_parameters = 0;
2710     for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
2711       if (!rewrite_cp_refs_in_annotations_typeArray(method_parameter_annotations, byte_i)) {
2712         log_debug(redefine, class, annotation)("bad method_parameter_annotations at %d", calc_num_parameters);
2713         // propagate failure back to caller
2714         return false;
2715       }
2716     }
2717     assert(num_parameters == calc_num_parameters, "sanity check");
2718   }
2719 
2720   return true;
2721 } // end rewrite_cp_refs_in_methods_parameter_annotations()
2722 
2723 
2724 // Rewrite constant pool references in a methods_default_annotations
2725 // field. This "structure" is adapted from the AnnotationDefault_attribute
2726 // that is described in section 4.8.19 of the 2nd-edition of the VM spec:
2727 //
2728 // methods_default_annotations_typeArray {
2729 //   element_value default_value;
2730 // }
2731 //
2732 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
2733        InstanceKlass* scratch_class) {
2734 
2735   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2736     Method* m = scratch_class->methods()->at(i);
2737     AnnotationArray* method_default_annotations = m->constMethod()->default_annotations();
2738     if (method_default_annotations == nullptr
2739         || method_default_annotations->length() == 0) {
2740       // this method does not have any default annotations so skip it
2741       continue;
2742     }
2743 
2744     int byte_i = 0;  // byte index into method_default_annotations
2745 
2746     if (!rewrite_cp_refs_in_element_value(
2747            method_default_annotations, byte_i)) {
2748       log_debug(redefine, class, annotation)("bad default element_value at %d", i);
2749       // propagate failure back to caller
2750       return false;
2751     }
2752   }
2753 
2754   return true;
2755 } // end rewrite_cp_refs_in_methods_default_annotations()
2756 
2757 
2758 // Rewrite constant pool references in a class_type_annotations field.
2759 bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
2760        InstanceKlass* scratch_class) {
2761 
2762   AnnotationArray* class_type_annotations = scratch_class->class_type_annotations();
2763   if (class_type_annotations == nullptr || class_type_annotations->length() == 0) {
2764     // no class_type_annotations so nothing to do
2765     return true;
2766   }
2767 
2768   log_debug(redefine, class, annotation)("class_type_annotations length=%d", class_type_annotations->length());
2769 
2770   int byte_i = 0;  // byte index into class_type_annotations
2771   return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations,
2772       byte_i, "ClassFile");
2773 } // end rewrite_cp_refs_in_class_type_annotations()
2774 
2775 
2776 // Rewrite constant pool references in a fields_type_annotations field.
2777 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(InstanceKlass* scratch_class) {
2778 
2779   Array<AnnotationArray*>* fields_type_annotations = scratch_class->fields_type_annotations();
2780   if (fields_type_annotations == nullptr || fields_type_annotations->length() == 0) {
2781     // no fields_type_annotations so nothing to do
2782     return true;
2783   }
2784 
2785   log_debug(redefine, class, annotation)("fields_type_annotations length=%d", fields_type_annotations->length());
2786 
2787   for (int i = 0; i < fields_type_annotations->length(); i++) {
2788     AnnotationArray* field_type_annotations = fields_type_annotations->at(i);
2789     if (field_type_annotations == nullptr || field_type_annotations->length() == 0) {
2790       // this field does not have any annotations so skip it
2791       continue;
2792     }
2793 
2794     int byte_i = 0;  // byte index into field_type_annotations
2795     if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations,
2796            byte_i, "field_info")) {
2797       log_debug(redefine, class, annotation)("bad field_type_annotations at %d", i);
2798       // propagate failure back to caller
2799       return false;
2800     }
2801   }
2802 
2803   return true;
2804 } // end rewrite_cp_refs_in_fields_type_annotations()
2805 
2806 
2807 // Rewrite constant pool references in a methods_type_annotations field.
2808 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations(
2809        InstanceKlass* scratch_class) {
2810 
2811   for (int i = 0; i < scratch_class->methods()->length(); i++) {
2812     Method* m = scratch_class->methods()->at(i);
2813     AnnotationArray* method_type_annotations = m->constMethod()->type_annotations();
2814 
2815     if (method_type_annotations == nullptr || method_type_annotations->length() == 0) {
2816       // this method does not have any annotations so skip it
2817       continue;
2818     }
2819 
2820     log_debug(redefine, class, annotation)("methods type_annotations length=%d", method_type_annotations->length());
2821 
2822     int byte_i = 0;  // byte index into method_type_annotations
2823     if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations,
2824            byte_i, "method_info")) {
2825       log_debug(redefine, class, annotation)("bad method_type_annotations at %d", i);
2826       // propagate failure back to caller
2827       return false;
2828     }
2829   }
2830 
2831   return true;
2832 } // end rewrite_cp_refs_in_methods_type_annotations()
2833 
2834 
2835 // Rewrite constant pool references in a type_annotations
2836 // field. This "structure" is adapted from the
2837 // RuntimeVisibleTypeAnnotations_attribute described in
2838 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2839 //
2840 // type_annotations_typeArray {
2841 //   u2              num_annotations;
2842 //   type_annotation annotations[num_annotations];
2843 // }
2844 //
2845 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray(
2846        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2847        const char * location_mesg) {
2848 
2849   if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2850     // not enough room for num_annotations field
2851     log_debug(redefine, class, annotation)("length() is too small for num_annotations field");
2852     return false;
2853   }
2854 
2855   u2 num_annotations = Bytes::get_Java_u2((address)
2856                          type_annotations_typeArray->adr_at(byte_i_ref));
2857   byte_i_ref += 2;
2858 
2859   log_debug(redefine, class, annotation)("num_type_annotations=%d", num_annotations);
2860 
2861   int calc_num_annotations = 0;
2862   for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2863     if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray,
2864            byte_i_ref, location_mesg)) {
2865       log_debug(redefine, class, annotation)("bad type_annotation_struct at %d", calc_num_annotations);
2866       // propagate failure back to caller
2867       return false;
2868     }
2869   }
2870   assert(num_annotations == calc_num_annotations, "sanity check");
2871 
2872   if (byte_i_ref != type_annotations_typeArray->length()) {
2873     log_debug(redefine, class, annotation)
2874       ("read wrong amount of bytes at end of processing type_annotations_typeArray (%d of %d bytes were read)",
2875        byte_i_ref, type_annotations_typeArray->length());
2876     return false;
2877   }
2878 
2879   return true;
2880 } // end rewrite_cp_refs_in_type_annotations_typeArray()
2881 
2882 
2883 // Rewrite constant pool references in a type_annotation
2884 // field. This "structure" is adapted from the
2885 // RuntimeVisibleTypeAnnotations_attribute described in
2886 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2887 //
2888 // type_annotation {
2889 //   u1 target_type;
2890 //   union {
2891 //     type_parameter_target;
2892 //     supertype_target;
2893 //     type_parameter_bound_target;
2894 //     empty_target;
2895 //     method_formal_parameter_target;
2896 //     throws_target;
2897 //     localvar_target;
2898 //     catch_target;
2899 //     offset_target;
2900 //     type_argument_target;
2901 //   } target_info;
2902 //   type_path target_path;
2903 //   annotation anno;
2904 // }
2905 //
2906 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct(
2907        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2908        const char * location_mesg) {
2909 
2910   if (!skip_type_annotation_target(type_annotations_typeArray,
2911          byte_i_ref, location_mesg)) {
2912     return false;
2913   }
2914 
2915   if (!skip_type_annotation_type_path(type_annotations_typeArray, byte_i_ref)) {
2916     return false;
2917   }
2918 
2919   if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray, byte_i_ref)) {
2920     return false;
2921   }
2922 
2923   return true;
2924 } // end rewrite_cp_refs_in_type_annotation_struct()
2925 
2926 
2927 // Read, verify and skip over the target_type and target_info part
2928 // so that rewriting can continue in the later parts of the struct.
2929 //
2930 // u1 target_type;
2931 // union {
2932 //   type_parameter_target;
2933 //   supertype_target;
2934 //   type_parameter_bound_target;
2935 //   empty_target;
2936 //   method_formal_parameter_target;
2937 //   throws_target;
2938 //   localvar_target;
2939 //   catch_target;
2940 //   offset_target;
2941 //   type_argument_target;
2942 // } target_info;
2943 //
2944 bool VM_RedefineClasses::skip_type_annotation_target(
2945        AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2946        const char * location_mesg) {
2947 
2948   if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2949     // not enough room for a target_type let alone the rest of a type_annotation
2950     log_debug(redefine, class, annotation)("length() is too small for a target_type");
2951     return false;
2952   }
2953 
2954   u1 target_type = type_annotations_typeArray->at(byte_i_ref);
2955   byte_i_ref += 1;
2956   log_debug(redefine, class, annotation)("target_type=0x%.2x", target_type);
2957   log_debug(redefine, class, annotation)("location=%s", location_mesg);
2958 
2959   // Skip over target_info
2960   switch (target_type) {
2961     case 0x00:
2962     // kind: type parameter declaration of generic class or interface
2963     // location: ClassFile
2964     case 0x01:
2965     // kind: type parameter declaration of generic method or constructor
2966     // location: method_info
2967 
2968     {
2969       // struct:
2970       // type_parameter_target {
2971       //   u1 type_parameter_index;
2972       // }
2973       //
2974       if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2975         log_debug(redefine, class, annotation)("length() is too small for a type_parameter_target");
2976         return false;
2977       }
2978 
2979       u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2980       byte_i_ref += 1;
2981 
2982       log_debug(redefine, class, annotation)("type_parameter_target: type_parameter_index=%d", type_parameter_index);
2983     } break;
2984 
2985     case 0x10:
2986     // kind: type in extends clause of class or interface declaration
2987     //       or in implements clause of interface declaration
2988     // location: ClassFile
2989 
2990     {
2991       // struct:
2992       // supertype_target {
2993       //   u2 supertype_index;
2994       // }
2995       //
2996       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2997         log_debug(redefine, class, annotation)("length() is too small for a supertype_target");
2998         return false;
2999       }
3000 
3001       u2 supertype_index = Bytes::get_Java_u2((address)
3002                              type_annotations_typeArray->adr_at(byte_i_ref));
3003       byte_i_ref += 2;
3004 
3005       log_debug(redefine, class, annotation)("supertype_target: supertype_index=%d", supertype_index);
3006     } break;
3007 
3008     case 0x11:
3009     // kind: type in bound of type parameter declaration of generic class or interface
3010     // location: ClassFile
3011     case 0x12:
3012     // kind: type in bound of type parameter declaration of generic method or constructor
3013     // location: method_info
3014 
3015     {
3016       // struct:
3017       // type_parameter_bound_target {
3018       //   u1 type_parameter_index;
3019       //   u1 bound_index;
3020       // }
3021       //
3022       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3023         log_debug(redefine, class, annotation)("length() is too small for a type_parameter_bound_target");
3024         return false;
3025       }
3026 
3027       u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
3028       byte_i_ref += 1;
3029       u1 bound_index = type_annotations_typeArray->at(byte_i_ref);
3030       byte_i_ref += 1;
3031 
3032       log_debug(redefine, class, annotation)
3033         ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d", type_parameter_index, bound_index);
3034     } break;
3035 
3036     case 0x13:
3037     // kind: type in field declaration
3038     // location: field_info
3039     case 0x14:
3040     // kind: return type of method, or type of newly constructed object
3041     // location: method_info
3042     case 0x15:
3043     // kind: receiver type of method or constructor
3044     // location: method_info
3045 
3046     {
3047       // struct:
3048       // empty_target {
3049       // }
3050       //
3051       log_debug(redefine, class, annotation)("empty_target");
3052     } break;
3053 
3054     case 0x16:
3055     // kind: type in formal parameter declaration of method, constructor, or lambda expression
3056     // location: method_info
3057 
3058     {
3059       // struct:
3060       // formal_parameter_target {
3061       //   u1 formal_parameter_index;
3062       // }
3063       //
3064       if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
3065         log_debug(redefine, class, annotation)("length() is too small for a formal_parameter_target");
3066         return false;
3067       }
3068 
3069       u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref);
3070       byte_i_ref += 1;
3071 
3072       log_debug(redefine, class, annotation)
3073         ("formal_parameter_target: formal_parameter_index=%d", formal_parameter_index);
3074     } break;
3075 
3076     case 0x17:
3077     // kind: type in throws clause of method or constructor
3078     // location: method_info
3079 
3080     {
3081       // struct:
3082       // throws_target {
3083       //   u2 throws_type_index
3084       // }
3085       //
3086       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3087         log_debug(redefine, class, annotation)("length() is too small for a throws_target");
3088         return false;
3089       }
3090 
3091       u2 throws_type_index = Bytes::get_Java_u2((address)
3092                                type_annotations_typeArray->adr_at(byte_i_ref));
3093       byte_i_ref += 2;
3094 
3095       log_debug(redefine, class, annotation)("throws_target: throws_type_index=%d", throws_type_index);
3096     } break;
3097 
3098     case 0x40:
3099     // kind: type in local variable declaration
3100     // location: Code
3101     case 0x41:
3102     // kind: type in resource variable declaration
3103     // location: Code
3104 
3105     {
3106       // struct:
3107       // localvar_target {
3108       //   u2 table_length;
3109       //   struct {
3110       //     u2 start_pc;
3111       //     u2 length;
3112       //     u2 index;
3113       //   } table[table_length];
3114       // }
3115       //
3116       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3117         // not enough room for a table_length let alone the rest of a localvar_target
3118         log_debug(redefine, class, annotation)("length() is too small for a localvar_target table_length");
3119         return false;
3120       }
3121 
3122       u2 table_length = Bytes::get_Java_u2((address)
3123                           type_annotations_typeArray->adr_at(byte_i_ref));
3124       byte_i_ref += 2;
3125 
3126       log_debug(redefine, class, annotation)("localvar_target: table_length=%d", table_length);
3127 
3128       int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry
3129       int table_size = table_length * table_struct_size;
3130 
3131       if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) {
3132         // not enough room for a table
3133         log_debug(redefine, class, annotation)("length() is too small for a table array of length %d", table_length);
3134         return false;
3135       }
3136 
3137       // Skip over table
3138       byte_i_ref += table_size;
3139     } break;
3140 
3141     case 0x42:
3142     // kind: type in exception parameter declaration
3143     // location: Code
3144 
3145     {
3146       // struct:
3147       // catch_target {
3148       //   u2 exception_table_index;
3149       // }
3150       //
3151       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3152         log_debug(redefine, class, annotation)("length() is too small for a catch_target");
3153         return false;
3154       }
3155 
3156       u2 exception_table_index = Bytes::get_Java_u2((address)
3157                                    type_annotations_typeArray->adr_at(byte_i_ref));
3158       byte_i_ref += 2;
3159 
3160       log_debug(redefine, class, annotation)("catch_target: exception_table_index=%d", exception_table_index);
3161     } break;
3162 
3163     case 0x43:
3164     // kind: type in instanceof expression
3165     // location: Code
3166     case 0x44:
3167     // kind: type in new expression
3168     // location: Code
3169     case 0x45:
3170     // kind: type in method reference expression using ::new
3171     // location: Code
3172     case 0x46:
3173     // kind: type in method reference expression using ::Identifier
3174     // location: Code
3175 
3176     {
3177       // struct:
3178       // offset_target {
3179       //   u2 offset;
3180       // }
3181       //
3182       if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3183         log_debug(redefine, class, annotation)("length() is too small for a offset_target");
3184         return false;
3185       }
3186 
3187       u2 offset = Bytes::get_Java_u2((address)
3188                     type_annotations_typeArray->adr_at(byte_i_ref));
3189       byte_i_ref += 2;
3190 
3191       log_debug(redefine, class, annotation)("offset_target: offset=%d", offset);
3192     } break;
3193 
3194     case 0x47:
3195     // kind: type in cast expression
3196     // location: Code
3197     case 0x48:
3198     // kind: type argument for generic constructor in new expression or
3199     //       explicit constructor invocation statement
3200     // location: Code
3201     case 0x49:
3202     // kind: type argument for generic method in method invocation expression
3203     // location: Code
3204     case 0x4A:
3205     // kind: type argument for generic constructor in method reference expression using ::new
3206     // location: Code
3207     case 0x4B:
3208     // kind: type argument for generic method in method reference expression using ::Identifier
3209     // location: Code
3210 
3211     {
3212       // struct:
3213       // type_argument_target {
3214       //   u2 offset;
3215       //   u1 type_argument_index;
3216       // }
3217       //
3218       if ((byte_i_ref + 3) > type_annotations_typeArray->length()) {
3219         log_debug(redefine, class, annotation)("length() is too small for a type_argument_target");
3220         return false;
3221       }
3222 
3223       u2 offset = Bytes::get_Java_u2((address)
3224                     type_annotations_typeArray->adr_at(byte_i_ref));
3225       byte_i_ref += 2;
3226       u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
3227       byte_i_ref += 1;
3228 
3229       log_debug(redefine, class, annotation)
3230         ("type_argument_target: offset=%d, type_argument_index=%d", offset, type_argument_index);
3231     } break;
3232 
3233     default:
3234       log_debug(redefine, class, annotation)("unknown target_type");
3235 #ifdef ASSERT
3236       ShouldNotReachHere();
3237 #endif
3238       return false;
3239   }
3240 
3241   return true;
3242 } // end skip_type_annotation_target()
3243 
3244 
3245 // Read, verify and skip over the type_path part so that rewriting
3246 // can continue in the later parts of the struct.
3247 //
3248 // type_path {
3249 //   u1 path_length;
3250 //   {
3251 //     u1 type_path_kind;
3252 //     u1 type_argument_index;
3253 //   } path[path_length];
3254 // }
3255 //
3256 bool VM_RedefineClasses::skip_type_annotation_type_path(
3257        AnnotationArray* type_annotations_typeArray, int &byte_i_ref) {
3258 
3259   if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
3260     // not enough room for a path_length let alone the rest of the type_path
3261     log_debug(redefine, class, annotation)("length() is too small for a type_path");
3262     return false;
3263   }
3264 
3265   u1 path_length = type_annotations_typeArray->at(byte_i_ref);
3266   byte_i_ref += 1;
3267 
3268   log_debug(redefine, class, annotation)("type_path: path_length=%d", path_length);
3269 
3270   int calc_path_length = 0;
3271   for (; calc_path_length < path_length; calc_path_length++) {
3272     if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) {
3273       // not enough room for a path
3274       log_debug(redefine, class, annotation)
3275         ("length() is too small for path entry %d of %d", calc_path_length, path_length);
3276       return false;
3277     }
3278 
3279     u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref);
3280     byte_i_ref += 1;
3281     u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
3282     byte_i_ref += 1;
3283 
3284     log_debug(redefine, class, annotation)
3285       ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d",
3286        calc_path_length, type_path_kind, type_argument_index);
3287 
3288     if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) {
3289       // not enough room for a path
3290       log_debug(redefine, class, annotation)("inconsistent type_path values");
3291       return false;
3292     }
3293   }
3294   assert(path_length == calc_path_length, "sanity check");
3295 
3296   return true;
3297 } // end skip_type_annotation_type_path()
3298 
3299 
3300 // Rewrite constant pool references in the method's stackmap table.
3301 // These "structures" are adapted from the StackMapTable_attribute that
3302 // is described in section 4.8.4 of the 6.0 version of the VM spec
3303 // (dated 2005.10.26):
3304 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3305 //
3306 // stack_map {
3307 //   u2 number_of_entries;
3308 //   stack_map_frame entries[number_of_entries];
3309 // }
3310 //
3311 void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
3312        const methodHandle& method) {
3313 
3314   if (!method->has_stackmap_table()) {
3315     return;
3316   }
3317 
3318   AnnotationArray* stackmap_data = method->stackmap_data();
3319   address stackmap_p = (address)stackmap_data->adr_at(0);
3320   address stackmap_end = stackmap_p + stackmap_data->length();
3321 
3322   assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
3323   u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
3324   stackmap_p += 2;
3325 
3326   log_debug(redefine, class, stackmap)("number_of_entries=%u", number_of_entries);
3327 
3328   // walk through each stack_map_frame
3329   u2 calc_number_of_entries = 0;
3330   for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3331     // The stack_map_frame structure is a u1 frame_type followed by
3332     // 0 or more bytes of data:
3333     //
3334     // union stack_map_frame {
3335     //   same_frame;
3336     //   same_locals_1_stack_item_frame;
3337     //   same_locals_1_stack_item_frame_extended;
3338     //   chop_frame;
3339     //   same_frame_extended;
3340     //   append_frame;
3341     //   full_frame;
3342     // }
3343 
3344     assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3345     u1 frame_type = *stackmap_p;
3346     stackmap_p++;
3347 
3348     // same_frame {
3349     //   u1 frame_type = SAME; /* 0-63 */
3350     // }
3351     if (frame_type <= 63) {
3352       // nothing more to do for same_frame
3353     }
3354 
3355     // same_locals_1_stack_item_frame {
3356     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3357     //   verification_type_info stack[1];
3358     // }
3359     else if (frame_type >= 64 && frame_type <= 127) {
3360       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3361         calc_number_of_entries, frame_type);
3362     }
3363 
3364     // reserved for future use
3365     else if (frame_type >= 128 && frame_type <= 246) {
3366       // nothing more to do for reserved frame_types
3367     }
3368 
3369     // same_locals_1_stack_item_frame_extended {
3370     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
3371     //   u2 offset_delta;
3372     //   verification_type_info stack[1];
3373     // }
3374     else if (frame_type == 247) {
3375       stackmap_p += 2;
3376       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3377         calc_number_of_entries, frame_type);
3378     }
3379 
3380     // chop_frame {
3381     //   u1 frame_type = CHOP; /* 248-250 */
3382     //   u2 offset_delta;
3383     // }
3384     else if (frame_type >= 248 && frame_type <= 250) {
3385       stackmap_p += 2;
3386     }
3387 
3388     // same_frame_extended {
3389     //   u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
3390     //   u2 offset_delta;
3391     // }
3392     else if (frame_type == 251) {
3393       stackmap_p += 2;
3394     }
3395 
3396     // append_frame {
3397     //   u1 frame_type = APPEND; /* 252-254 */
3398     //   u2 offset_delta;
3399     //   verification_type_info locals[frame_type - 251];
3400     // }
3401     else if (frame_type >= 252 && frame_type <= 254) {
3402       assert(stackmap_p + 2 <= stackmap_end,
3403         "no room for offset_delta");
3404       stackmap_p += 2;
3405       u1 len = frame_type - 251;
3406       for (u1 i = 0; i < len; i++) {
3407         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3408           calc_number_of_entries, frame_type);
3409       }
3410     }
3411 
3412     // full_frame {
3413     //   u1 frame_type = FULL_FRAME; /* 255 */
3414     //   u2 offset_delta;
3415     //   u2 number_of_locals;
3416     //   verification_type_info locals[number_of_locals];
3417     //   u2 number_of_stack_items;
3418     //   verification_type_info stack[number_of_stack_items];
3419     // }
3420     else if (frame_type == 255) {
3421       assert(stackmap_p + 2 + 2 <= stackmap_end,
3422         "no room for smallest full_frame");
3423       stackmap_p += 2;
3424 
3425       u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
3426       stackmap_p += 2;
3427 
3428       for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
3429         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3430           calc_number_of_entries, frame_type);
3431       }
3432 
3433       // Use the largest size for the number_of_stack_items, but only get
3434       // the right number of bytes.
3435       u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
3436       stackmap_p += 2;
3437 
3438       for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
3439         rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3440           calc_number_of_entries, frame_type);
3441       }
3442     }
3443   } // end while there is a stack_map_frame
3444   assert(number_of_entries == calc_number_of_entries, "sanity check");
3445 } // end rewrite_cp_refs_in_stack_map_table()
3446 
3447 
3448 // Rewrite constant pool references in the verification type info
3449 // portion of the method's stackmap table. These "structures" are
3450 // adapted from the StackMapTable_attribute that is described in
3451 // section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
3452 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3453 //
3454 // The verification_type_info structure is a u1 tag followed by 0 or
3455 // more bytes of data:
3456 //
3457 // union verification_type_info {
3458 //   Top_variable_info;
3459 //   Integer_variable_info;
3460 //   Float_variable_info;
3461 //   Long_variable_info;
3462 //   Double_variable_info;
3463 //   Null_variable_info;
3464 //   UninitializedThis_variable_info;
3465 //   Object_variable_info;
3466 //   Uninitialized_variable_info;
3467 // }
3468 //
3469 void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
3470        address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3471        u1 frame_type) {
3472 
3473   assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
3474   u1 tag = *stackmap_p_ref;
3475   stackmap_p_ref++;
3476 
3477   switch (tag) {
3478   // Top_variable_info {
3479   //   u1 tag = ITEM_Top; /* 0 */
3480   // }
3481   // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
3482   case 0:  // fall through
3483 
3484   // Integer_variable_info {
3485   //   u1 tag = ITEM_Integer; /* 1 */
3486   // }
3487   case ITEM_Integer:  // fall through
3488 
3489   // Float_variable_info {
3490   //   u1 tag = ITEM_Float; /* 2 */
3491   // }
3492   case ITEM_Float:  // fall through
3493 
3494   // Double_variable_info {
3495   //   u1 tag = ITEM_Double; /* 3 */
3496   // }
3497   case ITEM_Double:  // fall through
3498 
3499   // Long_variable_info {
3500   //   u1 tag = ITEM_Long; /* 4 */
3501   // }
3502   case ITEM_Long:  // fall through
3503 
3504   // Null_variable_info {
3505   //   u1 tag = ITEM_Null; /* 5 */
3506   // }
3507   case ITEM_Null:  // fall through
3508 
3509   // UninitializedThis_variable_info {
3510   //   u1 tag = ITEM_UninitializedThis; /* 6 */
3511   // }
3512   case ITEM_UninitializedThis:
3513     // nothing more to do for the above tag types
3514     break;
3515 
3516   // Object_variable_info {
3517   //   u1 tag = ITEM_Object; /* 7 */
3518   //   u2 cpool_index;
3519   // }
3520   case ITEM_Object:
3521   {
3522     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
3523     u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
3524     u2 new_cp_index = find_new_index(cpool_index);
3525     if (new_cp_index != 0) {
3526       log_debug(redefine, class, stackmap)("mapped old cpool_index=%d", cpool_index);
3527       Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3528       cpool_index = new_cp_index;
3529     }
3530     stackmap_p_ref += 2;
3531 
3532     log_debug(redefine, class, stackmap)
3533       ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3534   } break;
3535 
3536   // Uninitialized_variable_info {
3537   //   u1 tag = ITEM_Uninitialized; /* 8 */
3538   //   u2 offset;
3539   // }
3540   case ITEM_Uninitialized:
3541     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3542     stackmap_p_ref += 2;
3543     break;
3544 
3545   default:
3546     log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3547     ShouldNotReachHere();
3548     break;
3549   } // end switch (tag)
3550 } // end rewrite_cp_refs_in_verification_type_info()
3551 
3552 
3553 // Change the constant pool associated with klass scratch_class to scratch_cp.
3554 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3555 // and the smaller constant pool is associated with scratch_class.
3556 void VM_RedefineClasses::set_new_constant_pool(
3557        ClassLoaderData* loader_data,
3558        InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3559        int scratch_cp_length, TRAPS) {
3560   assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3561 
3562   // scratch_cp is a merged constant pool and has enough space for a
3563   // worst case merge situation. We want to associate the minimum
3564   // sized constant pool with the klass to save space.
3565   ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3566   constantPoolHandle smaller_cp(THREAD, cp);
3567 
3568   // preserve version() value in the smaller copy
3569   int version = scratch_cp->version();
3570   assert(version != 0, "sanity check");
3571   smaller_cp->set_version(version);
3572 
3573   // attach klass to new constant pool
3574   // reference to the cp holder is needed for copy_operands()
3575   smaller_cp->set_pool_holder(scratch_class);
3576 
3577   smaller_cp->copy_fields(scratch_cp());
3578 
3579   scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
3580   if (HAS_PENDING_EXCEPTION) {
3581     // Exception is handled in the caller
3582     loader_data->add_to_deallocate_list(smaller_cp());
3583     return;
3584   }
3585   scratch_cp = smaller_cp;
3586 
3587   // attach new constant pool to klass
3588   scratch_class->set_constants(scratch_cp());
3589   scratch_cp->initialize_unresolved_klasses(loader_data, CHECK);
3590 
3591   int i;  // for portability
3592 
3593   // update each field in klass to use new constant pool indices as needed
3594   int java_fields;
3595   int injected_fields;
3596   bool update_required = false;
3597   GrowableArray<FieldInfo>* fields = FieldInfoStream::create_FieldInfoArray(scratch_class->fieldinfo_stream(), &java_fields, &injected_fields);
3598   for (int i = 0; i < java_fields; i++) {
3599     FieldInfo* fi = fields->adr_at(i);
3600     jshort cur_index = fi->name_index();
3601     jshort new_index = find_new_index(cur_index);
3602     if (new_index != 0) {
3603       log_trace(redefine, class, constantpool)("field-name_index change: %d to %d", cur_index, new_index);
3604       fi->set_name_index(new_index);
3605       update_required = true;
3606     }
3607     cur_index = fi->signature_index();
3608     new_index = find_new_index(cur_index);
3609     if (new_index != 0) {
3610       log_trace(redefine, class, constantpool)("field-signature_index change: %d to %d", cur_index, new_index);
3611       fi->set_signature_index(new_index);
3612       update_required = true;
3613     }
3614     cur_index = fi->initializer_index();
3615     new_index = find_new_index(cur_index);
3616     if (new_index != 0) {
3617       log_trace(redefine, class, constantpool)("field-initval_index change: %d to %d", cur_index, new_index);
3618       fi->set_initializer_index(new_index);
3619       update_required = true;
3620     }
3621     cur_index = fi->generic_signature_index();
3622     new_index = find_new_index(cur_index);
3623     if (new_index != 0) {
3624       log_trace(redefine, class, constantpool)("field-generic_signature change: %d to %d", cur_index, new_index);
3625       fi->set_generic_signature_index(new_index);
3626       update_required = true;
3627     }
3628   }
3629   if (update_required) {
3630     Array<u1>* old_stream = scratch_class->fieldinfo_stream();
3631     assert(fields->length() == (java_fields + injected_fields), "Must be");
3632     Array<u1>* new_fis = FieldInfoStream::create_FieldInfoStream(fields, java_fields, injected_fields, scratch_class->class_loader_data(), CHECK);
3633     scratch_class->set_fieldinfo_stream(new_fis);
3634     MetadataFactory::free_array<u1>(scratch_class->class_loader_data(), old_stream);
3635   }
3636 
3637   // Update constant pool indices in the inner classes info to use
3638   // new constant indices as needed. The inner classes info is a
3639   // quadruple:
3640   // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
3641   InnerClassesIterator iter(scratch_class);
3642   for (; !iter.done(); iter.next()) {
3643     int cur_index = iter.inner_class_info_index();
3644     if (cur_index == 0) {
3645       continue;  // JVM spec. allows null inner class refs so skip it
3646     }
3647     u2 new_index = find_new_index(cur_index);
3648     if (new_index != 0) {
3649       log_trace(redefine, class, constantpool)("inner_class_info change: %d to %d", cur_index, new_index);
3650       iter.set_inner_class_info_index(new_index);
3651     }
3652     cur_index = iter.outer_class_info_index();
3653     new_index = find_new_index(cur_index);
3654     if (new_index != 0) {
3655       log_trace(redefine, class, constantpool)("outer_class_info change: %d to %d", cur_index, new_index);
3656       iter.set_outer_class_info_index(new_index);
3657     }
3658     cur_index = iter.inner_name_index();
3659     new_index = find_new_index(cur_index);
3660     if (new_index != 0) {
3661       log_trace(redefine, class, constantpool)("inner_name change: %d to %d", cur_index, new_index);
3662       iter.set_inner_name_index(new_index);
3663     }
3664   } // end for each inner class
3665 
3666   // Attach each method in klass to the new constant pool and update
3667   // to use new constant pool indices as needed:
3668   Array<Method*>* methods = scratch_class->methods();
3669   for (i = methods->length() - 1; i >= 0; i--) {
3670     methodHandle method(THREAD, methods->at(i));
3671     method->set_constants(scratch_cp());
3672 
3673     u2 new_index = find_new_index(method->name_index());
3674     if (new_index != 0) {
3675       log_trace(redefine, class, constantpool)
3676         ("method-name_index change: %d to %d", method->name_index(), new_index);
3677       method->set_name_index(new_index);
3678     }
3679     new_index = find_new_index(method->signature_index());
3680     if (new_index != 0) {
3681       log_trace(redefine, class, constantpool)
3682         ("method-signature_index change: %d to %d", method->signature_index(), new_index);
3683       method->set_signature_index(new_index);
3684     }
3685     new_index = find_new_index(method->generic_signature_index());
3686     if (new_index != 0) {
3687       log_trace(redefine, class, constantpool)
3688         ("method-generic_signature_index change: %d to %d", method->generic_signature_index(), new_index);
3689       method->constMethod()->set_generic_signature_index(new_index);
3690     }
3691 
3692     // Update constant pool indices in the method's checked exception
3693     // table to use new constant indices as needed.
3694     int cext_length = method->checked_exceptions_length();
3695     if (cext_length > 0) {
3696       CheckedExceptionElement * cext_table =
3697         method->checked_exceptions_start();
3698       for (int j = 0; j < cext_length; j++) {
3699         int cur_index = cext_table[j].class_cp_index;
3700         int new_index = find_new_index(cur_index);
3701         if (new_index != 0) {
3702           log_trace(redefine, class, constantpool)("cext-class_cp_index change: %d to %d", cur_index, new_index);
3703           cext_table[j].class_cp_index = (u2)new_index;
3704         }
3705       } // end for each checked exception table entry
3706     } // end if there are checked exception table entries
3707 
3708     // Update each catch type index in the method's exception table
3709     // to use new constant pool indices as needed. The exception table
3710     // holds quadruple entries of the form:
3711     //   (beg_bci, end_bci, handler_bci, klass_index)
3712 
3713     ExceptionTable ex_table(method());
3714     int ext_length = ex_table.length();
3715 
3716     for (int j = 0; j < ext_length; j ++) {
3717       int cur_index = ex_table.catch_type_index(j);
3718       u2 new_index = find_new_index(cur_index);
3719       if (new_index != 0) {
3720         log_trace(redefine, class, constantpool)("ext-klass_index change: %d to %d", cur_index, new_index);
3721         ex_table.set_catch_type_index(j, new_index);
3722       }
3723     } // end for each exception table entry
3724 
3725     // Update constant pool indices in the method's local variable
3726     // table to use new constant indices as needed. The local variable
3727     // table hold sextuple entries of the form:
3728     // (start_pc, length, name_index, descriptor_index, signature_index, slot)
3729     int lvt_length = method->localvariable_table_length();
3730     if (lvt_length > 0) {
3731       LocalVariableTableElement * lv_table =
3732         method->localvariable_table_start();
3733       for (int j = 0; j < lvt_length; j++) {
3734         int cur_index = lv_table[j].name_cp_index;
3735         int new_index = find_new_index(cur_index);
3736         if (new_index != 0) {
3737           log_trace(redefine, class, constantpool)("lvt-name_cp_index change: %d to %d", cur_index, new_index);
3738           lv_table[j].name_cp_index = (u2)new_index;
3739         }
3740         cur_index = lv_table[j].descriptor_cp_index;
3741         new_index = find_new_index(cur_index);
3742         if (new_index != 0) {
3743           log_trace(redefine, class, constantpool)("lvt-descriptor_cp_index change: %d to %d", cur_index, new_index);
3744           lv_table[j].descriptor_cp_index = (u2)new_index;
3745         }
3746         cur_index = lv_table[j].signature_cp_index;
3747         new_index = find_new_index(cur_index);
3748         if (new_index != 0) {
3749           log_trace(redefine, class, constantpool)("lvt-signature_cp_index change: %d to %d", cur_index, new_index);
3750           lv_table[j].signature_cp_index = (u2)new_index;
3751         }
3752       } // end for each local variable table entry
3753     } // end if there are local variable table entries
3754 
3755     // Update constant pool indices in the method's method_parameters.
3756     int mp_length = method->method_parameters_length();
3757     if (mp_length > 0) {
3758       MethodParametersElement* elem = method->method_parameters_start();
3759       for (int j = 0; j < mp_length; j++) {
3760         const int cp_index = elem[j].name_cp_index;
3761         const int new_cp_index = find_new_index(cp_index);
3762         if (new_cp_index != 0) {
3763           elem[j].name_cp_index = (u2)new_cp_index;
3764         }
3765       }
3766     }
3767 
3768     rewrite_cp_refs_in_stack_map_table(method);
3769   } // end for each method
3770 } // end set_new_constant_pool()
3771 
3772 
3773 // Unevolving classes may point to methods of the_class directly
3774 // from their constant pool caches, itables, and/or vtables. We
3775 // use the ClassLoaderDataGraph::classes_do() facility and this helper
3776 // to fix up these pointers.  MethodData also points to old methods and
3777 // must be cleaned.
3778 
3779 // Adjust cpools and vtables closure
3780 void VM_RedefineClasses::AdjustAndCleanMetadata::do_klass(Klass* k) {
3781 
3782   // This is a very busy routine. We don't want too much tracing
3783   // printed out.
3784   bool trace_name_printed = false;
3785 
3786   // If the class being redefined is java.lang.Object, we need to fix all
3787   // array class vtables also. The _has_redefined_Object flag is global.
3788   // Once the java.lang.Object has been redefined (by the current or one
3789   // of the previous VM_RedefineClasses operations) we have to always
3790   // adjust method entries for array classes.
3791   if (k->is_array_klass() && _has_redefined_Object) {
3792     k->vtable().adjust_method_entries(&trace_name_printed);
3793 
3794   } else if (k->is_instance_klass()) {
3795     HandleMark hm(_thread);
3796     InstanceKlass *ik = InstanceKlass::cast(k);
3797 
3798     // Clean MethodData of this class's methods so they don't refer to
3799     // old methods that are no longer running.
3800     Array<Method*>* methods = ik->methods();
3801     int num_methods = methods->length();
3802     for (int index = 0; index < num_methods; ++index) {
3803       if (methods->at(index)->method_data() != nullptr) {
3804         methods->at(index)->method_data()->clean_weak_method_links();
3805       }
3806     }
3807 
3808     // Adjust all vtables, default methods and itables, to clean out old methods.
3809     ResourceMark rm(_thread);
3810     if (ik->vtable_length() > 0) {
3811       ik->vtable().adjust_method_entries(&trace_name_printed);
3812       ik->adjust_default_methods(&trace_name_printed);
3813     }
3814 
3815     if (ik->itable_length() > 0) {
3816       ik->itable().adjust_method_entries(&trace_name_printed);
3817     }
3818 
3819     // The constant pools in other classes (other_cp) can refer to
3820     // old methods.  We have to update method information in
3821     // other_cp's cache. If other_cp has a previous version, then we
3822     // have to repeat the process for each previous version. The
3823     // constant pool cache holds the Method*s for non-virtual
3824     // methods and for virtual, final methods.
3825     //
3826     // Special case: if the current class is being redefined by the current
3827     // VM_RedefineClasses operation, then new_cp has already been attached
3828     // to the_class and old_cp has already been added as a previous version.
3829     // The new_cp doesn't have any cached references to old methods so it
3830     // doesn't need to be updated and we could optimize by skipping it.
3831     // However, the current class can be marked as being redefined by another
3832     // VM_RedefineClasses operation which has already executed its doit_prologue
3833     // and needs cpcache method entries adjusted. For simplicity, the cpcache
3834     // update is done unconditionally. It should result in doing nothing for
3835     // classes being redefined by the current VM_RedefineClasses operation.
3836     // Method entries in the previous version(s) are adjusted as well.
3837     ConstantPoolCache* cp_cache;
3838 
3839     // this klass' constant pool cache may need adjustment
3840     ConstantPool* other_cp = ik->constants();
3841     cp_cache = other_cp->cache();
3842     if (cp_cache != nullptr) {
3843       cp_cache->adjust_method_entries(&trace_name_printed);
3844     }
3845 
3846     // the previous versions' constant pool caches may need adjustment
3847     for (InstanceKlass* pv_node = ik->previous_versions();
3848          pv_node != nullptr;
3849          pv_node = pv_node->previous_versions()) {
3850       cp_cache = pv_node->constants()->cache();
3851       if (cp_cache != nullptr) {
3852         cp_cache->adjust_method_entries(&trace_name_printed);
3853       }
3854     }
3855   }
3856 }
3857 
3858 void VM_RedefineClasses::update_jmethod_ids() {
3859   for (int j = 0; j < _matching_methods_length; ++j) {
3860     Method* old_method = _matching_old_methods[j];
3861     jmethodID jmid = old_method->find_jmethod_id_or_null();
3862     if (jmid != nullptr) {
3863       // There is a jmethodID, change it to point to the new method
3864       Method* new_method = _matching_new_methods[j];
3865       Method::change_method_associated_with_jmethod_id(jmid, new_method);
3866       assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
3867              "should be replaced");
3868     }
3869   }
3870 }
3871 
3872 int VM_RedefineClasses::check_methods_and_mark_as_obsolete() {
3873   int emcp_method_count = 0;
3874   int obsolete_count = 0;
3875   int old_index = 0;
3876   for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
3877     Method* old_method = _matching_old_methods[j];
3878     Method* new_method = _matching_new_methods[j];
3879     Method* old_array_method;
3880 
3881     // Maintain an old_index into the _old_methods array by skipping
3882     // deleted methods
3883     while ((old_array_method = _old_methods->at(old_index)) != old_method) {
3884       ++old_index;
3885     }
3886 
3887     if (MethodComparator::methods_EMCP(old_method, new_method)) {
3888       // The EMCP definition from JSR-163 requires the bytecodes to be
3889       // the same with the exception of constant pool indices which may
3890       // differ. However, the constants referred to by those indices
3891       // must be the same.
3892       //
3893       // We use methods_EMCP() for comparison since constant pool
3894       // merging can remove duplicate constant pool entries that were
3895       // present in the old method and removed from the rewritten new
3896       // method. A faster binary comparison function would consider the
3897       // old and new methods to be different when they are actually
3898       // EMCP.
3899       //
3900       // The old and new methods are EMCP and you would think that we
3901       // could get rid of one of them here and now and save some space.
3902       // However, the concept of EMCP only considers the bytecodes and
3903       // the constant pool entries in the comparison. Other things,
3904       // e.g., the line number table (LNT) or the local variable table
3905       // (LVT) don't count in the comparison. So the new (and EMCP)
3906       // method can have a new LNT that we need so we can't just
3907       // overwrite the new method with the old method.
3908       //
3909       // When this routine is called, we have already attached the new
3910       // methods to the_class so the old methods are effectively
3911       // overwritten. However, if an old method is still executing,
3912       // then the old method cannot be collected until sometime after
3913       // the old method call has returned. So the overwriting of old
3914       // methods by new methods will save us space except for those
3915       // (hopefully few) old methods that are still executing.
3916       //
3917       // A method refers to a ConstMethod* and this presents another
3918       // possible avenue to space savings. The ConstMethod* in the
3919       // new method contains possibly new attributes (LNT, LVT, etc).
3920       // At first glance, it seems possible to save space by replacing
3921       // the ConstMethod* in the old method with the ConstMethod*
3922       // from the new method. The old and new methods would share the
3923       // same ConstMethod* and we would save the space occupied by
3924       // the old ConstMethod*. However, the ConstMethod* contains
3925       // a back reference to the containing method. Sharing the
3926       // ConstMethod* between two methods could lead to confusion in
3927       // the code that uses the back reference. This would lead to
3928       // brittle code that could be broken in non-obvious ways now or
3929       // in the future.
3930       //
3931       // Another possibility is to copy the ConstMethod* from the new
3932       // method to the old method and then overwrite the new method with
3933       // the old method. Since the ConstMethod* contains the bytecodes
3934       // for the method embedded in the oop, this option would change
3935       // the bytecodes out from under any threads executing the old
3936       // method and make the thread's bcp invalid. Since EMCP requires
3937       // that the bytecodes be the same modulo constant pool indices, it
3938       // is straight forward to compute the correct new bcp in the new
3939       // ConstMethod* from the old bcp in the old ConstMethod*. The
3940       // time consuming part would be searching all the frames in all
3941       // of the threads to find all of the calls to the old method.
3942       //
3943       // It looks like we will have to live with the limited savings
3944       // that we get from effectively overwriting the old methods
3945       // when the new methods are attached to the_class.
3946 
3947       // Count number of methods that are EMCP.  The method will be marked
3948       // old but not obsolete if it is EMCP.
3949       emcp_method_count++;
3950 
3951       // An EMCP method is _not_ obsolete. An obsolete method has a
3952       // different jmethodID than the current method. An EMCP method
3953       // has the same jmethodID as the current method. Having the
3954       // same jmethodID for all EMCP versions of a method allows for
3955       // a consistent view of the EMCP methods regardless of which
3956       // EMCP method you happen to have in hand. For example, a
3957       // breakpoint set in one EMCP method will work for all EMCP
3958       // versions of the method including the current one.
3959     } else {
3960       // mark obsolete methods as such
3961       old_method->set_is_obsolete();
3962       obsolete_count++;
3963 
3964       // obsolete methods need a unique idnum so they become new entries in
3965       // the jmethodID cache in InstanceKlass
3966       assert(old_method->method_idnum() == new_method->method_idnum(), "must match");
3967       u2 num = InstanceKlass::cast(_the_class)->next_method_idnum();
3968       if (num != ConstMethod::UNSET_IDNUM) {
3969         old_method->set_method_idnum(num);
3970       }
3971 
3972       // With tracing we try not to "yack" too much. The position of
3973       // this trace assumes there are fewer obsolete methods than
3974       // EMCP methods.
3975       if (log_is_enabled(Trace, redefine, class, obsolete, mark)) {
3976         ResourceMark rm;
3977         log_trace(redefine, class, obsolete, mark)
3978           ("mark %s(%s) as obsolete", old_method->name()->as_C_string(), old_method->signature()->as_C_string());
3979       }
3980     }
3981     old_method->set_is_old();
3982   }
3983   for (int i = 0; i < _deleted_methods_length; ++i) {
3984     Method* old_method = _deleted_methods[i];
3985 
3986     assert(!old_method->has_vtable_index(),
3987            "cannot delete methods with vtable entries");;
3988 
3989     // Mark all deleted methods as old, obsolete and deleted
3990     old_method->set_is_deleted();
3991     old_method->set_is_old();
3992     old_method->set_is_obsolete();
3993     ++obsolete_count;
3994     // With tracing we try not to "yack" too much. The position of
3995     // this trace assumes there are fewer obsolete methods than
3996     // EMCP methods.
3997     if (log_is_enabled(Trace, redefine, class, obsolete, mark)) {
3998       ResourceMark rm;
3999       log_trace(redefine, class, obsolete, mark)
4000         ("mark deleted %s(%s) as obsolete", old_method->name()->as_C_string(), old_method->signature()->as_C_string());
4001     }
4002   }
4003   assert((emcp_method_count + obsolete_count) == _old_methods->length(),
4004     "sanity check");
4005   log_trace(redefine, class, obsolete, mark)("EMCP_cnt=%d, obsolete_cnt=%d", emcp_method_count, obsolete_count);
4006   return emcp_method_count;
4007 }
4008 
4009 // This internal class transfers the native function registration from old methods
4010 // to new methods.  It is designed to handle both the simple case of unchanged
4011 // native methods and the complex cases of native method prefixes being added and/or
4012 // removed.
4013 // It expects only to be used during the VM_RedefineClasses op (a safepoint).
4014 //
4015 // This class is used after the new methods have been installed in "the_class".
4016 //
4017 // So, for example, the following must be handled.  Where 'm' is a method and
4018 // a number followed by an underscore is a prefix.
4019 //
4020 //                                      Old Name    New Name
4021 // Simple transfer to new method        m       ->  m
4022 // Add prefix                           m       ->  1_m
4023 // Remove prefix                        1_m     ->  m
4024 // Simultaneous add of prefixes         m       ->  3_2_1_m
4025 // Simultaneous removal of prefixes     3_2_1_m ->  m
4026 // Simultaneous add and remove          1_m     ->  2_m
4027 // Same, caused by prefix removal only  3_2_1_m ->  3_2_m
4028 //
4029 class TransferNativeFunctionRegistration {
4030  private:
4031   InstanceKlass* the_class;
4032   int prefix_count;
4033   char** prefixes;
4034 
4035   // Recursively search the binary tree of possibly prefixed method names.
4036   // Iteration could be used if all agents were well behaved. Full tree walk is
4037   // more resilent to agents not cleaning up intermediate methods.
4038   // Branch at each depth in the binary tree is:
4039   //    (1) without the prefix.
4040   //    (2) with the prefix.
4041   // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
4042   Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
4043                                      Symbol* signature) {
4044     TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
4045     if (name_symbol != nullptr) {
4046       Method* method = the_class->lookup_method(name_symbol, signature);
4047       if (method != nullptr) {
4048         // Even if prefixed, intermediate methods must exist.
4049         if (method->is_native()) {
4050           // Wahoo, we found a (possibly prefixed) version of the method, return it.
4051           return method;
4052         }
4053         if (depth < prefix_count) {
4054           // Try applying further prefixes (other than this one).
4055           method = search_prefix_name_space(depth+1, name_str, name_len, signature);
4056           if (method != nullptr) {
4057             return method; // found
4058           }
4059 
4060           // Try adding this prefix to the method name and see if it matches
4061           // another method name.
4062           char* prefix = prefixes[depth];
4063           size_t prefix_len = strlen(prefix);
4064           size_t trial_len = name_len + prefix_len;
4065           char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
4066           strcpy(trial_name_str, prefix);
4067           strcat(trial_name_str, name_str);
4068           method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
4069                                             signature);
4070           if (method != nullptr) {
4071             // If found along this branch, it was prefixed, mark as such
4072             method->set_is_prefixed_native();
4073             return method; // found
4074           }
4075         }
4076       }
4077     }
4078     return nullptr;  // This whole branch bore nothing
4079   }
4080 
4081   // Return the method name with old prefixes stripped away.
4082   char* method_name_without_prefixes(Method* method) {
4083     Symbol* name = method->name();
4084     char* name_str = name->as_utf8();
4085 
4086     // Old prefixing may be defunct, strip prefixes, if any.
4087     for (int i = prefix_count-1; i >= 0; i--) {
4088       char* prefix = prefixes[i];
4089       size_t prefix_len = strlen(prefix);
4090       if (strncmp(prefix, name_str, prefix_len) == 0) {
4091         name_str += prefix_len;
4092       }
4093     }
4094     return name_str;
4095   }
4096 
4097   // Strip any prefixes off the old native method, then try to find a
4098   // (possibly prefixed) new native that matches it.
4099   Method* strip_and_search_for_new_native(Method* method) {
4100     ResourceMark rm;
4101     char* name_str = method_name_without_prefixes(method);
4102     return search_prefix_name_space(0, name_str, strlen(name_str),
4103                                     method->signature());
4104   }
4105 
4106  public:
4107 
4108   // Construct a native method transfer processor for this class.
4109   TransferNativeFunctionRegistration(InstanceKlass* _the_class) {
4110     assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
4111 
4112     the_class = _the_class;
4113     prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
4114   }
4115 
4116   // Attempt to transfer any of the old or deleted methods that are native
4117   void transfer_registrations(Method** old_methods, int methods_length) {
4118     for (int j = 0; j < methods_length; j++) {
4119       Method* old_method = old_methods[j];
4120 
4121       if (old_method->is_native() && old_method->has_native_function()) {
4122         Method* new_method = strip_and_search_for_new_native(old_method);
4123         if (new_method != nullptr) {
4124           // Actually set the native function in the new method.
4125           // Redefine does not send events (except CFLH), certainly not this
4126           // behind the scenes re-registration.
4127           new_method->set_native_function(old_method->native_function(),
4128                               !Method::native_bind_event_is_interesting);
4129         }
4130       }
4131     }
4132   }
4133 };
4134 
4135 // Don't lose the association between a native method and its JNI function.
4136 void VM_RedefineClasses::transfer_old_native_function_registrations(InstanceKlass* the_class) {
4137   TransferNativeFunctionRegistration transfer(the_class);
4138   transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
4139   transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
4140 }
4141 
4142 // Deoptimize all compiled code that depends on the classes redefined.
4143 //
4144 // If the can_redefine_classes capability is obtained in the onload
4145 // phase then the compiler has recorded all dependencies from startup.
4146 // In that case we need only deoptimize and throw away all compiled code
4147 // that depends on the class.
4148 //
4149 // If can_redefine_classes is obtained sometime after the onload
4150 // phase then the dependency information may be incomplete. In that case
4151 // the first call to RedefineClasses causes all compiled code to be
4152 // thrown away. As can_redefine_classes has been obtained then
4153 // all future compilations will record dependencies so second and
4154 // subsequent calls to RedefineClasses need only throw away code
4155 // that depends on the class.
4156 //
4157 
4158 void VM_RedefineClasses::flush_dependent_code() {
4159   assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
4160 
4161   DeoptimizationScope deopt_scope;
4162 
4163   // This is the first redefinition, mark all the nmethods for deoptimization
4164   if (!JvmtiExport::all_dependencies_are_recorded()) {
4165     CodeCache::mark_all_nmethods_for_evol_deoptimization(&deopt_scope);
4166     log_debug(redefine, class, nmethod)("Marked all nmethods for deopt");
4167   } else {
4168     CodeCache::mark_dependents_for_evol_deoptimization(&deopt_scope);
4169     log_debug(redefine, class, nmethod)("Marked dependent nmethods for deopt");
4170   }
4171 
4172   deopt_scope.deoptimize_marked();
4173 
4174   // From now on we know that the dependency information is complete
4175   JvmtiExport::set_all_dependencies_are_recorded(true);
4176 }
4177 
4178 void VM_RedefineClasses::compute_added_deleted_matching_methods() {
4179   Method* old_method;
4180   Method* new_method;
4181 
4182   _matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4183   _matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4184   _added_methods        = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
4185   _deleted_methods      = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4186 
4187   _matching_methods_length = 0;
4188   _deleted_methods_length  = 0;
4189   _added_methods_length    = 0;
4190 
4191   int nj = 0;
4192   int oj = 0;
4193   while (true) {
4194     if (oj >= _old_methods->length()) {
4195       if (nj >= _new_methods->length()) {
4196         break; // we've looked at everything, done
4197       }
4198       // New method at the end
4199       new_method = _new_methods->at(nj);
4200       _added_methods[_added_methods_length++] = new_method;
4201       ++nj;
4202     } else if (nj >= _new_methods->length()) {
4203       // Old method, at the end, is deleted
4204       old_method = _old_methods->at(oj);
4205       _deleted_methods[_deleted_methods_length++] = old_method;
4206       ++oj;
4207     } else {
4208       old_method = _old_methods->at(oj);
4209       new_method = _new_methods->at(nj);
4210       if (old_method->name() == new_method->name()) {
4211         if (old_method->signature() == new_method->signature()) {
4212           _matching_old_methods[_matching_methods_length  ] = old_method;
4213           _matching_new_methods[_matching_methods_length++] = new_method;
4214           ++nj;
4215           ++oj;
4216         } else {
4217           // added overloaded have already been moved to the end,
4218           // so this is a deleted overloaded method
4219           _deleted_methods[_deleted_methods_length++] = old_method;
4220           ++oj;
4221         }
4222       } else { // names don't match
4223         if (old_method->name()->fast_compare(new_method->name()) > 0) {
4224           // new method
4225           _added_methods[_added_methods_length++] = new_method;
4226           ++nj;
4227         } else {
4228           // deleted method
4229           _deleted_methods[_deleted_methods_length++] = old_method;
4230           ++oj;
4231         }
4232       }
4233     }
4234   }
4235   assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
4236   assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
4237 }
4238 
4239 
4240 void VM_RedefineClasses::swap_annotations(InstanceKlass* the_class,
4241                                           InstanceKlass* scratch_class) {
4242   // Swap annotation fields values
4243   Annotations* old_annotations = the_class->annotations();
4244   the_class->set_annotations(scratch_class->annotations());
4245   scratch_class->set_annotations(old_annotations);
4246 }
4247 
4248 
4249 // Install the redefinition of a class:
4250 //    - house keeping (flushing breakpoints and caches, deoptimizing
4251 //      dependent compiled code)
4252 //    - replacing parts in the_class with parts from scratch_class
4253 //    - adding a weak reference to track the obsolete but interesting
4254 //      parts of the_class
4255 //    - adjusting constant pool caches and vtables in other classes
4256 //      that refer to methods in the_class. These adjustments use the
4257 //      ClassLoaderDataGraph::classes_do() facility which only allows
4258 //      a helper method to be specified. The interesting parameters
4259 //      that we would like to pass to the helper method are saved in
4260 //      static global fields in the VM operation.
4261 void VM_RedefineClasses::redefine_single_class(Thread* current, jclass the_jclass,
4262                                                InstanceKlass* scratch_class) {
4263 
4264   HandleMark hm(current);   // make sure handles from this call are freed
4265 
4266   if (log_is_enabled(Info, redefine, class, timer)) {
4267     _timer_rsc_phase1.start();
4268   }
4269 
4270   InstanceKlass* the_class = get_ik(the_jclass);
4271 
4272   // Set a flag to control and optimize adjusting method entries
4273   _has_redefined_Object |= the_class == vmClasses::Object_klass();
4274 
4275   // Remove all breakpoints in methods of this class
4276   JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
4277   jvmti_breakpoints.clearall_in_class_at_safepoint(the_class);
4278 
4279   _old_methods = the_class->methods();
4280   _new_methods = scratch_class->methods();
4281   _the_class = the_class;
4282   compute_added_deleted_matching_methods();
4283   update_jmethod_ids();
4284 
4285   _any_class_has_resolved_methods = the_class->has_resolved_methods() || _any_class_has_resolved_methods;
4286 
4287   // Attach new constant pool to the original klass. The original
4288   // klass still refers to the old constant pool (for now).
4289   scratch_class->constants()->set_pool_holder(the_class);
4290 
4291 #if 0
4292   // In theory, with constant pool merging in place we should be able
4293   // to save space by using the new, merged constant pool in place of
4294   // the old constant pool(s). By "pool(s)" I mean the constant pool in
4295   // the klass version we are replacing now and any constant pool(s) in
4296   // previous versions of klass. Nice theory, doesn't work in practice.
4297   // When this code is enabled, even simple programs throw NullPointer
4298   // exceptions. I'm guessing that this is caused by some constant pool
4299   // cache difference between the new, merged constant pool and the
4300   // constant pool that was just being used by the klass. I'm keeping
4301   // this code around to archive the idea, but the code has to remain
4302   // disabled for now.
4303 
4304   // Attach each old method to the new constant pool. This can be
4305   // done here since we are past the bytecode verification and
4306   // constant pool optimization phases.
4307   for (int i = _old_methods->length() - 1; i >= 0; i--) {
4308     Method* method = _old_methods->at(i);
4309     method->set_constants(scratch_class->constants());
4310   }
4311 
4312   // NOTE: this doesn't work because you can redefine the same class in two
4313   // threads, each getting their own constant pool data appended to the
4314   // original constant pool.  In order for the new methods to work when they
4315   // become old methods, they need to keep their updated copy of the constant pool.
4316 
4317   {
4318     // walk all previous versions of the klass
4319     InstanceKlass *ik = the_class;
4320     PreviousVersionWalker pvw(ik);
4321     do {
4322       ik = pvw.next_previous_version();
4323       if (ik != nullptr) {
4324 
4325         // attach previous version of klass to the new constant pool
4326         ik->set_constants(scratch_class->constants());
4327 
4328         // Attach each method in the previous version of klass to the
4329         // new constant pool
4330         Array<Method*>* prev_methods = ik->methods();
4331         for (int i = prev_methods->length() - 1; i >= 0; i--) {
4332           Method* method = prev_methods->at(i);
4333           method->set_constants(scratch_class->constants());
4334         }
4335       }
4336     } while (ik != nullptr);
4337   }
4338 #endif
4339 
4340   // Replace methods and constantpool
4341   the_class->set_methods(_new_methods);
4342   scratch_class->set_methods(_old_methods);     // To prevent potential GCing of the old methods,
4343                                           // and to be able to undo operation easily.
4344 
4345   Array<int>* old_ordering = the_class->method_ordering();
4346   the_class->set_method_ordering(scratch_class->method_ordering());
4347   scratch_class->set_method_ordering(old_ordering);
4348 
4349   ConstantPool* old_constants = the_class->constants();
4350   the_class->set_constants(scratch_class->constants());
4351   scratch_class->set_constants(old_constants);  // See the previous comment.
4352 #if 0
4353   // We are swapping the guts of "the new class" with the guts of "the
4354   // class". Since the old constant pool has just been attached to "the
4355   // new class", it seems logical to set the pool holder in the old
4356   // constant pool also. However, doing this will change the observable
4357   // class hierarchy for any old methods that are still executing. A
4358   // method can query the identity of its "holder" and this query uses
4359   // the method's constant pool link to find the holder. The change in
4360   // holding class from "the class" to "the new class" can confuse
4361   // things.
4362   //
4363   // Setting the old constant pool's holder will also cause
4364   // verification done during vtable initialization below to fail.
4365   // During vtable initialization, the vtable's class is verified to be
4366   // a subtype of the method's holder. The vtable's class is "the
4367   // class" and the method's holder is gotten from the constant pool
4368   // link in the method itself. For "the class"'s directly implemented
4369   // methods, the method holder is "the class" itself (as gotten from
4370   // the new constant pool). The check works fine in this case. The
4371   // check also works fine for methods inherited from super classes.
4372   //
4373   // Miranda methods are a little more complicated. A miranda method is
4374   // provided by an interface when the class implementing the interface
4375   // does not provide its own method.  These interfaces are implemented
4376   // internally as an InstanceKlass. These special instanceKlasses
4377   // share the constant pool of the class that "implements" the
4378   // interface. By sharing the constant pool, the method holder of a
4379   // miranda method is the class that "implements" the interface. In a
4380   // non-redefine situation, the subtype check works fine. However, if
4381   // the old constant pool's pool holder is modified, then the check
4382   // fails because there is no class hierarchy relationship between the
4383   // vtable's class and "the new class".
4384 
4385   old_constants->set_pool_holder(scratch_class());
4386 #endif
4387 
4388   // track number of methods that are EMCP for add_previous_version() call below
4389   int emcp_method_count = check_methods_and_mark_as_obsolete();
4390   transfer_old_native_function_registrations(the_class);
4391 
4392   if (scratch_class->get_cached_class_file() != the_class->get_cached_class_file()) {
4393     // 1. the_class doesn't have a cache yet, scratch_class does have a cache.
4394     // 2. The same class can be present twice in the scratch classes list or there
4395     // are multiple concurrent RetransformClasses calls on different threads.
4396     // the_class and scratch_class have the same cached bytes, but different buffers.
4397     // In such cases we need to deallocate one of the buffers.
4398     // 3. RedefineClasses and the_class has cached bytes from a previous transformation.
4399     // In the case we need to use class bytes from scratch_class.
4400     if (the_class->get_cached_class_file() != nullptr) {
4401       os::free(the_class->get_cached_class_file());
4402     }
4403     the_class->set_cached_class_file(scratch_class->get_cached_class_file());
4404   }
4405 
4406   // null out in scratch class to not delete twice.  The class to be redefined
4407   // always owns these bytes.
4408   scratch_class->set_cached_class_file(nullptr);
4409 
4410   // Replace inner_classes
4411   Array<u2>* old_inner_classes = the_class->inner_classes();
4412   the_class->set_inner_classes(scratch_class->inner_classes());
4413   scratch_class->set_inner_classes(old_inner_classes);
4414 
4415   // Initialize the vtable and interface table after
4416   // methods have been rewritten
4417   // no exception should happen here since we explicitly
4418   // do not check loader constraints.
4419   // compare_and_normalize_class_versions has already checked:
4420   //  - classloaders unchanged, signatures unchanged
4421   //  - all instanceKlasses for redefined classes reused & contents updated
4422   the_class->vtable().initialize_vtable();
4423   the_class->itable().initialize_itable();
4424 
4425   // Leave arrays of jmethodIDs and itable index cache unchanged
4426 
4427   // Copy the "source debug extension" attribute from new class version
4428   the_class->set_source_debug_extension(
4429     scratch_class->source_debug_extension(),
4430     scratch_class->source_debug_extension() == nullptr ? 0 :
4431     (int)strlen(scratch_class->source_debug_extension()));
4432 
4433   // Use of javac -g could be different in the old and the new
4434   if (scratch_class->has_localvariable_table() !=
4435       the_class->has_localvariable_table()) {
4436     the_class->set_has_localvariable_table(scratch_class->has_localvariable_table());
4437   }
4438 
4439   swap_annotations(the_class, scratch_class);
4440 
4441   // Replace minor version number of class file
4442   u2 old_minor_version = the_class->constants()->minor_version();
4443   the_class->constants()->set_minor_version(scratch_class->constants()->minor_version());
4444   scratch_class->constants()->set_minor_version(old_minor_version);
4445 
4446   // Replace major version number of class file
4447   u2 old_major_version = the_class->constants()->major_version();
4448   the_class->constants()->set_major_version(scratch_class->constants()->major_version());
4449   scratch_class->constants()->set_major_version(old_major_version);
4450 
4451   // Replace CP indexes for class and name+type of enclosing method
4452   u2 old_class_idx  = the_class->enclosing_method_class_index();
4453   u2 old_method_idx = the_class->enclosing_method_method_index();
4454   the_class->set_enclosing_method_indices(
4455     scratch_class->enclosing_method_class_index(),
4456     scratch_class->enclosing_method_method_index());
4457   scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
4458 
4459   if (!the_class->has_been_redefined()) {
4460     the_class->set_has_been_redefined();
4461   }
4462 
4463   // Scratch class is unloaded but still needs cleaning, and skipping for CDS.
4464   scratch_class->set_is_scratch_class();
4465 
4466   // keep track of previous versions of this class
4467   the_class->add_previous_version(scratch_class, emcp_method_count);
4468 
4469   _timer_rsc_phase1.stop();
4470   if (log_is_enabled(Info, redefine, class, timer)) {
4471     _timer_rsc_phase2.start();
4472   }
4473 
4474   if (the_class->oop_map_cache() != nullptr) {
4475     // Flush references to any obsolete methods from the oop map cache
4476     // so that obsolete methods are not pinned.
4477     the_class->oop_map_cache()->flush_obsolete_entries();
4478   }
4479 
4480   increment_class_counter(the_class);
4481 
4482   if (EventClassRedefinition::is_enabled()) {
4483     EventClassRedefinition event;
4484     event.set_classModificationCount(java_lang_Class::classRedefinedCount(the_class->java_mirror()));
4485     event.set_redefinedClass(the_class);
4486     event.set_redefinitionId(_id);
4487     event.commit();
4488   }
4489 
4490   {
4491     ResourceMark rm(current);
4492     // increment the classRedefinedCount field in the_class and in any
4493     // direct and indirect subclasses of the_class
4494     log_info(redefine, class, load)
4495       ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
4496        the_class->external_name(), java_lang_Class::classRedefinedCount(the_class->java_mirror()), os::available_memory() >> 10);
4497     Events::log_redefinition(current, "redefined class name=%s, count=%d",
4498                              the_class->external_name(),
4499                              java_lang_Class::classRedefinedCount(the_class->java_mirror()));
4500 
4501   }
4502   _timer_rsc_phase2.stop();
4503 
4504 } // end redefine_single_class()
4505 
4506 
4507 // Increment the classRedefinedCount field in the specific InstanceKlass
4508 // and in all direct and indirect subclasses.
4509 void VM_RedefineClasses::increment_class_counter(InstanceKlass* ik) {
4510   for (ClassHierarchyIterator iter(ik); !iter.done(); iter.next()) {
4511     // Only update instanceKlasses
4512     Klass* sub = iter.klass();
4513     if (sub->is_instance_klass()) {
4514       oop class_mirror = InstanceKlass::cast(sub)->java_mirror();
4515       Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
4516       int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
4517       java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
4518 
4519       if (class_oop != _the_class) {
4520         // _the_class count is printed at end of redefine_single_class()
4521         log_debug(redefine, class, subclass)("updated count in subclass=%s to %d", ik->external_name(), new_count);
4522       }
4523     }
4524   }
4525 }
4526 
4527 void VM_RedefineClasses::CheckClass::do_klass(Klass* k) {
4528   bool no_old_methods = true;  // be optimistic
4529 
4530   // Both array and instance classes have vtables.
4531   // a vtable should never contain old or obsolete methods
4532   ResourceMark rm(_thread);
4533   if (k->vtable_length() > 0 &&
4534       !k->vtable().check_no_old_or_obsolete_entries()) {
4535     if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4536       log_trace(redefine, class, obsolete, metadata)
4537         ("klassVtable::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4538          k->signature_name());
4539       k->vtable().dump_vtable();
4540     }
4541     no_old_methods = false;
4542   }
4543 
4544   if (k->is_instance_klass()) {
4545     HandleMark hm(_thread);
4546     InstanceKlass *ik = InstanceKlass::cast(k);
4547 
4548     // an itable should never contain old or obsolete methods
4549     if (ik->itable_length() > 0 &&
4550         !ik->itable().check_no_old_or_obsolete_entries()) {
4551       if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4552         log_trace(redefine, class, obsolete, metadata)
4553           ("klassItable::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4554            ik->signature_name());
4555         ik->itable().dump_itable();
4556       }
4557       no_old_methods = false;
4558     }
4559 
4560     // the constant pool cache should never contain non-deleted old or obsolete methods
4561     if (ik->constants() != nullptr &&
4562         ik->constants()->cache() != nullptr &&
4563         !ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
4564       if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4565         log_trace(redefine, class, obsolete, metadata)
4566           ("cp-cache::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4567            ik->signature_name());
4568         ik->constants()->cache()->dump_cache();
4569       }
4570       no_old_methods = false;
4571     }
4572   }
4573 
4574   // print and fail guarantee if old methods are found.
4575   if (!no_old_methods) {
4576     if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4577       dump_methods();
4578     } else {
4579       log_trace(redefine, class)("Use the '-Xlog:redefine+class*:' option "
4580         "to see more info about the following guarantee() failure.");
4581     }
4582     guarantee(false, "OLD and/or OBSOLETE method(s) found");
4583   }
4584 }
4585 
4586 u8 VM_RedefineClasses::next_id() {
4587   while (true) {
4588     u8 id = _id_counter;
4589     u8 next_id = id + 1;
4590     u8 result = Atomic::cmpxchg(&_id_counter, id, next_id);
4591     if (result == id) {
4592       return next_id;
4593     }
4594   }
4595 }
4596 
4597 void VM_RedefineClasses::dump_methods() {
4598   int j;
4599   log_trace(redefine, class, dump)("_old_methods --");
4600   for (j = 0; j < _old_methods->length(); ++j) {
4601     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4602     Method* m = _old_methods->at(j);
4603     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4604     m->access_flags().print_on(&log_stream);
4605     log_stream.print(" --  ");
4606     m->print_name(&log_stream);
4607     log_stream.cr();
4608   }
4609   log_trace(redefine, class, dump)("_new_methods --");
4610   for (j = 0; j < _new_methods->length(); ++j) {
4611     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4612     Method* m = _new_methods->at(j);
4613     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4614     m->access_flags().print_on(&log_stream);
4615     log_stream.print(" --  ");
4616     m->print_name(&log_stream);
4617     log_stream.cr();
4618   }
4619   log_trace(redefine, class, dump)("_matching_methods --");
4620   for (j = 0; j < _matching_methods_length; ++j) {
4621     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4622     Method* m = _matching_old_methods[j];
4623     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4624     m->access_flags().print_on(&log_stream);
4625     log_stream.print(" --  ");
4626     m->print_name();
4627     log_stream.cr();
4628 
4629     m = _matching_new_methods[j];
4630     log_stream.print("      (%5d)  ", m->vtable_index());
4631     m->access_flags().print_on(&log_stream);
4632     log_stream.cr();
4633   }
4634   log_trace(redefine, class, dump)("_deleted_methods --");
4635   for (j = 0; j < _deleted_methods_length; ++j) {
4636     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4637     Method* m = _deleted_methods[j];
4638     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4639     m->access_flags().print_on(&log_stream);
4640     log_stream.print(" --  ");
4641     m->print_name(&log_stream);
4642     log_stream.cr();
4643   }
4644   log_trace(redefine, class, dump)("_added_methods --");
4645   for (j = 0; j < _added_methods_length; ++j) {
4646     LogStreamHandle(Trace, redefine, class, dump) log_stream;
4647     Method* m = _added_methods[j];
4648     log_stream.print("%4d  (%5d)  ", j, m->vtable_index());
4649     m->access_flags().print_on(&log_stream);
4650     log_stream.print(" --  ");
4651     m->print_name(&log_stream);
4652     log_stream.cr();
4653   }
4654 }
4655 
4656 void VM_RedefineClasses::print_on_error(outputStream* st) const {
4657   VM_Operation::print_on_error(st);
4658   if (_the_class != nullptr) {
4659     ResourceMark rm;
4660     st->print_cr(", redefining class %s", _the_class->external_name());
4661   }
4662 }