1 /*
2 * Copyright (c) 2003, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotMetaspace.hpp"
26 #include "cds/cdsConfig.hpp"
27 #include "classfile/classFileStream.hpp"
28 #include "classfile/classLoaderDataGraph.hpp"
29 #include "classfile/classLoadInfo.hpp"
30 #include "classfile/javaClasses.inline.hpp"
31 #include "classfile/klassFactory.hpp"
32 #include "classfile/metadataOnStackMark.hpp"
33 #include "classfile/stackMapTable.hpp"
34 #include "classfile/symbolTable.hpp"
35 #include "classfile/verifier.hpp"
36 #include "classfile/vmClasses.hpp"
37 #include "classfile/vmSymbols.hpp"
38 #include "code/codeCache.hpp"
39 #include "compiler/compileBroker.hpp"
40 #include "interpreter/oopMapCache.hpp"
41 #include "interpreter/rewriter.hpp"
42 #include "jfr/jfrEvents.hpp"
43 #include "logging/logStream.hpp"
44 #include "memory/metadataFactory.hpp"
45 #include "memory/resourceArea.hpp"
46 #include "memory/universe.hpp"
47 #include "oops/annotations.hpp"
48 #include "oops/bsmAttribute.inline.hpp"
49 #include "oops/constantPool.inline.hpp"
50 #include "oops/fieldStreams.inline.hpp"
51 #include "oops/klass.inline.hpp"
52 #include "oops/klassVtable.hpp"
53 #include "oops/method.hpp"
54 #include "oops/oop.inline.hpp"
55 #include "oops/recordComponent.hpp"
56 #include "prims/jvmtiImpl.hpp"
57 #include "prims/jvmtiRedefineClasses.hpp"
58 #include "prims/jvmtiThreadState.inline.hpp"
59 #include "prims/methodComparator.hpp"
60 #include "prims/resolvedMethodTable.hpp"
61 #include "runtime/atomicAccess.hpp"
62 #include "runtime/deoptimization.hpp"
63 #include "runtime/handles.inline.hpp"
64 #include "runtime/jniHandles.inline.hpp"
65 #include "runtime/relocator.hpp"
66 #include "runtime/safepointVerifiers.hpp"
67 #include "utilities/bitMap.inline.hpp"
68 #include "utilities/checkedCast.hpp"
69 #include "utilities/events.hpp"
70 #include "utilities/macros.hpp"
71 #if INCLUDE_JFR
72 #include "jfr/jfr.hpp"
73 #endif
74
75 Array<Method*>* VM_RedefineClasses::_old_methods = nullptr;
76 Array<Method*>* VM_RedefineClasses::_new_methods = nullptr;
77 Method** VM_RedefineClasses::_matching_old_methods = nullptr;
78 Method** VM_RedefineClasses::_matching_new_methods = nullptr;
79 Method** VM_RedefineClasses::_deleted_methods = nullptr;
80 Method** VM_RedefineClasses::_added_methods = nullptr;
81 int VM_RedefineClasses::_matching_methods_length = 0;
82 int VM_RedefineClasses::_deleted_methods_length = 0;
83 int VM_RedefineClasses::_added_methods_length = 0;
84
85 // This flag is global as the constructor does not reset it:
86 bool VM_RedefineClasses::_has_redefined_Object = false;
87 u8 VM_RedefineClasses::_id_counter = 0;
88
89 VM_RedefineClasses::VM_RedefineClasses(jint class_count,
90 const jvmtiClassDefinition *class_defs,
91 JvmtiClassLoadKind class_load_kind) {
92 _class_count = class_count;
93 _class_defs = class_defs;
94 _class_load_kind = class_load_kind;
95 _any_class_has_resolved_methods = false;
96 _res = JVMTI_ERROR_NONE;
97 _the_class = nullptr;
98 _id = next_id();
99 }
100
101 static inline InstanceKlass* get_ik(jclass def) {
102 oop mirror = JNIHandles::resolve_non_null(def);
103 return java_lang_Class::as_InstanceKlass(mirror);
104 }
105
106 // If any of the classes are being redefined, wait
107 // Parallel constant pool merging leads to indeterminate constant pools.
108 void VM_RedefineClasses::lock_classes() {
109 JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
110 GrowableArray<Klass*>* redef_classes = state->get_classes_being_redefined();
111
112 MonitorLocker ml(RedefineClasses_lock);
113
114 if (redef_classes == nullptr) {
115 redef_classes = new (mtClass) GrowableArray<Klass*>(1, mtClass);
116 state->set_classes_being_redefined(redef_classes);
117 }
118
119 bool has_redefined;
120 do {
121 has_redefined = false;
122 // Go through classes each time until none are being redefined. Skip
123 // the ones that are being redefined by this thread currently. Class file
124 // load hook event may trigger new class redefine when we are redefining
125 // a class (after lock_classes()).
126 for (int i = 0; i < _class_count; i++) {
127 InstanceKlass* ik = get_ik(_class_defs[i].klass);
128 // Check if we are currently redefining the class in this thread already.
129 if (redef_classes->contains(ik)) {
130 assert(ik->is_being_redefined(), "sanity");
131 } else {
132 if (ik->is_being_redefined()) {
133 ml.wait();
134 has_redefined = true;
135 break; // for loop
136 }
137 }
138 }
139 } while (has_redefined);
140
141 for (int i = 0; i < _class_count; i++) {
142 InstanceKlass* ik = get_ik(_class_defs[i].klass);
143 redef_classes->push(ik); // Add to the _classes_being_redefined list
144 ik->set_is_being_redefined(true);
145 }
146 ml.notify_all();
147 }
148
149 void VM_RedefineClasses::unlock_classes() {
150 JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
151 GrowableArray<Klass*>* redef_classes = state->get_classes_being_redefined();
152 assert(redef_classes != nullptr, "_classes_being_redefined is not allocated");
153
154 MonitorLocker ml(RedefineClasses_lock);
155
156 for (int i = _class_count - 1; i >= 0; i--) {
157 InstanceKlass* def_ik = get_ik(_class_defs[i].klass);
158 if (redef_classes->length() > 0) {
159 // Remove the class from _classes_being_redefined list
160 Klass* k = redef_classes->pop();
161 assert(def_ik == k, "unlocking wrong class");
162 }
163 assert(def_ik->is_being_redefined(),
164 "should be being redefined to get here");
165
166 // Unlock after we finish all redefines for this class within
167 // the thread. Same class can be pushed to the list multiple
168 // times (not more than once by each recursive redefinition).
169 if (!redef_classes->contains(def_ik)) {
170 def_ik->set_is_being_redefined(false);
171 }
172 }
173 ml.notify_all();
174 }
175
176 bool VM_RedefineClasses::doit_prologue() {
177 if (_class_count == 0) {
178 _res = JVMTI_ERROR_NONE;
179 return false;
180 }
181 if (_class_defs == nullptr) {
182 _res = JVMTI_ERROR_NULL_POINTER;
183 return false;
184 }
185
186 for (int i = 0; i < _class_count; i++) {
187 if (_class_defs[i].klass == nullptr) {
188 _res = JVMTI_ERROR_INVALID_CLASS;
189 return false;
190 }
191 if (_class_defs[i].class_byte_count == 0) {
192 _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
193 return false;
194 }
195 if (_class_defs[i].class_bytes == nullptr) {
196 _res = JVMTI_ERROR_NULL_POINTER;
197 return false;
198 }
199
200 oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
201 // classes for primitives, arrays, and hidden classes
202 // cannot be redefined.
203 if (!is_modifiable_class(mirror)) {
204 _res = JVMTI_ERROR_UNMODIFIABLE_CLASS;
205 return false;
206 }
207 }
208
209 // Start timer after all the sanity checks; not quite accurate, but
210 // better than adding a bunch of stop() calls.
211 if (log_is_enabled(Info, redefine, class, timer)) {
212 _timer_vm_op_prologue.start();
213 }
214
215 lock_classes();
216 // We first load new class versions in the prologue, because somewhere down the
217 // call chain it is required that the current thread is a Java thread.
218 _res = load_new_class_versions();
219 if (_res != JVMTI_ERROR_NONE) {
220 // free any successfully created classes, since none are redefined
221 for (int i = 0; i < _class_count; i++) {
222 if (_scratch_classes[i] != nullptr) {
223 ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
224 // Free the memory for this class at class unloading time. Not before
225 // because CMS might think this is still live.
226 InstanceKlass* ik = get_ik(_class_defs[i].klass);
227 if (ik->get_cached_class_file() == _scratch_classes[i]->get_cached_class_file()) {
228 // Don't double-free cached_class_file copied from the original class if error.
229 _scratch_classes[i]->set_cached_class_file(nullptr);
230 }
231 cld->add_to_deallocate_list(InstanceKlass::cast(_scratch_classes[i]));
232 }
233 }
234 // Free os::malloc allocated memory in load_new_class_version.
235 os::free(_scratch_classes);
236 _timer_vm_op_prologue.stop();
237 unlock_classes();
238 return false;
239 }
240
241 _timer_vm_op_prologue.stop();
242 return true;
243 }
244
245 void VM_RedefineClasses::doit() {
246 Thread* current = Thread::current();
247
248 if (log_is_enabled(Info, redefine, class, timer)) {
249 _timer_vm_op_doit.start();
250 }
251
252 #if INCLUDE_CDS
253 if (CDSConfig::is_using_archive()) {
254 // Sharing is enabled so we remap the shared readonly space to
255 // shared readwrite, private just in case we need to redefine
256 // a shared class. We do the remap during the doit() phase of
257 // the safepoint to be safer.
258 if (!AOTMetaspace::remap_shared_readonly_as_readwrite()) {
259 log_info(redefine, class, load)("failed to remap shared readonly space to readwrite, private");
260 _res = JVMTI_ERROR_INTERNAL;
261 _timer_vm_op_doit.stop();
262 return;
263 }
264 }
265 #endif
266
267 // Mark methods seen on stack and everywhere else so old methods are not
268 // cleaned up if they're on the stack.
269 MetadataOnStackMark md_on_stack(/*walk_all_metadata*/true, /*redefinition_walk*/true);
270 HandleMark hm(current); // make sure any handles created are deleted
271 // before the stack walk again.
272
273 for (int i = 0; i < _class_count; i++) {
274 redefine_single_class(current, _class_defs[i].klass, _scratch_classes[i]);
275 }
276
277 // Flush all compiled code that depends on the classes redefined.
278 flush_dependent_code();
279
280 // Adjust constantpool caches and vtables for all classes
281 // that reference methods of the evolved classes.
282 // Have to do this after all classes are redefined and all methods that
283 // are redefined are marked as old.
284 AdjustAndCleanMetadata adjust_and_clean_metadata(current);
285 ClassLoaderDataGraph::classes_do(&adjust_and_clean_metadata);
286
287 // JSR-292 support
288 if (_any_class_has_resolved_methods) {
289 bool trace_name_printed = false;
290 ResolvedMethodTable::adjust_method_entries(&trace_name_printed);
291 }
292
293 // Increment flag indicating that some invariants are no longer true.
294 // See jvmtiExport.hpp for detailed explanation.
295 JvmtiExport::increment_redefinition_count();
296
297 // check_class() is optionally called for product bits, but is
298 // always called for non-product bits.
299 #ifdef PRODUCT
300 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
301 #endif
302 log_trace(redefine, class, obsolete, metadata)("calling check_class");
303 CheckClass check_class(current);
304 ClassLoaderDataGraph::classes_do(&check_class);
305 #ifdef PRODUCT
306 }
307 #endif
308
309 // Clean up any metadata now unreferenced while MetadataOnStackMark is set.
310 ClassLoaderDataGraph::clean_deallocate_lists(false);
311
312 _timer_vm_op_doit.stop();
313 }
314
315 void VM_RedefineClasses::doit_epilogue() {
316 unlock_classes();
317
318 // Free os::malloc allocated memory.
319 os::free(_scratch_classes);
320
321 // Reset the_class to null for error printing.
322 _the_class = nullptr;
323
324 if (log_is_enabled(Info, redefine, class, timer)) {
325 // Used to have separate timers for "doit" and "all", but the timer
326 // overhead skewed the measurements.
327 julong doit_time = _timer_vm_op_doit.milliseconds();
328 julong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
329
330 log_info(redefine, class, timer)
331 ("vm_op: all=" JULONG_FORMAT " prologue=" JULONG_FORMAT " doit=" JULONG_FORMAT,
332 all_time, (julong)_timer_vm_op_prologue.milliseconds(), doit_time);
333 log_info(redefine, class, timer)
334 ("redefine_single_class: phase1=" JULONG_FORMAT " phase2=" JULONG_FORMAT,
335 (julong)_timer_rsc_phase1.milliseconds(), (julong)_timer_rsc_phase2.milliseconds());
336 }
337 }
338
339 bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
340 // classes for primitives cannot be redefined
341 if (java_lang_Class::is_primitive(klass_mirror)) {
342 return false;
343 }
344 Klass* k = java_lang_Class::as_Klass(klass_mirror);
345 // classes for arrays cannot be redefined
346 if (k == nullptr || !k->is_instance_klass()) {
347 return false;
348 }
349
350 // Cannot redefine or retransform a hidden class.
351 if (InstanceKlass::cast(k)->is_hidden()) {
352 return false;
353 }
354 if (InstanceKlass::cast(k) == vmClasses::Continuation_klass()) {
355 // Don't redefine Continuation class. See 8302779.
356 return false;
357 }
358 return true;
359 }
360
361 // Append the current entry at scratch_i in scratch_cp to *merge_cp_p
362 // where the end of *merge_cp_p is specified by *merge_cp_length_p. For
363 // direct CP entries, there is just the current entry to append. For
364 // indirect and double-indirect CP entries, there are zero or more
365 // referenced CP entries along with the current entry to append.
366 // Indirect and double-indirect CP entries are handled by recursive
367 // calls to append_entry() as needed. The referenced CP entries are
368 // always appended to *merge_cp_p before the referee CP entry. These
369 // referenced CP entries may already exist in *merge_cp_p in which case
370 // there is nothing extra to append and only the current entry is
371 // appended.
372 void VM_RedefineClasses::append_entry(const constantPoolHandle& scratch_cp,
373 int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
374
375 // append is different depending on entry tag type
376 switch (scratch_cp->tag_at(scratch_i).value()) {
377
378 // The old verifier is implemented outside the VM. It loads classes,
379 // but does not resolve constant pool entries directly so we never
380 // see Class entries here with the old verifier. Similarly the old
381 // verifier does not like Class entries in the input constant pool.
382 // The split-verifier is implemented in the VM so it can optionally
383 // and directly resolve constant pool entries to load classes. The
384 // split-verifier can accept either Class entries or UnresolvedClass
385 // entries in the input constant pool. We revert the appended copy
386 // back to UnresolvedClass so that either verifier will be happy
387 // with the constant pool entry.
388 //
389 // this is an indirect CP entry so it needs special handling
390 case JVM_CONSTANT_Class:
391 case JVM_CONSTANT_UnresolvedClass:
392 {
393 int name_i = scratch_cp->klass_name_index_at(scratch_i);
394 int new_name_i = find_or_append_indirect_entry(scratch_cp, name_i, merge_cp_p,
395 merge_cp_length_p);
396
397 if (new_name_i != name_i) {
398 log_trace(redefine, class, constantpool)
399 ("Class entry@%d name_index change: %d to %d",
400 *merge_cp_length_p, name_i, new_name_i);
401 }
402
403 (*merge_cp_p)->temp_unresolved_klass_at_put(*merge_cp_length_p, new_name_i);
404 if (scratch_i != *merge_cp_length_p) {
405 // The new entry in *merge_cp_p is at a different index than
406 // the new entry in scratch_cp so we need to map the index values.
407 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
408 }
409 (*merge_cp_length_p)++;
410 } break;
411
412 // these are direct CP entries so they can be directly appended,
413 // but double and long take two constant pool entries
414 case JVM_CONSTANT_Double: // fall through
415 case JVM_CONSTANT_Long:
416 {
417 ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p);
418
419 if (scratch_i != *merge_cp_length_p) {
420 // The new entry in *merge_cp_p is at a different index than
421 // the new entry in scratch_cp so we need to map the index values.
422 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
423 }
424 (*merge_cp_length_p) += 2;
425 } break;
426
427 // these are direct CP entries so they can be directly appended
428 case JVM_CONSTANT_Float: // fall through
429 case JVM_CONSTANT_Integer: // fall through
430 case JVM_CONSTANT_Utf8: // fall through
431
432 // This was an indirect CP entry, but it has been changed into
433 // Symbol*s so this entry can be directly appended.
434 case JVM_CONSTANT_String: // fall through
435 {
436 ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p);
437
438 if (scratch_i != *merge_cp_length_p) {
439 // The new entry in *merge_cp_p is at a different index than
440 // the new entry in scratch_cp so we need to map the index values.
441 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
442 }
443 (*merge_cp_length_p)++;
444 } break;
445
446 // this is an indirect CP entry so it needs special handling
447 case JVM_CONSTANT_NameAndType:
448 {
449 int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
450 int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
451 merge_cp_length_p);
452
453 int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
454 int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
455 merge_cp_p, merge_cp_length_p);
456
457 // If the referenced entries already exist in *merge_cp_p, then
458 // both new_name_ref_i and new_signature_ref_i will both be 0.
459 // In that case, all we are appending is the current entry.
460 if (new_name_ref_i != name_ref_i) {
461 log_trace(redefine, class, constantpool)
462 ("NameAndType entry@%d name_ref_index change: %d to %d",
463 *merge_cp_length_p, name_ref_i, new_name_ref_i);
464 }
465 if (new_signature_ref_i != signature_ref_i) {
466 log_trace(redefine, class, constantpool)
467 ("NameAndType entry@%d signature_ref_index change: %d to %d",
468 *merge_cp_length_p, signature_ref_i, new_signature_ref_i);
469 }
470
471 (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
472 new_name_ref_i, new_signature_ref_i);
473 if (scratch_i != *merge_cp_length_p) {
474 // The new entry in *merge_cp_p is at a different index than
475 // the new entry in scratch_cp so we need to map the index values.
476 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
477 }
478 (*merge_cp_length_p)++;
479 } break;
480
481 // this is a double-indirect CP entry so it needs special handling
482 case JVM_CONSTANT_Fieldref: // fall through
483 case JVM_CONSTANT_InterfaceMethodref: // fall through
484 case JVM_CONSTANT_Methodref:
485 {
486 int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
487 int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
488 merge_cp_p, merge_cp_length_p);
489
490 int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
491 int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
492 merge_cp_p, merge_cp_length_p);
493
494 const char *entry_name = nullptr;
495 switch (scratch_cp->tag_at(scratch_i).value()) {
496 case JVM_CONSTANT_Fieldref:
497 entry_name = "Fieldref";
498 (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
499 new_name_and_type_ref_i);
500 break;
501 case JVM_CONSTANT_InterfaceMethodref:
502 entry_name = "IFMethodref";
503 (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
504 new_klass_ref_i, new_name_and_type_ref_i);
505 break;
506 case JVM_CONSTANT_Methodref:
507 entry_name = "Methodref";
508 (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
509 new_name_and_type_ref_i);
510 break;
511 default:
512 guarantee(false, "bad switch");
513 break;
514 }
515
516 if (klass_ref_i != new_klass_ref_i) {
517 log_trace(redefine, class, constantpool)
518 ("%s entry@%d class_index changed: %d to %d", entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i);
519 }
520 if (name_and_type_ref_i != new_name_and_type_ref_i) {
521 log_trace(redefine, class, constantpool)
522 ("%s entry@%d name_and_type_index changed: %d to %d",
523 entry_name, *merge_cp_length_p, name_and_type_ref_i, new_name_and_type_ref_i);
524 }
525
526 if (scratch_i != *merge_cp_length_p) {
527 // The new entry in *merge_cp_p is at a different index than
528 // the new entry in scratch_cp so we need to map the index values.
529 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
530 }
531 (*merge_cp_length_p)++;
532 } break;
533
534 // this is an indirect CP entry so it needs special handling
535 case JVM_CONSTANT_MethodType:
536 {
537 int ref_i = scratch_cp->method_type_index_at(scratch_i);
538 int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
539 merge_cp_length_p);
540 if (new_ref_i != ref_i) {
541 log_trace(redefine, class, constantpool)
542 ("MethodType entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
543 }
544 (*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i);
545 if (scratch_i != *merge_cp_length_p) {
546 // The new entry in *merge_cp_p is at a different index than
547 // the new entry in scratch_cp so we need to map the index values.
548 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
549 }
550 (*merge_cp_length_p)++;
551 } break;
552
553 // this is an indirect CP entry so it needs special handling
554 case JVM_CONSTANT_MethodHandle:
555 {
556 int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i);
557 int ref_i = scratch_cp->method_handle_index_at(scratch_i);
558 int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
559 merge_cp_length_p);
560 if (new_ref_i != ref_i) {
561 log_trace(redefine, class, constantpool)
562 ("MethodHandle entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
563 }
564 (*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i);
565 if (scratch_i != *merge_cp_length_p) {
566 // The new entry in *merge_cp_p is at a different index than
567 // the new entry in scratch_cp so we need to map the index values.
568 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
569 }
570 (*merge_cp_length_p)++;
571 } break;
572
573 // this is an indirect CP entry so it needs special handling
574 case JVM_CONSTANT_Dynamic: // fall through
575 case JVM_CONSTANT_InvokeDynamic:
576 {
577 // Index of the bootstrap specifier in the BSM array
578 int old_bs_i = scratch_cp->bootstrap_methods_attribute_index(scratch_i);
579 int new_bs_i = find_or_append_bsm_entry(scratch_cp, old_bs_i, merge_cp_p,
580 merge_cp_length_p);
581 // The bootstrap method NameAndType_info index
582 int old_ref_i = scratch_cp->bootstrap_name_and_type_ref_index_at(scratch_i);
583 int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
584 merge_cp_length_p);
585 if (new_bs_i != old_bs_i) {
586 log_trace(redefine, class, constantpool)
587 ("Dynamic entry@%d bootstrap_method_attr_index change: %d to %d",
588 *merge_cp_length_p, old_bs_i, new_bs_i);
589 }
590 if (new_ref_i != old_ref_i) {
591 log_trace(redefine, class, constantpool)
592 ("Dynamic entry@%d name_and_type_index change: %d to %d", *merge_cp_length_p, old_ref_i, new_ref_i);
593 }
594
595 if (scratch_cp->tag_at(scratch_i).is_dynamic_constant()) {
596 (*merge_cp_p)->dynamic_constant_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
597 } else {
598 (*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
599 }
600 if (scratch_i != *merge_cp_length_p) {
601 // The new entry in *merge_cp_p is at a different index than
602 // the new entry in scratch_cp so we need to map the index values.
603 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
604 }
605 (*merge_cp_length_p)++;
606 } break;
607
608 // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
609 // ClassIndex
610 case JVM_CONSTANT_ClassIndex: // fall through
611
612 // Invalid is used as the tag for the second constant pool entry
613 // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
614 // not be seen by itself.
615 case JVM_CONSTANT_Invalid: // fall through
616
617 // At this stage, String could be here, but not StringIndex
618 case JVM_CONSTANT_StringIndex: // fall through
619
620 // At this stage JVM_CONSTANT_UnresolvedClassInError should not be here
621 case JVM_CONSTANT_UnresolvedClassInError: // fall through
622
623 default:
624 {
625 // leave a breadcrumb
626 jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
627 ShouldNotReachHere();
628 } break;
629 } // end switch tag value
630 } // end append_entry()
631
632
633 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
634 int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
635
636 int new_ref_i = ref_i;
637 bool match = (ref_i < *merge_cp_length_p) &&
638 scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
639
640 if (!match) {
641 // forward reference in *merge_cp_p or not a direct match
642 int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p);
643 if (found_i != 0) {
644 guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree");
645 // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
646 new_ref_i = found_i;
647 map_index(scratch_cp, ref_i, found_i);
648 } else {
649 // no match found so we have to append this entry to *merge_cp_p
650 append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p);
651 // The above call to append_entry() can only append one entry
652 // so the post call query of *merge_cp_length_p is only for
653 // the sake of consistency.
654 new_ref_i = *merge_cp_length_p - 1;
655 }
656 }
657
658 // constant pool indices are u2, unless the merged constant pool overflows which
659 // we don't check for.
660 return checked_cast<u2>(new_ref_i);
661 } // end find_or_append_indirect_entry()
662
663
664 // Append a bootstrap specifier into the merge_cp BSM entries that is semantically equal
665 // to the scratch_cp BSM entries' bootstrap specifier passed by the old_bs_i index.
666 // Recursively append new merge_cp entries referenced by the new bootstrap specifier.
667 int VM_RedefineClasses::append_bsm_entry(const constantPoolHandle& scratch_cp, const int old_bs_i,
668 constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
669
670 BSMAttributeEntry* old_bsme = scratch_cp->bsm_attribute_entry(old_bs_i);
671 u2 old_ref_i = old_bsme->bootstrap_method_index();
672 u2 new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
673 merge_cp_length_p);
674 if (new_ref_i != old_ref_i) {
675 log_trace(redefine, class, constantpool)
676 ("BSM attribute entry@%d bootstrap method ref_index change: %d to %d", _bsmae_iter.current_offset() - 1, old_ref_i, new_ref_i);
677 }
678
679 const int new_bs_i = _bsmae_iter.current_offset();
680 BSMAttributeEntry* new_bsme =
681 _bsmae_iter.reserve_new_entry(new_ref_i, old_bsme->argument_count());
682 assert(new_bsme != nullptr, "must be");
683 for (int i = 0; i < new_bsme->argument_count(); i++) {
684 u2 old_arg_ref_i = old_bsme->argument(i);
685 u2 new_arg_ref_i = find_or_append_indirect_entry(scratch_cp, old_arg_ref_i, merge_cp_p,
686 merge_cp_length_p);
687 new_bsme->set_argument(i, new_arg_ref_i);
688
689 if (new_arg_ref_i != old_arg_ref_i) {
690 log_trace(redefine, class, constantpool)
691 ("BSM attribute entry@%d bootstrap method argument ref_index change: %d to %d",
692 _bsmae_iter.current_offset() - 1, old_arg_ref_i, new_arg_ref_i);
693 }
694 }
695 // This is only for the logging
696 map_bsm_index(old_bs_i, new_bs_i);
697 return new_bs_i;
698 } // end append_bsm_entry()
699
700
701 int VM_RedefineClasses::find_or_append_bsm_entry(const constantPoolHandle& scratch_cp,
702 int old_bs_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
703
704 const int max_offset_in_merge = _bsmae_iter.current_offset();
705 int new_bs_i = old_bs_i; // bootstrap specifier index
706 // Has the old_bs_i index been used already? Check if it's the same so we know
707 // whether or not a remapping is required.
708 bool match = (old_bs_i < max_offset_in_merge) &&
709 scratch_cp->compare_bootstrap_entry_to(old_bs_i, *merge_cp_p, old_bs_i);
710
711 if (!match) {
712 // forward reference in *merge_cp_p or not a direct match
713 int found_i = scratch_cp->find_matching_bsm_entry(old_bs_i, *merge_cp_p,
714 max_offset_in_merge);
715 if (found_i != -1) {
716 guarantee(found_i != old_bs_i, "compare_bootstrap_entry_to() and find_matching_bsm_entry() disagree");
717 // found a matching BSM entry somewhere else in *merge_cp_p so just need a mapping
718 new_bs_i = found_i;
719 map_bsm_index(old_bs_i, found_i);
720 } else {
721 // no match found so we have to append this bootstrap specifier to *merge_cp_p
722 new_bs_i = append_bsm_entry(scratch_cp, old_bs_i, merge_cp_p, merge_cp_length_p);
723 }
724 }
725 return new_bs_i;
726 } // end find_or_append_bsm_entry()
727
728
729 void VM_RedefineClasses::finalize_bsm_entries_merge(const constantPoolHandle& merge_cp, TRAPS) {
730 if (merge_cp->bsm_entries().number_of_entries() == 0) {
731 return;
732 }
733 // Finished extending the BSMAEs
734 merge_cp->end_extension(_bsmae_iter, CHECK);
735
736 if (log_is_enabled(Trace, redefine, class, constantpool)) {
737 // don't want to loop unless we are tracing
738 int count = 0;
739 for (int i = 1; i < _bsm_index_map_p->length(); i++) {
740 int value = _bsm_index_map_p->at(i);
741 if (value != -1) {
742 log_trace(redefine, class, constantpool)("bsm_index_map[%d]: old=%d new=%d", count, i, value);
743 count++;
744 }
745 }
746 }
747 // Clean-up
748 _bsm_index_map_p = nullptr;
749 _bsm_index_map_count = 0;
750 _bsmae_iter = BSMAttributeEntries::InsertionIterator();
751 } // end finalize_bsmentries_merge()
752
753 // Symbol* comparator for qsort
754 // The caller must have an active ResourceMark.
755 static int symcmp(const void* a, const void* b) {
756 char* astr = (*(Symbol**)a)->as_C_string();
757 char* bstr = (*(Symbol**)b)->as_C_string();
758 return strcmp(astr, bstr);
759 }
760
761 // The caller must have an active ResourceMark.
762 static jvmtiError check_attribute_arrays(const char* attr_name,
763 InstanceKlass* the_class, InstanceKlass* scratch_class,
764 Array<u2>* the_array, Array<u2>* scr_array) {
765 bool the_array_exists = the_array != Universe::the_empty_short_array();
766 bool scr_array_exists = scr_array != Universe::the_empty_short_array();
767
768 int array_len = the_array->length();
769 if (the_array_exists && scr_array_exists) {
770 if (array_len != scr_array->length()) {
771 log_trace(redefine, class)
772 ("redefined class %s attribute change error: %s len=%d changed to len=%d",
773 the_class->external_name(), attr_name, array_len, scr_array->length());
774 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
775 }
776
777 // The order of entries in the attribute array is not specified so we
778 // have to explicitly check for the same contents. We do this by copying
779 // the referenced symbols into their own arrays, sorting them and then
780 // comparing each element pair.
781
782 Symbol** the_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, array_len);
783 Symbol** scr_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, array_len);
784
785 if (the_syms == nullptr || scr_syms == nullptr) {
786 return JVMTI_ERROR_OUT_OF_MEMORY;
787 }
788
789 for (int i = 0; i < array_len; i++) {
790 int the_cp_index = the_array->at(i);
791 int scr_cp_index = scr_array->at(i);
792 the_syms[i] = the_class->constants()->klass_name_at(the_cp_index);
793 scr_syms[i] = scratch_class->constants()->klass_name_at(scr_cp_index);
794 }
795
796 qsort(the_syms, array_len, sizeof(Symbol*), symcmp);
797 qsort(scr_syms, array_len, sizeof(Symbol*), symcmp);
798
799 for (int i = 0; i < array_len; i++) {
800 if (the_syms[i] != scr_syms[i]) {
801 log_info(redefine, class)
802 ("redefined class %s attribute change error: %s[%d]: %s changed to %s",
803 the_class->external_name(), attr_name, i,
804 the_syms[i]->as_C_string(), scr_syms[i]->as_C_string());
805 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
806 }
807 }
808 } else if (the_array_exists ^ scr_array_exists) {
809 const char* action_str = (the_array_exists) ? "removed" : "added";
810 log_info(redefine, class)
811 ("redefined class %s attribute change error: %s attribute %s",
812 the_class->external_name(), attr_name, action_str);
813 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
814 }
815 return JVMTI_ERROR_NONE;
816 }
817
818 static jvmtiError check_nest_attributes(InstanceKlass* the_class,
819 InstanceKlass* scratch_class) {
820 // Check whether the class NestHost attribute has been changed.
821 Thread* thread = Thread::current();
822 ResourceMark rm(thread);
823 u2 the_nest_host_idx = the_class->nest_host_index();
824 u2 scr_nest_host_idx = scratch_class->nest_host_index();
825
826 if (the_nest_host_idx != 0 && scr_nest_host_idx != 0) {
827 Symbol* the_sym = the_class->constants()->klass_name_at(the_nest_host_idx);
828 Symbol* scr_sym = scratch_class->constants()->klass_name_at(scr_nest_host_idx);
829 if (the_sym != scr_sym) {
830 log_info(redefine, class, nestmates)
831 ("redefined class %s attribute change error: NestHost class: %s replaced with: %s",
832 the_class->external_name(), the_sym->as_C_string(), scr_sym->as_C_string());
833 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
834 }
835 } else if ((the_nest_host_idx == 0) ^ (scr_nest_host_idx == 0)) {
836 const char* action_str = (the_nest_host_idx != 0) ? "removed" : "added";
837 log_info(redefine, class, nestmates)
838 ("redefined class %s attribute change error: NestHost attribute %s",
839 the_class->external_name(), action_str);
840 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
841 }
842
843 // Check whether the class NestMembers attribute has been changed.
844 return check_attribute_arrays("NestMembers",
845 the_class, scratch_class,
846 the_class->nest_members(),
847 scratch_class->nest_members());
848 }
849
850 // Return an error status if the class Record attribute was changed.
851 static jvmtiError check_record_attribute(InstanceKlass* the_class, InstanceKlass* scratch_class) {
852 // Get lists of record components.
853 Array<RecordComponent*>* the_record = the_class->record_components();
854 Array<RecordComponent*>* scr_record = scratch_class->record_components();
855 bool the_record_exists = the_record != nullptr;
856 bool scr_record_exists = scr_record != nullptr;
857
858 if (the_record_exists && scr_record_exists) {
859 int the_num_components = the_record->length();
860 int scr_num_components = scr_record->length();
861 if (the_num_components != scr_num_components) {
862 log_info(redefine, class, record)
863 ("redefined class %s attribute change error: Record num_components=%d changed to num_components=%d",
864 the_class->external_name(), the_num_components, scr_num_components);
865 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
866 }
867
868 // Compare each field in each record component.
869 ConstantPool* the_cp = the_class->constants();
870 ConstantPool* scr_cp = scratch_class->constants();
871 for (int x = 0; x < the_num_components; x++) {
872 RecordComponent* the_component = the_record->at(x);
873 RecordComponent* scr_component = scr_record->at(x);
874 const Symbol* const the_name = the_cp->symbol_at(the_component->name_index());
875 const Symbol* const scr_name = scr_cp->symbol_at(scr_component->name_index());
876 const Symbol* const the_descr = the_cp->symbol_at(the_component->descriptor_index());
877 const Symbol* const scr_descr = scr_cp->symbol_at(scr_component->descriptor_index());
878 if (the_name != scr_name || the_descr != scr_descr) {
879 log_info(redefine, class, record)
880 ("redefined class %s attribute change error: Record name_index, descriptor_index, and/or attributes_count changed",
881 the_class->external_name());
882 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
883 }
884
885 int the_gen_sig = the_component->generic_signature_index();
886 int scr_gen_sig = scr_component->generic_signature_index();
887 const Symbol* const the_gen_sig_sym = (the_gen_sig == 0 ? nullptr :
888 the_cp->symbol_at(the_component->generic_signature_index()));
889 const Symbol* const scr_gen_sig_sym = (scr_gen_sig == 0 ? nullptr :
890 scr_cp->symbol_at(scr_component->generic_signature_index()));
891 if (the_gen_sig_sym != scr_gen_sig_sym) {
892 log_info(redefine, class, record)
893 ("redefined class %s attribute change error: Record generic_signature attribute changed",
894 the_class->external_name());
895 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
896 }
897
898 // It's okay if a record component's annotations were changed.
899 }
900
901 } else if (the_record_exists ^ scr_record_exists) {
902 const char* action_str = (the_record_exists) ? "removed" : "added";
903 log_info(redefine, class, record)
904 ("redefined class %s attribute change error: Record attribute %s",
905 the_class->external_name(), action_str);
906 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
907 }
908
909 return JVMTI_ERROR_NONE;
910 }
911
912
913 static jvmtiError check_permitted_subclasses_attribute(InstanceKlass* the_class,
914 InstanceKlass* scratch_class) {
915 Thread* thread = Thread::current();
916 ResourceMark rm(thread);
917
918 // Check whether the class PermittedSubclasses attribute has been changed.
919 return check_attribute_arrays("PermittedSubclasses",
920 the_class, scratch_class,
921 the_class->permitted_subclasses(),
922 scratch_class->permitted_subclasses());
923 }
924
925 static bool can_add_or_delete(Method* m) {
926 // Compatibility mode
927 return (AllowRedefinitionToAddDeleteMethods &&
928 (m->is_private() && (m->is_static() || m->is_final())));
929 }
930
931 jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
932 InstanceKlass* the_class,
933 InstanceKlass* scratch_class) {
934 int i;
935
936 // Check superclasses, or rather their names, since superclasses themselves can be
937 // requested to replace.
938 // Check for null superclass first since this might be java.lang.Object
939 if (the_class->super() != scratch_class->super() &&
940 (the_class->super() == nullptr || scratch_class->super() == nullptr ||
941 the_class->super()->name() !=
942 scratch_class->super()->name())) {
943 log_info(redefine, class, normalize)
944 ("redefined class %s superclass change error: superclass changed from %s to %s.",
945 the_class->external_name(),
946 the_class->super() == nullptr ? "null" : the_class->super()->external_name(),
947 scratch_class->super() == nullptr ? "null" : scratch_class->super()->external_name());
948 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
949 }
950
951 // Check if the number, names and order of directly implemented interfaces are the same.
952 // I think in principle we should just check if the sets of names of directly implemented
953 // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
954 // .java file, also changes in .class file) should not matter. However, comparing sets is
955 // technically a bit more difficult, and, more importantly, I am not sure at present that the
956 // order of interfaces does not matter on the implementation level, i.e. that the VM does not
957 // rely on it somewhere.
958 Array<InstanceKlass*>* k_interfaces = the_class->local_interfaces();
959 Array<InstanceKlass*>* k_new_interfaces = scratch_class->local_interfaces();
960 int n_intfs = k_interfaces->length();
961 if (n_intfs != k_new_interfaces->length()) {
962 log_info(redefine, class, normalize)
963 ("redefined class %s interfaces change error: number of implemented interfaces changed from %d to %d.",
964 the_class->external_name(), n_intfs, k_new_interfaces->length());
965 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
966 }
967 for (i = 0; i < n_intfs; i++) {
968 if (k_interfaces->at(i)->name() !=
969 k_new_interfaces->at(i)->name()) {
970 log_info(redefine, class, normalize)
971 ("redefined class %s interfaces change error: interface changed from %s to %s.",
972 the_class->external_name(),
973 k_interfaces->at(i)->external_name(), k_new_interfaces->at(i)->external_name());
974 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
975 }
976 }
977
978 // Check whether class is in the error init state.
979 if (the_class->is_in_error_state()) {
980 log_info(redefine, class, normalize)
981 ("redefined class %s is in error init state.", the_class->external_name());
982 // TBD #5057930: special error code is needed in 1.6
983 return JVMTI_ERROR_INVALID_CLASS;
984 }
985
986 // Check whether the nest-related attributes have been changed.
987 jvmtiError err = check_nest_attributes(the_class, scratch_class);
988 if (err != JVMTI_ERROR_NONE) {
989 return err;
990 }
991
992 // Check whether the Record attribute has been changed.
993 err = check_record_attribute(the_class, scratch_class);
994 if (err != JVMTI_ERROR_NONE) {
995 return err;
996 }
997
998 // Check whether the PermittedSubclasses attribute has been changed.
999 err = check_permitted_subclasses_attribute(the_class, scratch_class);
1000 if (err != JVMTI_ERROR_NONE) {
1001 return err;
1002 }
1003
1004 // Check whether class modifiers are the same.
1005 u2 old_flags = the_class->access_flags().as_class_flags();
1006 u2 new_flags = scratch_class->access_flags().as_class_flags();
1007 if (old_flags != new_flags) {
1008 log_info(redefine, class, normalize)
1009 ("redefined class %s modifiers change error: modifiers changed from %d to %d.",
1010 the_class->external_name(), old_flags, new_flags);
1011 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
1012 }
1013
1014 // Check if the number, names, types and order of fields declared in these classes
1015 // are the same.
1016 JavaFieldStream old_fs(the_class);
1017 JavaFieldStream new_fs(scratch_class);
1018 for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
1019 // name and signature
1020 Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
1021 Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
1022 Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
1023 Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
1024 if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
1025 log_info(redefine, class, normalize)
1026 ("redefined class %s fields change error: field %s %s changed to %s %s.",
1027 the_class->external_name(),
1028 sig_sym1->as_C_string(), name_sym1->as_C_string(),
1029 sig_sym2->as_C_string(), name_sym2->as_C_string());
1030 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
1031 }
1032 // offset
1033 if (old_fs.offset() != new_fs.offset()) {
1034 log_info(redefine, class, normalize)
1035 ("redefined class %s field %s change error: offset changed from %d to %d.",
1036 the_class->external_name(), name_sym2->as_C_string(), old_fs.offset(), new_fs.offset());
1037 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
1038 }
1039 // access
1040 old_flags = old_fs.access_flags().as_field_flags();
1041 new_flags = new_fs.access_flags().as_field_flags();
1042 if (old_flags != new_flags) {
1043 log_info(redefine, class, normalize)
1044 ("redefined class %s field %s change error: modifiers changed from %d to %d.",
1045 the_class->external_name(), name_sym2->as_C_string(), old_flags, new_flags);
1046 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
1047 }
1048 }
1049
1050 // If both streams aren't done then we have a differing number of
1051 // fields.
1052 if (!old_fs.done() || !new_fs.done()) {
1053 const char* action = old_fs.done() ? "added" : "deleted";
1054 log_info(redefine, class, normalize)
1055 ("redefined class %s fields change error: some fields were %s.",
1056 the_class->external_name(), action);
1057 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
1058 }
1059
1060 // Do a parallel walk through the old and new methods. Detect
1061 // cases where they match (exist in both), have been added in
1062 // the new methods, or have been deleted (exist only in the
1063 // old methods). The class file parser places methods in order
1064 // by method name, but does not order overloaded methods by
1065 // signature. In order to determine what fate befell the methods,
1066 // this code places the overloaded new methods that have matching
1067 // old methods in the same order as the old methods and places
1068 // new overloaded methods at the end of overloaded methods of
1069 // that name. The code for this order normalization is adapted
1070 // from the algorithm used in InstanceKlass::find_method().
1071 // Since we are swapping out of order entries as we find them,
1072 // we only have to search forward through the overloaded methods.
1073 // Methods which are added and have the same name as an existing
1074 // method (but different signature) will be put at the end of
1075 // the methods with that name, and the name mismatch code will
1076 // handle them.
1077 Array<Method*>* k_old_methods(the_class->methods());
1078 Array<Method*>* k_new_methods(scratch_class->methods());
1079 int n_old_methods = k_old_methods->length();
1080 int n_new_methods = k_new_methods->length();
1081 Thread* thread = Thread::current();
1082
1083 int ni = 0;
1084 int oi = 0;
1085 while (true) {
1086 Method* k_old_method;
1087 Method* k_new_method;
1088 enum { matched, added, deleted, undetermined } method_was = undetermined;
1089
1090 if (oi >= n_old_methods) {
1091 if (ni >= n_new_methods) {
1092 break; // we've looked at everything, done
1093 }
1094 // New method at the end
1095 k_new_method = k_new_methods->at(ni);
1096 method_was = added;
1097 } else if (ni >= n_new_methods) {
1098 // Old method, at the end, is deleted
1099 k_old_method = k_old_methods->at(oi);
1100 method_was = deleted;
1101 } else {
1102 // There are more methods in both the old and new lists
1103 k_old_method = k_old_methods->at(oi);
1104 k_new_method = k_new_methods->at(ni);
1105 if (k_old_method->name() != k_new_method->name()) {
1106 // Methods are sorted by method name, so a mismatch means added
1107 // or deleted
1108 if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
1109 method_was = added;
1110 } else {
1111 method_was = deleted;
1112 }
1113 } else if (k_old_method->signature() == k_new_method->signature()) {
1114 // Both the name and signature match
1115 method_was = matched;
1116 } else {
1117 // The name matches, but the signature doesn't, which means we have to
1118 // search forward through the new overloaded methods.
1119 int nj; // outside the loop for post-loop check
1120 for (nj = ni + 1; nj < n_new_methods; nj++) {
1121 Method* m = k_new_methods->at(nj);
1122 if (k_old_method->name() != m->name()) {
1123 // reached another method name so no more overloaded methods
1124 method_was = deleted;
1125 break;
1126 }
1127 if (k_old_method->signature() == m->signature()) {
1128 // found a match so swap the methods
1129 k_new_methods->at_put(ni, m);
1130 k_new_methods->at_put(nj, k_new_method);
1131 k_new_method = m;
1132 method_was = matched;
1133 break;
1134 }
1135 }
1136
1137 if (nj >= n_new_methods) {
1138 // reached the end without a match; so method was deleted
1139 method_was = deleted;
1140 }
1141 }
1142 }
1143
1144 switch (method_was) {
1145 case matched:
1146 // methods match, be sure modifiers do too
1147 old_flags = k_old_method->access_flags().as_method_flags();
1148 new_flags = k_new_method->access_flags().as_method_flags();
1149 if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
1150 log_info(redefine, class, normalize)
1151 ("redefined class %s method %s modifiers error: modifiers changed from %d to %d",
1152 the_class->external_name(), k_old_method->name_and_sig_as_C_string(), old_flags, new_flags);
1153 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
1154 }
1155 {
1156 u2 new_num = k_new_method->method_idnum();
1157 u2 old_num = k_old_method->method_idnum();
1158 if (new_num != old_num) {
1159 Method* idnum_owner = scratch_class->method_with_idnum(old_num);
1160 if (idnum_owner != nullptr) {
1161 // There is already a method assigned this idnum -- switch them
1162 // Take current and original idnum from the new_method
1163 idnum_owner->set_method_idnum(new_num);
1164 idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
1165 }
1166 // Take current and original idnum from the old_method
1167 k_new_method->set_method_idnum(old_num);
1168 k_new_method->set_orig_method_idnum(k_old_method->orig_method_idnum());
1169 if (thread->has_pending_exception()) {
1170 return JVMTI_ERROR_OUT_OF_MEMORY;
1171 }
1172 }
1173 }
1174 log_trace(redefine, class, normalize)
1175 ("Method matched: new: %s [%d] == old: %s [%d]",
1176 k_new_method->name_and_sig_as_C_string(), ni, k_old_method->name_and_sig_as_C_string(), oi);
1177 // advance to next pair of methods
1178 ++oi;
1179 ++ni;
1180 break;
1181 case added:
1182 // method added, see if it is OK
1183 if (!can_add_or_delete(k_new_method)) {
1184 log_info(redefine, class, normalize)
1185 ("redefined class %s methods error: added method: %s [%d]",
1186 the_class->external_name(), k_new_method->name_and_sig_as_C_string(), ni);
1187 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
1188 }
1189 {
1190 u2 num = the_class->next_method_idnum();
1191 if (num == ConstMethod::UNSET_IDNUM) {
1192 // cannot add any more methods
1193 log_info(redefine, class, normalize)
1194 ("redefined class %s methods error: can't create ID for new method %s [%d]",
1195 the_class->external_name(), k_new_method->name_and_sig_as_C_string(), ni);
1196 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
1197 }
1198 u2 new_num = k_new_method->method_idnum();
1199 Method* idnum_owner = scratch_class->method_with_idnum(num);
1200 if (idnum_owner != nullptr) {
1201 // There is already a method assigned this idnum -- switch them
1202 // Take current and original idnum from the new_method
1203 idnum_owner->set_method_idnum(new_num);
1204 idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
1205 }
1206 k_new_method->set_method_idnum(num);
1207 k_new_method->set_orig_method_idnum(num);
1208 if (thread->has_pending_exception()) {
1209 return JVMTI_ERROR_OUT_OF_MEMORY;
1210 }
1211 }
1212 log_trace(redefine, class, normalize)
1213 ("Method added: new: %s [%d]", k_new_method->name_and_sig_as_C_string(), ni);
1214 ++ni; // advance to next new method
1215 break;
1216 case deleted:
1217 // method deleted, see if it is OK
1218 if (!can_add_or_delete(k_old_method)) {
1219 log_info(redefine, class, normalize)
1220 ("redefined class %s methods error: deleted method %s [%d]",
1221 the_class->external_name(), k_old_method->name_and_sig_as_C_string(), oi);
1222 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
1223 }
1224 log_trace(redefine, class, normalize)
1225 ("Method deleted: old: %s [%d]", k_old_method->name_and_sig_as_C_string(), oi);
1226 ++oi; // advance to next old method
1227 break;
1228 default:
1229 ShouldNotReachHere();
1230 }
1231 }
1232
1233 return JVMTI_ERROR_NONE;
1234 }
1235
1236
1237 // Find new constant pool index value for old constant pool index value
1238 // by searching the index map. Returns zero (0) if there is no mapped
1239 // value for the old constant pool index.
1240 u2 VM_RedefineClasses::find_new_index(int old_index) {
1241 if (_index_map_count == 0) {
1242 // map is empty so nothing can be found
1243 return 0;
1244 }
1245
1246 if (old_index < 1 || old_index >= _index_map_p->length()) {
1247 // The old_index is out of range so it is not mapped. This should
1248 // not happen in regular constant pool merging use, but it can
1249 // happen if a corrupt annotation is processed.
1250 return 0;
1251 }
1252
1253 int value = _index_map_p->at(old_index);
1254 if (value == -1) {
1255 // the old_index is not mapped
1256 return 0;
1257 }
1258
1259 // constant pool indices are u2, unless the merged constant pool overflows which
1260 // we don't check for.
1261 return checked_cast<u2>(value);
1262 } // end find_new_index()
1263
1264
1265 // Find new bootstrap specifier index value for old bootstrap specifier index
1266 // value by searching the index map. Returns unused index (-1) if there is
1267 // no mapped value for the old bootstrap specifier index.
1268 int VM_RedefineClasses::find_new_bsm_index(int old_index) {
1269 if (_bsm_index_map_count == 0) {
1270 // map is empty so nothing can be found
1271 return -1;
1272 }
1273
1274 if (old_index == -1 || old_index >= _bsm_index_map_p->length()) {
1275 // The old_index is out of range so it is not mapped.
1276 // This should not happen in regular constant pool merging use.
1277 return -1;
1278 }
1279
1280 int value = _bsm_index_map_p->at(old_index);
1281 if (value == -1) {
1282 // the old_index is not mapped
1283 return -1;
1284 }
1285
1286 return value;
1287 } // end find_new_bsm_index()
1288
1289
1290 // The bug 6214132 caused the verification to fail.
1291 // 1. What's done in RedefineClasses() before verification:
1292 // a) A reference to the class being redefined (_the_class) and a
1293 // reference to new version of the class (_scratch_class) are
1294 // saved here for use during the bytecode verification phase of
1295 // RedefineClasses.
1296 // b) The _java_mirror field from _the_class is copied to the
1297 // _java_mirror field in _scratch_class. This means that a jclass
1298 // returned for _the_class or _scratch_class will refer to the
1299 // same Java mirror. The verifier will see the "one true mirror"
1300 // for the class being verified.
1301 // 2. See comments in JvmtiThreadState for what is done during verification.
1302
1303 class RedefineVerifyMark : public StackObj {
1304 private:
1305 JvmtiThreadState* _state;
1306 InstanceKlass* _scratch_class;
1307 OopHandle _scratch_mirror;
1308
1309 public:
1310
1311 RedefineVerifyMark(InstanceKlass* the_class, InstanceKlass* scratch_class,
1312 JvmtiThreadState* state) : _state(state), _scratch_class(scratch_class)
1313 {
1314 _state->set_class_versions_map(the_class, scratch_class);
1315 _scratch_mirror = the_class->java_mirror_handle(); // this is a copy that is swapped
1316 _scratch_class->swap_java_mirror_handle(_scratch_mirror);
1317 }
1318
1319 ~RedefineVerifyMark() {
1320 // Restore the scratch class's mirror, so when scratch_class is removed
1321 // the correct mirror pointing to it can be cleared.
1322 _scratch_class->swap_java_mirror_handle(_scratch_mirror);
1323 _state->clear_class_versions_map();
1324 }
1325 };
1326
1327
1328 jvmtiError VM_RedefineClasses::load_new_class_versions() {
1329
1330 // For consistency allocate memory using os::malloc wrapper.
1331 _scratch_classes = (InstanceKlass**)
1332 os::malloc(sizeof(InstanceKlass*) * _class_count, mtClass);
1333 if (_scratch_classes == nullptr) {
1334 return JVMTI_ERROR_OUT_OF_MEMORY;
1335 }
1336 // Zero initialize the _scratch_classes array.
1337 for (int i = 0; i < _class_count; i++) {
1338 _scratch_classes[i] = nullptr;
1339 }
1340
1341 JavaThread* current = JavaThread::current();
1342 ResourceMark rm(current);
1343
1344 JvmtiThreadState *state = JvmtiThreadState::state_for(current);
1345 // state can only be null if the current thread is exiting which
1346 // should not happen since we're trying to do a RedefineClasses
1347 guarantee(state != nullptr, "exiting thread calling load_new_class_versions");
1348 for (int i = 0; i < _class_count; i++) {
1349 // Create HandleMark so that any handles created while loading new class
1350 // versions are deleted. Constant pools are deallocated while merging
1351 // constant pools
1352 HandleMark hm(current);
1353 InstanceKlass* the_class = get_ik(_class_defs[i].klass);
1354 physical_memory_size_type avail_mem = 0;
1355 // Return value ignored - defaulting to 0 on failure.
1356 (void)os::available_memory(avail_mem);
1357 log_debug(redefine, class, load)
1358 ("loading name=%s kind=%d (avail_mem=" PHYS_MEM_TYPE_FORMAT "K)",
1359 the_class->external_name(), _class_load_kind, avail_mem >> 10);
1360
1361 ClassFileStream st((u1*)_class_defs[i].class_bytes,
1362 _class_defs[i].class_byte_count,
1363 "__VM_RedefineClasses__");
1364
1365 // Set redefined class handle in JvmtiThreadState class.
1366 // This redefined class is sent to agent event handler for class file
1367 // load hook event.
1368 state->set_class_being_redefined(the_class, _class_load_kind);
1369
1370 JavaThread* THREAD = current; // For exception macros.
1371 ExceptionMark em(THREAD);
1372 Handle protection_domain(THREAD, the_class->protection_domain());
1373 ClassLoadInfo cl_info(protection_domain);
1374 // Parse and create a class from the bytes, but this class isn't added
1375 // to the dictionary, so do not call resolve_from_stream.
1376 InstanceKlass* scratch_class = KlassFactory::create_from_stream(&st,
1377 the_class->name(),
1378 the_class->class_loader_data(),
1379 cl_info,
1380 THREAD);
1381
1382 // Clear class_being_redefined just to be sure.
1383 state->clear_class_being_redefined();
1384
1385 // TODO: if this is retransform, and nothing changed we can skip it
1386
1387 // Need to clean up allocated InstanceKlass if there's an error so assign
1388 // the result here. Caller deallocates all the scratch classes in case of
1389 // an error.
1390 _scratch_classes[i] = scratch_class;
1391
1392 if (HAS_PENDING_EXCEPTION) {
1393 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1394 log_info(redefine, class, load, exceptions)("create_from_stream exception: '%s'", ex_name->as_C_string());
1395 CLEAR_PENDING_EXCEPTION;
1396
1397 if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
1398 return JVMTI_ERROR_UNSUPPORTED_VERSION;
1399 } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
1400 return JVMTI_ERROR_INVALID_CLASS_FORMAT;
1401 } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
1402 return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
1403 } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
1404 // The message will be "XXX (wrong name: YYY)"
1405 return JVMTI_ERROR_NAMES_DONT_MATCH;
1406 } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1407 return JVMTI_ERROR_OUT_OF_MEMORY;
1408 } else { // Just in case more exceptions can be thrown..
1409 return JVMTI_ERROR_FAILS_VERIFICATION;
1410 }
1411 }
1412
1413 // Ensure class is linked before redefine
1414 if (!the_class->is_linked()) {
1415 the_class->link_class(THREAD);
1416 if (HAS_PENDING_EXCEPTION) {
1417 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1418 oop message = java_lang_Throwable::message(PENDING_EXCEPTION);
1419 if (message != nullptr) {
1420 char* ex_msg = java_lang_String::as_utf8_string(message);
1421 log_info(redefine, class, load, exceptions)("link_class exception: '%s %s'",
1422 ex_name->as_C_string(), ex_msg);
1423 } else {
1424 log_info(redefine, class, load, exceptions)("link_class exception: '%s'",
1425 ex_name->as_C_string());
1426 }
1427 CLEAR_PENDING_EXCEPTION;
1428 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1429 return JVMTI_ERROR_OUT_OF_MEMORY;
1430 } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
1431 return JVMTI_ERROR_INVALID_CLASS;
1432 } else {
1433 return JVMTI_ERROR_INTERNAL;
1434 }
1435 }
1436 }
1437
1438 // Do the validity checks in compare_and_normalize_class_versions()
1439 // before verifying the byte codes. By doing these checks first, we
1440 // limit the number of functions that require redirection from
1441 // the_class to scratch_class. In particular, we don't have to
1442 // modify JNI GetSuperclass() and thus won't change its performance.
1443 jvmtiError res = compare_and_normalize_class_versions(the_class,
1444 scratch_class);
1445 if (res != JVMTI_ERROR_NONE) {
1446 return res;
1447 }
1448
1449 // verify what the caller passed us
1450 {
1451 // The bug 6214132 caused the verification to fail.
1452 // Information about the_class and scratch_class is temporarily
1453 // recorded into jvmtiThreadState. This data is used to redirect
1454 // the_class to scratch_class in the JVM_* functions called by the
1455 // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
1456 // description.
1457 RedefineVerifyMark rvm(the_class, scratch_class, state);
1458 Verifier::verify(scratch_class, true, THREAD);
1459 }
1460
1461 if (HAS_PENDING_EXCEPTION) {
1462 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1463 log_info(redefine, class, load, exceptions)("verify_byte_codes exception: '%s'", ex_name->as_C_string());
1464 CLEAR_PENDING_EXCEPTION;
1465 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1466 return JVMTI_ERROR_OUT_OF_MEMORY;
1467 } else {
1468 // tell the caller the bytecodes are bad
1469 return JVMTI_ERROR_FAILS_VERIFICATION;
1470 }
1471 }
1472
1473 res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
1474 if (HAS_PENDING_EXCEPTION) {
1475 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1476 log_info(redefine, class, load, exceptions)("merge_cp_and_rewrite exception: '%s'", ex_name->as_C_string());
1477 CLEAR_PENDING_EXCEPTION;
1478 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1479 return JVMTI_ERROR_OUT_OF_MEMORY;
1480 } else {
1481 return JVMTI_ERROR_INTERNAL;
1482 }
1483 } else if (res != JVMTI_ERROR_NONE) {
1484 return res;
1485 }
1486
1487 #ifdef ASSERT
1488 {
1489 // verify what we have done during constant pool merging
1490 {
1491 RedefineVerifyMark rvm(the_class, scratch_class, state);
1492 Verifier::verify(scratch_class, true, THREAD);
1493 }
1494
1495 if (HAS_PENDING_EXCEPTION) {
1496 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1497 log_info(redefine, class, load, exceptions)
1498 ("verify_byte_codes post merge-CP exception: '%s'", ex_name->as_C_string());
1499 CLEAR_PENDING_EXCEPTION;
1500 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1501 return JVMTI_ERROR_OUT_OF_MEMORY;
1502 } else {
1503 // tell the caller that constant pool merging screwed up
1504 return JVMTI_ERROR_INTERNAL;
1505 }
1506 }
1507 }
1508 #endif // ASSERT
1509
1510 Rewriter::rewrite(scratch_class, THREAD);
1511 if (!HAS_PENDING_EXCEPTION) {
1512 scratch_class->link_methods(THREAD);
1513 }
1514 if (HAS_PENDING_EXCEPTION) {
1515 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1516 log_info(redefine, class, load, exceptions)
1517 ("Rewriter::rewrite or link_methods exception: '%s'", ex_name->as_C_string());
1518 CLEAR_PENDING_EXCEPTION;
1519 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1520 return JVMTI_ERROR_OUT_OF_MEMORY;
1521 } else {
1522 return JVMTI_ERROR_INTERNAL;
1523 }
1524 }
1525 // Return value ignored - defaulting to 0 on failure.
1526 (void)os::available_memory(avail_mem);
1527 log_debug(redefine, class, load)
1528 ("loaded name=%s (avail_mem=" PHYS_MEM_TYPE_FORMAT "K)", the_class->external_name(), avail_mem >> 10);
1529 }
1530
1531 return JVMTI_ERROR_NONE;
1532 }
1533
1534
1535 // Map old_index to new_index as needed. scratch_cp is only needed
1536 // for log calls.
1537 void VM_RedefineClasses::map_index(const constantPoolHandle& scratch_cp,
1538 int old_index, int new_index) {
1539 if (find_new_index(old_index) != 0) {
1540 // old_index is already mapped
1541 return;
1542 }
1543
1544 if (old_index == new_index) {
1545 // no mapping is needed
1546 return;
1547 }
1548
1549 _index_map_p->at_put(old_index, new_index);
1550 _index_map_count++;
1551
1552 log_trace(redefine, class, constantpool)
1553 ("mapped tag %d at index %d to %d", scratch_cp->tag_at(old_index).value(), old_index, new_index);
1554 } // end map_index()
1555
1556
1557 // Map old_index to new_index as needed.
1558 void VM_RedefineClasses::map_bsm_index(int old_index, int new_index) {
1559 if (old_index == new_index) {
1560 // no mapping is needed
1561 return;
1562 }
1563 _bsm_index_map_p->at_put(old_index, new_index);
1564 _bsm_index_map_count++;
1565 log_trace(redefine, class, constantpool)("mapped bootstrap specifier at index %d to %d", old_index, new_index);
1566 } // end map_bsm_index()
1567
1568
1569 // Merge old_cp and scratch_cp and return the results of the merge via
1570 // merge_cp_p. The number of entries in merge_cp_p is returned via
1571 // merge_cp_length_p. The entries in old_cp occupy the same locations
1572 // in merge_cp_p. Also creates a map of indices from entries in
1573 // scratch_cp to the corresponding entry in merge_cp_p. Index map
1574 // entries are only created for entries in scratch_cp that occupy a
1575 // different location in merged_cp_p.
1576 bool VM_RedefineClasses::merge_constant_pools(const constantPoolHandle& old_cp,
1577 const constantPoolHandle& scratch_cp, constantPoolHandle& merge_cp_p,
1578 int& merge_cp_length_p, TRAPS) {
1579
1580 // Worst case we need old_cp->length() + scratch_cp()->length(),
1581 // but the caller might be smart so make sure we have at least
1582 // the minimum.
1583 if (merge_cp_p->length() < old_cp->length()) {
1584 assert(false, "merge area too small");
1585 return false; // robustness
1586 }
1587
1588 log_info(redefine, class, constantpool)("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(), scratch_cp->length());
1589
1590 {
1591 // Pass 0:
1592 // The old_cp is copied to *merge_cp_p; this means that any code
1593 // using old_cp does not have to change. This work looks like a
1594 // perfect fit for ConstantPool*::copy_cp_to(), but we need to
1595 // handle one special case:
1596 // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
1597 // This will make verification happy.
1598
1599 int old_i; // index into old_cp
1600
1601 // index zero (0) is not used in constantPools
1602 for (old_i = 1; old_i < old_cp->length(); old_i++) {
1603 // leave debugging crumb
1604 jbyte old_tag = old_cp->tag_at(old_i).value();
1605 switch (old_tag) {
1606 case JVM_CONSTANT_Class:
1607 case JVM_CONSTANT_UnresolvedClass:
1608 // revert the copy to JVM_CONSTANT_UnresolvedClass
1609 // May be resolving while calling this so do the same for
1610 // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
1611 merge_cp_p->temp_unresolved_klass_at_put(old_i,
1612 old_cp->klass_name_index_at(old_i));
1613 break;
1614
1615 case JVM_CONSTANT_Double:
1616 case JVM_CONSTANT_Long:
1617 // just copy the entry to merge_cp_p, but double and long take
1618 // two constant pool entries
1619 ConstantPool::copy_entry_to(old_cp, old_i, merge_cp_p, old_i);
1620 old_i++;
1621 break;
1622
1623 default:
1624 // just copy the entry to merge_cp_p
1625 ConstantPool::copy_entry_to(old_cp, old_i, merge_cp_p, old_i);
1626 break;
1627 }
1628 } // end for each old_cp entry
1629
1630 ConstantPool::copy_bsm_entries(old_cp, merge_cp_p, CHECK_false);
1631 _bsmae_iter = merge_cp_p->start_extension(scratch_cp, CHECK_false);
1632
1633 // We don't need to sanity check that *merge_cp_length_p is within
1634 // *merge_cp_p bounds since we have the minimum on-entry check above.
1635 merge_cp_length_p = old_i;
1636 }
1637
1638 // merge_cp_len should be the same as old_cp->length() at this point
1639 // so this trace message is really a "warm-and-breathing" message.
1640 log_debug(redefine, class, constantpool)("after pass 0: merge_cp_len=%d", merge_cp_length_p);
1641
1642 int scratch_i; // index into scratch_cp
1643 {
1644 // Pass 1a:
1645 // Compare scratch_cp entries to the old_cp entries that we have
1646 // already copied to *merge_cp_p. In this pass, we are eliminating
1647 // exact duplicates (matching entry at same index) so we only
1648 // compare entries in the common indice range.
1649 int increment = 1;
1650 int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
1651 for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
1652 switch (scratch_cp->tag_at(scratch_i).value()) {
1653 case JVM_CONSTANT_Double:
1654 case JVM_CONSTANT_Long:
1655 // double and long take two constant pool entries
1656 increment = 2;
1657 break;
1658
1659 default:
1660 increment = 1;
1661 break;
1662 }
1663
1664 bool match = scratch_cp->compare_entry_to(scratch_i, merge_cp_p, scratch_i);
1665 if (match) {
1666 // found a match at the same index so nothing more to do
1667 continue;
1668 }
1669
1670 int found_i = scratch_cp->find_matching_entry(scratch_i, merge_cp_p);
1671 if (found_i != 0) {
1672 guarantee(found_i != scratch_i,
1673 "compare_entry_to() and find_matching_entry() do not agree");
1674
1675 // Found a matching entry somewhere else in *merge_cp_p so
1676 // just need a mapping entry.
1677 map_index(scratch_cp, scratch_i, found_i);
1678 continue;
1679 }
1680
1681 // No match found so we have to append this entry and any unique
1682 // referenced entries to merge_cp_p.
1683 append_entry(scratch_cp, scratch_i, &merge_cp_p, &merge_cp_length_p);
1684 }
1685 }
1686
1687 log_debug(redefine, class, constantpool)
1688 ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1689 merge_cp_length_p, scratch_i, _index_map_count);
1690
1691 if (scratch_i < scratch_cp->length()) {
1692 // Pass 1b:
1693 // old_cp is smaller than scratch_cp so there are entries in
1694 // scratch_cp that we have not yet processed. We take care of
1695 // those now.
1696 int increment = 1;
1697 for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
1698 switch (scratch_cp->tag_at(scratch_i).value()) {
1699 case JVM_CONSTANT_Double:
1700 case JVM_CONSTANT_Long:
1701 // double and long take two constant pool entries
1702 increment = 2;
1703 break;
1704
1705 default:
1706 increment = 1;
1707 break;
1708 }
1709
1710 int found_i =
1711 scratch_cp->find_matching_entry(scratch_i, merge_cp_p);
1712 if (found_i != 0) {
1713 // Found a matching entry somewhere else in merge_cp_p so
1714 // just need a mapping entry.
1715 map_index(scratch_cp, scratch_i, found_i);
1716 continue;
1717 }
1718
1719 // No match found so we have to append this entry and any unique
1720 // referenced entries to merge_cp_p.
1721 append_entry(scratch_cp, scratch_i, &merge_cp_p, &merge_cp_length_p);
1722 }
1723
1724 log_debug(redefine, class, constantpool)
1725 ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1726 merge_cp_length_p, scratch_i, _index_map_count);
1727 }
1728 finalize_bsm_entries_merge(merge_cp_p, CHECK_false);
1729
1730 return true;
1731 } // end merge_constant_pools()
1732
1733
1734 // Scoped object to clean up the constant pool(s) created for merging
1735 class MergeCPCleaner {
1736 ClassLoaderData* _loader_data;
1737 ConstantPool* _cp;
1738 ConstantPool* _scratch_cp;
1739 public:
1740 MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
1741 _loader_data(loader_data), _cp(merge_cp), _scratch_cp(nullptr) {}
1742 ~MergeCPCleaner() {
1743 _loader_data->add_to_deallocate_list(_cp);
1744 if (_scratch_cp != nullptr) {
1745 _loader_data->add_to_deallocate_list(_scratch_cp);
1746 }
1747 }
1748 void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
1749 };
1750
1751 // Merge constant pools between the_class and scratch_class and
1752 // potentially rewrite bytecodes in scratch_class to use the merged
1753 // constant pool.
1754 jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
1755 InstanceKlass* the_class, InstanceKlass* scratch_class,
1756 TRAPS) {
1757 // worst case merged constant pool length is old and new combined
1758 int merge_cp_length = the_class->constants()->length()
1759 + scratch_class->constants()->length();
1760
1761 // Constant pools are not easily reused so we allocate a new one
1762 // each time.
1763 // merge_cp is created unsafe for concurrent GC processing. It
1764 // should be marked safe before discarding it. Even though
1765 // garbage, if it crosses a card boundary, it may be scanned
1766 // in order to find the start of the first complete object on the card.
1767 ClassLoaderData* loader_data = the_class->class_loader_data();
1768 ConstantPool* merge_cp_oop =
1769 ConstantPool::allocate(loader_data,
1770 merge_cp_length,
1771 CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1772 MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
1773
1774 HandleMark hm(THREAD); // make sure handles are cleared before
1775 // MergeCPCleaner clears out merge_cp_oop
1776 constantPoolHandle merge_cp(THREAD, merge_cp_oop);
1777
1778 // Get constants() from the old class because it could have been rewritten
1779 // while we were at a safepoint allocating a new constant pool.
1780 constantPoolHandle old_cp(THREAD, the_class->constants());
1781 constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
1782
1783 // If the length changed, the class was redefined out from under us. Return
1784 // an error.
1785 if (merge_cp_length != the_class->constants()->length()
1786 + scratch_class->constants()->length()) {
1787 return JVMTI_ERROR_INTERNAL;
1788 }
1789
1790 // Update the version number of the constant pools (may keep scratch_cp)
1791 merge_cp->increment_and_save_version(old_cp->version());
1792 scratch_cp->increment_and_save_version(old_cp->version());
1793
1794 ResourceMark rm(THREAD);
1795 _index_map_count = 0;
1796 _index_map_p = new intArray(scratch_cp->length(), scratch_cp->length(), -1);
1797
1798 _bsm_index_map_count = 0;
1799 int bsm_data_len = scratch_cp->bsm_entries().array_length();
1800 _bsm_index_map_p = new intArray(bsm_data_len, bsm_data_len, -1);
1801
1802 // reference to the cp holder is needed for reallocating the BSM attribute
1803 merge_cp->set_pool_holder(scratch_class);
1804 bool result = merge_constant_pools(old_cp, scratch_cp, merge_cp,
1805 merge_cp_length, THREAD);
1806 merge_cp->set_pool_holder(nullptr);
1807
1808 if (!result) {
1809 // The merge can fail due to memory allocation failure or due
1810 // to robustness checks.
1811 return JVMTI_ERROR_INTERNAL;
1812 }
1813
1814 // ensure merged constant pool size does not overflow u2
1815 if (merge_cp_length > 0xFFFF) {
1816 log_warning(redefine, class, constantpool)("Merged constant pool overflow: %d entries", merge_cp_length);
1817 return JVMTI_ERROR_INTERNAL;
1818 }
1819
1820 // Set dynamic constants attribute from the original CP.
1821 if (old_cp->has_dynamic_constant()) {
1822 scratch_cp->set_has_dynamic_constant();
1823 }
1824
1825 log_info(redefine, class, constantpool)("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count);
1826
1827 if (_index_map_count == 0) {
1828 // there is nothing to map between the new and merged constant pools
1829
1830 // Copy attributes from scratch_cp to merge_cp
1831 merge_cp->copy_fields(scratch_cp());
1832
1833 if (old_cp->length() == scratch_cp->length()) {
1834 // The old and new constant pools are the same length and the
1835 // index map is empty. This means that the three constant pools
1836 // are equivalent (but not the same). Unfortunately, the new
1837 // constant pool has not gone through link resolution nor have
1838 // the new class bytecodes gone through constant pool cache
1839 // rewriting so we can't use the old constant pool with the new
1840 // class.
1841
1842 // toss the merged constant pool at return
1843 } else if (old_cp->length() < scratch_cp->length()) {
1844 // The old constant pool has fewer entries than the new constant
1845 // pool and the index map is empty. This means the new constant
1846 // pool is a superset of the old constant pool. However, the old
1847 // class bytecodes have already gone through constant pool cache
1848 // rewriting so we can't use the new constant pool with the old
1849 // class.
1850
1851 // toss the merged constant pool at return
1852 } else {
1853 // The old constant pool has more entries than the new constant
1854 // pool and the index map is empty. This means that both the old
1855 // and merged constant pools are supersets of the new constant
1856 // pool.
1857
1858 // Replace the new constant pool with a shrunken copy of the
1859 // merged constant pool
1860 set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1861 CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1862 // The new constant pool replaces scratch_cp so have cleaner clean it up.
1863 // It can't be cleaned up while there are handles to it.
1864 cp_cleaner.add_scratch_cp(scratch_cp());
1865 }
1866 } else {
1867 if (log_is_enabled(Trace, redefine, class, constantpool)) {
1868 // don't want to loop unless we are tracing
1869 int count = 0;
1870 for (int i = 1; i < _index_map_p->length(); i++) {
1871 int value = _index_map_p->at(i);
1872
1873 if (value != -1) {
1874 log_trace(redefine, class, constantpool)("index_map[%d]: old=%d new=%d", count, i, value);
1875 count++;
1876 }
1877 }
1878 }
1879
1880 // We have entries mapped between the new and merged constant pools
1881 // so we have to rewrite some constant pool references.
1882 if (!rewrite_cp_refs(scratch_class)) {
1883 return JVMTI_ERROR_INTERNAL;
1884 }
1885
1886 // Copy attributes from scratch_cp to merge_cp (should be done after rewrite_cp_refs())
1887 merge_cp->copy_fields(scratch_cp());
1888
1889 // Replace the new constant pool with a shrunken copy of the
1890 // merged constant pool so now the rewritten bytecodes have
1891 // valid references; the previous new constant pool will get
1892 // GCed.
1893 set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1894 CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1895 // The new constant pool replaces scratch_cp so have cleaner clean it up.
1896 // It can't be cleaned up while there are handles to it.
1897 cp_cleaner.add_scratch_cp(scratch_cp());
1898 }
1899
1900 return JVMTI_ERROR_NONE;
1901 } // end merge_cp_and_rewrite()
1902
1903
1904 // Rewrite constant pool references in klass scratch_class.
1905 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1906
1907 // rewrite constant pool references in the nest attributes:
1908 if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1909 // propagate failure back to caller
1910 return false;
1911 }
1912
1913 // rewrite constant pool references in the Record attribute:
1914 if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1915 // propagate failure back to caller
1916 return false;
1917 }
1918
1919 // rewrite constant pool references in the PermittedSubclasses attribute:
1920 if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1921 // propagate failure back to caller
1922 return false;
1923 }
1924
1925 // rewrite constant pool references in the LoadableDescriptors attribute:
1926 if (!rewrite_cp_refs_in_loadable_descriptors_attribute(scratch_class)) {
1927 // propagate failure back to caller
1928 return false;
1929 }
1930
1931 // rewrite constant pool references in the methods:
1932 if (!rewrite_cp_refs_in_methods(scratch_class)) {
1933 // propagate failure back to caller
1934 return false;
1935 }
1936
1937 // rewrite constant pool references in the class_annotations:
1938 if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1939 // propagate failure back to caller
1940 return false;
1941 }
1942
1943 // rewrite constant pool references in the fields_annotations:
1944 if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1945 // propagate failure back to caller
1946 return false;
1947 }
1948
1949 // rewrite constant pool references in the methods_annotations:
1950 if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {
1951 // propagate failure back to caller
1952 return false;
1953 }
1954
1955 // rewrite constant pool references in the methods_parameter_annotations:
1956 if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class)) {
1957 // propagate failure back to caller
1958 return false;
1959 }
1960
1961 // rewrite constant pool references in the methods_default_annotations:
1962 if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class)) {
1963 // propagate failure back to caller
1964 return false;
1965 }
1966
1967 // rewrite constant pool references in the class_type_annotations:
1968 if (!rewrite_cp_refs_in_class_type_annotations(scratch_class)) {
1969 // propagate failure back to caller
1970 return false;
1971 }
1972
1973 // rewrite constant pool references in the fields_type_annotations:
1974 if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class)) {
1975 // propagate failure back to caller
1976 return false;
1977 }
1978
1979 // rewrite constant pool references in the methods_type_annotations:
1980 if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class)) {
1981 // propagate failure back to caller
1982 return false;
1983 }
1984
1985 // There can be type annotations in the Code part of a method_info attribute.
1986 // These annotations are not accessible, even by reflection.
1987 // Currently they are not even parsed by the ClassFileParser.
1988 // If runtime access is added they will also need to be rewritten.
1989
1990 // rewrite source file name index:
1991 u2 source_file_name_idx = scratch_class->source_file_name_index();
1992 if (source_file_name_idx != 0) {
1993 u2 new_source_file_name_idx = find_new_index(source_file_name_idx);
1994 if (new_source_file_name_idx != 0) {
1995 scratch_class->set_source_file_name_index(new_source_file_name_idx);
1996 }
1997 }
1998
1999 // rewrite class generic signature index:
2000 u2 generic_signature_index = scratch_class->generic_signature_index();
2001 if (generic_signature_index != 0) {
2002 u2 new_generic_signature_index = find_new_index(generic_signature_index);
2003 if (new_generic_signature_index != 0) {
2004 scratch_class->set_generic_signature_index(new_generic_signature_index);
2005 }
2006 }
2007
2008 return true;
2009 } // end rewrite_cp_refs()
2010
2011 // Rewrite constant pool references in the NestHost and NestMembers attributes.
2012 bool VM_RedefineClasses::rewrite_cp_refs_in_nest_attributes(
2013 InstanceKlass* scratch_class) {
2014
2015 u2 cp_index = scratch_class->nest_host_index();
2016 if (cp_index != 0) {
2017 scratch_class->set_nest_host_index(find_new_index(cp_index));
2018 }
2019 Array<u2>* nest_members = scratch_class->nest_members();
2020 for (int i = 0; i < nest_members->length(); i++) {
2021 u2 cp_index = nest_members->at(i);
2022 nest_members->at_put(i, find_new_index(cp_index));
2023 }
2024 return true;
2025 }
2026
2027 // Rewrite constant pool references in the Record attribute.
2028 bool VM_RedefineClasses::rewrite_cp_refs_in_record_attribute(InstanceKlass* scratch_class) {
2029 Array<RecordComponent*>* components = scratch_class->record_components();
2030 if (components != nullptr) {
2031 for (int i = 0; i < components->length(); i++) {
2032 RecordComponent* component = components->at(i);
2033 u2 cp_index = component->name_index();
2034 component->set_name_index(find_new_index(cp_index));
2035 cp_index = component->descriptor_index();
2036 component->set_descriptor_index(find_new_index(cp_index));
2037 cp_index = component->generic_signature_index();
2038 if (cp_index != 0) {
2039 component->set_generic_signature_index(find_new_index(cp_index));
2040 }
2041
2042 AnnotationArray* annotations = component->annotations();
2043 if (annotations != nullptr && annotations->length() != 0) {
2044 int byte_i = 0; // byte index into annotations
2045 if (!rewrite_cp_refs_in_annotations_typeArray(annotations, byte_i)) {
2046 log_debug(redefine, class, annotation)("bad record_component_annotations at %d", i);
2047 // propagate failure back to caller
2048 return false;
2049 }
2050 }
2051
2052 AnnotationArray* type_annotations = component->type_annotations();
2053 if (type_annotations != nullptr && type_annotations->length() != 0) {
2054 int byte_i = 0; // byte index into annotations
2055 if (!rewrite_cp_refs_in_type_annotations_typeArray(type_annotations, byte_i, "record_info")) {
2056 log_debug(redefine, class, annotation)("bad record_component_type_annotations at %d", i);
2057 // propagate failure back to caller
2058 return false;
2059 }
2060 }
2061 }
2062 }
2063 return true;
2064 }
2065
2066 // Rewrite constant pool references in the PermittedSubclasses attribute.
2067 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2068 InstanceKlass* scratch_class) {
2069
2070 Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2071 assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2072 for (int i = 0; i < permitted_subclasses->length(); i++) {
2073 u2 cp_index = permitted_subclasses->at(i);
2074 permitted_subclasses->at_put(i, find_new_index(cp_index));
2075 }
2076 return true;
2077 }
2078
2079 // Rewrite constant pool references in the LoadableDescriptors attribute.
2080 bool VM_RedefineClasses::rewrite_cp_refs_in_loadable_descriptors_attribute(
2081 InstanceKlass* scratch_class) {
2082
2083 Array<u2>* loadable_descriptors = scratch_class->loadable_descriptors();
2084 assert(loadable_descriptors != nullptr, "unexpected null loadable_descriptors");
2085 for (int i = 0; i < loadable_descriptors->length(); i++) {
2086 u2 cp_index = loadable_descriptors->at(i);
2087 loadable_descriptors->at_put(i, find_new_index(cp_index));
2088 }
2089 return true;
2090 }
2091
2092 // Rewrite constant pool references in the methods.
2093 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2094
2095 Array<Method*>* methods = scratch_class->methods();
2096
2097 if (methods == nullptr || methods->length() == 0) {
2098 // no methods so nothing to do
2099 return true;
2100 }
2101
2102 JavaThread* THREAD = JavaThread::current(); // For exception macros.
2103 ExceptionMark em(THREAD);
2104
2105 // rewrite constant pool references in the methods:
2106 for (int i = methods->length() - 1; i >= 0; i--) {
2107 methodHandle method(THREAD, methods->at(i));
2108 methodHandle new_method;
2109 rewrite_cp_refs_in_method(method, &new_method, THREAD);
2110 if (!new_method.is_null()) {
2111 // the method has been replaced so save the new method version
2112 // even in the case of an exception. original method is on the
2113 // deallocation list.
2114 methods->at_put(i, new_method());
2115 }
2116 if (HAS_PENDING_EXCEPTION) {
2117 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
2118 log_info(redefine, class, load, exceptions)("rewrite_cp_refs_in_method exception: '%s'", ex_name->as_C_string());
2119 // Need to clear pending exception here as the super caller sets
2120 // the JVMTI_ERROR_INTERNAL if the returned value is false.
2121 CLEAR_PENDING_EXCEPTION;
2122 return false;
2123 }
2124 }
2125
2126 return true;
2127 }
2128
2129
2130 // Rewrite constant pool references in the specific method. This code
2131 // was adapted from Rewriter::rewrite_method().
2132 void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
2133 methodHandle *new_method_p, TRAPS) {
2134
2135 *new_method_p = methodHandle(); // default is no new method
2136
2137 // We cache a pointer to the bytecodes here in code_base. If GC
2138 // moves the Method*, then the bytecodes will also move which
2139 // will likely cause a crash. We create a NoSafepointVerifier
2140 // object to detect whether we pass a possible safepoint in this
2141 // code block.
2142 NoSafepointVerifier nsv;
2143
2144 // Bytecodes and their length
2145 address code_base = method->code_base();
2146 int code_length = method->code_size();
2147
2148 int bc_length;
2149 for (int bci = 0; bci < code_length; bci += bc_length) {
2150 address bcp = code_base + bci;
2151 Bytecodes::Code c = (Bytecodes::Code)(*bcp);
2152
2153 bc_length = Bytecodes::length_for(c);
2154 if (bc_length == 0) {
2155 // More complicated bytecodes report a length of zero so
2156 // we have to try again a slightly different way.
2157 bc_length = Bytecodes::length_at(method(), bcp);
2158 }
2159
2160 assert(bc_length != 0, "impossible bytecode length");
2161
2162 switch (c) {
2163 case Bytecodes::_ldc:
2164 {
2165 u1 cp_index = *(bcp + 1);
2166 u2 new_index = find_new_index(cp_index);
2167
2168 if (StressLdcRewrite && new_index == 0) {
2169 // If we are stressing ldc -> ldc_w rewriting, then we
2170 // always need a new_index value.
2171 new_index = cp_index;
2172 }
2173 if (new_index != 0) {
2174 // the original index is mapped so we have more work to do
2175 if (!StressLdcRewrite && new_index <= max_jubyte) {
2176 // The new value can still use ldc instead of ldc_w
2177 // unless we are trying to stress ldc -> ldc_w rewriting
2178 log_trace(redefine, class, constantpool)
2179 ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), p2i(bcp), cp_index, new_index);
2180 // We checked that new_index fits in a u1 so this cast is safe
2181 *(bcp + 1) = (u1)new_index;
2182 } else {
2183 log_trace(redefine, class, constantpool)
2184 ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), p2i(bcp), cp_index, new_index);
2185 // the new value needs ldc_w instead of ldc
2186 u_char inst_buffer[4]; // max instruction size is 4 bytes
2187 bcp = (address)inst_buffer;
2188 // construct new instruction sequence
2189 *bcp = Bytecodes::_ldc_w;
2190 bcp++;
2191 // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
2192 // See comment below for difference between put_Java_u2()
2193 // and put_native_u2().
2194 Bytes::put_Java_u2(bcp, new_index);
2195
2196 Relocator rc(method, nullptr /* no RelocatorListener needed */);
2197 methodHandle m;
2198 {
2199 PauseNoSafepointVerifier pnsv(&nsv);
2200
2201 // ldc is 2 bytes and ldc_w is 3 bytes
2202 m = rc.insert_space_at(bci, 3, inst_buffer, CHECK);
2203 }
2204
2205 // return the new method so that the caller can update
2206 // the containing class
2207 *new_method_p = method = m;
2208 // switch our bytecode processing loop from the old method
2209 // to the new method
2210 code_base = method->code_base();
2211 code_length = method->code_size();
2212 bcp = code_base + bci;
2213 c = (Bytecodes::Code)(*bcp);
2214 bc_length = Bytecodes::length_for(c);
2215 assert(bc_length != 0, "sanity check");
2216 } // end we need ldc_w instead of ldc
2217 } // end if there is a mapped index
2218 } break;
2219
2220 // these bytecodes have a two-byte constant pool index
2221 case Bytecodes::_anewarray : // fall through
2222 case Bytecodes::_checkcast : // fall through
2223 case Bytecodes::_getfield : // fall through
2224 case Bytecodes::_getstatic : // fall through
2225 case Bytecodes::_instanceof : // fall through
2226 case Bytecodes::_invokedynamic : // fall through
2227 case Bytecodes::_invokeinterface: // fall through
2228 case Bytecodes::_invokespecial : // fall through
2229 case Bytecodes::_invokestatic : // fall through
2230 case Bytecodes::_invokevirtual : // fall through
2231 case Bytecodes::_ldc_w : // fall through
2232 case Bytecodes::_ldc2_w : // fall through
2233 case Bytecodes::_multianewarray : // fall through
2234 case Bytecodes::_new : // fall through
2235 case Bytecodes::_putfield : // fall through
2236 case Bytecodes::_putstatic :
2237 {
2238 address p = bcp + 1;
2239 int cp_index = Bytes::get_Java_u2(p);
2240 u2 new_index = find_new_index(cp_index);
2241 if (new_index != 0) {
2242 // the original index is mapped so update w/ new value
2243 log_trace(redefine, class, constantpool)
2244 ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),p2i(bcp), cp_index, new_index);
2245 // Rewriter::rewrite_method() uses put_native_u2() in this
2246 // situation because it is reusing the constant pool index
2247 // location for a native index into the ConstantPoolCache.
2248 // Since we are updating the constant pool index prior to
2249 // verification and ConstantPoolCache initialization, we
2250 // need to keep the new index in Java byte order.
2251 Bytes::put_Java_u2(p, new_index);
2252 }
2253 } break;
2254 default:
2255 break;
2256 }
2257 } // end for each bytecode
2258 } // end rewrite_cp_refs_in_method()
2259
2260
2261 // Rewrite constant pool references in the class_annotations field.
2262 bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(InstanceKlass* scratch_class) {
2263
2264 AnnotationArray* class_annotations = scratch_class->class_annotations();
2265 if (class_annotations == nullptr || class_annotations->length() == 0) {
2266 // no class_annotations so nothing to do
2267 return true;
2268 }
2269
2270 log_debug(redefine, class, annotation)("class_annotations length=%d", class_annotations->length());
2271
2272 int byte_i = 0; // byte index into class_annotations
2273 return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i);
2274 }
2275
2276
2277 // Rewrite constant pool references in an annotations typeArray. This
2278 // "structure" is adapted from the RuntimeVisibleAnnotations_attribute
2279 // that is described in section 4.8.15 of the 2nd-edition of the VM spec:
2280 //
2281 // annotations_typeArray {
2282 // u2 num_annotations;
2283 // annotation annotations[num_annotations];
2284 // }
2285 //
2286 bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
2287 AnnotationArray* annotations_typeArray, int &byte_i_ref) {
2288
2289 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2290 // not enough room for num_annotations field
2291 log_debug(redefine, class, annotation)("length() is too small for num_annotations field");
2292 return false;
2293 }
2294
2295 u2 num_annotations = Bytes::get_Java_u2((address)
2296 annotations_typeArray->adr_at(byte_i_ref));
2297 byte_i_ref += 2;
2298
2299 log_debug(redefine, class, annotation)("num_annotations=%d", num_annotations);
2300
2301 int calc_num_annotations = 0;
2302 for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2303 if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray, byte_i_ref)) {
2304 log_debug(redefine, class, annotation)("bad annotation_struct at %d", calc_num_annotations);
2305 // propagate failure back to caller
2306 return false;
2307 }
2308 }
2309 assert(num_annotations == calc_num_annotations, "sanity check");
2310
2311 return true;
2312 } // end rewrite_cp_refs_in_annotations_typeArray()
2313
2314
2315 // Rewrite constant pool references in the annotation struct portion of
2316 // an annotations_typeArray. This "structure" is from section 4.8.15 of
2317 // the 2nd-edition of the VM spec:
2318 //
2319 // struct annotation {
2320 // u2 type_index;
2321 // u2 num_element_value_pairs;
2322 // {
2323 // u2 element_name_index;
2324 // element_value value;
2325 // } element_value_pairs[num_element_value_pairs];
2326 // }
2327 //
2328 bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
2329 AnnotationArray* annotations_typeArray, int &byte_i_ref) {
2330 if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
2331 // not enough room for smallest annotation_struct
2332 log_debug(redefine, class, annotation)("length() is too small for annotation_struct");
2333 return false;
2334 }
2335
2336 u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
2337 byte_i_ref, "type_index");
2338
2339 u2 num_element_value_pairs = Bytes::get_Java_u2((address)
2340 annotations_typeArray->adr_at(byte_i_ref));
2341 byte_i_ref += 2;
2342
2343 log_debug(redefine, class, annotation)
2344 ("type_index=%d num_element_value_pairs=%d", type_index, num_element_value_pairs);
2345
2346 int calc_num_element_value_pairs = 0;
2347 for (; calc_num_element_value_pairs < num_element_value_pairs;
2348 calc_num_element_value_pairs++) {
2349 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2350 // not enough room for another element_name_index, let alone
2351 // the rest of another component
2352 log_debug(redefine, class, annotation)("length() is too small for element_name_index");
2353 return false;
2354 }
2355
2356 u2 element_name_index = rewrite_cp_ref_in_annotation_data(
2357 annotations_typeArray, byte_i_ref,
2358 "element_name_index");
2359
2360 log_debug(redefine, class, annotation)("element_name_index=%d", element_name_index);
2361
2362 if (!rewrite_cp_refs_in_element_value(annotations_typeArray, byte_i_ref)) {
2363 log_debug(redefine, class, annotation)("bad element_value at %d", calc_num_element_value_pairs);
2364 // propagate failure back to caller
2365 return false;
2366 }
2367 } // end for each component
2368 assert(num_element_value_pairs == calc_num_element_value_pairs,
2369 "sanity check");
2370
2371 return true;
2372 } // end rewrite_cp_refs_in_annotation_struct()
2373
2374
2375 // Rewrite a constant pool reference at the current position in
2376 // annotations_typeArray if needed. Returns the original constant
2377 // pool reference if a rewrite was not needed or the new constant
2378 // pool reference if a rewrite was needed.
2379 u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
2380 AnnotationArray* annotations_typeArray, int &byte_i_ref,
2381 const char * trace_mesg) {
2382
2383 address cp_index_addr = (address)
2384 annotations_typeArray->adr_at(byte_i_ref);
2385 u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
2386 u2 new_cp_index = find_new_index(old_cp_index);
2387 if (new_cp_index != 0) {
2388 log_debug(redefine, class, annotation)("mapped old %s=%d", trace_mesg, old_cp_index);
2389 Bytes::put_Java_u2(cp_index_addr, new_cp_index);
2390 old_cp_index = new_cp_index;
2391 }
2392 byte_i_ref += 2;
2393 return old_cp_index;
2394 }
2395
2396
2397 // Rewrite constant pool references in the element_value portion of an
2398 // annotations_typeArray. This "structure" is from section 4.8.15.1 of
2399 // the 2nd-edition of the VM spec:
2400 //
2401 // struct element_value {
2402 // u1 tag;
2403 // union {
2404 // u2 const_value_index;
2405 // {
2406 // u2 type_name_index;
2407 // u2 const_name_index;
2408 // } enum_const_value;
2409 // u2 class_info_index;
2410 // annotation annotation_value;
2411 // struct {
2412 // u2 num_values;
2413 // element_value values[num_values];
2414 // } array_value;
2415 // } value;
2416 // }
2417 //
2418 bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
2419 AnnotationArray* annotations_typeArray, int &byte_i_ref) {
2420
2421 if ((byte_i_ref + 1) > annotations_typeArray->length()) {
2422 // not enough room for a tag let alone the rest of an element_value
2423 log_debug(redefine, class, annotation)("length() is too small for a tag");
2424 return false;
2425 }
2426
2427 u1 tag = annotations_typeArray->at(byte_i_ref);
2428 byte_i_ref++;
2429 log_debug(redefine, class, annotation)("tag='%c'", tag);
2430
2431 switch (tag) {
2432 // These BaseType tag values are from Table 4.2 in VM spec:
2433 case JVM_SIGNATURE_BYTE:
2434 case JVM_SIGNATURE_CHAR:
2435 case JVM_SIGNATURE_DOUBLE:
2436 case JVM_SIGNATURE_FLOAT:
2437 case JVM_SIGNATURE_INT:
2438 case JVM_SIGNATURE_LONG:
2439 case JVM_SIGNATURE_SHORT:
2440 case JVM_SIGNATURE_BOOLEAN:
2441
2442 // The remaining tag values are from Table 4.8 in the 2nd-edition of
2443 // the VM spec:
2444 case 's':
2445 {
2446 // For the above tag values (including the BaseType values),
2447 // value.const_value_index is right union field.
2448
2449 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2450 // not enough room for a const_value_index
2451 log_debug(redefine, class, annotation)("length() is too small for a const_value_index");
2452 return false;
2453 }
2454
2455 u2 const_value_index = rewrite_cp_ref_in_annotation_data(
2456 annotations_typeArray, byte_i_ref,
2457 "const_value_index");
2458
2459 log_debug(redefine, class, annotation)("const_value_index=%d", const_value_index);
2460 } break;
2461
2462 case 'e':
2463 {
2464 // for the above tag value, value.enum_const_value is right union field
2465
2466 if ((byte_i_ref + 4) > annotations_typeArray->length()) {
2467 // not enough room for a enum_const_value
2468 log_debug(redefine, class, annotation)("length() is too small for a enum_const_value");
2469 return false;
2470 }
2471
2472 u2 type_name_index = rewrite_cp_ref_in_annotation_data(
2473 annotations_typeArray, byte_i_ref,
2474 "type_name_index");
2475
2476 u2 const_name_index = rewrite_cp_ref_in_annotation_data(
2477 annotations_typeArray, byte_i_ref,
2478 "const_name_index");
2479
2480 log_debug(redefine, class, annotation)
2481 ("type_name_index=%d const_name_index=%d", type_name_index, const_name_index);
2482 } break;
2483
2484 case 'c':
2485 {
2486 // for the above tag value, value.class_info_index is right union field
2487
2488 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2489 // not enough room for a class_info_index
2490 log_debug(redefine, class, annotation)("length() is too small for a class_info_index");
2491 return false;
2492 }
2493
2494 u2 class_info_index = rewrite_cp_ref_in_annotation_data(
2495 annotations_typeArray, byte_i_ref,
2496 "class_info_index");
2497
2498 log_debug(redefine, class, annotation)("class_info_index=%d", class_info_index);
2499 } break;
2500
2501 case '@':
2502 // For the above tag value, value.attr_value is the right union
2503 // field. This is a nested annotation.
2504 if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray, byte_i_ref)) {
2505 // propagate failure back to caller
2506 return false;
2507 }
2508 break;
2509
2510 case JVM_SIGNATURE_ARRAY:
2511 {
2512 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2513 // not enough room for a num_values field
2514 log_debug(redefine, class, annotation)("length() is too small for a num_values field");
2515 return false;
2516 }
2517
2518 // For the above tag value, value.array_value is the right union
2519 // field. This is an array of nested element_value.
2520 u2 num_values = Bytes::get_Java_u2((address)
2521 annotations_typeArray->adr_at(byte_i_ref));
2522 byte_i_ref += 2;
2523 log_debug(redefine, class, annotation)("num_values=%d", num_values);
2524
2525 int calc_num_values = 0;
2526 for (; calc_num_values < num_values; calc_num_values++) {
2527 if (!rewrite_cp_refs_in_element_value(annotations_typeArray, byte_i_ref)) {
2528 log_debug(redefine, class, annotation)("bad nested element_value at %d", calc_num_values);
2529 // propagate failure back to caller
2530 return false;
2531 }
2532 }
2533 assert(num_values == calc_num_values, "sanity check");
2534 } break;
2535
2536 default:
2537 log_debug(redefine, class, annotation)("bad tag=0x%x", tag);
2538 return false;
2539 } // end decode tag field
2540
2541 return true;
2542 } // end rewrite_cp_refs_in_element_value()
2543
2544
2545 // Rewrite constant pool references in a fields_annotations field.
2546 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
2547 InstanceKlass* scratch_class) {
2548
2549 Array<AnnotationArray*>* fields_annotations = scratch_class->fields_annotations();
2550
2551 if (fields_annotations == nullptr || fields_annotations->length() == 0) {
2552 // no fields_annotations so nothing to do
2553 return true;
2554 }
2555
2556 log_debug(redefine, class, annotation)("fields_annotations length=%d", fields_annotations->length());
2557
2558 for (int i = 0; i < fields_annotations->length(); i++) {
2559 AnnotationArray* field_annotations = fields_annotations->at(i);
2560 if (field_annotations == nullptr || field_annotations->length() == 0) {
2561 // this field does not have any annotations so skip it
2562 continue;
2563 }
2564
2565 int byte_i = 0; // byte index into field_annotations
2566 if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i)) {
2567 log_debug(redefine, class, annotation)("bad field_annotations at %d", i);
2568 // propagate failure back to caller
2569 return false;
2570 }
2571 }
2572
2573 return true;
2574 } // end rewrite_cp_refs_in_fields_annotations()
2575
2576
2577 // Rewrite constant pool references in a methods_annotations field.
2578 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
2579 InstanceKlass* scratch_class) {
2580
2581 for (int i = 0; i < scratch_class->methods()->length(); i++) {
2582 Method* m = scratch_class->methods()->at(i);
2583 AnnotationArray* method_annotations = m->constMethod()->method_annotations();
2584
2585 if (method_annotations == nullptr || method_annotations->length() == 0) {
2586 // this method does not have any annotations so skip it
2587 continue;
2588 }
2589
2590 int byte_i = 0; // byte index into method_annotations
2591 if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i)) {
2592 log_debug(redefine, class, annotation)("bad method_annotations at %d", i);
2593 // propagate failure back to caller
2594 return false;
2595 }
2596 }
2597
2598 return true;
2599 } // end rewrite_cp_refs_in_methods_annotations()
2600
2601
2602 // Rewrite constant pool references in a methods_parameter_annotations
2603 // field. This "structure" is adapted from the
2604 // RuntimeVisibleParameterAnnotations_attribute described in section
2605 // 4.8.17 of the 2nd-edition of the VM spec:
2606 //
2607 // methods_parameter_annotations_typeArray {
2608 // u1 num_parameters;
2609 // {
2610 // u2 num_annotations;
2611 // annotation annotations[num_annotations];
2612 // } parameter_annotations[num_parameters];
2613 // }
2614 //
2615 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
2616 InstanceKlass* scratch_class) {
2617
2618 for (int i = 0; i < scratch_class->methods()->length(); i++) {
2619 Method* m = scratch_class->methods()->at(i);
2620 AnnotationArray* method_parameter_annotations = m->constMethod()->parameter_annotations();
2621 if (method_parameter_annotations == nullptr
2622 || method_parameter_annotations->length() == 0) {
2623 // this method does not have any parameter annotations so skip it
2624 continue;
2625 }
2626
2627 if (method_parameter_annotations->length() < 1) {
2628 // not enough room for a num_parameters field
2629 log_debug(redefine, class, annotation)("length() is too small for a num_parameters field at %d", i);
2630 return false;
2631 }
2632
2633 int byte_i = 0; // byte index into method_parameter_annotations
2634
2635 u1 num_parameters = method_parameter_annotations->at(byte_i);
2636 byte_i++;
2637
2638 log_debug(redefine, class, annotation)("num_parameters=%d", num_parameters);
2639
2640 int calc_num_parameters = 0;
2641 for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
2642 if (!rewrite_cp_refs_in_annotations_typeArray(method_parameter_annotations, byte_i)) {
2643 log_debug(redefine, class, annotation)("bad method_parameter_annotations at %d", calc_num_parameters);
2644 // propagate failure back to caller
2645 return false;
2646 }
2647 }
2648 assert(num_parameters == calc_num_parameters, "sanity check");
2649 }
2650
2651 return true;
2652 } // end rewrite_cp_refs_in_methods_parameter_annotations()
2653
2654
2655 // Rewrite constant pool references in a methods_default_annotations
2656 // field. This "structure" is adapted from the AnnotationDefault_attribute
2657 // that is described in section 4.8.19 of the 2nd-edition of the VM spec:
2658 //
2659 // methods_default_annotations_typeArray {
2660 // element_value default_value;
2661 // }
2662 //
2663 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
2664 InstanceKlass* scratch_class) {
2665
2666 for (int i = 0; i < scratch_class->methods()->length(); i++) {
2667 Method* m = scratch_class->methods()->at(i);
2668 AnnotationArray* method_default_annotations = m->constMethod()->default_annotations();
2669 if (method_default_annotations == nullptr
2670 || method_default_annotations->length() == 0) {
2671 // this method does not have any default annotations so skip it
2672 continue;
2673 }
2674
2675 int byte_i = 0; // byte index into method_default_annotations
2676
2677 if (!rewrite_cp_refs_in_element_value(
2678 method_default_annotations, byte_i)) {
2679 log_debug(redefine, class, annotation)("bad default element_value at %d", i);
2680 // propagate failure back to caller
2681 return false;
2682 }
2683 }
2684
2685 return true;
2686 } // end rewrite_cp_refs_in_methods_default_annotations()
2687
2688
2689 // Rewrite constant pool references in a class_type_annotations field.
2690 bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
2691 InstanceKlass* scratch_class) {
2692
2693 AnnotationArray* class_type_annotations = scratch_class->class_type_annotations();
2694 if (class_type_annotations == nullptr || class_type_annotations->length() == 0) {
2695 // no class_type_annotations so nothing to do
2696 return true;
2697 }
2698
2699 log_debug(redefine, class, annotation)("class_type_annotations length=%d", class_type_annotations->length());
2700
2701 int byte_i = 0; // byte index into class_type_annotations
2702 return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations,
2703 byte_i, "ClassFile");
2704 } // end rewrite_cp_refs_in_class_type_annotations()
2705
2706
2707 // Rewrite constant pool references in a fields_type_annotations field.
2708 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(InstanceKlass* scratch_class) {
2709
2710 Array<AnnotationArray*>* fields_type_annotations = scratch_class->fields_type_annotations();
2711 if (fields_type_annotations == nullptr || fields_type_annotations->length() == 0) {
2712 // no fields_type_annotations so nothing to do
2713 return true;
2714 }
2715
2716 log_debug(redefine, class, annotation)("fields_type_annotations length=%d", fields_type_annotations->length());
2717
2718 for (int i = 0; i < fields_type_annotations->length(); i++) {
2719 AnnotationArray* field_type_annotations = fields_type_annotations->at(i);
2720 if (field_type_annotations == nullptr || field_type_annotations->length() == 0) {
2721 // this field does not have any annotations so skip it
2722 continue;
2723 }
2724
2725 int byte_i = 0; // byte index into field_type_annotations
2726 if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations,
2727 byte_i, "field_info")) {
2728 log_debug(redefine, class, annotation)("bad field_type_annotations at %d", i);
2729 // propagate failure back to caller
2730 return false;
2731 }
2732 }
2733
2734 return true;
2735 } // end rewrite_cp_refs_in_fields_type_annotations()
2736
2737
2738 // Rewrite constant pool references in a methods_type_annotations field.
2739 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations(
2740 InstanceKlass* scratch_class) {
2741
2742 for (int i = 0; i < scratch_class->methods()->length(); i++) {
2743 Method* m = scratch_class->methods()->at(i);
2744 AnnotationArray* method_type_annotations = m->constMethod()->type_annotations();
2745
2746 if (method_type_annotations == nullptr || method_type_annotations->length() == 0) {
2747 // this method does not have any annotations so skip it
2748 continue;
2749 }
2750
2751 log_debug(redefine, class, annotation)("methods type_annotations length=%d", method_type_annotations->length());
2752
2753 int byte_i = 0; // byte index into method_type_annotations
2754 if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations,
2755 byte_i, "method_info")) {
2756 log_debug(redefine, class, annotation)("bad method_type_annotations at %d", i);
2757 // propagate failure back to caller
2758 return false;
2759 }
2760 }
2761
2762 return true;
2763 } // end rewrite_cp_refs_in_methods_type_annotations()
2764
2765
2766 // Rewrite constant pool references in a type_annotations
2767 // field. This "structure" is adapted from the
2768 // RuntimeVisibleTypeAnnotations_attribute described in
2769 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2770 //
2771 // type_annotations_typeArray {
2772 // u2 num_annotations;
2773 // type_annotation annotations[num_annotations];
2774 // }
2775 //
2776 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray(
2777 AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2778 const char * location_mesg) {
2779
2780 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2781 // not enough room for num_annotations field
2782 log_debug(redefine, class, annotation)("length() is too small for num_annotations field");
2783 return false;
2784 }
2785
2786 u2 num_annotations = Bytes::get_Java_u2((address)
2787 type_annotations_typeArray->adr_at(byte_i_ref));
2788 byte_i_ref += 2;
2789
2790 log_debug(redefine, class, annotation)("num_type_annotations=%d", num_annotations);
2791
2792 int calc_num_annotations = 0;
2793 for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2794 if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray,
2795 byte_i_ref, location_mesg)) {
2796 log_debug(redefine, class, annotation)("bad type_annotation_struct at %d", calc_num_annotations);
2797 // propagate failure back to caller
2798 return false;
2799 }
2800 }
2801 assert(num_annotations == calc_num_annotations, "sanity check");
2802
2803 if (byte_i_ref != type_annotations_typeArray->length()) {
2804 log_debug(redefine, class, annotation)
2805 ("read wrong amount of bytes at end of processing type_annotations_typeArray (%d of %d bytes were read)",
2806 byte_i_ref, type_annotations_typeArray->length());
2807 return false;
2808 }
2809
2810 return true;
2811 } // end rewrite_cp_refs_in_type_annotations_typeArray()
2812
2813
2814 // Rewrite constant pool references in a type_annotation
2815 // field. This "structure" is adapted from the
2816 // RuntimeVisibleTypeAnnotations_attribute described in
2817 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2818 //
2819 // type_annotation {
2820 // u1 target_type;
2821 // union {
2822 // type_parameter_target;
2823 // supertype_target;
2824 // type_parameter_bound_target;
2825 // empty_target;
2826 // method_formal_parameter_target;
2827 // throws_target;
2828 // localvar_target;
2829 // catch_target;
2830 // offset_target;
2831 // type_argument_target;
2832 // } target_info;
2833 // type_path target_path;
2834 // annotation anno;
2835 // }
2836 //
2837 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct(
2838 AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2839 const char * location_mesg) {
2840
2841 if (!skip_type_annotation_target(type_annotations_typeArray,
2842 byte_i_ref, location_mesg)) {
2843 return false;
2844 }
2845
2846 if (!skip_type_annotation_type_path(type_annotations_typeArray, byte_i_ref)) {
2847 return false;
2848 }
2849
2850 if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray, byte_i_ref)) {
2851 return false;
2852 }
2853
2854 return true;
2855 } // end rewrite_cp_refs_in_type_annotation_struct()
2856
2857
2858 // Read, verify and skip over the target_type and target_info part
2859 // so that rewriting can continue in the later parts of the struct.
2860 //
2861 // u1 target_type;
2862 // union {
2863 // type_parameter_target;
2864 // supertype_target;
2865 // type_parameter_bound_target;
2866 // empty_target;
2867 // method_formal_parameter_target;
2868 // throws_target;
2869 // localvar_target;
2870 // catch_target;
2871 // offset_target;
2872 // type_argument_target;
2873 // } target_info;
2874 //
2875 bool VM_RedefineClasses::skip_type_annotation_target(
2876 AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2877 const char * location_mesg) {
2878
2879 if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2880 // not enough room for a target_type let alone the rest of a type_annotation
2881 log_debug(redefine, class, annotation)("length() is too small for a target_type");
2882 return false;
2883 }
2884
2885 u1 target_type = type_annotations_typeArray->at(byte_i_ref);
2886 byte_i_ref += 1;
2887 log_debug(redefine, class, annotation)("target_type=0x%.2x", target_type);
2888 log_debug(redefine, class, annotation)("location=%s", location_mesg);
2889
2890 // Skip over target_info
2891 switch (target_type) {
2892 case 0x00:
2893 // kind: type parameter declaration of generic class or interface
2894 // location: ClassFile
2895 case 0x01:
2896 // kind: type parameter declaration of generic method or constructor
2897 // location: method_info
2898
2899 {
2900 // struct:
2901 // type_parameter_target {
2902 // u1 type_parameter_index;
2903 // }
2904 //
2905 if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2906 log_debug(redefine, class, annotation)("length() is too small for a type_parameter_target");
2907 return false;
2908 }
2909
2910 u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2911 byte_i_ref += 1;
2912
2913 log_debug(redefine, class, annotation)("type_parameter_target: type_parameter_index=%d", type_parameter_index);
2914 } break;
2915
2916 case 0x10:
2917 // kind: type in extends clause of class or interface declaration
2918 // or in implements clause of interface declaration
2919 // location: ClassFile
2920
2921 {
2922 // struct:
2923 // supertype_target {
2924 // u2 supertype_index;
2925 // }
2926 //
2927 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2928 log_debug(redefine, class, annotation)("length() is too small for a supertype_target");
2929 return false;
2930 }
2931
2932 u2 supertype_index = Bytes::get_Java_u2((address)
2933 type_annotations_typeArray->adr_at(byte_i_ref));
2934 byte_i_ref += 2;
2935
2936 log_debug(redefine, class, annotation)("supertype_target: supertype_index=%d", supertype_index);
2937 } break;
2938
2939 case 0x11:
2940 // kind: type in bound of type parameter declaration of generic class or interface
2941 // location: ClassFile
2942 case 0x12:
2943 // kind: type in bound of type parameter declaration of generic method or constructor
2944 // location: method_info
2945
2946 {
2947 // struct:
2948 // type_parameter_bound_target {
2949 // u1 type_parameter_index;
2950 // u1 bound_index;
2951 // }
2952 //
2953 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2954 log_debug(redefine, class, annotation)("length() is too small for a type_parameter_bound_target");
2955 return false;
2956 }
2957
2958 u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2959 byte_i_ref += 1;
2960 u1 bound_index = type_annotations_typeArray->at(byte_i_ref);
2961 byte_i_ref += 1;
2962
2963 log_debug(redefine, class, annotation)
2964 ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d", type_parameter_index, bound_index);
2965 } break;
2966
2967 case 0x13:
2968 // kind: type in field declaration
2969 // location: field_info
2970 case 0x14:
2971 // kind: return type of method, or type of newly constructed object
2972 // location: method_info
2973 case 0x15:
2974 // kind: receiver type of method or constructor
2975 // location: method_info
2976
2977 {
2978 // struct:
2979 // empty_target {
2980 // }
2981 //
2982 log_debug(redefine, class, annotation)("empty_target");
2983 } break;
2984
2985 case 0x16:
2986 // kind: type in formal parameter declaration of method, constructor, or lambda expression
2987 // location: method_info
2988
2989 {
2990 // struct:
2991 // formal_parameter_target {
2992 // u1 formal_parameter_index;
2993 // }
2994 //
2995 if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2996 log_debug(redefine, class, annotation)("length() is too small for a formal_parameter_target");
2997 return false;
2998 }
2999
3000 u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref);
3001 byte_i_ref += 1;
3002
3003 log_debug(redefine, class, annotation)
3004 ("formal_parameter_target: formal_parameter_index=%d", formal_parameter_index);
3005 } break;
3006
3007 case 0x17:
3008 // kind: type in throws clause of method or constructor
3009 // location: method_info
3010
3011 {
3012 // struct:
3013 // throws_target {
3014 // u2 throws_type_index
3015 // }
3016 //
3017 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3018 log_debug(redefine, class, annotation)("length() is too small for a throws_target");
3019 return false;
3020 }
3021
3022 u2 throws_type_index = Bytes::get_Java_u2((address)
3023 type_annotations_typeArray->adr_at(byte_i_ref));
3024 byte_i_ref += 2;
3025
3026 log_debug(redefine, class, annotation)("throws_target: throws_type_index=%d", throws_type_index);
3027 } break;
3028
3029 case 0x40:
3030 // kind: type in local variable declaration
3031 // location: Code
3032 case 0x41:
3033 // kind: type in resource variable declaration
3034 // location: Code
3035
3036 {
3037 // struct:
3038 // localvar_target {
3039 // u2 table_length;
3040 // struct {
3041 // u2 start_pc;
3042 // u2 length;
3043 // u2 index;
3044 // } table[table_length];
3045 // }
3046 //
3047 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3048 // not enough room for a table_length let alone the rest of a localvar_target
3049 log_debug(redefine, class, annotation)("length() is too small for a localvar_target table_length");
3050 return false;
3051 }
3052
3053 u2 table_length = Bytes::get_Java_u2((address)
3054 type_annotations_typeArray->adr_at(byte_i_ref));
3055 byte_i_ref += 2;
3056
3057 log_debug(redefine, class, annotation)("localvar_target: table_length=%d", table_length);
3058
3059 int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry
3060 int table_size = table_length * table_struct_size;
3061
3062 if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) {
3063 // not enough room for a table
3064 log_debug(redefine, class, annotation)("length() is too small for a table array of length %d", table_length);
3065 return false;
3066 }
3067
3068 // Skip over table
3069 byte_i_ref += table_size;
3070 } break;
3071
3072 case 0x42:
3073 // kind: type in exception parameter declaration
3074 // location: Code
3075
3076 {
3077 // struct:
3078 // catch_target {
3079 // u2 exception_table_index;
3080 // }
3081 //
3082 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3083 log_debug(redefine, class, annotation)("length() is too small for a catch_target");
3084 return false;
3085 }
3086
3087 u2 exception_table_index = Bytes::get_Java_u2((address)
3088 type_annotations_typeArray->adr_at(byte_i_ref));
3089 byte_i_ref += 2;
3090
3091 log_debug(redefine, class, annotation)("catch_target: exception_table_index=%d", exception_table_index);
3092 } break;
3093
3094 case 0x43:
3095 // kind: type in instanceof expression
3096 // location: Code
3097 case 0x44:
3098 // kind: type in new expression
3099 // location: Code
3100 case 0x45:
3101 // kind: type in method reference expression using ::new
3102 // location: Code
3103 case 0x46:
3104 // kind: type in method reference expression using ::Identifier
3105 // location: Code
3106
3107 {
3108 // struct:
3109 // offset_target {
3110 // u2 offset;
3111 // }
3112 //
3113 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3114 log_debug(redefine, class, annotation)("length() is too small for a offset_target");
3115 return false;
3116 }
3117
3118 u2 offset = Bytes::get_Java_u2((address)
3119 type_annotations_typeArray->adr_at(byte_i_ref));
3120 byte_i_ref += 2;
3121
3122 log_debug(redefine, class, annotation)("offset_target: offset=%d", offset);
3123 } break;
3124
3125 case 0x47:
3126 // kind: type in cast expression
3127 // location: Code
3128 case 0x48:
3129 // kind: type argument for generic constructor in new expression or
3130 // explicit constructor invocation statement
3131 // location: Code
3132 case 0x49:
3133 // kind: type argument for generic method in method invocation expression
3134 // location: Code
3135 case 0x4A:
3136 // kind: type argument for generic constructor in method reference expression using ::new
3137 // location: Code
3138 case 0x4B:
3139 // kind: type argument for generic method in method reference expression using ::Identifier
3140 // location: Code
3141
3142 {
3143 // struct:
3144 // type_argument_target {
3145 // u2 offset;
3146 // u1 type_argument_index;
3147 // }
3148 //
3149 if ((byte_i_ref + 3) > type_annotations_typeArray->length()) {
3150 log_debug(redefine, class, annotation)("length() is too small for a type_argument_target");
3151 return false;
3152 }
3153
3154 u2 offset = Bytes::get_Java_u2((address)
3155 type_annotations_typeArray->adr_at(byte_i_ref));
3156 byte_i_ref += 2;
3157 u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
3158 byte_i_ref += 1;
3159
3160 log_debug(redefine, class, annotation)
3161 ("type_argument_target: offset=%d, type_argument_index=%d", offset, type_argument_index);
3162 } break;
3163
3164 default:
3165 log_debug(redefine, class, annotation)("unknown target_type");
3166 #ifdef ASSERT
3167 ShouldNotReachHere();
3168 #endif
3169 return false;
3170 }
3171
3172 return true;
3173 } // end skip_type_annotation_target()
3174
3175
3176 // Read, verify and skip over the type_path part so that rewriting
3177 // can continue in the later parts of the struct.
3178 //
3179 // type_path {
3180 // u1 path_length;
3181 // {
3182 // u1 type_path_kind;
3183 // u1 type_argument_index;
3184 // } path[path_length];
3185 // }
3186 //
3187 bool VM_RedefineClasses::skip_type_annotation_type_path(
3188 AnnotationArray* type_annotations_typeArray, int &byte_i_ref) {
3189
3190 if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
3191 // not enough room for a path_length let alone the rest of the type_path
3192 log_debug(redefine, class, annotation)("length() is too small for a type_path");
3193 return false;
3194 }
3195
3196 u1 path_length = type_annotations_typeArray->at(byte_i_ref);
3197 byte_i_ref += 1;
3198
3199 log_debug(redefine, class, annotation)("type_path: path_length=%d", path_length);
3200
3201 int calc_path_length = 0;
3202 for (; calc_path_length < path_length; calc_path_length++) {
3203 if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) {
3204 // not enough room for a path
3205 log_debug(redefine, class, annotation)
3206 ("length() is too small for path entry %d of %d", calc_path_length, path_length);
3207 return false;
3208 }
3209
3210 u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref);
3211 byte_i_ref += 1;
3212 u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
3213 byte_i_ref += 1;
3214
3215 log_debug(redefine, class, annotation)
3216 ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d",
3217 calc_path_length, type_path_kind, type_argument_index);
3218
3219 if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) {
3220 // not enough room for a path
3221 log_debug(redefine, class, annotation)("inconsistent type_path values");
3222 return false;
3223 }
3224 }
3225 assert(path_length == calc_path_length, "sanity check");
3226
3227 return true;
3228 } // end skip_type_annotation_type_path()
3229
3230
3231 // Rewrite constant pool references in the method's stackmap table.
3232 // These "structures" are adapted from the StackMapTable_attribute that
3233 // is described in section 4.8.4 of the 6.0 version of the VM spec
3234 // (dated 2005.10.26):
3235 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3236 //
3237 // stack_map {
3238 // u2 number_of_entries;
3239 // stack_map_frame entries[number_of_entries];
3240 // }
3241 //
3242 void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
3243 const methodHandle& method) {
3244
3245 if (!method->has_stackmap_table()) {
3246 return;
3247 }
3248
3249 AnnotationArray* stackmap_data = method->stackmap_data();
3250 address stackmap_p = (address)stackmap_data->adr_at(0);
3251 address stackmap_end = stackmap_p + stackmap_data->length();
3252
3253 assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
3254 u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
3255 stackmap_p += 2;
3256
3257 log_debug(redefine, class, stackmap)("number_of_entries=%u", number_of_entries);
3258
3259 // walk through each stack_map_frame
3260 u2 calc_number_of_entries = 0;
3261 for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3262 // The stack_map_frame structure is a u1 frame_type followed by
3263 // 0 or more bytes of data:
3264 //
3265 // union stack_map_frame {
3266 // same_frame;
3267 // same_locals_1_stack_item_frame;
3268 // same_locals_1_stack_item_frame_extended;
3269 // chop_frame;
3270 // same_frame_extended;
3271 // append_frame;
3272 // full_frame;
3273 // }
3274
3275 assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3276 u1 frame_type = *stackmap_p;
3277 stackmap_p++;
3278
3279 if (frame_type == 246) { // EARLY_LARVAL
3280 // rewrite_cp_refs in unset fields and fall through.
3281 rewrite_cp_refs_in_early_larval_stackmaps(stackmap_p, stackmap_end, calc_number_of_entries, frame_type);
3282 // The larval frames point to the next frame, so advance to the next frame and fall through.
3283 frame_type = *stackmap_p;
3284 stackmap_p++;
3285 }
3286
3287 // same_frame {
3288 // u1 frame_type = SAME; /* 0-63 */
3289 // }
3290 if (frame_type <= StackMapReader::SAME_FRAME_END) {
3291 // nothing more to do for same_frame
3292 }
3293
3294 // same_locals_1_stack_item_frame {
3295 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3296 // verification_type_info stack[1];
3297 // }
3298 else if (frame_type >= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_START &&
3299 frame_type <= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_END) {
3300 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3301 calc_number_of_entries, frame_type);
3302 }
3303
3304 // reserved for future use
3305 else if (frame_type >= StackMapReader::RESERVED_START &&
3306 frame_type <= StackMapReader::RESERVED_END) {
3307 // nothing more to do for reserved frame_types
3308 }
3309
3310 // same_locals_1_stack_item_frame_extended {
3311 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
3312 // u2 offset_delta;
3313 // verification_type_info stack[1];
3314 // }
3315 else if (frame_type == StackMapReader::SAME_LOCALS_1_STACK_ITEM_EXTENDED) {
3316 stackmap_p += 2;
3317 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3318 calc_number_of_entries, frame_type);
3319 }
3320
3321 // chop_frame {
3322 // u1 frame_type = CHOP; /* 248-250 */
3323 // u2 offset_delta;
3324 // }
3325 else if (frame_type >= StackMapReader::CHOP_FRAME_START &&
3326 frame_type <= StackMapReader::CHOP_FRAME_END) {
3327 stackmap_p += 2;
3328 }
3329
3330 // same_frame_extended {
3331 // u1 frame_type = SAME_EXTENDED; /* 251 */
3332 // u2 offset_delta;
3333 // }
3334 else if (frame_type == StackMapReader::SAME_FRAME_EXTENDED) {
3335 stackmap_p += 2;
3336 }
3337
3338 // append_frame {
3339 // u1 frame_type = APPEND; /* 252-254 */
3340 // u2 offset_delta;
3341 // verification_type_info locals[frame_type - SAME_EXTENDED];
3342 // }
3343 else if (frame_type >= StackMapReader::APPEND_FRAME_START &&
3344 frame_type <= StackMapReader::APPEND_FRAME_END) {
3345 assert(stackmap_p + 2 <= stackmap_end,
3346 "no room for offset_delta");
3347 stackmap_p += 2;
3348 u1 len = frame_type - StackMapReader::APPEND_FRAME_START + 1;
3349 for (u1 i = 0; i < len; i++) {
3350 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3351 calc_number_of_entries, frame_type);
3352 }
3353 }
3354
3355 // full_frame {
3356 // u1 frame_type = FULL_FRAME; /* 255 */
3357 // u2 offset_delta;
3358 // u2 number_of_locals;
3359 // verification_type_info locals[number_of_locals];
3360 // u2 number_of_stack_items;
3361 // verification_type_info stack[number_of_stack_items];
3362 // }
3363 else if (frame_type == StackMapReader::FULL_FRAME) {
3364 assert(stackmap_p + 2 + 2 <= stackmap_end,
3365 "no room for smallest full_frame");
3366 stackmap_p += 2;
3367
3368 u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
3369 stackmap_p += 2;
3370
3371 for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
3372 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3373 calc_number_of_entries, frame_type);
3374 }
3375
3376 // Use the largest size for the number_of_stack_items, but only get
3377 // the right number of bytes.
3378 u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
3379 stackmap_p += 2;
3380
3381 for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
3382 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3383 calc_number_of_entries, frame_type);
3384 }
3385 }
3386 } // end while there is a stack_map_frame
3387 assert(number_of_entries == calc_number_of_entries, "sanity check");
3388 } // end rewrite_cp_refs_in_stack_map_table()
3389
3390
3391 // Rewrite constant pool references in the verification type info
3392 // portion of the method's stackmap table. These "structures" are
3393 // adapted from the StackMapTable_attribute that is described in
3394 // section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
3395 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3396 //
3397 // The verification_type_info structure is a u1 tag followed by 0 or
3398 // more bytes of data:
3399 //
3400 // union verification_type_info {
3401 // Top_variable_info;
3402 // Integer_variable_info;
3403 // Float_variable_info;
3404 // Long_variable_info;
3405 // Double_variable_info;
3406 // Null_variable_info;
3407 // UninitializedThis_variable_info;
3408 // Object_variable_info;
3409 // Uninitialized_variable_info;
3410 // }
3411 //
3412 void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
3413 address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3414 u1 frame_type) {
3415
3416 assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
3417 u1 tag = *stackmap_p_ref;
3418 stackmap_p_ref++;
3419
3420 switch (tag) {
3421 // Top_variable_info {
3422 // u1 tag = ITEM_Top; /* 0 */
3423 // }
3424 // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
3425 case 0: // fall through
3426
3427 // Integer_variable_info {
3428 // u1 tag = ITEM_Integer; /* 1 */
3429 // }
3430 case ITEM_Integer: // fall through
3431
3432 // Float_variable_info {
3433 // u1 tag = ITEM_Float; /* 2 */
3434 // }
3435 case ITEM_Float: // fall through
3436
3437 // Double_variable_info {
3438 // u1 tag = ITEM_Double; /* 3 */
3439 // }
3440 case ITEM_Double: // fall through
3441
3442 // Long_variable_info {
3443 // u1 tag = ITEM_Long; /* 4 */
3444 // }
3445 case ITEM_Long: // fall through
3446
3447 // Null_variable_info {
3448 // u1 tag = ITEM_Null; /* 5 */
3449 // }
3450 case ITEM_Null: // fall through
3451
3452 // UninitializedThis_variable_info {
3453 // u1 tag = ITEM_UninitializedThis; /* 6 */
3454 // }
3455 case ITEM_UninitializedThis:
3456 // nothing more to do for the above tag types
3457 break;
3458
3459 // Object_variable_info {
3460 // u1 tag = ITEM_Object; /* 7 */
3461 // u2 cpool_index;
3462 // }
3463 case ITEM_Object:
3464 {
3465 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
3466 u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
3467 u2 new_cp_index = find_new_index(cpool_index);
3468 if (new_cp_index != 0) {
3469 log_debug(redefine, class, stackmap)("mapped old cpool_index=%d", cpool_index);
3470 Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3471 cpool_index = new_cp_index;
3472 }
3473 stackmap_p_ref += 2;
3474
3475 log_debug(redefine, class, stackmap)
3476 ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3477 } break;
3478
3479 // Uninitialized_variable_info {
3480 // u1 tag = ITEM_Uninitialized; /* 8 */
3481 // u2 offset;
3482 // }
3483 case ITEM_Uninitialized:
3484 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3485 stackmap_p_ref += 2;
3486 break;
3487
3488 default:
3489 log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3490 ShouldNotReachHere();
3491 break;
3492 } // end switch (tag)
3493 } // end rewrite_cp_refs_in_verification_type_info()
3494
3495
3496 void VM_RedefineClasses::rewrite_cp_refs_in_early_larval_stackmaps(
3497 address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3498 u1 frame_type) {
3499
3500 u2 num_early_larval_stackmaps = Bytes::get_Java_u2(stackmap_p_ref);
3501 stackmap_p_ref += 2;
3502
3503 for (u2 i = 0; i < num_early_larval_stackmaps; i++) {
3504
3505 u2 name_and_ref_index = Bytes::get_Java_u2(stackmap_p_ref);
3506 u2 new_cp_index = find_new_index(name_and_ref_index);
3507 if (new_cp_index != 0) {
3508 log_debug(redefine, class, stackmap)("mapped old name_and_ref_index=%d", name_and_ref_index);
3509 Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3510 name_and_ref_index = new_cp_index;
3511 }
3512 log_debug(redefine, class, stackmap)
3513 ("frame_i=%u, frame_type=%u, name_and_ref_index=%d", frame_i, frame_type, name_and_ref_index);
3514
3515 stackmap_p_ref += 2;
3516 }
3517 } // rewrite_cp_refs_in_early_larval_stackmaps
3518
3519 // Change the constant pool associated with klass scratch_class to scratch_cp.
3520 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3521 // and the smaller constant pool is associated with scratch_class.
3522 void VM_RedefineClasses::set_new_constant_pool(
3523 ClassLoaderData* loader_data,
3524 InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3525 int scratch_cp_length, TRAPS) {
3526 assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3527
3528 // scratch_cp is a merged constant pool and has enough space for a
3529 // worst case merge situation. We want to associate the minimum
3530 // sized constant pool with the klass to save space.
3531 ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3532 constantPoolHandle smaller_cp(THREAD, cp);
3533
3534 // preserve version() value in the smaller copy
3535 int version = scratch_cp->version();
3536 assert(version != 0, "sanity check");
3537 smaller_cp->set_version(version);
3538
3539 // attach klass to new constant pool
3540 // reference to the cp holder is needed for reallocating the BSM attribute
3541 smaller_cp->set_pool_holder(scratch_class);
3542
3543 smaller_cp->copy_fields(scratch_cp());
3544
3545 scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
3546 if (HAS_PENDING_EXCEPTION) {
3547 // Exception is handled in the caller
3548 loader_data->add_to_deallocate_list(smaller_cp());
3549 return;
3550 }
3551 scratch_cp = smaller_cp;
3552
3553 // attach new constant pool to klass
3554 scratch_class->set_constants(scratch_cp());
3555 scratch_cp->initialize_unresolved_klasses(loader_data, CHECK);
3556
3557 int i; // for portability
3558
3559 // update each field in klass to use new constant pool indices as needed
3560 int java_fields;
3561 int injected_fields;
3562 bool update_required = false;
3563 GrowableArray<FieldInfo>* fields = FieldInfoStream::create_FieldInfoArray(scratch_class->fieldinfo_stream(), &java_fields, &injected_fields);
3564 for (int i = 0; i < java_fields; i++) {
3565 FieldInfo* fi = fields->adr_at(i);
3566 jshort cur_index = fi->name_index();
3567 jshort new_index = find_new_index(cur_index);
3568 if (new_index != 0) {
3569 log_trace(redefine, class, constantpool)("field-name_index change: %d to %d", cur_index, new_index);
3570 fi->set_name_index(new_index);
3571 update_required = true;
3572 }
3573 cur_index = fi->signature_index();
3574 new_index = find_new_index(cur_index);
3575 if (new_index != 0) {
3576 log_trace(redefine, class, constantpool)("field-signature_index change: %d to %d", cur_index, new_index);
3577 fi->set_signature_index(new_index);
3578 update_required = true;
3579 }
3580 cur_index = fi->initializer_index();
3581 new_index = find_new_index(cur_index);
3582 if (new_index != 0) {
3583 log_trace(redefine, class, constantpool)("field-initval_index change: %d to %d", cur_index, new_index);
3584 fi->set_initializer_index(new_index);
3585 update_required = true;
3586 }
3587 cur_index = fi->generic_signature_index();
3588 new_index = find_new_index(cur_index);
3589 if (new_index != 0) {
3590 log_trace(redefine, class, constantpool)("field-generic_signature change: %d to %d", cur_index, new_index);
3591 fi->set_generic_signature_index(new_index);
3592 update_required = true;
3593 }
3594 }
3595 if (update_required) {
3596 Array<u1>* old_stream = scratch_class->fieldinfo_stream();
3597 assert(fields->length() == (java_fields + injected_fields), "Must be");
3598 Array<u1>* new_fis = FieldInfoStream::create_FieldInfoStream(fields, java_fields, injected_fields, scratch_class->class_loader_data(), CHECK);
3599 scratch_class->set_fieldinfo_stream(new_fis);
3600 MetadataFactory::free_array<u1>(scratch_class->class_loader_data(), old_stream);
3601
3602 Array<u1>* old_table = scratch_class->fieldinfo_search_table();
3603 Array<u1>* search_table = FieldInfoStream::create_search_table(scratch_class->constants(), new_fis, scratch_class->class_loader_data(), CHECK);
3604 scratch_class->set_fieldinfo_search_table(search_table);
3605 MetadataFactory::free_array<u1>(scratch_class->class_loader_data(), old_table);
3606
3607 DEBUG_ONLY(FieldInfoStream::validate_search_table(scratch_class->constants(), new_fis, search_table));
3608 }
3609
3610 // Update constant pool indices in the inner classes info to use
3611 // new constant indices as needed. The inner classes info is a
3612 // quadruple:
3613 // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
3614 InnerClassesIterator iter(scratch_class);
3615 for (; !iter.done(); iter.next()) {
3616 int cur_index = iter.inner_class_info_index();
3617 if (cur_index == 0) {
3618 continue; // JVM spec. allows null inner class refs so skip it
3619 }
3620 u2 new_index = find_new_index(cur_index);
3621 if (new_index != 0) {
3622 log_trace(redefine, class, constantpool)("inner_class_info change: %d to %d", cur_index, new_index);
3623 iter.set_inner_class_info_index(new_index);
3624 }
3625 cur_index = iter.outer_class_info_index();
3626 new_index = find_new_index(cur_index);
3627 if (new_index != 0) {
3628 log_trace(redefine, class, constantpool)("outer_class_info change: %d to %d", cur_index, new_index);
3629 iter.set_outer_class_info_index(new_index);
3630 }
3631 cur_index = iter.inner_name_index();
3632 new_index = find_new_index(cur_index);
3633 if (new_index != 0) {
3634 log_trace(redefine, class, constantpool)("inner_name change: %d to %d", cur_index, new_index);
3635 iter.set_inner_name_index(new_index);
3636 }
3637 } // end for each inner class
3638
3639 // Attach each method in klass to the new constant pool and update
3640 // to use new constant pool indices as needed:
3641 Array<Method*>* methods = scratch_class->methods();
3642 for (i = methods->length() - 1; i >= 0; i--) {
3643 methodHandle method(THREAD, methods->at(i));
3644 method->set_constants(scratch_cp());
3645
3646 u2 new_index = find_new_index(method->name_index());
3647 if (new_index != 0) {
3648 log_trace(redefine, class, constantpool)
3649 ("method-name_index change: %d to %d", method->name_index(), new_index);
3650 method->set_name_index(new_index);
3651 }
3652 new_index = find_new_index(method->signature_index());
3653 if (new_index != 0) {
3654 log_trace(redefine, class, constantpool)
3655 ("method-signature_index change: %d to %d", method->signature_index(), new_index);
3656 method->set_signature_index(new_index);
3657 }
3658 new_index = find_new_index(method->generic_signature_index());
3659 if (new_index != 0) {
3660 log_trace(redefine, class, constantpool)
3661 ("method-generic_signature_index change: %d to %d", method->generic_signature_index(), new_index);
3662 method->constMethod()->set_generic_signature_index(new_index);
3663 }
3664
3665 // Update constant pool indices in the method's checked exception
3666 // table to use new constant indices as needed.
3667 int cext_length = method->checked_exceptions_length();
3668 if (cext_length > 0) {
3669 CheckedExceptionElement * cext_table =
3670 method->checked_exceptions_start();
3671 for (int j = 0; j < cext_length; j++) {
3672 int cur_index = cext_table[j].class_cp_index;
3673 int new_index = find_new_index(cur_index);
3674 if (new_index != 0) {
3675 log_trace(redefine, class, constantpool)("cext-class_cp_index change: %d to %d", cur_index, new_index);
3676 cext_table[j].class_cp_index = (u2)new_index;
3677 }
3678 } // end for each checked exception table entry
3679 } // end if there are checked exception table entries
3680
3681 // Update each catch type index in the method's exception table
3682 // to use new constant pool indices as needed. The exception table
3683 // holds quadruple entries of the form:
3684 // (beg_bci, end_bci, handler_bci, klass_index)
3685
3686 ExceptionTable ex_table(method());
3687 int ext_length = ex_table.length();
3688
3689 for (int j = 0; j < ext_length; j ++) {
3690 int cur_index = ex_table.catch_type_index(j);
3691 u2 new_index = find_new_index(cur_index);
3692 if (new_index != 0) {
3693 log_trace(redefine, class, constantpool)("ext-klass_index change: %d to %d", cur_index, new_index);
3694 ex_table.set_catch_type_index(j, new_index);
3695 }
3696 } // end for each exception table entry
3697
3698 // Update constant pool indices in the method's local variable
3699 // table to use new constant indices as needed. The local variable
3700 // table hold sextuple entries of the form:
3701 // (start_pc, length, name_index, descriptor_index, signature_index, slot)
3702 int lvt_length = method->localvariable_table_length();
3703 if (lvt_length > 0) {
3704 LocalVariableTableElement * lv_table =
3705 method->localvariable_table_start();
3706 for (int j = 0; j < lvt_length; j++) {
3707 int cur_index = lv_table[j].name_cp_index;
3708 int new_index = find_new_index(cur_index);
3709 if (new_index != 0) {
3710 log_trace(redefine, class, constantpool)("lvt-name_cp_index change: %d to %d", cur_index, new_index);
3711 lv_table[j].name_cp_index = (u2)new_index;
3712 }
3713 cur_index = lv_table[j].descriptor_cp_index;
3714 new_index = find_new_index(cur_index);
3715 if (new_index != 0) {
3716 log_trace(redefine, class, constantpool)("lvt-descriptor_cp_index change: %d to %d", cur_index, new_index);
3717 lv_table[j].descriptor_cp_index = (u2)new_index;
3718 }
3719 cur_index = lv_table[j].signature_cp_index;
3720 new_index = find_new_index(cur_index);
3721 if (new_index != 0) {
3722 log_trace(redefine, class, constantpool)("lvt-signature_cp_index change: %d to %d", cur_index, new_index);
3723 lv_table[j].signature_cp_index = (u2)new_index;
3724 }
3725 } // end for each local variable table entry
3726 } // end if there are local variable table entries
3727
3728 // Update constant pool indices in the method's method_parameters.
3729 int mp_length = method->method_parameters_length();
3730 if (mp_length > 0) {
3731 MethodParametersElement* elem = method->method_parameters_start();
3732 for (int j = 0; j < mp_length; j++) {
3733 const int cp_index = elem[j].name_cp_index;
3734 const int new_cp_index = find_new_index(cp_index);
3735 if (new_cp_index != 0) {
3736 elem[j].name_cp_index = (u2)new_cp_index;
3737 }
3738 }
3739 }
3740
3741 rewrite_cp_refs_in_stack_map_table(method);
3742 } // end for each method
3743 } // end set_new_constant_pool()
3744
3745
3746 // Unevolving classes may point to methods of the_class directly
3747 // from their constant pool caches, itables, and/or vtables. We
3748 // use the ClassLoaderDataGraph::classes_do() facility and this helper
3749 // to fix up these pointers. MethodData also points to old methods and
3750 // must be cleaned.
3751
3752 // Adjust cpools and vtables closure
3753 void VM_RedefineClasses::AdjustAndCleanMetadata::do_klass(Klass* k) {
3754
3755 // This is a very busy routine. We don't want too much tracing
3756 // printed out.
3757 bool trace_name_printed = false;
3758
3759 // If the class being redefined is java.lang.Object, we need to fix all
3760 // array class vtables also. The _has_redefined_Object flag is global.
3761 // Once the java.lang.Object has been redefined (by the current or one
3762 // of the previous VM_RedefineClasses operations) we have to always
3763 // adjust method entries for array classes.
3764 if (k->is_array_klass() && _has_redefined_Object) {
3765 k->vtable().adjust_method_entries(&trace_name_printed);
3766
3767 } else if (k->is_instance_klass()) {
3768 HandleMark hm(_thread);
3769 InstanceKlass *ik = InstanceKlass::cast(k);
3770
3771 // Clean MethodData of this class's methods so they don't refer to
3772 // old methods that are no longer running.
3773 Array<Method*>* methods = ik->methods();
3774 int num_methods = methods->length();
3775 for (int index = 0; index < num_methods; ++index) {
3776 if (methods->at(index)->method_data() != nullptr) {
3777 methods->at(index)->method_data()->clean_weak_method_links();
3778 }
3779 }
3780
3781 // Adjust all vtables, default methods and itables, to clean out old methods.
3782 ResourceMark rm(_thread);
3783 if (ik->vtable_length() > 0) {
3784 ik->vtable().adjust_method_entries(&trace_name_printed);
3785 ik->adjust_default_methods(&trace_name_printed);
3786 }
3787
3788 if (ik->itable_length() > 0) {
3789 ik->itable().adjust_method_entries(&trace_name_printed);
3790 }
3791
3792 // The constant pools in other classes (other_cp) can refer to
3793 // old methods. We have to update method information in
3794 // other_cp's cache. If other_cp has a previous version, then we
3795 // have to repeat the process for each previous version. The
3796 // constant pool cache holds the Method*s for non-virtual
3797 // methods and for virtual, final methods.
3798 //
3799 // Special case: if the current class is being redefined by the current
3800 // VM_RedefineClasses operation, then new_cp has already been attached
3801 // to the_class and old_cp has already been added as a previous version.
3802 // The new_cp doesn't have any cached references to old methods so it
3803 // doesn't need to be updated and we could optimize by skipping it.
3804 // However, the current class can be marked as being redefined by another
3805 // VM_RedefineClasses operation which has already executed its doit_prologue
3806 // and needs cpcache method entries adjusted. For simplicity, the cpcache
3807 // update is done unconditionally. It should result in doing nothing for
3808 // classes being redefined by the current VM_RedefineClasses operation.
3809 // Method entries in the previous version(s) are adjusted as well.
3810 ConstantPoolCache* cp_cache;
3811
3812 // this klass' constant pool cache may need adjustment
3813 ConstantPool* other_cp = ik->constants();
3814 cp_cache = other_cp->cache();
3815 if (cp_cache != nullptr) {
3816 cp_cache->adjust_method_entries(&trace_name_printed);
3817 }
3818
3819 // the previous versions' constant pool caches may need adjustment
3820 for (InstanceKlass* pv_node = ik->previous_versions();
3821 pv_node != nullptr;
3822 pv_node = pv_node->previous_versions()) {
3823 cp_cache = pv_node->constants()->cache();
3824 if (cp_cache != nullptr) {
3825 cp_cache->adjust_method_entries(&trace_name_printed);
3826 }
3827 }
3828 }
3829 }
3830
3831 void VM_RedefineClasses::update_jmethod_ids() {
3832 for (int j = 0; j < _matching_methods_length; ++j) {
3833 Method* old_method = _matching_old_methods[j];
3834 // The method_idnum should be within the range of 1..number-of-methods
3835 // until incremented later for obsolete methods.
3836 // The increment is so if a jmethodID is created for an old obsolete method
3837 // it gets a new jmethodID cache slot in the InstanceKlass.
3838 // They're cleaned out later when all methods of the previous version are purged.
3839 assert(old_method->method_idnum() <= _old_methods->length(),
3840 "shouldn't be incremented yet for obsolete methods");
3841 jmethodID jmid = old_method->find_jmethod_id_or_null();
3842 if (jmid != nullptr) {
3843 // There is a jmethodID, change it to point to the new method
3844 Method* new_method = _matching_new_methods[j];
3845 Method::change_method_associated_with_jmethod_id(jmid, new_method);
3846 assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
3847 "should be replaced");
3848 }
3849 }
3850 }
3851
3852 int VM_RedefineClasses::check_methods_and_mark_as_obsolete() {
3853 int emcp_method_count = 0;
3854 int obsolete_count = 0;
3855 int old_index = 0;
3856 for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
3857 Method* old_method = _matching_old_methods[j];
3858 Method* new_method = _matching_new_methods[j];
3859 Method* old_array_method;
3860
3861 // Maintain an old_index into the _old_methods array by skipping
3862 // deleted methods
3863 while ((old_array_method = _old_methods->at(old_index)) != old_method) {
3864 ++old_index;
3865 }
3866
3867 if (MethodComparator::methods_EMCP(old_method, new_method)) {
3868 // The EMCP definition from JSR-163 requires the bytecodes to be
3869 // the same with the exception of constant pool indices which may
3870 // differ. However, the constants referred to by those indices
3871 // must be the same.
3872 //
3873 // We use methods_EMCP() for comparison since constant pool
3874 // merging can remove duplicate constant pool entries that were
3875 // present in the old method and removed from the rewritten new
3876 // method. A faster binary comparison function would consider the
3877 // old and new methods to be different when they are actually
3878 // EMCP.
3879 //
3880 // The old and new methods are EMCP and you would think that we
3881 // could get rid of one of them here and now and save some space.
3882 // However, the concept of EMCP only considers the bytecodes and
3883 // the constant pool entries in the comparison. Other things,
3884 // e.g., the line number table (LNT) or the local variable table
3885 // (LVT) don't count in the comparison. So the new (and EMCP)
3886 // method can have a new LNT that we need so we can't just
3887 // overwrite the new method with the old method.
3888 //
3889 // When this routine is called, we have already attached the new
3890 // methods to the_class so the old methods are effectively
3891 // overwritten. However, if an old method is still executing,
3892 // then the old method cannot be collected until sometime after
3893 // the old method call has returned. So the overwriting of old
3894 // methods by new methods will save us space except for those
3895 // (hopefully few) old methods that are still executing.
3896 //
3897 // A method refers to a ConstMethod* and this presents another
3898 // possible avenue to space savings. The ConstMethod* in the
3899 // new method contains possibly new attributes (LNT, LVT, etc).
3900 // At first glance, it seems possible to save space by replacing
3901 // the ConstMethod* in the old method with the ConstMethod*
3902 // from the new method. The old and new methods would share the
3903 // same ConstMethod* and we would save the space occupied by
3904 // the old ConstMethod*. However, the ConstMethod* contains
3905 // a back reference to the containing method. Sharing the
3906 // ConstMethod* between two methods could lead to confusion in
3907 // the code that uses the back reference. This would lead to
3908 // brittle code that could be broken in non-obvious ways now or
3909 // in the future.
3910 //
3911 // Another possibility is to copy the ConstMethod* from the new
3912 // method to the old method and then overwrite the new method with
3913 // the old method. Since the ConstMethod* contains the bytecodes
3914 // for the method embedded in the oop, this option would change
3915 // the bytecodes out from under any threads executing the old
3916 // method and make the thread's bcp invalid. Since EMCP requires
3917 // that the bytecodes be the same modulo constant pool indices, it
3918 // is straight forward to compute the correct new bcp in the new
3919 // ConstMethod* from the old bcp in the old ConstMethod*. The
3920 // time consuming part would be searching all the frames in all
3921 // of the threads to find all of the calls to the old method.
3922 //
3923 // It looks like we will have to live with the limited savings
3924 // that we get from effectively overwriting the old methods
3925 // when the new methods are attached to the_class.
3926
3927 // Count number of methods that are EMCP. The method will be marked
3928 // old but not obsolete if it is EMCP.
3929 emcp_method_count++;
3930
3931 // An EMCP method is _not_ obsolete. An obsolete method has a
3932 // different jmethodID than the current method. An EMCP method
3933 // has the same jmethodID as the current method. Having the
3934 // same jmethodID for all EMCP versions of a method allows for
3935 // a consistent view of the EMCP methods regardless of which
3936 // EMCP method you happen to have in hand. For example, a
3937 // breakpoint set in one EMCP method will work for all EMCP
3938 // versions of the method including the current one.
3939 } else {
3940 // mark obsolete methods as such
3941 old_method->set_is_obsolete();
3942 obsolete_count++;
3943
3944 // obsolete methods need a unique idnum so they become new entries in
3945 // the jmethodID cache in InstanceKlass
3946 assert(old_method->method_idnum() == new_method->method_idnum(), "must match");
3947 u2 num = InstanceKlass::cast(_the_class)->next_method_idnum();
3948 if (num != ConstMethod::UNSET_IDNUM) {
3949 old_method->set_method_idnum(num);
3950 }
3951
3952 // With tracing we try not to "yack" too much. The position of
3953 // this trace assumes there are fewer obsolete methods than
3954 // EMCP methods.
3955 if (log_is_enabled(Trace, redefine, class, obsolete, mark)) {
3956 ResourceMark rm;
3957 log_trace(redefine, class, obsolete, mark)
3958 ("mark %s(%s) as obsolete", old_method->name()->as_C_string(), old_method->signature()->as_C_string());
3959 }
3960 }
3961 old_method->set_is_old();
3962 }
3963 for (int i = 0; i < _deleted_methods_length; ++i) {
3964 Method* old_method = _deleted_methods[i];
3965
3966 assert(!old_method->has_vtable_index(),
3967 "cannot delete methods with vtable entries");;
3968
3969 // Mark all deleted methods as old, obsolete and deleted
3970 old_method->set_is_deleted();
3971 old_method->set_is_old();
3972 old_method->set_is_obsolete();
3973 ++obsolete_count;
3974 // With tracing we try not to "yack" too much. The position of
3975 // this trace assumes there are fewer obsolete methods than
3976 // EMCP methods.
3977 if (log_is_enabled(Trace, redefine, class, obsolete, mark)) {
3978 ResourceMark rm;
3979 log_trace(redefine, class, obsolete, mark)
3980 ("mark deleted %s(%s) as obsolete", old_method->name()->as_C_string(), old_method->signature()->as_C_string());
3981 }
3982 }
3983 assert((emcp_method_count + obsolete_count) == _old_methods->length(),
3984 "sanity check");
3985 log_trace(redefine, class, obsolete, mark)("EMCP_cnt=%d, obsolete_cnt=%d", emcp_method_count, obsolete_count);
3986 return emcp_method_count;
3987 }
3988
3989 // This internal class transfers the native function registration from old methods
3990 // to new methods. It is designed to handle both the simple case of unchanged
3991 // native methods and the complex cases of native method prefixes being added and/or
3992 // removed.
3993 // It expects only to be used during the VM_RedefineClasses op (a safepoint).
3994 //
3995 // This class is used after the new methods have been installed in "the_class".
3996 //
3997 // So, for example, the following must be handled. Where 'm' is a method and
3998 // a number followed by an underscore is a prefix.
3999 //
4000 // Old Name New Name
4001 // Simple transfer to new method m -> m
4002 // Add prefix m -> 1_m
4003 // Remove prefix 1_m -> m
4004 // Simultaneous add of prefixes m -> 3_2_1_m
4005 // Simultaneous removal of prefixes 3_2_1_m -> m
4006 // Simultaneous add and remove 1_m -> 2_m
4007 // Same, caused by prefix removal only 3_2_1_m -> 3_2_m
4008 //
4009 class TransferNativeFunctionRegistration {
4010 private:
4011 InstanceKlass* the_class;
4012 int prefix_count;
4013 char** prefixes;
4014
4015 // Recursively search the binary tree of possibly prefixed method names.
4016 // Iteration could be used if all agents were well behaved. Full tree walk is
4017 // more resilent to agents not cleaning up intermediate methods.
4018 // Branch at each depth in the binary tree is:
4019 // (1) without the prefix.
4020 // (2) with the prefix.
4021 // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
4022 Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
4023 Symbol* signature) {
4024 TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
4025 if (name_symbol != nullptr) {
4026 Method* method = the_class->lookup_method(name_symbol, signature);
4027 if (method != nullptr) {
4028 // Even if prefixed, intermediate methods must exist.
4029 if (method->is_native()) {
4030 // Wahoo, we found a (possibly prefixed) version of the method, return it.
4031 return method;
4032 }
4033 if (depth < prefix_count) {
4034 // Try applying further prefixes (other than this one).
4035 method = search_prefix_name_space(depth+1, name_str, name_len, signature);
4036 if (method != nullptr) {
4037 return method; // found
4038 }
4039
4040 // Try adding this prefix to the method name and see if it matches
4041 // another method name.
4042 char* prefix = prefixes[depth];
4043 size_t prefix_len = strlen(prefix);
4044 size_t trial_len = name_len + prefix_len;
4045 char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
4046 strcpy(trial_name_str, prefix);
4047 strcat(trial_name_str, name_str);
4048 method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
4049 signature);
4050 if (method != nullptr) {
4051 // If found along this branch, it was prefixed, mark as such
4052 method->set_is_prefixed_native();
4053 return method; // found
4054 }
4055 }
4056 }
4057 }
4058 return nullptr; // This whole branch bore nothing
4059 }
4060
4061 // Return the method name with old prefixes stripped away.
4062 char* method_name_without_prefixes(Method* method) {
4063 Symbol* name = method->name();
4064 char* name_str = name->as_utf8();
4065
4066 // Old prefixing may be defunct, strip prefixes, if any.
4067 for (int i = prefix_count-1; i >= 0; i--) {
4068 char* prefix = prefixes[i];
4069 size_t prefix_len = strlen(prefix);
4070 if (strncmp(prefix, name_str, prefix_len) == 0) {
4071 name_str += prefix_len;
4072 }
4073 }
4074 return name_str;
4075 }
4076
4077 // Strip any prefixes off the old native method, then try to find a
4078 // (possibly prefixed) new native that matches it.
4079 Method* strip_and_search_for_new_native(Method* method) {
4080 ResourceMark rm;
4081 char* name_str = method_name_without_prefixes(method);
4082 return search_prefix_name_space(0, name_str, strlen(name_str),
4083 method->signature());
4084 }
4085
4086 public:
4087
4088 // Construct a native method transfer processor for this class.
4089 TransferNativeFunctionRegistration(InstanceKlass* _the_class) {
4090 assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
4091
4092 the_class = _the_class;
4093 prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
4094 }
4095
4096 // Attempt to transfer any of the old or deleted methods that are native
4097 void transfer_registrations(Method** old_methods, int methods_length) {
4098 for (int j = 0; j < methods_length; j++) {
4099 Method* old_method = old_methods[j];
4100
4101 if (old_method->is_native() && old_method->has_native_function()) {
4102 Method* new_method = strip_and_search_for_new_native(old_method);
4103 if (new_method != nullptr) {
4104 // Actually set the native function in the new method.
4105 // Redefine does not send events (except CFLH), certainly not this
4106 // behind the scenes re-registration.
4107 new_method->set_native_function(old_method->native_function(),
4108 !Method::native_bind_event_is_interesting);
4109 }
4110 }
4111 }
4112 }
4113 };
4114
4115 // Don't lose the association between a native method and its JNI function.
4116 void VM_RedefineClasses::transfer_old_native_function_registrations(InstanceKlass* the_class) {
4117 TransferNativeFunctionRegistration transfer(the_class);
4118 transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
4119 transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
4120 }
4121
4122 // Deoptimize all compiled code that depends on the classes redefined.
4123 //
4124 // If the can_redefine_classes capability is obtained in the onload
4125 // phase or 'AlwaysRecordEvolDependencies' is true, then the compiler has
4126 // recorded all dependencies from startup. In that case we need only
4127 // deoptimize and throw away all compiled code that depends on the class.
4128 //
4129 // If can_redefine_classes is obtained sometime after the onload phase
4130 // (and 'AlwaysRecordEvolDependencies' is false) then the dependency
4131 // information may be incomplete. In that case the first call to
4132 // RedefineClasses causes all compiled code to be thrown away. As
4133 // can_redefine_classes has been obtained then all future compilations will
4134 // record dependencies so second and subsequent calls to RedefineClasses
4135 // need only throw away code that depends on the class.
4136 //
4137
4138 void VM_RedefineClasses::flush_dependent_code() {
4139 assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
4140 assert(JvmtiExport::all_dependencies_are_recorded() || !AlwaysRecordEvolDependencies, "sanity check");
4141
4142 DeoptimizationScope deopt_scope;
4143
4144 // This is the first redefinition, mark all the nmethods for deoptimization
4145 if (!JvmtiExport::all_dependencies_are_recorded()) {
4146 CodeCache::mark_all_nmethods_for_evol_deoptimization(&deopt_scope);
4147 log_debug(redefine, class, nmethod)("Marked all nmethods for deopt");
4148 } else {
4149 CodeCache::mark_dependents_for_evol_deoptimization(&deopt_scope);
4150 log_debug(redefine, class, nmethod)("Marked dependent nmethods for deopt");
4151 }
4152
4153 deopt_scope.deoptimize_marked();
4154
4155 // From now on we know that the dependency information is complete
4156 JvmtiExport::set_all_dependencies_are_recorded(true);
4157 }
4158
4159 void VM_RedefineClasses::compute_added_deleted_matching_methods() {
4160 Method* old_method;
4161 Method* new_method;
4162
4163 _matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4164 _matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4165 _added_methods = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
4166 _deleted_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4167
4168 _matching_methods_length = 0;
4169 _deleted_methods_length = 0;
4170 _added_methods_length = 0;
4171
4172 int nj = 0;
4173 int oj = 0;
4174 while (true) {
4175 if (oj >= _old_methods->length()) {
4176 if (nj >= _new_methods->length()) {
4177 break; // we've looked at everything, done
4178 }
4179 // New method at the end
4180 new_method = _new_methods->at(nj);
4181 _added_methods[_added_methods_length++] = new_method;
4182 ++nj;
4183 } else if (nj >= _new_methods->length()) {
4184 // Old method, at the end, is deleted
4185 old_method = _old_methods->at(oj);
4186 _deleted_methods[_deleted_methods_length++] = old_method;
4187 ++oj;
4188 } else {
4189 old_method = _old_methods->at(oj);
4190 new_method = _new_methods->at(nj);
4191 if (old_method->name() == new_method->name()) {
4192 if (old_method->signature() == new_method->signature()) {
4193 _matching_old_methods[_matching_methods_length ] = old_method;
4194 _matching_new_methods[_matching_methods_length++] = new_method;
4195 ++nj;
4196 ++oj;
4197 } else {
4198 // added overloaded have already been moved to the end,
4199 // so this is a deleted overloaded method
4200 _deleted_methods[_deleted_methods_length++] = old_method;
4201 ++oj;
4202 }
4203 } else { // names don't match
4204 if (old_method->name()->fast_compare(new_method->name()) > 0) {
4205 // new method
4206 _added_methods[_added_methods_length++] = new_method;
4207 ++nj;
4208 } else {
4209 // deleted method
4210 _deleted_methods[_deleted_methods_length++] = old_method;
4211 ++oj;
4212 }
4213 }
4214 }
4215 }
4216 assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
4217 assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
4218 }
4219
4220
4221 void VM_RedefineClasses::swap_annotations(InstanceKlass* the_class,
4222 InstanceKlass* scratch_class) {
4223 // Swap annotation fields values
4224 Annotations* old_annotations = the_class->annotations();
4225 the_class->set_annotations(scratch_class->annotations());
4226 scratch_class->set_annotations(old_annotations);
4227 }
4228
4229
4230 // Install the redefinition of a class:
4231 // - house keeping (flushing breakpoints and caches, deoptimizing
4232 // dependent compiled code)
4233 // - replacing parts in the_class with parts from scratch_class
4234 // - adding a weak reference to track the obsolete but interesting
4235 // parts of the_class
4236 // - adjusting constant pool caches and vtables in other classes
4237 // that refer to methods in the_class. These adjustments use the
4238 // ClassLoaderDataGraph::classes_do() facility which only allows
4239 // a helper method to be specified. The interesting parameters
4240 // that we would like to pass to the helper method are saved in
4241 // static global fields in the VM operation.
4242 void VM_RedefineClasses::redefine_single_class(Thread* current, jclass the_jclass,
4243 InstanceKlass* scratch_class) {
4244
4245 HandleMark hm(current); // make sure handles from this call are freed
4246
4247 if (log_is_enabled(Info, redefine, class, timer)) {
4248 _timer_rsc_phase1.start();
4249 }
4250
4251 InstanceKlass* the_class = get_ik(the_jclass);
4252
4253 // Set a flag to control and optimize adjusting method entries
4254 _has_redefined_Object |= the_class == vmClasses::Object_klass();
4255
4256 // Remove all breakpoints in methods of this class
4257 JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
4258 jvmti_breakpoints.clearall_in_class_at_safepoint(the_class);
4259
4260 _old_methods = the_class->methods();
4261 _new_methods = scratch_class->methods();
4262 _the_class = the_class;
4263 compute_added_deleted_matching_methods();
4264 update_jmethod_ids();
4265
4266 _any_class_has_resolved_methods = the_class->has_resolved_methods() || _any_class_has_resolved_methods;
4267
4268 // Attach new constant pool to the original klass. The original
4269 // klass still refers to the old constant pool (for now).
4270 scratch_class->constants()->set_pool_holder(the_class);
4271
4272 #if 0
4273 // In theory, with constant pool merging in place we should be able
4274 // to save space by using the new, merged constant pool in place of
4275 // the old constant pool(s). By "pool(s)" I mean the constant pool in
4276 // the klass version we are replacing now and any constant pool(s) in
4277 // previous versions of klass. Nice theory, doesn't work in practice.
4278 // When this code is enabled, even simple programs throw NullPointer
4279 // exceptions. I'm guessing that this is caused by some constant pool
4280 // cache difference between the new, merged constant pool and the
4281 // constant pool that was just being used by the klass. I'm keeping
4282 // this code around to archive the idea, but the code has to remain
4283 // disabled for now.
4284
4285 // Attach each old method to the new constant pool. This can be
4286 // done here since we are past the bytecode verification and
4287 // constant pool optimization phases.
4288 for (int i = _old_methods->length() - 1; i >= 0; i--) {
4289 Method* method = _old_methods->at(i);
4290 method->set_constants(scratch_class->constants());
4291 }
4292
4293 // NOTE: this doesn't work because you can redefine the same class in two
4294 // threads, each getting their own constant pool data appended to the
4295 // original constant pool. In order for the new methods to work when they
4296 // become old methods, they need to keep their updated copy of the constant pool.
4297
4298 {
4299 // walk all previous versions of the klass
4300 InstanceKlass *ik = the_class;
4301 PreviousVersionWalker pvw(ik);
4302 do {
4303 ik = pvw.next_previous_version();
4304 if (ik != nullptr) {
4305
4306 // attach previous version of klass to the new constant pool
4307 ik->set_constants(scratch_class->constants());
4308
4309 // Attach each method in the previous version of klass to the
4310 // new constant pool
4311 Array<Method*>* prev_methods = ik->methods();
4312 for (int i = prev_methods->length() - 1; i >= 0; i--) {
4313 Method* method = prev_methods->at(i);
4314 method->set_constants(scratch_class->constants());
4315 }
4316 }
4317 } while (ik != nullptr);
4318 }
4319 #endif
4320
4321 // Replace methods and constantpool
4322 the_class->set_methods(_new_methods);
4323 scratch_class->set_methods(_old_methods); // To prevent potential GCing of the old methods,
4324 // and to be able to undo operation easily.
4325
4326 Array<int>* old_ordering = the_class->method_ordering();
4327 the_class->set_method_ordering(scratch_class->method_ordering());
4328 scratch_class->set_method_ordering(old_ordering);
4329
4330 ConstantPool* old_constants = the_class->constants();
4331 the_class->set_constants(scratch_class->constants());
4332 scratch_class->set_constants(old_constants); // See the previous comment.
4333 #if 0
4334 // We are swapping the guts of "the new class" with the guts of "the
4335 // class". Since the old constant pool has just been attached to "the
4336 // new class", it seems logical to set the pool holder in the old
4337 // constant pool also. However, doing this will change the observable
4338 // class hierarchy for any old methods that are still executing. A
4339 // method can query the identity of its "holder" and this query uses
4340 // the method's constant pool link to find the holder. The change in
4341 // holding class from "the class" to "the new class" can confuse
4342 // things.
4343 //
4344 // Setting the old constant pool's holder will also cause
4345 // verification done during vtable initialization below to fail.
4346 // During vtable initialization, the vtable's class is verified to be
4347 // a subtype of the method's holder. The vtable's class is "the
4348 // class" and the method's holder is gotten from the constant pool
4349 // link in the method itself. For "the class"'s directly implemented
4350 // methods, the method holder is "the class" itself (as gotten from
4351 // the new constant pool). The check works fine in this case. The
4352 // check also works fine for methods inherited from super classes.
4353 //
4354 // Miranda methods are a little more complicated. A miranda method is
4355 // provided by an interface when the class implementing the interface
4356 // does not provide its own method. These interfaces are implemented
4357 // internally as an InstanceKlass. These special instanceKlasses
4358 // share the constant pool of the class that "implements" the
4359 // interface. By sharing the constant pool, the method holder of a
4360 // miranda method is the class that "implements" the interface. In a
4361 // non-redefine situation, the subtype check works fine. However, if
4362 // the old constant pool's pool holder is modified, then the check
4363 // fails because there is no class hierarchy relationship between the
4364 // vtable's class and "the new class".
4365
4366 old_constants->set_pool_holder(scratch_class());
4367 #endif
4368
4369 // track number of methods that are EMCP for add_previous_version() call below
4370 int emcp_method_count = check_methods_and_mark_as_obsolete();
4371 transfer_old_native_function_registrations(the_class);
4372
4373 if (scratch_class->get_cached_class_file() != the_class->get_cached_class_file()) {
4374 // 1. the_class doesn't have a cache yet, scratch_class does have a cache.
4375 // 2. The same class can be present twice in the scratch classes list or there
4376 // are multiple concurrent RetransformClasses calls on different threads.
4377 // the_class and scratch_class have the same cached bytes, but different buffers.
4378 // In such cases we need to deallocate one of the buffers.
4379 // 3. RedefineClasses and the_class has cached bytes from a previous transformation.
4380 // In the case we need to use class bytes from scratch_class.
4381 if (the_class->get_cached_class_file() != nullptr) {
4382 os::free(the_class->get_cached_class_file());
4383 }
4384 the_class->set_cached_class_file(scratch_class->get_cached_class_file());
4385 }
4386
4387 // null out in scratch class to not delete twice. The class to be redefined
4388 // always owns these bytes.
4389 scratch_class->set_cached_class_file(nullptr);
4390
4391 // Replace inner_classes
4392 Array<u2>* old_inner_classes = the_class->inner_classes();
4393 the_class->set_inner_classes(scratch_class->inner_classes());
4394 scratch_class->set_inner_classes(old_inner_classes);
4395
4396 // Initialize the vtable and interface table after
4397 // methods have been rewritten
4398 // no exception should happen here since we explicitly
4399 // do not check loader constraints.
4400 // compare_and_normalize_class_versions has already checked:
4401 // - classloaders unchanged, signatures unchanged
4402 // - all instanceKlasses for redefined classes reused & contents updated
4403 the_class->vtable().initialize_vtable();
4404 the_class->itable().initialize_itable();
4405
4406 // Update jmethodID cache if present.
4407 the_class->update_methods_jmethod_cache();
4408
4409 // Copy the "source debug extension" attribute from new class version
4410 the_class->set_source_debug_extension(
4411 scratch_class->source_debug_extension(),
4412 scratch_class->source_debug_extension() == nullptr ? 0 :
4413 (int)strlen(scratch_class->source_debug_extension()));
4414
4415 // Use of javac -g could be different in the old and the new
4416 if (scratch_class->has_localvariable_table() !=
4417 the_class->has_localvariable_table()) {
4418 the_class->set_has_localvariable_table(scratch_class->has_localvariable_table());
4419 }
4420
4421 swap_annotations(the_class, scratch_class);
4422
4423 // Replace minor version number of class file
4424 u2 old_minor_version = the_class->constants()->minor_version();
4425 the_class->constants()->set_minor_version(scratch_class->constants()->minor_version());
4426 scratch_class->constants()->set_minor_version(old_minor_version);
4427
4428 // Replace major version number of class file
4429 u2 old_major_version = the_class->constants()->major_version();
4430 the_class->constants()->set_major_version(scratch_class->constants()->major_version());
4431 scratch_class->constants()->set_major_version(old_major_version);
4432
4433 // Replace CP indexes for class and name+type of enclosing method
4434 u2 old_class_idx = the_class->enclosing_method_class_index();
4435 u2 old_method_idx = the_class->enclosing_method_method_index();
4436 the_class->set_enclosing_method_indices(
4437 scratch_class->enclosing_method_class_index(),
4438 scratch_class->enclosing_method_method_index());
4439 scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
4440
4441 if (!the_class->has_been_redefined()) {
4442 the_class->set_has_been_redefined();
4443 }
4444
4445 // Scratch class is unloaded but still needs cleaning, and skipping for CDS.
4446 scratch_class->set_is_scratch_class();
4447
4448 // keep track of previous versions of this class
4449 the_class->add_previous_version(scratch_class, emcp_method_count);
4450
4451 JFR_ONLY(Jfr::on_klass_redefinition(the_class, scratch_class);)
4452
4453 _timer_rsc_phase1.stop();
4454 if (log_is_enabled(Info, redefine, class, timer)) {
4455 _timer_rsc_phase2.start();
4456 }
4457
4458 if (the_class->oop_map_cache() != nullptr) {
4459 // Flush references to any obsolete methods from the oop map cache
4460 // so that obsolete methods are not pinned.
4461 the_class->oop_map_cache()->flush_obsolete_entries();
4462 }
4463
4464 increment_class_counter(the_class);
4465
4466 if (EventClassRedefinition::is_enabled()) {
4467 EventClassRedefinition event;
4468 event.set_classModificationCount(java_lang_Class::classRedefinedCount(the_class->java_mirror()));
4469 event.set_redefinedClass(the_class);
4470 event.set_redefinitionId(_id);
4471 event.commit();
4472 }
4473
4474 {
4475 ResourceMark rm(current);
4476 // increment the classRedefinedCount field in the_class and in any
4477 // direct and indirect subclasses of the_class
4478 physical_memory_size_type avail_mem = 0;
4479 // Return value ignored - defaulting to 0 on failure.
4480 (void)os::available_memory(avail_mem);
4481 log_info(redefine, class, load)
4482 ("redefined name=%s, count=%d (avail_mem=" PHYS_MEM_TYPE_FORMAT "K)",
4483 the_class->external_name(), java_lang_Class::classRedefinedCount(the_class->java_mirror()), avail_mem >> 10);
4484 Events::log_redefinition(current, "redefined class name=%s, count=%d",
4485 the_class->external_name(),
4486 java_lang_Class::classRedefinedCount(the_class->java_mirror()));
4487
4488 }
4489 _timer_rsc_phase2.stop();
4490
4491 } // end redefine_single_class()
4492
4493
4494 // Increment the classRedefinedCount field in the specific InstanceKlass
4495 // and in all direct and indirect subclasses.
4496 void VM_RedefineClasses::increment_class_counter(InstanceKlass* ik) {
4497 for (ClassHierarchyIterator iter(ik); !iter.done(); iter.next()) {
4498 // Only update instanceKlasses
4499 Klass* sub = iter.klass();
4500 if (sub->is_instance_klass()) {
4501 oop class_mirror = InstanceKlass::cast(sub)->java_mirror();
4502 Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
4503 int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
4504 java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
4505
4506 if (class_oop != _the_class) {
4507 // _the_class count is printed at end of redefine_single_class()
4508 log_debug(redefine, class, subclass)("updated count in subclass=%s to %d", ik->external_name(), new_count);
4509 }
4510 }
4511 }
4512 }
4513
4514 void VM_RedefineClasses::CheckClass::do_klass(Klass* k) {
4515 bool no_old_methods = true; // be optimistic
4516
4517 // Both array and instance classes have vtables.
4518 // a vtable should never contain old or obsolete methods
4519 ResourceMark rm(_thread);
4520 if (k->vtable_length() > 0 &&
4521 !k->vtable().check_no_old_or_obsolete_entries()) {
4522 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4523 log_trace(redefine, class, obsolete, metadata)
4524 ("klassVtable::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4525 k->signature_name());
4526 k->vtable().dump_vtable();
4527 }
4528 no_old_methods = false;
4529 }
4530
4531 if (k->is_instance_klass()) {
4532 HandleMark hm(_thread);
4533 InstanceKlass *ik = InstanceKlass::cast(k);
4534
4535 // an itable should never contain old or obsolete methods
4536 if (ik->itable_length() > 0 &&
4537 !ik->itable().check_no_old_or_obsolete_entries()) {
4538 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4539 log_trace(redefine, class, obsolete, metadata)
4540 ("klassItable::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4541 ik->signature_name());
4542 ik->itable().dump_itable();
4543 }
4544 no_old_methods = false;
4545 }
4546
4547 // the constant pool cache should never contain non-deleted old or obsolete methods
4548 if (ik->constants() != nullptr &&
4549 ik->constants()->cache() != nullptr &&
4550 !ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
4551 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4552 log_trace(redefine, class, obsolete, metadata)
4553 ("cp-cache::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4554 ik->signature_name());
4555 ik->constants()->cache()->dump_cache();
4556 }
4557 no_old_methods = false;
4558 }
4559 }
4560
4561 // print and fail guarantee if old methods are found.
4562 if (!no_old_methods) {
4563 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4564 dump_methods();
4565 } else {
4566 log_trace(redefine, class)("Use the '-Xlog:redefine+class*:' option "
4567 "to see more info about the following guarantee() failure.");
4568 }
4569 guarantee(false, "OLD and/or OBSOLETE method(s) found");
4570 }
4571 }
4572
4573 u8 VM_RedefineClasses::next_id() {
4574 while (true) {
4575 u8 id = _id_counter;
4576 u8 next_id = id + 1;
4577 u8 result = AtomicAccess::cmpxchg(&_id_counter, id, next_id);
4578 if (result == id) {
4579 return next_id;
4580 }
4581 }
4582 }
4583
4584 void VM_RedefineClasses::dump_methods() {
4585 int j;
4586 log_trace(redefine, class, dump)("_old_methods --");
4587 for (j = 0; j < _old_methods->length(); ++j) {
4588 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4589 Method* m = _old_methods->at(j);
4590 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4591 m->access_flags().print_on(&log_stream);
4592 log_stream.print(" -- ");
4593 m->print_name(&log_stream);
4594 log_stream.cr();
4595 }
4596 log_trace(redefine, class, dump)("_new_methods --");
4597 for (j = 0; j < _new_methods->length(); ++j) {
4598 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4599 Method* m = _new_methods->at(j);
4600 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4601 m->access_flags().print_on(&log_stream);
4602 log_stream.print(" -- ");
4603 m->print_name(&log_stream);
4604 log_stream.cr();
4605 }
4606 log_trace(redefine, class, dump)("_matching_methods --");
4607 for (j = 0; j < _matching_methods_length; ++j) {
4608 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4609 Method* m = _matching_old_methods[j];
4610 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4611 m->access_flags().print_on(&log_stream);
4612 log_stream.print(" -- ");
4613 m->print_name();
4614 log_stream.cr();
4615
4616 m = _matching_new_methods[j];
4617 log_stream.print(" (%5d) ", m->vtable_index());
4618 m->access_flags().print_on(&log_stream);
4619 log_stream.cr();
4620 }
4621 log_trace(redefine, class, dump)("_deleted_methods --");
4622 for (j = 0; j < _deleted_methods_length; ++j) {
4623 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4624 Method* m = _deleted_methods[j];
4625 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4626 m->access_flags().print_on(&log_stream);
4627 log_stream.print(" -- ");
4628 m->print_name(&log_stream);
4629 log_stream.cr();
4630 }
4631 log_trace(redefine, class, dump)("_added_methods --");
4632 for (j = 0; j < _added_methods_length; ++j) {
4633 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4634 Method* m = _added_methods[j];
4635 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4636 m->access_flags().print_on(&log_stream);
4637 log_stream.print(" -- ");
4638 m->print_name(&log_stream);
4639 log_stream.cr();
4640 }
4641 }
4642
4643 void VM_RedefineClasses::print_on_error(outputStream* st) const {
4644 VM_Operation::print_on_error(st);
4645 if (_the_class != nullptr) {
4646 ResourceMark rm;
4647 st->print_cr(", redefining class %s", _the_class->external_name());
4648 }
4649 }