598 if (scratch_i != *merge_cp_length_p) {
599 // The new entry in *merge_cp_p is at a different index than
600 // the new entry in scratch_cp so we need to map the index values.
601 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
602 }
603 (*merge_cp_length_p)++;
604 } break;
605
606 // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
607 // ClassIndex
608 case JVM_CONSTANT_ClassIndex: // fall through
609
610 // Invalid is used as the tag for the second constant pool entry
611 // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
612 // not be seen by itself.
613 case JVM_CONSTANT_Invalid: // fall through
614
615 // At this stage, String could be here, but not StringIndex
616 case JVM_CONSTANT_StringIndex: // fall through
617
618 // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
619 // here
620 case JVM_CONSTANT_UnresolvedClassInError: // fall through
621
622 default:
623 {
624 // leave a breadcrumb
625 jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
626 ShouldNotReachHere();
627 } break;
628 } // end switch tag value
629 } // end append_entry()
630
631
632 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
633 int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
634
635 int new_ref_i = ref_i;
636 bool match = (ref_i < *merge_cp_length_p) &&
637 scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
638
639 if (!match) {
1918 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1919
1920 // rewrite constant pool references in the nest attributes:
1921 if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1922 // propagate failure back to caller
1923 return false;
1924 }
1925
1926 // rewrite constant pool references in the Record attribute:
1927 if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1928 // propagate failure back to caller
1929 return false;
1930 }
1931
1932 // rewrite constant pool references in the PermittedSubclasses attribute:
1933 if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1934 // propagate failure back to caller
1935 return false;
1936 }
1937
1938 // rewrite constant pool references in the methods:
1939 if (!rewrite_cp_refs_in_methods(scratch_class)) {
1940 // propagate failure back to caller
1941 return false;
1942 }
1943
1944 // rewrite constant pool references in the class_annotations:
1945 if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1946 // propagate failure back to caller
1947 return false;
1948 }
1949
1950 // rewrite constant pool references in the fields_annotations:
1951 if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1952 // propagate failure back to caller
1953 return false;
1954 }
1955
1956 // rewrite constant pool references in the methods_annotations:
1957 if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {
2066 }
2067 }
2068 }
2069 }
2070 return true;
2071 }
2072
2073 // Rewrite constant pool references in the PermittedSubclasses attribute.
2074 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2075 InstanceKlass* scratch_class) {
2076
2077 Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2078 assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2079 for (int i = 0; i < permitted_subclasses->length(); i++) {
2080 u2 cp_index = permitted_subclasses->at(i);
2081 permitted_subclasses->at_put(i, find_new_index(cp_index));
2082 }
2083 return true;
2084 }
2085
2086 // Rewrite constant pool references in the methods.
2087 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2088
2089 Array<Method*>* methods = scratch_class->methods();
2090
2091 if (methods == nullptr || methods->length() == 0) {
2092 // no methods so nothing to do
2093 return true;
2094 }
2095
2096 JavaThread* THREAD = JavaThread::current(); // For exception macros.
2097 ExceptionMark em(THREAD);
2098
2099 // rewrite constant pool references in the methods:
2100 for (int i = methods->length() - 1; i >= 0; i--) {
2101 methodHandle method(THREAD, methods->at(i));
2102 methodHandle new_method;
2103 rewrite_cp_refs_in_method(method, &new_method, THREAD);
2104 if (!new_method.is_null()) {
2105 // the method has been replaced so save the new method version
3253 // walk through each stack_map_frame
3254 u2 calc_number_of_entries = 0;
3255 for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3256 // The stack_map_frame structure is a u1 frame_type followed by
3257 // 0 or more bytes of data:
3258 //
3259 // union stack_map_frame {
3260 // same_frame;
3261 // same_locals_1_stack_item_frame;
3262 // same_locals_1_stack_item_frame_extended;
3263 // chop_frame;
3264 // same_frame_extended;
3265 // append_frame;
3266 // full_frame;
3267 // }
3268
3269 assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3270 u1 frame_type = *stackmap_p;
3271 stackmap_p++;
3272
3273 // same_frame {
3274 // u1 frame_type = SAME; /* 0-63 */
3275 // }
3276 if (frame_type <= StackMapReader::SAME_FRAME_END) {
3277 // nothing more to do for same_frame
3278 }
3279
3280 // same_locals_1_stack_item_frame {
3281 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3282 // verification_type_info stack[1];
3283 // }
3284 else if (frame_type >= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_START &&
3285 frame_type <= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_END) {
3286 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3287 calc_number_of_entries, frame_type);
3288 }
3289
3290 // reserved for future use
3291 else if (frame_type >= StackMapReader::RESERVED_START &&
3292 frame_type <= StackMapReader::RESERVED_END) {
3462 ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3463 } break;
3464
3465 // Uninitialized_variable_info {
3466 // u1 tag = ITEM_Uninitialized; /* 8 */
3467 // u2 offset;
3468 // }
3469 case ITEM_Uninitialized:
3470 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3471 stackmap_p_ref += 2;
3472 break;
3473
3474 default:
3475 log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3476 ShouldNotReachHere();
3477 break;
3478 } // end switch (tag)
3479 } // end rewrite_cp_refs_in_verification_type_info()
3480
3481
3482 // Change the constant pool associated with klass scratch_class to scratch_cp.
3483 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3484 // and the smaller constant pool is associated with scratch_class.
3485 void VM_RedefineClasses::set_new_constant_pool(
3486 ClassLoaderData* loader_data,
3487 InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3488 int scratch_cp_length, TRAPS) {
3489 assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3490
3491 // scratch_cp is a merged constant pool and has enough space for a
3492 // worst case merge situation. We want to associate the minimum
3493 // sized constant pool with the klass to save space.
3494 ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3495 constantPoolHandle smaller_cp(THREAD, cp);
3496
3497 // preserve version() value in the smaller copy
3498 int version = scratch_cp->version();
3499 assert(version != 0, "sanity check");
3500 smaller_cp->set_version(version);
3501
|
598 if (scratch_i != *merge_cp_length_p) {
599 // The new entry in *merge_cp_p is at a different index than
600 // the new entry in scratch_cp so we need to map the index values.
601 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
602 }
603 (*merge_cp_length_p)++;
604 } break;
605
606 // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
607 // ClassIndex
608 case JVM_CONSTANT_ClassIndex: // fall through
609
610 // Invalid is used as the tag for the second constant pool entry
611 // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
612 // not be seen by itself.
613 case JVM_CONSTANT_Invalid: // fall through
614
615 // At this stage, String could be here, but not StringIndex
616 case JVM_CONSTANT_StringIndex: // fall through
617
618 // At this stage JVM_CONSTANT_UnresolvedClassInError should not be here
619 case JVM_CONSTANT_UnresolvedClassInError: // fall through
620
621 default:
622 {
623 // leave a breadcrumb
624 jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
625 ShouldNotReachHere();
626 } break;
627 } // end switch tag value
628 } // end append_entry()
629
630
631 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
632 int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
633
634 int new_ref_i = ref_i;
635 bool match = (ref_i < *merge_cp_length_p) &&
636 scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
637
638 if (!match) {
1917 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1918
1919 // rewrite constant pool references in the nest attributes:
1920 if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1921 // propagate failure back to caller
1922 return false;
1923 }
1924
1925 // rewrite constant pool references in the Record attribute:
1926 if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1927 // propagate failure back to caller
1928 return false;
1929 }
1930
1931 // rewrite constant pool references in the PermittedSubclasses attribute:
1932 if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1933 // propagate failure back to caller
1934 return false;
1935 }
1936
1937 // rewrite constant pool references in the LoadableDescriptors attribute:
1938 if (!rewrite_cp_refs_in_loadable_descriptors_attribute(scratch_class)) {
1939 // propagate failure back to caller
1940 return false;
1941 }
1942
1943 // rewrite constant pool references in the methods:
1944 if (!rewrite_cp_refs_in_methods(scratch_class)) {
1945 // propagate failure back to caller
1946 return false;
1947 }
1948
1949 // rewrite constant pool references in the class_annotations:
1950 if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1951 // propagate failure back to caller
1952 return false;
1953 }
1954
1955 // rewrite constant pool references in the fields_annotations:
1956 if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1957 // propagate failure back to caller
1958 return false;
1959 }
1960
1961 // rewrite constant pool references in the methods_annotations:
1962 if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {
2071 }
2072 }
2073 }
2074 }
2075 return true;
2076 }
2077
2078 // Rewrite constant pool references in the PermittedSubclasses attribute.
2079 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2080 InstanceKlass* scratch_class) {
2081
2082 Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2083 assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2084 for (int i = 0; i < permitted_subclasses->length(); i++) {
2085 u2 cp_index = permitted_subclasses->at(i);
2086 permitted_subclasses->at_put(i, find_new_index(cp_index));
2087 }
2088 return true;
2089 }
2090
2091 // Rewrite constant pool references in the LoadableDescriptors attribute.
2092 bool VM_RedefineClasses::rewrite_cp_refs_in_loadable_descriptors_attribute(
2093 InstanceKlass* scratch_class) {
2094
2095 Array<u2>* loadable_descriptors = scratch_class->loadable_descriptors();
2096 assert(loadable_descriptors != nullptr, "unexpected null loadable_descriptors");
2097 for (int i = 0; i < loadable_descriptors->length(); i++) {
2098 u2 cp_index = loadable_descriptors->at(i);
2099 loadable_descriptors->at_put(i, find_new_index(cp_index));
2100 }
2101 return true;
2102 }
2103
2104 // Rewrite constant pool references in the methods.
2105 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2106
2107 Array<Method*>* methods = scratch_class->methods();
2108
2109 if (methods == nullptr || methods->length() == 0) {
2110 // no methods so nothing to do
2111 return true;
2112 }
2113
2114 JavaThread* THREAD = JavaThread::current(); // For exception macros.
2115 ExceptionMark em(THREAD);
2116
2117 // rewrite constant pool references in the methods:
2118 for (int i = methods->length() - 1; i >= 0; i--) {
2119 methodHandle method(THREAD, methods->at(i));
2120 methodHandle new_method;
2121 rewrite_cp_refs_in_method(method, &new_method, THREAD);
2122 if (!new_method.is_null()) {
2123 // the method has been replaced so save the new method version
3271 // walk through each stack_map_frame
3272 u2 calc_number_of_entries = 0;
3273 for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3274 // The stack_map_frame structure is a u1 frame_type followed by
3275 // 0 or more bytes of data:
3276 //
3277 // union stack_map_frame {
3278 // same_frame;
3279 // same_locals_1_stack_item_frame;
3280 // same_locals_1_stack_item_frame_extended;
3281 // chop_frame;
3282 // same_frame_extended;
3283 // append_frame;
3284 // full_frame;
3285 // }
3286
3287 assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3288 u1 frame_type = *stackmap_p;
3289 stackmap_p++;
3290
3291 if (frame_type == 246) { // EARLY_LARVAL
3292 // rewrite_cp_refs in unset fields and fall through.
3293 rewrite_cp_refs_in_early_larval_stackmaps(stackmap_p, stackmap_end, calc_number_of_entries, frame_type);
3294 // The larval frames point to the next frame, so advance to the next frame and fall through.
3295 frame_type = *stackmap_p;
3296 stackmap_p++;
3297 }
3298
3299 // same_frame {
3300 // u1 frame_type = SAME; /* 0-63 */
3301 // }
3302 if (frame_type <= StackMapReader::SAME_FRAME_END) {
3303 // nothing more to do for same_frame
3304 }
3305
3306 // same_locals_1_stack_item_frame {
3307 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3308 // verification_type_info stack[1];
3309 // }
3310 else if (frame_type >= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_START &&
3311 frame_type <= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_END) {
3312 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3313 calc_number_of_entries, frame_type);
3314 }
3315
3316 // reserved for future use
3317 else if (frame_type >= StackMapReader::RESERVED_START &&
3318 frame_type <= StackMapReader::RESERVED_END) {
3488 ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3489 } break;
3490
3491 // Uninitialized_variable_info {
3492 // u1 tag = ITEM_Uninitialized; /* 8 */
3493 // u2 offset;
3494 // }
3495 case ITEM_Uninitialized:
3496 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3497 stackmap_p_ref += 2;
3498 break;
3499
3500 default:
3501 log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3502 ShouldNotReachHere();
3503 break;
3504 } // end switch (tag)
3505 } // end rewrite_cp_refs_in_verification_type_info()
3506
3507
3508 void VM_RedefineClasses::rewrite_cp_refs_in_early_larval_stackmaps(
3509 address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3510 u1 frame_type) {
3511
3512 u2 num_early_larval_stackmaps = Bytes::get_Java_u2(stackmap_p_ref);
3513 stackmap_p_ref += 2;
3514
3515 for (u2 i = 0; i < num_early_larval_stackmaps; i++) {
3516
3517 u2 name_and_ref_index = Bytes::get_Java_u2(stackmap_p_ref);
3518 u2 new_cp_index = find_new_index(name_and_ref_index);
3519 if (new_cp_index != 0) {
3520 log_debug(redefine, class, stackmap)("mapped old name_and_ref_index=%d", name_and_ref_index);
3521 Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3522 name_and_ref_index = new_cp_index;
3523 }
3524 log_debug(redefine, class, stackmap)
3525 ("frame_i=%u, frame_type=%u, name_and_ref_index=%d", frame_i, frame_type, name_and_ref_index);
3526
3527 stackmap_p_ref += 2;
3528 }
3529 } // rewrite_cp_refs_in_early_larval_stackmaps
3530
3531 // Change the constant pool associated with klass scratch_class to scratch_cp.
3532 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3533 // and the smaller constant pool is associated with scratch_class.
3534 void VM_RedefineClasses::set_new_constant_pool(
3535 ClassLoaderData* loader_data,
3536 InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3537 int scratch_cp_length, TRAPS) {
3538 assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3539
3540 // scratch_cp is a merged constant pool and has enough space for a
3541 // worst case merge situation. We want to associate the minimum
3542 // sized constant pool with the klass to save space.
3543 ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3544 constantPoolHandle smaller_cp(THREAD, cp);
3545
3546 // preserve version() value in the smaller copy
3547 int version = scratch_cp->version();
3548 assert(version != 0, "sanity check");
3549 smaller_cp->set_version(version);
3550
|