600 if (scratch_i != *merge_cp_length_p) {
601 // The new entry in *merge_cp_p is at a different index than
602 // the new entry in scratch_cp so we need to map the index values.
603 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
604 }
605 (*merge_cp_length_p)++;
606 } break;
607
608 // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
609 // ClassIndex
610 case JVM_CONSTANT_ClassIndex: // fall through
611
612 // Invalid is used as the tag for the second constant pool entry
613 // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
614 // not be seen by itself.
615 case JVM_CONSTANT_Invalid: // fall through
616
617 // At this stage, String could be here, but not StringIndex
618 case JVM_CONSTANT_StringIndex: // fall through
619
620 // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
621 // here
622 case JVM_CONSTANT_UnresolvedClassInError: // fall through
623
624 default:
625 {
626 // leave a breadcrumb
627 jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
628 ShouldNotReachHere();
629 } break;
630 } // end switch tag value
631 } // end append_entry()
632
633
634 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
635 int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
636
637 int new_ref_i = ref_i;
638 bool match = (ref_i < *merge_cp_length_p) &&
639 scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
640
641 if (!match) {
1904 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1905
1906 // rewrite constant pool references in the nest attributes:
1907 if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1908 // propagate failure back to caller
1909 return false;
1910 }
1911
1912 // rewrite constant pool references in the Record attribute:
1913 if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1914 // propagate failure back to caller
1915 return false;
1916 }
1917
1918 // rewrite constant pool references in the PermittedSubclasses attribute:
1919 if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1920 // propagate failure back to caller
1921 return false;
1922 }
1923
1924 // rewrite constant pool references in the methods:
1925 if (!rewrite_cp_refs_in_methods(scratch_class)) {
1926 // propagate failure back to caller
1927 return false;
1928 }
1929
1930 // rewrite constant pool references in the class_annotations:
1931 if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1932 // propagate failure back to caller
1933 return false;
1934 }
1935
1936 // rewrite constant pool references in the fields_annotations:
1937 if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1938 // propagate failure back to caller
1939 return false;
1940 }
1941
1942 // rewrite constant pool references in the methods_annotations:
1943 if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {
2052 }
2053 }
2054 }
2055 }
2056 return true;
2057 }
2058
2059 // Rewrite constant pool references in the PermittedSubclasses attribute.
2060 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2061 InstanceKlass* scratch_class) {
2062
2063 Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2064 assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2065 for (int i = 0; i < permitted_subclasses->length(); i++) {
2066 u2 cp_index = permitted_subclasses->at(i);
2067 permitted_subclasses->at_put(i, find_new_index(cp_index));
2068 }
2069 return true;
2070 }
2071
2072 // Rewrite constant pool references in the methods.
2073 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2074
2075 Array<Method*>* methods = scratch_class->methods();
2076
2077 if (methods == nullptr || methods->length() == 0) {
2078 // no methods so nothing to do
2079 return true;
2080 }
2081
2082 JavaThread* THREAD = JavaThread::current(); // For exception macros.
2083 ExceptionMark em(THREAD);
2084
2085 // rewrite constant pool references in the methods:
2086 for (int i = methods->length() - 1; i >= 0; i--) {
2087 methodHandle method(THREAD, methods->at(i));
2088 methodHandle new_method;
2089 rewrite_cp_refs_in_method(method, &new_method, THREAD);
2090 if (!new_method.is_null()) {
2091 // the method has been replaced so save the new method version
3239 // walk through each stack_map_frame
3240 u2 calc_number_of_entries = 0;
3241 for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3242 // The stack_map_frame structure is a u1 frame_type followed by
3243 // 0 or more bytes of data:
3244 //
3245 // union stack_map_frame {
3246 // same_frame;
3247 // same_locals_1_stack_item_frame;
3248 // same_locals_1_stack_item_frame_extended;
3249 // chop_frame;
3250 // same_frame_extended;
3251 // append_frame;
3252 // full_frame;
3253 // }
3254
3255 assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3256 u1 frame_type = *stackmap_p;
3257 stackmap_p++;
3258
3259 // same_frame {
3260 // u1 frame_type = SAME; /* 0-63 */
3261 // }
3262 if (frame_type <= StackMapReader::SAME_FRAME_END) {
3263 // nothing more to do for same_frame
3264 }
3265
3266 // same_locals_1_stack_item_frame {
3267 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3268 // verification_type_info stack[1];
3269 // }
3270 else if (frame_type >= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_START &&
3271 frame_type <= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_END) {
3272 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3273 calc_number_of_entries, frame_type);
3274 }
3275
3276 // reserved for future use
3277 else if (frame_type >= StackMapReader::RESERVED_START &&
3278 frame_type <= StackMapReader::RESERVED_END) {
3448 ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3449 } break;
3450
3451 // Uninitialized_variable_info {
3452 // u1 tag = ITEM_Uninitialized; /* 8 */
3453 // u2 offset;
3454 // }
3455 case ITEM_Uninitialized:
3456 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3457 stackmap_p_ref += 2;
3458 break;
3459
3460 default:
3461 log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3462 ShouldNotReachHere();
3463 break;
3464 } // end switch (tag)
3465 } // end rewrite_cp_refs_in_verification_type_info()
3466
3467
3468 // Change the constant pool associated with klass scratch_class to scratch_cp.
3469 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3470 // and the smaller constant pool is associated with scratch_class.
3471 void VM_RedefineClasses::set_new_constant_pool(
3472 ClassLoaderData* loader_data,
3473 InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3474 int scratch_cp_length, TRAPS) {
3475 assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3476
3477 // scratch_cp is a merged constant pool and has enough space for a
3478 // worst case merge situation. We want to associate the minimum
3479 // sized constant pool with the klass to save space.
3480 ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3481 constantPoolHandle smaller_cp(THREAD, cp);
3482
3483 // preserve version() value in the smaller copy
3484 int version = scratch_cp->version();
3485 assert(version != 0, "sanity check");
3486 smaller_cp->set_version(version);
3487
|
600 if (scratch_i != *merge_cp_length_p) {
601 // The new entry in *merge_cp_p is at a different index than
602 // the new entry in scratch_cp so we need to map the index values.
603 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
604 }
605 (*merge_cp_length_p)++;
606 } break;
607
608 // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
609 // ClassIndex
610 case JVM_CONSTANT_ClassIndex: // fall through
611
612 // Invalid is used as the tag for the second constant pool entry
613 // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
614 // not be seen by itself.
615 case JVM_CONSTANT_Invalid: // fall through
616
617 // At this stage, String could be here, but not StringIndex
618 case JVM_CONSTANT_StringIndex: // fall through
619
620 // At this stage JVM_CONSTANT_UnresolvedClassInError should not be here
621 case JVM_CONSTANT_UnresolvedClassInError: // fall through
622
623 default:
624 {
625 // leave a breadcrumb
626 jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
627 ShouldNotReachHere();
628 } break;
629 } // end switch tag value
630 } // end append_entry()
631
632
633 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
634 int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
635
636 int new_ref_i = ref_i;
637 bool match = (ref_i < *merge_cp_length_p) &&
638 scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
639
640 if (!match) {
1903 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1904
1905 // rewrite constant pool references in the nest attributes:
1906 if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1907 // propagate failure back to caller
1908 return false;
1909 }
1910
1911 // rewrite constant pool references in the Record attribute:
1912 if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1913 // propagate failure back to caller
1914 return false;
1915 }
1916
1917 // rewrite constant pool references in the PermittedSubclasses attribute:
1918 if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1919 // propagate failure back to caller
1920 return false;
1921 }
1922
1923 // rewrite constant pool references in the LoadableDescriptors attribute:
1924 if (!rewrite_cp_refs_in_loadable_descriptors_attribute(scratch_class)) {
1925 // propagate failure back to caller
1926 return false;
1927 }
1928
1929 // rewrite constant pool references in the methods:
1930 if (!rewrite_cp_refs_in_methods(scratch_class)) {
1931 // propagate failure back to caller
1932 return false;
1933 }
1934
1935 // rewrite constant pool references in the class_annotations:
1936 if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1937 // propagate failure back to caller
1938 return false;
1939 }
1940
1941 // rewrite constant pool references in the fields_annotations:
1942 if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1943 // propagate failure back to caller
1944 return false;
1945 }
1946
1947 // rewrite constant pool references in the methods_annotations:
1948 if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {
2057 }
2058 }
2059 }
2060 }
2061 return true;
2062 }
2063
2064 // Rewrite constant pool references in the PermittedSubclasses attribute.
2065 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2066 InstanceKlass* scratch_class) {
2067
2068 Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2069 assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2070 for (int i = 0; i < permitted_subclasses->length(); i++) {
2071 u2 cp_index = permitted_subclasses->at(i);
2072 permitted_subclasses->at_put(i, find_new_index(cp_index));
2073 }
2074 return true;
2075 }
2076
2077 // Rewrite constant pool references in the LoadableDescriptors attribute.
2078 bool VM_RedefineClasses::rewrite_cp_refs_in_loadable_descriptors_attribute(
2079 InstanceKlass* scratch_class) {
2080
2081 Array<u2>* loadable_descriptors = scratch_class->loadable_descriptors();
2082 assert(loadable_descriptors != nullptr, "unexpected null loadable_descriptors");
2083 for (int i = 0; i < loadable_descriptors->length(); i++) {
2084 u2 cp_index = loadable_descriptors->at(i);
2085 loadable_descriptors->at_put(i, find_new_index(cp_index));
2086 }
2087 return true;
2088 }
2089
2090 // Rewrite constant pool references in the methods.
2091 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2092
2093 Array<Method*>* methods = scratch_class->methods();
2094
2095 if (methods == nullptr || methods->length() == 0) {
2096 // no methods so nothing to do
2097 return true;
2098 }
2099
2100 JavaThread* THREAD = JavaThread::current(); // For exception macros.
2101 ExceptionMark em(THREAD);
2102
2103 // rewrite constant pool references in the methods:
2104 for (int i = methods->length() - 1; i >= 0; i--) {
2105 methodHandle method(THREAD, methods->at(i));
2106 methodHandle new_method;
2107 rewrite_cp_refs_in_method(method, &new_method, THREAD);
2108 if (!new_method.is_null()) {
2109 // the method has been replaced so save the new method version
3257 // walk through each stack_map_frame
3258 u2 calc_number_of_entries = 0;
3259 for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3260 // The stack_map_frame structure is a u1 frame_type followed by
3261 // 0 or more bytes of data:
3262 //
3263 // union stack_map_frame {
3264 // same_frame;
3265 // same_locals_1_stack_item_frame;
3266 // same_locals_1_stack_item_frame_extended;
3267 // chop_frame;
3268 // same_frame_extended;
3269 // append_frame;
3270 // full_frame;
3271 // }
3272
3273 assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3274 u1 frame_type = *stackmap_p;
3275 stackmap_p++;
3276
3277 if (frame_type == 246) { // EARLY_LARVAL
3278 // rewrite_cp_refs in unset fields and fall through.
3279 rewrite_cp_refs_in_early_larval_stackmaps(stackmap_p, stackmap_end, calc_number_of_entries, frame_type);
3280 // The larval frames point to the next frame, so advance to the next frame and fall through.
3281 frame_type = *stackmap_p;
3282 stackmap_p++;
3283 }
3284
3285 // same_frame {
3286 // u1 frame_type = SAME; /* 0-63 */
3287 // }
3288 if (frame_type <= StackMapReader::SAME_FRAME_END) {
3289 // nothing more to do for same_frame
3290 }
3291
3292 // same_locals_1_stack_item_frame {
3293 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3294 // verification_type_info stack[1];
3295 // }
3296 else if (frame_type >= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_START &&
3297 frame_type <= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_END) {
3298 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3299 calc_number_of_entries, frame_type);
3300 }
3301
3302 // reserved for future use
3303 else if (frame_type >= StackMapReader::RESERVED_START &&
3304 frame_type <= StackMapReader::RESERVED_END) {
3474 ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3475 } break;
3476
3477 // Uninitialized_variable_info {
3478 // u1 tag = ITEM_Uninitialized; /* 8 */
3479 // u2 offset;
3480 // }
3481 case ITEM_Uninitialized:
3482 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3483 stackmap_p_ref += 2;
3484 break;
3485
3486 default:
3487 log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3488 ShouldNotReachHere();
3489 break;
3490 } // end switch (tag)
3491 } // end rewrite_cp_refs_in_verification_type_info()
3492
3493
3494 void VM_RedefineClasses::rewrite_cp_refs_in_early_larval_stackmaps(
3495 address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3496 u1 frame_type) {
3497
3498 u2 num_early_larval_stackmaps = Bytes::get_Java_u2(stackmap_p_ref);
3499 stackmap_p_ref += 2;
3500
3501 for (u2 i = 0; i < num_early_larval_stackmaps; i++) {
3502
3503 u2 name_and_ref_index = Bytes::get_Java_u2(stackmap_p_ref);
3504 u2 new_cp_index = find_new_index(name_and_ref_index);
3505 if (new_cp_index != 0) {
3506 log_debug(redefine, class, stackmap)("mapped old name_and_ref_index=%d", name_and_ref_index);
3507 Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3508 name_and_ref_index = new_cp_index;
3509 }
3510 log_debug(redefine, class, stackmap)
3511 ("frame_i=%u, frame_type=%u, name_and_ref_index=%d", frame_i, frame_type, name_and_ref_index);
3512
3513 stackmap_p_ref += 2;
3514 }
3515 } // rewrite_cp_refs_in_early_larval_stackmaps
3516
3517 // Change the constant pool associated with klass scratch_class to scratch_cp.
3518 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3519 // and the smaller constant pool is associated with scratch_class.
3520 void VM_RedefineClasses::set_new_constant_pool(
3521 ClassLoaderData* loader_data,
3522 InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3523 int scratch_cp_length, TRAPS) {
3524 assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3525
3526 // scratch_cp is a merged constant pool and has enough space for a
3527 // worst case merge situation. We want to associate the minimum
3528 // sized constant pool with the klass to save space.
3529 ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3530 constantPoolHandle smaller_cp(THREAD, cp);
3531
3532 // preserve version() value in the smaller copy
3533 int version = scratch_cp->version();
3534 assert(version != 0, "sanity check");
3535 smaller_cp->set_version(version);
3536
|