< prev index next >

src/hotspot/share/prims/jvmtiRedefineClasses.cpp

Print this page

 600       if (scratch_i != *merge_cp_length_p) {
 601         // The new entry in *merge_cp_p is at a different index than
 602         // the new entry in scratch_cp so we need to map the index values.
 603         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 604       }
 605       (*merge_cp_length_p)++;
 606     } break;
 607 
 608     // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
 609     // ClassIndex
 610     case JVM_CONSTANT_ClassIndex: // fall through
 611 
 612     // Invalid is used as the tag for the second constant pool entry
 613     // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
 614     // not be seen by itself.
 615     case JVM_CONSTANT_Invalid: // fall through
 616 
 617     // At this stage, String could be here, but not StringIndex
 618     case JVM_CONSTANT_StringIndex: // fall through
 619 
 620     // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
 621     // here
 622     case JVM_CONSTANT_UnresolvedClassInError: // fall through
 623 
 624     default:
 625     {
 626       // leave a breadcrumb
 627       jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
 628       ShouldNotReachHere();
 629     } break;
 630   } // end switch tag value
 631 } // end append_entry()
 632 
 633 
 634 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
 635       int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
 636 
 637   int new_ref_i = ref_i;
 638   bool match = (ref_i < *merge_cp_length_p) &&
 639                scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
 640 
 641   if (!match) {

1906 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1907 
1908   // rewrite constant pool references in the nest attributes:
1909   if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1910     // propagate failure back to caller
1911     return false;
1912   }
1913 
1914   // rewrite constant pool references in the Record attribute:
1915   if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1916     // propagate failure back to caller
1917     return false;
1918   }
1919 
1920   // rewrite constant pool references in the PermittedSubclasses attribute:
1921   if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1922     // propagate failure back to caller
1923     return false;
1924   }
1925 






1926   // rewrite constant pool references in the methods:
1927   if (!rewrite_cp_refs_in_methods(scratch_class)) {
1928     // propagate failure back to caller
1929     return false;
1930   }
1931 
1932   // rewrite constant pool references in the class_annotations:
1933   if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1934     // propagate failure back to caller
1935     return false;
1936   }
1937 
1938   // rewrite constant pool references in the fields_annotations:
1939   if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1940     // propagate failure back to caller
1941     return false;
1942   }
1943 
1944   // rewrite constant pool references in the methods_annotations:
1945   if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {

2054         }
2055       }
2056     }
2057   }
2058   return true;
2059 }
2060 
2061 // Rewrite constant pool references in the PermittedSubclasses attribute.
2062 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2063        InstanceKlass* scratch_class) {
2064 
2065   Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2066   assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2067   for (int i = 0; i < permitted_subclasses->length(); i++) {
2068     u2 cp_index = permitted_subclasses->at(i);
2069     permitted_subclasses->at_put(i, find_new_index(cp_index));
2070   }
2071   return true;
2072 }
2073 













2074 // Rewrite constant pool references in the methods.
2075 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2076 
2077   Array<Method*>* methods = scratch_class->methods();
2078 
2079   if (methods == nullptr || methods->length() == 0) {
2080     // no methods so nothing to do
2081     return true;
2082   }
2083 
2084   JavaThread* THREAD = JavaThread::current(); // For exception macros.
2085   ExceptionMark em(THREAD);
2086 
2087   // rewrite constant pool references in the methods:
2088   for (int i = methods->length() - 1; i >= 0; i--) {
2089     methodHandle method(THREAD, methods->at(i));
2090     methodHandle new_method;
2091     rewrite_cp_refs_in_method(method, &new_method, THREAD);
2092     if (!new_method.is_null()) {
2093       // the method has been replaced so save the new method version

3241   // walk through each stack_map_frame
3242   u2 calc_number_of_entries = 0;
3243   for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3244     // The stack_map_frame structure is a u1 frame_type followed by
3245     // 0 or more bytes of data:
3246     //
3247     // union stack_map_frame {
3248     //   same_frame;
3249     //   same_locals_1_stack_item_frame;
3250     //   same_locals_1_stack_item_frame_extended;
3251     //   chop_frame;
3252     //   same_frame_extended;
3253     //   append_frame;
3254     //   full_frame;
3255     // }
3256 
3257     assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3258     u1 frame_type = *stackmap_p;
3259     stackmap_p++;
3260 








3261     // same_frame {
3262     //   u1 frame_type = SAME; /* 0-63 */
3263     // }
3264     if (frame_type <= StackMapReader::SAME_FRAME_END) {
3265       // nothing more to do for same_frame
3266     }
3267 
3268     // same_locals_1_stack_item_frame {
3269     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3270     //   verification_type_info stack[1];
3271     // }
3272     else if (frame_type >= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_START &&
3273              frame_type <= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_END) {
3274       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3275         calc_number_of_entries, frame_type);
3276     }
3277 
3278     // reserved for future use
3279     else if (frame_type >= StackMapReader::RESERVED_START &&
3280              frame_type <= StackMapReader::RESERVED_END) {

3450       ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3451   } break;
3452 
3453   // Uninitialized_variable_info {
3454   //   u1 tag = ITEM_Uninitialized; /* 8 */
3455   //   u2 offset;
3456   // }
3457   case ITEM_Uninitialized:
3458     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3459     stackmap_p_ref += 2;
3460     break;
3461 
3462   default:
3463     log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3464     ShouldNotReachHere();
3465     break;
3466   } // end switch (tag)
3467 } // end rewrite_cp_refs_in_verification_type_info()
3468 
3469 























3470 // Change the constant pool associated with klass scratch_class to scratch_cp.
3471 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3472 // and the smaller constant pool is associated with scratch_class.
3473 void VM_RedefineClasses::set_new_constant_pool(
3474        ClassLoaderData* loader_data,
3475        InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3476        int scratch_cp_length, TRAPS) {
3477   assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3478 
3479   // scratch_cp is a merged constant pool and has enough space for a
3480   // worst case merge situation. We want to associate the minimum
3481   // sized constant pool with the klass to save space.
3482   ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3483   constantPoolHandle smaller_cp(THREAD, cp);
3484 
3485   // preserve version() value in the smaller copy
3486   int version = scratch_cp->version();
3487   assert(version != 0, "sanity check");
3488   smaller_cp->set_version(version);
3489 

 600       if (scratch_i != *merge_cp_length_p) {
 601         // The new entry in *merge_cp_p is at a different index than
 602         // the new entry in scratch_cp so we need to map the index values.
 603         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 604       }
 605       (*merge_cp_length_p)++;
 606     } break;
 607 
 608     // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
 609     // ClassIndex
 610     case JVM_CONSTANT_ClassIndex: // fall through
 611 
 612     // Invalid is used as the tag for the second constant pool entry
 613     // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
 614     // not be seen by itself.
 615     case JVM_CONSTANT_Invalid: // fall through
 616 
 617     // At this stage, String could be here, but not StringIndex
 618     case JVM_CONSTANT_StringIndex: // fall through
 619 
 620     // At this stage JVM_CONSTANT_UnresolvedClassInError should not be here

 621     case JVM_CONSTANT_UnresolvedClassInError: // fall through
 622 
 623     default:
 624     {
 625       // leave a breadcrumb
 626       jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
 627       ShouldNotReachHere();
 628     } break;
 629   } // end switch tag value
 630 } // end append_entry()
 631 
 632 
 633 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
 634       int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
 635 
 636   int new_ref_i = ref_i;
 637   bool match = (ref_i < *merge_cp_length_p) &&
 638                scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
 639 
 640   if (!match) {

1905 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1906 
1907   // rewrite constant pool references in the nest attributes:
1908   if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1909     // propagate failure back to caller
1910     return false;
1911   }
1912 
1913   // rewrite constant pool references in the Record attribute:
1914   if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1915     // propagate failure back to caller
1916     return false;
1917   }
1918 
1919   // rewrite constant pool references in the PermittedSubclasses attribute:
1920   if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1921     // propagate failure back to caller
1922     return false;
1923   }
1924 
1925   // rewrite constant pool references in the LoadableDescriptors attribute:
1926   if (!rewrite_cp_refs_in_loadable_descriptors_attribute(scratch_class)) {
1927     // propagate failure back to caller
1928     return false;
1929   }
1930 
1931   // rewrite constant pool references in the methods:
1932   if (!rewrite_cp_refs_in_methods(scratch_class)) {
1933     // propagate failure back to caller
1934     return false;
1935   }
1936 
1937   // rewrite constant pool references in the class_annotations:
1938   if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1939     // propagate failure back to caller
1940     return false;
1941   }
1942 
1943   // rewrite constant pool references in the fields_annotations:
1944   if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1945     // propagate failure back to caller
1946     return false;
1947   }
1948 
1949   // rewrite constant pool references in the methods_annotations:
1950   if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {

2059         }
2060       }
2061     }
2062   }
2063   return true;
2064 }
2065 
2066 // Rewrite constant pool references in the PermittedSubclasses attribute.
2067 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2068        InstanceKlass* scratch_class) {
2069 
2070   Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2071   assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2072   for (int i = 0; i < permitted_subclasses->length(); i++) {
2073     u2 cp_index = permitted_subclasses->at(i);
2074     permitted_subclasses->at_put(i, find_new_index(cp_index));
2075   }
2076   return true;
2077 }
2078 
2079 // Rewrite constant pool references in the LoadableDescriptors attribute.
2080 bool VM_RedefineClasses::rewrite_cp_refs_in_loadable_descriptors_attribute(
2081        InstanceKlass* scratch_class) {
2082 
2083   Array<u2>* loadable_descriptors = scratch_class->loadable_descriptors();
2084   assert(loadable_descriptors != nullptr, "unexpected null loadable_descriptors");
2085   for (int i = 0; i < loadable_descriptors->length(); i++) {
2086     u2 cp_index = loadable_descriptors->at(i);
2087     loadable_descriptors->at_put(i, find_new_index(cp_index));
2088   }
2089   return true;
2090 }
2091 
2092 // Rewrite constant pool references in the methods.
2093 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2094 
2095   Array<Method*>* methods = scratch_class->methods();
2096 
2097   if (methods == nullptr || methods->length() == 0) {
2098     // no methods so nothing to do
2099     return true;
2100   }
2101 
2102   JavaThread* THREAD = JavaThread::current(); // For exception macros.
2103   ExceptionMark em(THREAD);
2104 
2105   // rewrite constant pool references in the methods:
2106   for (int i = methods->length() - 1; i >= 0; i--) {
2107     methodHandle method(THREAD, methods->at(i));
2108     methodHandle new_method;
2109     rewrite_cp_refs_in_method(method, &new_method, THREAD);
2110     if (!new_method.is_null()) {
2111       // the method has been replaced so save the new method version

3259   // walk through each stack_map_frame
3260   u2 calc_number_of_entries = 0;
3261   for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3262     // The stack_map_frame structure is a u1 frame_type followed by
3263     // 0 or more bytes of data:
3264     //
3265     // union stack_map_frame {
3266     //   same_frame;
3267     //   same_locals_1_stack_item_frame;
3268     //   same_locals_1_stack_item_frame_extended;
3269     //   chop_frame;
3270     //   same_frame_extended;
3271     //   append_frame;
3272     //   full_frame;
3273     // }
3274 
3275     assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3276     u1 frame_type = *stackmap_p;
3277     stackmap_p++;
3278 
3279    if (frame_type == 246) {  // EARLY_LARVAL
3280      // rewrite_cp_refs in  unset fields and fall through.
3281      rewrite_cp_refs_in_early_larval_stackmaps(stackmap_p, stackmap_end, calc_number_of_entries, frame_type);
3282      // The larval frames point to the next frame, so advance to the next frame and fall through.
3283      frame_type = *stackmap_p;
3284      stackmap_p++;
3285    }
3286 
3287     // same_frame {
3288     //   u1 frame_type = SAME; /* 0-63 */
3289     // }
3290     if (frame_type <= StackMapReader::SAME_FRAME_END) {
3291       // nothing more to do for same_frame
3292     }
3293 
3294     // same_locals_1_stack_item_frame {
3295     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3296     //   verification_type_info stack[1];
3297     // }
3298     else if (frame_type >= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_START &&
3299              frame_type <= StackMapReader::SAME_LOCALS_1_STACK_ITEM_FRAME_END) {
3300       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3301         calc_number_of_entries, frame_type);
3302     }
3303 
3304     // reserved for future use
3305     else if (frame_type >= StackMapReader::RESERVED_START &&
3306              frame_type <= StackMapReader::RESERVED_END) {

3476       ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3477   } break;
3478 
3479   // Uninitialized_variable_info {
3480   //   u1 tag = ITEM_Uninitialized; /* 8 */
3481   //   u2 offset;
3482   // }
3483   case ITEM_Uninitialized:
3484     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3485     stackmap_p_ref += 2;
3486     break;
3487 
3488   default:
3489     log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3490     ShouldNotReachHere();
3491     break;
3492   } // end switch (tag)
3493 } // end rewrite_cp_refs_in_verification_type_info()
3494 
3495 
3496 void VM_RedefineClasses::rewrite_cp_refs_in_early_larval_stackmaps(
3497        address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3498        u1 frame_type) {
3499 
3500     u2 num_early_larval_stackmaps = Bytes::get_Java_u2(stackmap_p_ref);
3501     stackmap_p_ref += 2;
3502 
3503     for (u2 i = 0; i < num_early_larval_stackmaps; i++) {
3504 
3505       u2 name_and_ref_index = Bytes::get_Java_u2(stackmap_p_ref);
3506       u2 new_cp_index = find_new_index(name_and_ref_index);
3507       if (new_cp_index != 0) {
3508         log_debug(redefine, class, stackmap)("mapped old name_and_ref_index=%d", name_and_ref_index);
3509         Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3510         name_and_ref_index = new_cp_index;
3511       }
3512       log_debug(redefine, class, stackmap)
3513         ("frame_i=%u, frame_type=%u, name_and_ref_index=%d", frame_i, frame_type, name_and_ref_index);
3514 
3515       stackmap_p_ref += 2;
3516     }
3517 } // rewrite_cp_refs_in_early_larval_stackmaps
3518 
3519 // Change the constant pool associated with klass scratch_class to scratch_cp.
3520 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3521 // and the smaller constant pool is associated with scratch_class.
3522 void VM_RedefineClasses::set_new_constant_pool(
3523        ClassLoaderData* loader_data,
3524        InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3525        int scratch_cp_length, TRAPS) {
3526   assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3527 
3528   // scratch_cp is a merged constant pool and has enough space for a
3529   // worst case merge situation. We want to associate the minimum
3530   // sized constant pool with the klass to save space.
3531   ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3532   constantPoolHandle smaller_cp(THREAD, cp);
3533 
3534   // preserve version() value in the smaller copy
3535   int version = scratch_cp->version();
3536   assert(version != 0, "sanity check");
3537   smaller_cp->set_version(version);
3538 
< prev index next >