< prev index next >

src/hotspot/share/prims/jvmtiRedefineClasses.cpp

Print this page

  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "cds/cdsConfig.hpp"
  26 #include "cds/metaspaceShared.hpp"
  27 #include "classfile/classFileStream.hpp"
  28 #include "classfile/classLoaderDataGraph.hpp"
  29 #include "classfile/classLoadInfo.hpp"
  30 #include "classfile/javaClasses.inline.hpp"

  31 #include "classfile/metadataOnStackMark.hpp"

  32 #include "classfile/symbolTable.hpp"
  33 #include "classfile/klassFactory.hpp"
  34 #include "classfile/verifier.hpp"
  35 #include "classfile/vmClasses.hpp"
  36 #include "classfile/vmSymbols.hpp"
  37 #include "code/codeCache.hpp"
  38 #include "compiler/compileBroker.hpp"
  39 #include "interpreter/oopMapCache.hpp"
  40 #include "interpreter/rewriter.hpp"
  41 #include "jfr/jfrEvents.hpp"
  42 #include "logging/logStream.hpp"
  43 #include "memory/metadataFactory.hpp"
  44 #include "memory/resourceArea.hpp"
  45 #include "memory/universe.hpp"
  46 #include "oops/annotations.hpp"
  47 #include "oops/constantPool.hpp"
  48 #include "oops/fieldStreams.inline.hpp"
  49 #include "oops/klass.inline.hpp"
  50 #include "oops/klassVtable.hpp"
  51 #include "oops/method.hpp"
  52 #include "oops/oop.inline.hpp"
  53 #include "oops/recordComponent.hpp"

 594       if (scratch_i != *merge_cp_length_p) {
 595         // The new entry in *merge_cp_p is at a different index than
 596         // the new entry in scratch_cp so we need to map the index values.
 597         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 598       }
 599       (*merge_cp_length_p)++;
 600     } break;
 601 
 602     // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
 603     // ClassIndex
 604     case JVM_CONSTANT_ClassIndex: // fall through
 605 
 606     // Invalid is used as the tag for the second constant pool entry
 607     // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
 608     // not be seen by itself.
 609     case JVM_CONSTANT_Invalid: // fall through
 610 
 611     // At this stage, String could be here, but not StringIndex
 612     case JVM_CONSTANT_StringIndex: // fall through
 613 
 614     // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
 615     // here
 616     case JVM_CONSTANT_UnresolvedClassInError: // fall through
 617 
 618     default:
 619     {
 620       // leave a breadcrumb
 621       jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
 622       ShouldNotReachHere();
 623     } break;
 624   } // end switch tag value
 625 } // end append_entry()
 626 
 627 
 628 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
 629       int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
 630 
 631   int new_ref_i = ref_i;
 632   bool match = (ref_i < *merge_cp_length_p) &&
 633                scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
 634 
 635   if (!match) {

1919 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1920 
1921   // rewrite constant pool references in the nest attributes:
1922   if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1923     // propagate failure back to caller
1924     return false;
1925   }
1926 
1927   // rewrite constant pool references in the Record attribute:
1928   if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1929     // propagate failure back to caller
1930     return false;
1931   }
1932 
1933   // rewrite constant pool references in the PermittedSubclasses attribute:
1934   if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1935     // propagate failure back to caller
1936     return false;
1937   }
1938 






1939   // rewrite constant pool references in the methods:
1940   if (!rewrite_cp_refs_in_methods(scratch_class)) {
1941     // propagate failure back to caller
1942     return false;
1943   }
1944 
1945   // rewrite constant pool references in the class_annotations:
1946   if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1947     // propagate failure back to caller
1948     return false;
1949   }
1950 
1951   // rewrite constant pool references in the fields_annotations:
1952   if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1953     // propagate failure back to caller
1954     return false;
1955   }
1956 
1957   // rewrite constant pool references in the methods_annotations:
1958   if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {

2067         }
2068       }
2069     }
2070   }
2071   return true;
2072 }
2073 
2074 // Rewrite constant pool references in the PermittedSubclasses attribute.
2075 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2076        InstanceKlass* scratch_class) {
2077 
2078   Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2079   assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2080   for (int i = 0; i < permitted_subclasses->length(); i++) {
2081     u2 cp_index = permitted_subclasses->at(i);
2082     permitted_subclasses->at_put(i, find_new_index(cp_index));
2083   }
2084   return true;
2085 }
2086 













2087 // Rewrite constant pool references in the methods.
2088 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2089 
2090   Array<Method*>* methods = scratch_class->methods();
2091 
2092   if (methods == nullptr || methods->length() == 0) {
2093     // no methods so nothing to do
2094     return true;
2095   }
2096 
2097   JavaThread* THREAD = JavaThread::current(); // For exception macros.
2098   ExceptionMark em(THREAD);
2099 
2100   // rewrite constant pool references in the methods:
2101   for (int i = methods->length() - 1; i >= 0; i--) {
2102     methodHandle method(THREAD, methods->at(i));
2103     methodHandle new_method;
2104     rewrite_cp_refs_in_method(method, &new_method, THREAD);
2105     if (!new_method.is_null()) {
2106       // the method has been replaced so save the new method version

3254   // walk through each stack_map_frame
3255   u2 calc_number_of_entries = 0;
3256   for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3257     // The stack_map_frame structure is a u1 frame_type followed by
3258     // 0 or more bytes of data:
3259     //
3260     // union stack_map_frame {
3261     //   same_frame;
3262     //   same_locals_1_stack_item_frame;
3263     //   same_locals_1_stack_item_frame_extended;
3264     //   chop_frame;
3265     //   same_frame_extended;
3266     //   append_frame;
3267     //   full_frame;
3268     // }
3269 
3270     assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3271     u1 frame_type = *stackmap_p;
3272     stackmap_p++;
3273 








3274     // same_frame {
3275     //   u1 frame_type = SAME; /* 0-63 */
3276     // }
3277     if (frame_type <= 63) {
3278       // nothing more to do for same_frame
3279     }
3280 
3281     // same_locals_1_stack_item_frame {
3282     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3283     //   verification_type_info stack[1];
3284     // }
3285     else if (frame_type >= 64 && frame_type <= 127) {
3286       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3287         calc_number_of_entries, frame_type);
3288     }
3289 
3290     // reserved for future use
3291     else if (frame_type >= 128 && frame_type <= 246) {
3292       // nothing more to do for reserved frame_types
3293     }
3294 
3295     // same_locals_1_stack_item_frame_extended {
3296     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
3297     //   u2 offset_delta;
3298     //   verification_type_info stack[1];
3299     // }
3300     else if (frame_type == 247) {
3301       stackmap_p += 2;
3302       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3303         calc_number_of_entries, frame_type);
3304     }
3305 
3306     // chop_frame {
3307     //   u1 frame_type = CHOP; /* 248-250 */
3308     //   u2 offset_delta;
3309     // }
3310     else if (frame_type >= 248 && frame_type <= 250) {
3311       stackmap_p += 2;

3459       ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3460   } break;
3461 
3462   // Uninitialized_variable_info {
3463   //   u1 tag = ITEM_Uninitialized; /* 8 */
3464   //   u2 offset;
3465   // }
3466   case ITEM_Uninitialized:
3467     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3468     stackmap_p_ref += 2;
3469     break;
3470 
3471   default:
3472     log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3473     ShouldNotReachHere();
3474     break;
3475   } // end switch (tag)
3476 } // end rewrite_cp_refs_in_verification_type_info()
3477 
3478 























3479 // Change the constant pool associated with klass scratch_class to scratch_cp.
3480 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3481 // and the smaller constant pool is associated with scratch_class.
3482 void VM_RedefineClasses::set_new_constant_pool(
3483        ClassLoaderData* loader_data,
3484        InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3485        int scratch_cp_length, TRAPS) {
3486   assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3487 
3488   // scratch_cp is a merged constant pool and has enough space for a
3489   // worst case merge situation. We want to associate the minimum
3490   // sized constant pool with the klass to save space.
3491   ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3492   constantPoolHandle smaller_cp(THREAD, cp);
3493 
3494   // preserve version() value in the smaller copy
3495   int version = scratch_cp->version();
3496   assert(version != 0, "sanity check");
3497   smaller_cp->set_version(version);
3498 

  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "cds/cdsConfig.hpp"
  26 #include "cds/metaspaceShared.hpp"
  27 #include "classfile/classFileStream.hpp"
  28 #include "classfile/classLoaderDataGraph.hpp"
  29 #include "classfile/classLoadInfo.hpp"
  30 #include "classfile/javaClasses.inline.hpp"
  31 #include "classfile/klassFactory.hpp"
  32 #include "classfile/metadataOnStackMark.hpp"
  33 #include "classfile/stackMapTable.hpp"
  34 #include "classfile/symbolTable.hpp"

  35 #include "classfile/verifier.hpp"
  36 #include "classfile/vmClasses.hpp"
  37 #include "classfile/vmSymbols.hpp"
  38 #include "code/codeCache.hpp"
  39 #include "compiler/compileBroker.hpp"
  40 #include "interpreter/oopMapCache.hpp"
  41 #include "interpreter/rewriter.hpp"
  42 #include "jfr/jfrEvents.hpp"
  43 #include "logging/logStream.hpp"
  44 #include "memory/metadataFactory.hpp"
  45 #include "memory/resourceArea.hpp"
  46 #include "memory/universe.hpp"
  47 #include "oops/annotations.hpp"
  48 #include "oops/constantPool.hpp"
  49 #include "oops/fieldStreams.inline.hpp"
  50 #include "oops/klass.inline.hpp"
  51 #include "oops/klassVtable.hpp"
  52 #include "oops/method.hpp"
  53 #include "oops/oop.inline.hpp"
  54 #include "oops/recordComponent.hpp"

 595       if (scratch_i != *merge_cp_length_p) {
 596         // The new entry in *merge_cp_p is at a different index than
 597         // the new entry in scratch_cp so we need to map the index values.
 598         map_index(scratch_cp, scratch_i, *merge_cp_length_p);
 599       }
 600       (*merge_cp_length_p)++;
 601     } break;
 602 
 603     // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
 604     // ClassIndex
 605     case JVM_CONSTANT_ClassIndex: // fall through
 606 
 607     // Invalid is used as the tag for the second constant pool entry
 608     // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
 609     // not be seen by itself.
 610     case JVM_CONSTANT_Invalid: // fall through
 611 
 612     // At this stage, String could be here, but not StringIndex
 613     case JVM_CONSTANT_StringIndex: // fall through
 614 
 615     // At this stage JVM_CONSTANT_UnresolvedClassInError should not be here

 616     case JVM_CONSTANT_UnresolvedClassInError: // fall through
 617 
 618     default:
 619     {
 620       // leave a breadcrumb
 621       jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
 622       ShouldNotReachHere();
 623     } break;
 624   } // end switch tag value
 625 } // end append_entry()
 626 
 627 
 628 u2 VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
 629       int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
 630 
 631   int new_ref_i = ref_i;
 632   bool match = (ref_i < *merge_cp_length_p) &&
 633                scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
 634 
 635   if (!match) {

1919 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class) {
1920 
1921   // rewrite constant pool references in the nest attributes:
1922   if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1923     // propagate failure back to caller
1924     return false;
1925   }
1926 
1927   // rewrite constant pool references in the Record attribute:
1928   if (!rewrite_cp_refs_in_record_attribute(scratch_class)) {
1929     // propagate failure back to caller
1930     return false;
1931   }
1932 
1933   // rewrite constant pool references in the PermittedSubclasses attribute:
1934   if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1935     // propagate failure back to caller
1936     return false;
1937   }
1938 
1939   // rewrite constant pool references in the LoadableDescriptors attribute:
1940   if (!rewrite_cp_refs_in_loadable_descriptors_attribute(scratch_class)) {
1941     // propagate failure back to caller
1942     return false;
1943   }
1944 
1945   // rewrite constant pool references in the methods:
1946   if (!rewrite_cp_refs_in_methods(scratch_class)) {
1947     // propagate failure back to caller
1948     return false;
1949   }
1950 
1951   // rewrite constant pool references in the class_annotations:
1952   if (!rewrite_cp_refs_in_class_annotations(scratch_class)) {
1953     // propagate failure back to caller
1954     return false;
1955   }
1956 
1957   // rewrite constant pool references in the fields_annotations:
1958   if (!rewrite_cp_refs_in_fields_annotations(scratch_class)) {
1959     // propagate failure back to caller
1960     return false;
1961   }
1962 
1963   // rewrite constant pool references in the methods_annotations:
1964   if (!rewrite_cp_refs_in_methods_annotations(scratch_class)) {

2073         }
2074       }
2075     }
2076   }
2077   return true;
2078 }
2079 
2080 // Rewrite constant pool references in the PermittedSubclasses attribute.
2081 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
2082        InstanceKlass* scratch_class) {
2083 
2084   Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
2085   assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
2086   for (int i = 0; i < permitted_subclasses->length(); i++) {
2087     u2 cp_index = permitted_subclasses->at(i);
2088     permitted_subclasses->at_put(i, find_new_index(cp_index));
2089   }
2090   return true;
2091 }
2092 
2093 // Rewrite constant pool references in the LoadableDescriptors attribute.
2094 bool VM_RedefineClasses::rewrite_cp_refs_in_loadable_descriptors_attribute(
2095        InstanceKlass* scratch_class) {
2096 
2097   Array<u2>* loadable_descriptors = scratch_class->loadable_descriptors();
2098   assert(loadable_descriptors != nullptr, "unexpected null loadable_descriptors");
2099   for (int i = 0; i < loadable_descriptors->length(); i++) {
2100     u2 cp_index = loadable_descriptors->at(i);
2101     loadable_descriptors->at_put(i, find_new_index(cp_index));
2102   }
2103   return true;
2104 }
2105 
2106 // Rewrite constant pool references in the methods.
2107 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class) {
2108 
2109   Array<Method*>* methods = scratch_class->methods();
2110 
2111   if (methods == nullptr || methods->length() == 0) {
2112     // no methods so nothing to do
2113     return true;
2114   }
2115 
2116   JavaThread* THREAD = JavaThread::current(); // For exception macros.
2117   ExceptionMark em(THREAD);
2118 
2119   // rewrite constant pool references in the methods:
2120   for (int i = methods->length() - 1; i >= 0; i--) {
2121     methodHandle method(THREAD, methods->at(i));
2122     methodHandle new_method;
2123     rewrite_cp_refs_in_method(method, &new_method, THREAD);
2124     if (!new_method.is_null()) {
2125       // the method has been replaced so save the new method version

3273   // walk through each stack_map_frame
3274   u2 calc_number_of_entries = 0;
3275   for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3276     // The stack_map_frame structure is a u1 frame_type followed by
3277     // 0 or more bytes of data:
3278     //
3279     // union stack_map_frame {
3280     //   same_frame;
3281     //   same_locals_1_stack_item_frame;
3282     //   same_locals_1_stack_item_frame_extended;
3283     //   chop_frame;
3284     //   same_frame_extended;
3285     //   append_frame;
3286     //   full_frame;
3287     // }
3288 
3289     assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3290     u1 frame_type = *stackmap_p;
3291     stackmap_p++;
3292 
3293    if (frame_type == 246) {  // EARLY_LARVAL
3294      // rewrite_cp_refs in  unset fields and fall through.
3295      rewrite_cp_refs_in_early_larval_stackmaps(stackmap_p, stackmap_end, calc_number_of_entries, frame_type);
3296      // The larval frames point to the next frame, so advance to the next frame and fall through.
3297      frame_type = *stackmap_p;
3298      stackmap_p++;
3299    }
3300 
3301     // same_frame {
3302     //   u1 frame_type = SAME; /* 0-63 */
3303     // }
3304     if (frame_type <= 63) {
3305       // nothing more to do for same_frame
3306     }
3307 
3308     // same_locals_1_stack_item_frame {
3309     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3310     //   verification_type_info stack[1];
3311     // }
3312     else if (frame_type >= 64 && frame_type <= 127) {
3313       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3314         calc_number_of_entries, frame_type);
3315     }
3316 
3317     // reserved for future use
3318     else if (frame_type >= 128 && frame_type <= 245) {
3319       // nothing more to do for reserved frame_types
3320     }
3321 
3322     // same_locals_1_stack_item_frame_extended {
3323     //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
3324     //   u2 offset_delta;
3325     //   verification_type_info stack[1];
3326     // }
3327     else if (frame_type == 247) {
3328       stackmap_p += 2;
3329       rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3330         calc_number_of_entries, frame_type);
3331     }
3332 
3333     // chop_frame {
3334     //   u1 frame_type = CHOP; /* 248-250 */
3335     //   u2 offset_delta;
3336     // }
3337     else if (frame_type >= 248 && frame_type <= 250) {
3338       stackmap_p += 2;

3486       ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3487   } break;
3488 
3489   // Uninitialized_variable_info {
3490   //   u1 tag = ITEM_Uninitialized; /* 8 */
3491   //   u2 offset;
3492   // }
3493   case ITEM_Uninitialized:
3494     assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3495     stackmap_p_ref += 2;
3496     break;
3497 
3498   default:
3499     log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3500     ShouldNotReachHere();
3501     break;
3502   } // end switch (tag)
3503 } // end rewrite_cp_refs_in_verification_type_info()
3504 
3505 
3506 void VM_RedefineClasses::rewrite_cp_refs_in_early_larval_stackmaps(
3507        address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3508        u1 frame_type) {
3509 
3510     u2 num_early_larval_stackmaps = Bytes::get_Java_u2(stackmap_p_ref);
3511     stackmap_p_ref += 2;
3512 
3513     for (u2 i = 0; i < num_early_larval_stackmaps; i++) {
3514 
3515       u2 name_and_ref_index = Bytes::get_Java_u2(stackmap_p_ref);
3516       u2 new_cp_index = find_new_index(name_and_ref_index);
3517       if (new_cp_index != 0) {
3518         log_debug(redefine, class, stackmap)("mapped old name_and_ref_index=%d", name_and_ref_index);
3519         Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3520         name_and_ref_index = new_cp_index;
3521       }
3522       log_debug(redefine, class, stackmap)
3523         ("frame_i=%u, frame_type=%u, name_and_ref_index=%d", frame_i, frame_type, name_and_ref_index);
3524 
3525       stackmap_p_ref += 2;
3526     }
3527 } // rewrite_cp_refs_in_early_larval_stackmaps
3528 
3529 // Change the constant pool associated with klass scratch_class to scratch_cp.
3530 // scratch_cp_length elements are copied from scratch_cp to a smaller constant pool
3531 // and the smaller constant pool is associated with scratch_class.
3532 void VM_RedefineClasses::set_new_constant_pool(
3533        ClassLoaderData* loader_data,
3534        InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3535        int scratch_cp_length, TRAPS) {
3536   assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3537 
3538   // scratch_cp is a merged constant pool and has enough space for a
3539   // worst case merge situation. We want to associate the minimum
3540   // sized constant pool with the klass to save space.
3541   ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3542   constantPoolHandle smaller_cp(THREAD, cp);
3543 
3544   // preserve version() value in the smaller copy
3545   int version = scratch_cp->version();
3546   assert(version != 0, "sanity check");
3547   smaller_cp->set_version(version);
3548 
< prev index next >