< prev index next >

src/hotspot/share/oops/resolvedMethodEntry.cpp

Print this page
*** 63,10 ***
--- 63,85 ---
      ArchiveBuilder::current()->mark_and_relocate_to_buffered_addr(&_method);
    }
    if (bytecode1() == Bytecodes::_invokeinterface) {
      ArchiveBuilder::current()->mark_and_relocate_to_buffered_addr(&_entry_specific._interface_klass);
    }
+ #if 0
+   // OLD CODE ... some of it may need to be salvaged.
+   Bytecodes::Code invoke_code = bytecode_1();
+   if (invoke_code != (Bytecodes::Code)0) {
+     Metadata* f1 = f1_ord();
+     if (f1 != nullptr) {
+       ArchiveBuilder::current()->mark_and_relocate_to_buffered_addr(&_f1);
+       switch (invoke_code) {
+       case Bytecodes::_invokeinterface:
+         assert(0, "not implemented");
+         //assert(f1->is_klass(), "");
+         //ArchiveBuilder::current()->mark_and_relocate_to_buffered_addr(&_f2); // f2 is interface method
+         return false;
+       case Bytecodes::_invokestatic:
+         // For safety, we support invokestatic only for invoking methods in MethodHandle.
+         // FIXME -- further restrict it to linkToStatic(), etc?
+         assert(bytecode_2() == (Bytecodes::Code)0, "must be");
+         assert(f1->is_method(), "");
+         assert(f1_as_method()->method_holder()->name()->equals("java/lang/invoke/MethodHandle") ||
+                f1_as_method()->method_holder()->name()->equals("java/lang/invoke/MethodHandleNatives"), "sanity");
+         return true;
+       case Bytecodes::_invokespecial:
+         assert(f1->is_method(), "must be");
+         // Also need to work on bytecode_2() below.
+         break;
+       case Bytecodes::_invokehandle:
+         assert(bytecode_2() == (Bytecodes::Code)0, "must be");
+         assert(f1->is_method(), "");
+         return true;
+       default:
+         ShouldNotReachHere();
+         break;
+       }
+     }
+   }
+ 
+   // TODO test case: can invokespecial and invokevirtual share the same CP?
+   invoke_code = bytecode_2();
+   if (invoke_code != (Bytecodes::Code)0) {
+     assert(invoke_code == Bytecodes::_invokevirtual, "must be");
+     if (is_vfinal()) {
+       // f2 is vfinal method
+       ArchiveBuilder::current()->mark_and_relocate_to_buffered_addr(&_f2); // f2 is final method
+     } else {
+       // f2 is vtable index, no need to mark
+       if (DynamicDumpSharedSpaces) {
+         // InstanceKlass::methods() is has been resorted, so we need to
+         // update the vtable_index.
+         int holder_index = src_cp->uncached_klass_ref_index_at(constant_pool_index());
+         Klass* src_klass = src_cp->resolved_klass_at(holder_index);
+         Method* src_m = src_klass->method_at_vtable(f2_as_index());
+         if (!ArchiveBuilder::current()->is_in_mapped_static_archive(src_m->method_holder()) &&
+             !ArchiveBuilder::current()->is_in_mapped_static_archive(src_m)) {
+           Klass* buffered_klass = ArchiveBuilder::current()->get_buffered_addr(src_klass);
+           Method* buffered_m = ArchiveBuilder::current()->get_buffered_addr(src_m);
+           int vtable_index;
+           if (src_m->method_holder()->is_interface()) { // default or miranda method
+             assert(src_m->vtable_index() < 0, "must be");
+             assert(buffered_klass->is_instance_klass(), "must be");
+             vtable_index = InstanceKlass::cast(buffered_klass)->vtable_index_of_interface_method(buffered_m);
+             assert(vtable_index >= 0, "must be");
+           } else {
+             vtable_index = buffered_m->vtable_index();
+             assert(vtable_index >= 0, "must be");
+           }
+           if (_f2 != vtable_index) {
+             log_trace(cds, resolve)("vtable_index changed %d => %d", (int)_f2, vtable_index);
+             _f2 = vtable_index;
+           }
+         }
+       }
+     }
+   }
+ 
+ #endif
  }
  #endif
  
  void ResolvedMethodEntry::print_on(outputStream* st) const {
    st->print_cr("Method Entry:");
< prev index next >