< prev index next >

src/hotspot/share/runtime/deoptimization.cpp

Print this page




  33 #include "code/scopeDesc.hpp"
  34 #include "interpreter/bytecode.hpp"
  35 #include "interpreter/interpreter.hpp"
  36 #include "interpreter/oopMapCache.hpp"
  37 #include "memory/allocation.inline.hpp"
  38 #include "memory/oopFactory.hpp"
  39 #include "memory/resourceArea.hpp"
  40 #include "memory/universe.hpp"
  41 #include "oops/constantPool.hpp"
  42 #include "oops/method.hpp"
  43 #include "oops/objArrayKlass.hpp"
  44 #include "oops/objArrayOop.inline.hpp"
  45 #include "oops/oop.inline.hpp"
  46 #include "oops/fieldStreams.hpp"
  47 #include "oops/typeArrayOop.inline.hpp"
  48 #include "oops/verifyOopClosure.hpp"
  49 #include "prims/jvmtiThreadState.hpp"
  50 #include "runtime/biasedLocking.hpp"
  51 #include "runtime/compilationPolicy.hpp"
  52 #include "runtime/deoptimization.hpp"
  53 #include "runtime/fieldDescriptor.hpp"
  54 #include "runtime/fieldDescriptor.inline.hpp"
  55 #include "runtime/frame.inline.hpp"
  56 #include "runtime/jniHandles.inline.hpp"
  57 #include "runtime/handles.inline.hpp"
  58 #include "runtime/interfaceSupport.inline.hpp"
  59 #include "runtime/safepointVerifiers.hpp"
  60 #include "runtime/sharedRuntime.hpp"
  61 #include "runtime/signature.hpp"
  62 #include "runtime/stubRoutines.hpp"
  63 #include "runtime/thread.hpp"
  64 #include "runtime/threadSMR.hpp"
  65 #include "runtime/vframe.hpp"
  66 #include "runtime/vframeArray.hpp"
  67 #include "runtime/vframe_hp.hpp"
  68 #include "utilities/events.hpp"
  69 #include "utilities/preserveException.hpp"
  70 #include "utilities/xmlstream.hpp"
  71 
  72 
  73 bool DeoptimizationMarker::_is_active = false;
  74 
  75 Deoptimization::UnrollBlock::UnrollBlock(int  size_of_deoptimized_frame,
  76                                          int  caller_adjustment,


 218       // by analyzing bytecode in deoptimized frames. This is why this flag
 219       // is set during method compilation (see Compile::Process_OopMap_Node()).
 220       // If the previous frame was popped or if we are dispatching an exception,
 221       // we don't have an oop result.
 222       bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
 223       Handle return_value;
 224       if (save_oop_result) {
 225         // Reallocation may trigger GC. If deoptimization happened on return from
 226         // call which returns oop we need to save it since it is not in oopmap.
 227         oop result = deoptee.saved_oop_result(&map);
 228         assert(oopDesc::is_oop_or_null(result), "must be oop");
 229         return_value = Handle(thread, result);
 230         assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
 231         if (TraceDeoptimization) {
 232           ttyLocker ttyl;
 233           tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
 234         }
 235       }
 236       if (objects != NULL) {
 237         JRT_BLOCK
 238           realloc_failures = realloc_objects(thread, &deoptee, &map, objects, THREAD);
 239         JRT_END
 240         bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
 241         reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
 242 #ifndef PRODUCT
 243         if (TraceDeoptimization) {
 244           ttyLocker ttyl;
 245           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 246           print_objects(objects, realloc_failures);
 247         }
 248 #endif
 249       }
 250       if (save_oop_result) {
 251         // Restore result.
 252         deoptee.set_saved_oop_result(&map, return_value());
 253       }
 254 #if !INCLUDE_JVMCI
 255     }
 256     if (EliminateLocks) {
 257 #endif // INCLUDE_JVMCI
 258 #ifndef PRODUCT


 615 
 616   // It is actually ok to allocate handles in a leaf method. It causes no safepoints,
 617   // but makes the entry a little slower. There is however a little dance we have to
 618   // do in debug mode to get around the NoHandleMark code in the JRT_LEAF macro
 619   ResetNoHandleMark rnhm; // No-op in release/product versions
 620   HandleMark hm;
 621 
 622   frame stub_frame = thread->last_frame();
 623 
 624   // Since the frame to unpack is the top frame of this thread, the vframe_array_head
 625   // must point to the vframeArray for the unpack frame.
 626   vframeArray* array = thread->vframe_array_head();
 627 
 628 #ifndef PRODUCT
 629   if (TraceDeoptimization) {
 630     ttyLocker ttyl;
 631     tty->print_cr("DEOPT UNPACKING thread " INTPTR_FORMAT " vframeArray " INTPTR_FORMAT " mode %d",
 632                   p2i(thread), p2i(array), exec_mode);
 633   }
 634 #endif
 635   Events::log_deopt_message(thread, "DEOPT UNPACKING pc=" INTPTR_FORMAT " sp=" INTPTR_FORMAT " mode %d",
 636               p2i(stub_frame.pc()), p2i(stub_frame.sp()), exec_mode);
 637 
 638   UnrollBlock* info = array->unroll_block();
 639 
 640   // Unpack the interpreter frames and any adapter frame (c2 only) we might create.
 641   array->unpack_to_stack(stub_frame, exec_mode, info->caller_actual_parameters());
 642 
 643   BasicType bt = info->return_type();
 644 
 645   // If we have an exception pending, claim that the return type is an oop
 646   // so the deopt_blob does not overwrite the exception_oop.
 647 
 648   if (exec_mode == Unpack_exception)
 649     bt = T_OBJECT;
 650 
 651   // Cleanup thread deopt data
 652   cleanup_deopt_info(thread, array);
 653 
 654 #ifndef PRODUCT
 655   if (VerifyStack) {


 796 
 797   if (SafepointSynchronize::is_at_safepoint()) {
 798     DeoptimizeMarkedTC deopt(false);
 799     // Make the dependent methods not entrant
 800     CodeCache::make_marked_nmethods_not_entrant();
 801     Threads::java_threads_do(&deopt);
 802   } else {
 803     // Make the dependent methods not entrant
 804     {
 805       MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 806       CodeCache::make_marked_nmethods_not_entrant();
 807     }
 808     DeoptimizeMarkedTC deopt(true);
 809     Handshake::execute(&deopt);
 810   }
 811 }
 812 
 813 Deoptimization::DeoptAction Deoptimization::_unloaded_action
 814   = Deoptimization::Action_reinterpret;
 815 
 816 
 817 
 818 #if INCLUDE_JVMCI || INCLUDE_AOT
 819 template<typename CacheType>
 820 class BoxCacheBase : public CHeapObj<mtCompiler> {
 821 protected:
 822   static InstanceKlass* find_cache_klass(Symbol* klass_name, TRAPS) {
 823     ResourceMark rm;
 824     char* klass_name_str = klass_name->as_C_string();
 825     Klass* k = SystemDictionary::find(klass_name, Handle(), Handle(), THREAD);
 826     guarantee(k != NULL, "%s must be loaded", klass_name_str);
 827     InstanceKlass* ik = InstanceKlass::cast(k);
 828     guarantee(ik->is_initialized(), "%s must be initialized", klass_name_str);
 829     CacheType::compute_offsets(ik);
 830     return ik;
 831   }
 832 };
 833 
 834 template<typename PrimitiveType, typename CacheType, typename BoxType> class BoxCache  : public BoxCacheBase<CacheType> {
 835   PrimitiveType _low;
 836   PrimitiveType _high;
 837   jobject _cache;
 838 protected:
 839   static BoxCache<PrimitiveType, CacheType, BoxType> *_singleton;
 840   BoxCache(Thread* thread) {
 841     InstanceKlass* ik = BoxCacheBase<CacheType>::find_cache_klass(CacheType::symbol(), thread);
 842     objArrayOop cache = CacheType::cache(ik);
 843     assert(cache->length() > 0, "Empty cache");
 844     _low = BoxType::value(cache->obj_at(0));
 845     _high = _low + cache->length() - 1;
 846     _cache = JNIHandles::make_global(Handle(thread, cache));
 847   }
 848   ~BoxCache() {
 849     JNIHandles::destroy_global(_cache);
 850   }
 851 public:
 852   static BoxCache<PrimitiveType, CacheType, BoxType>* singleton(Thread* thread) {
 853     if (_singleton == NULL) {
 854       BoxCache<PrimitiveType, CacheType, BoxType>* s = new BoxCache<PrimitiveType, CacheType, BoxType>(thread);
 855       if (!Atomic::replace_if_null(s, &_singleton)) {
 856         delete s;
 857       }
 858     }
 859     return _singleton;
 860   }
 861   oop lookup(PrimitiveType value) {
 862     if (_low <= value && value <= _high) {
 863       int offset = value - _low;
 864       return objArrayOop(JNIHandles::resolve_non_null(_cache))->obj_at(offset);
 865     }
 866     return NULL;
 867   }
 868 };
 869 
 870 typedef BoxCache<jint, java_lang_Integer_IntegerCache, java_lang_Integer> IntegerBoxCache;
 871 typedef BoxCache<jlong, java_lang_Long_LongCache, java_lang_Long> LongBoxCache;
 872 typedef BoxCache<jchar, java_lang_Character_CharacterCache, java_lang_Character> CharacterBoxCache;
 873 typedef BoxCache<jshort, java_lang_Short_ShortCache, java_lang_Short> ShortBoxCache;
 874 typedef BoxCache<jbyte, java_lang_Byte_ByteCache, java_lang_Byte> ByteBoxCache;
 875 
 876 template<> BoxCache<jint, java_lang_Integer_IntegerCache, java_lang_Integer>* BoxCache<jint, java_lang_Integer_IntegerCache, java_lang_Integer>::_singleton = NULL;
 877 template<> BoxCache<jlong, java_lang_Long_LongCache, java_lang_Long>* BoxCache<jlong, java_lang_Long_LongCache, java_lang_Long>::_singleton = NULL;
 878 template<> BoxCache<jchar, java_lang_Character_CharacterCache, java_lang_Character>* BoxCache<jchar, java_lang_Character_CharacterCache, java_lang_Character>::_singleton = NULL;
 879 template<> BoxCache<jshort, java_lang_Short_ShortCache, java_lang_Short>* BoxCache<jshort, java_lang_Short_ShortCache, java_lang_Short>::_singleton = NULL;
 880 template<> BoxCache<jbyte, java_lang_Byte_ByteCache, java_lang_Byte>* BoxCache<jbyte, java_lang_Byte_ByteCache, java_lang_Byte>::_singleton = NULL;
 881 
 882 class BooleanBoxCache : public BoxCacheBase<java_lang_Boolean> {
 883   jobject _true_cache;
 884   jobject _false_cache;
 885 protected:
 886   static BooleanBoxCache *_singleton;
 887   BooleanBoxCache(Thread *thread) {
 888     InstanceKlass* ik = find_cache_klass(java_lang_Boolean::symbol(), thread);
 889     _true_cache = JNIHandles::make_global(Handle(thread, java_lang_Boolean::get_TRUE(ik)));
 890     _false_cache = JNIHandles::make_global(Handle(thread, java_lang_Boolean::get_FALSE(ik)));
 891   }
 892   ~BooleanBoxCache() {
 893     JNIHandles::destroy_global(_true_cache);
 894     JNIHandles::destroy_global(_false_cache);
 895   }
 896 public:
 897   static BooleanBoxCache* singleton(Thread* thread) {
 898     if (_singleton == NULL) {
 899       BooleanBoxCache* s = new BooleanBoxCache(thread);
 900       if (!Atomic::replace_if_null(s, &_singleton)) {
 901         delete s;
 902       }
 903     }
 904     return _singleton;
 905   }
 906   oop lookup(jboolean value) {
 907     if (value != 0) {
 908       return JNIHandles::resolve_non_null(_true_cache);
 909     }
 910     return JNIHandles::resolve_non_null(_false_cache);
 911   }
 912 };
 913 
 914 BooleanBoxCache* BooleanBoxCache::_singleton = NULL;
 915 
 916 oop Deoptimization::get_cached_box(AutoBoxObjectValue* bv, frame* fr, RegisterMap* reg_map, TRAPS) {
 917    Klass* k = java_lang_Class::as_Klass(bv->klass()->as_ConstantOopReadValue()->value()());
 918    BasicType box_type = SystemDictionary::box_klass_type(k);
 919    if (box_type != T_OBJECT) {
 920      StackValue* value = StackValue::create_stack_value(fr, reg_map, bv->field_at(0));
 921      switch(box_type) {
 922        case T_INT:     return IntegerBoxCache::singleton(THREAD)->lookup(value->get_int());
 923        case T_LONG: {
 924                        StackValue* low = StackValue::create_stack_value(fr, reg_map, bv->field_at(1));
 925                        jlong res = (jlong)low->get_int();
 926                        return LongBoxCache::singleton(THREAD)->lookup(res);
 927                      }
 928        case T_CHAR:    return CharacterBoxCache::singleton(THREAD)->lookup(value->get_int());
 929        case T_SHORT:   return ShortBoxCache::singleton(THREAD)->lookup(value->get_int());
 930        case T_BYTE:    return ByteBoxCache::singleton(THREAD)->lookup(value->get_int());
 931        case T_BOOLEAN: return BooleanBoxCache::singleton(THREAD)->lookup(value->get_int());
 932        default:;
 933      }
 934    }
 935    return NULL;
 936 }
 937 #endif // INCLUDE_JVMCI || INCLUDE_AOT
 938 
 939 #if COMPILER2_OR_JVMCI
 940 bool Deoptimization::realloc_objects(JavaThread* thread, frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, TRAPS) {
 941   Handle pending_exception(THREAD, thread->pending_exception());
 942   const char* exception_file = thread->exception_file();
 943   int exception_line = thread->exception_line();
 944   thread->clear_pending_exception();
 945 
 946   bool failures = false;
 947 
 948   for (int i = 0; i < objects->length(); i++) {
 949     assert(objects->at(i)->is_object(), "invalid debug information");
 950     ObjectValue* sv = (ObjectValue*) objects->at(i);
 951 
 952     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
 953     oop obj = NULL;
 954 
 955     if (k->is_instance_klass()) {
 956 #if INCLUDE_JVMCI || INCLUDE_AOT
 957       CompiledMethod* cm = fr->cb()->as_compiled_method_or_null();
 958       if (cm->is_compiled_by_jvmci() && sv->is_auto_box()) {
 959         AutoBoxObjectValue* abv = (AutoBoxObjectValue*) sv;
 960         obj = get_cached_box(abv, fr, reg_map, THREAD);
 961         if (obj != NULL) {
 962           // Set the flag to indicate the box came from a cache, so that we can skip the field reassignment for it.
 963           abv->set_cached(true);
 964         }
 965       }
 966 #endif // INCLUDE_JVMCI || INCLUDE_AOT
 967       InstanceKlass* ik = InstanceKlass::cast(k);
 968       if (obj == NULL) {
 969         obj = ik->allocate_instance(THREAD);
 970       }
 971     } else if (k->is_typeArray_klass()) {
 972       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
 973       assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
 974       int len = sv->field_size() / type2size[ak->element_type()];
 975       obj = ak->allocate(len, THREAD);
 976     } else if (k->is_objArray_klass()) {
 977       ObjArrayKlass* ak = ObjArrayKlass::cast(k);
 978       obj = ak->allocate(sv->field_size(), THREAD);
 979     }
 980 
 981     if (obj == NULL) {
 982       failures = true;
 983     }
 984 
 985     assert(sv->value().is_null(), "redundant reallocation");
 986     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
 987     CLEAR_PENDING_EXCEPTION;
 988     sv->set_value(obj);
 989   }
 990 


1223         ShouldNotReachHere();
1224     }
1225     svIndex++;
1226   }
1227   return svIndex;
1228 }
1229 
1230 // restore fields of all eliminated objects and arrays
1231 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
1232   for (int i = 0; i < objects->length(); i++) {
1233     ObjectValue* sv = (ObjectValue*) objects->at(i);
1234     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1235     Handle obj = sv->value();
1236     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1237     if (PrintDeoptimizationDetails) {
1238       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1239     }
1240     if (obj.is_null()) {
1241       continue;
1242     }
1243 #if INCLUDE_JVMCI || INCLUDE_AOT
1244     // Don't reassign fields of boxes that came from a cache. Caches may be in CDS.
1245     if (sv->is_auto_box() && ((AutoBoxObjectValue*) sv)->is_cached()) {
1246       continue;
1247     }
1248 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1249     if (k->is_instance_klass()) {
1250       InstanceKlass* ik = InstanceKlass::cast(k);
1251       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal);
1252     } else if (k->is_typeArray_klass()) {
1253       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1254       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1255     } else if (k->is_objArray_klass()) {
1256       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1257     }
1258   }
1259 }
1260 
1261 
1262 // relock objects for which synchronization was eliminated
1263 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {
1264   for (int i = 0; i < monitors->length(); i++) {
1265     MonitorInfo* mon_info = monitors->at(i);
1266     if (mon_info->eliminated()) {
1267       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1268       if (!mon_info->owner_is_scalar_replaced()) {


1299 
1300     tty->print("     object <" INTPTR_FORMAT "> of type ", p2i(sv->value()()));
1301     k->print_value();
1302     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1303     if (obj.is_null()) {
1304       tty->print(" allocation failed");
1305     } else {
1306       tty->print(" allocated (%d bytes)", obj->size() * HeapWordSize);
1307     }
1308     tty->cr();
1309 
1310     if (Verbose && !obj.is_null()) {
1311       k->oop_print_on(obj(), tty);
1312     }
1313   }
1314 }
1315 #endif
1316 #endif // COMPILER2_OR_JVMCI
1317 
1318 vframeArray* Deoptimization::create_vframeArray(JavaThread* thread, frame fr, RegisterMap *reg_map, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures) {
1319   Events::log_deopt_message(thread, "DEOPT PACKING pc=" INTPTR_FORMAT " sp=" INTPTR_FORMAT, p2i(fr.pc()), p2i(fr.sp()));
1320 
1321 #ifndef PRODUCT
1322   if (PrintDeoptimizationDetails) {
1323     ttyLocker ttyl;
1324     tty->print("DEOPT PACKING thread " INTPTR_FORMAT " ", p2i(thread));
1325     fr.print_on(tty);
1326     tty->print_cr("     Virtual frames (innermost first):");
1327     for (int index = 0; index < chunk->length(); index++) {
1328       compiledVFrame* vf = chunk->at(index);
1329       tty->print("       %2d - ", index);
1330       vf->print_value();
1331       int bci = chunk->at(index)->raw_bci();
1332       const char* code_name;
1333       if (bci == SynchronizationEntryBCI) {
1334         code_name = "sync entry";
1335       } else {
1336         Bytecodes::Code code = vf->method()->code_at(bci);
1337         code_name = Bytecodes::name(code);
1338       }
1339       tty->print(" - %s", code_name);


1659   // uncommon_trap() is called at the beginning of the uncommon trap
1660   // handler. Note this fact before we start generating temporary frames
1661   // that can confuse an asynchronous stack walker. This counter is
1662   // decremented at the end of unpack_frames().
1663   thread->inc_in_deopt_handler();
1664 
1665   // We need to update the map if we have biased locking.
1666 #if INCLUDE_JVMCI
1667   // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
1668   RegisterMap reg_map(thread, true);
1669 #else
1670   RegisterMap reg_map(thread, UseBiasedLocking);
1671 #endif
1672   frame stub_frame = thread->last_frame();
1673   frame fr = stub_frame.sender(&reg_map);
1674   // Make sure the calling nmethod is not getting deoptimized and removed
1675   // before we are done with it.
1676   nmethodLocker nl(fr.pc());
1677 
1678   // Log a message
1679   Events::log_deopt_message(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT,
1680               trap_request, p2i(fr.pc()), fr.pc() - fr.cb()->code_begin());
1681 
1682   {
1683     ResourceMark rm;
1684 
1685     // Revoke biases of any monitors in the frame to ensure we can migrate them
1686     revoke_biases_of_monitors(thread, fr, &reg_map);
1687 
1688     DeoptReason reason = trap_request_reason(trap_request);
1689     DeoptAction action = trap_request_action(trap_request);
1690 #if INCLUDE_JVMCI
1691     int debug_id = trap_request_debug_id(trap_request);
1692 #endif
1693     jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1
1694 
1695     vframe*  vf  = vframe::new_vframe(&fr, &reg_map, thread);
1696     compiledVFrame* cvf = compiledVFrame::cast(vf);
1697 
1698     CompiledMethod* nm = cvf->code();
1699 


2059       if ((reason != Reason_rtm_state_change) && (trap_mdo != NULL) &&
2060           UseRTMDeopt && (nm->as_nmethod()->rtm_state() != ProfileRTM)) {
2061         trap_mdo->atomic_set_rtm_state(ProfileRTM);
2062       }
2063 #endif
2064       // For code aging we count traps separately here, using make_not_entrant()
2065       // as a guard against simultaneous deopts in multiple threads.
2066       if (reason == Reason_tenured && trap_mdo != NULL) {
2067         trap_mdo->inc_tenure_traps();
2068       }
2069     }
2070 
2071     if (inc_recompile_count) {
2072       trap_mdo->inc_overflow_recompile_count();
2073       if ((uint)trap_mdo->overflow_recompile_count() >
2074           (uint)PerBytecodeRecompilationCutoff) {
2075         // Give up on the method containing the bad BCI.
2076         if (trap_method() == nm->method()) {
2077           make_not_compilable = true;
2078         } else {
2079           trap_method->set_not_compilable("overflow_recompile_count > PerBytecodeRecompilationCutoff", CompLevel_full_optimization);
2080           // But give grace to the enclosing nm->method().
2081         }
2082       }
2083     }
2084 
2085     // Reprofile
2086     if (reprofile) {
2087       CompilationPolicy::policy()->reprofile(trap_scope, nm->is_osr_method());
2088     }
2089 
2090     // Give up compiling
2091     if (make_not_compilable && !nm->method()->is_not_compilable(CompLevel_full_optimization)) {
2092       assert(make_not_entrant, "consistent");
2093       nm->method()->set_not_compilable("give up compiling", CompLevel_full_optimization);
2094     }
2095 
2096   } // Free marked resources
2097 
2098 }
2099 JRT_END
2100 
2101 ProfileData*
2102 Deoptimization::query_update_method_data(MethodData* trap_mdo,
2103                                          int trap_bci,
2104                                          Deoptimization::DeoptReason reason,
2105                                          bool update_total_trap_count,
2106 #if INCLUDE_JVMCI
2107                                          bool is_osr,
2108 #endif
2109                                          Method* compiled_method,
2110                                          //outputs:
2111                                          uint& ret_this_trap_count,
2112                                          bool& ret_maybe_prior_trap,
2113                                          bool& ret_maybe_prior_recompile) {




  33 #include "code/scopeDesc.hpp"
  34 #include "interpreter/bytecode.hpp"
  35 #include "interpreter/interpreter.hpp"
  36 #include "interpreter/oopMapCache.hpp"
  37 #include "memory/allocation.inline.hpp"
  38 #include "memory/oopFactory.hpp"
  39 #include "memory/resourceArea.hpp"
  40 #include "memory/universe.hpp"
  41 #include "oops/constantPool.hpp"
  42 #include "oops/method.hpp"
  43 #include "oops/objArrayKlass.hpp"
  44 #include "oops/objArrayOop.inline.hpp"
  45 #include "oops/oop.inline.hpp"
  46 #include "oops/fieldStreams.hpp"
  47 #include "oops/typeArrayOop.inline.hpp"
  48 #include "oops/verifyOopClosure.hpp"
  49 #include "prims/jvmtiThreadState.hpp"
  50 #include "runtime/biasedLocking.hpp"
  51 #include "runtime/compilationPolicy.hpp"
  52 #include "runtime/deoptimization.hpp"


  53 #include "runtime/frame.inline.hpp"

  54 #include "runtime/handles.inline.hpp"
  55 #include "runtime/interfaceSupport.inline.hpp"
  56 #include "runtime/safepointVerifiers.hpp"
  57 #include "runtime/sharedRuntime.hpp"
  58 #include "runtime/signature.hpp"
  59 #include "runtime/stubRoutines.hpp"
  60 #include "runtime/thread.hpp"
  61 #include "runtime/threadSMR.hpp"
  62 #include "runtime/vframe.hpp"
  63 #include "runtime/vframeArray.hpp"
  64 #include "runtime/vframe_hp.hpp"
  65 #include "utilities/events.hpp"
  66 #include "utilities/preserveException.hpp"
  67 #include "utilities/xmlstream.hpp"
  68 
  69 
  70 bool DeoptimizationMarker::_is_active = false;
  71 
  72 Deoptimization::UnrollBlock::UnrollBlock(int  size_of_deoptimized_frame,
  73                                          int  caller_adjustment,


 215       // by analyzing bytecode in deoptimized frames. This is why this flag
 216       // is set during method compilation (see Compile::Process_OopMap_Node()).
 217       // If the previous frame was popped or if we are dispatching an exception,
 218       // we don't have an oop result.
 219       bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
 220       Handle return_value;
 221       if (save_oop_result) {
 222         // Reallocation may trigger GC. If deoptimization happened on return from
 223         // call which returns oop we need to save it since it is not in oopmap.
 224         oop result = deoptee.saved_oop_result(&map);
 225         assert(oopDesc::is_oop_or_null(result), "must be oop");
 226         return_value = Handle(thread, result);
 227         assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
 228         if (TraceDeoptimization) {
 229           ttyLocker ttyl;
 230           tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
 231         }
 232       }
 233       if (objects != NULL) {
 234         JRT_BLOCK
 235           realloc_failures = realloc_objects(thread, &deoptee, objects, THREAD);
 236         JRT_END
 237         bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
 238         reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
 239 #ifndef PRODUCT
 240         if (TraceDeoptimization) {
 241           ttyLocker ttyl;
 242           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 243           print_objects(objects, realloc_failures);
 244         }
 245 #endif
 246       }
 247       if (save_oop_result) {
 248         // Restore result.
 249         deoptee.set_saved_oop_result(&map, return_value());
 250       }
 251 #if !INCLUDE_JVMCI
 252     }
 253     if (EliminateLocks) {
 254 #endif // INCLUDE_JVMCI
 255 #ifndef PRODUCT


 612 
 613   // It is actually ok to allocate handles in a leaf method. It causes no safepoints,
 614   // but makes the entry a little slower. There is however a little dance we have to
 615   // do in debug mode to get around the NoHandleMark code in the JRT_LEAF macro
 616   ResetNoHandleMark rnhm; // No-op in release/product versions
 617   HandleMark hm;
 618 
 619   frame stub_frame = thread->last_frame();
 620 
 621   // Since the frame to unpack is the top frame of this thread, the vframe_array_head
 622   // must point to the vframeArray for the unpack frame.
 623   vframeArray* array = thread->vframe_array_head();
 624 
 625 #ifndef PRODUCT
 626   if (TraceDeoptimization) {
 627     ttyLocker ttyl;
 628     tty->print_cr("DEOPT UNPACKING thread " INTPTR_FORMAT " vframeArray " INTPTR_FORMAT " mode %d",
 629                   p2i(thread), p2i(array), exec_mode);
 630   }
 631 #endif
 632   Events::log(thread, "DEOPT UNPACKING pc=" INTPTR_FORMAT " sp=" INTPTR_FORMAT " mode %d",
 633               p2i(stub_frame.pc()), p2i(stub_frame.sp()), exec_mode);
 634 
 635   UnrollBlock* info = array->unroll_block();
 636 
 637   // Unpack the interpreter frames and any adapter frame (c2 only) we might create.
 638   array->unpack_to_stack(stub_frame, exec_mode, info->caller_actual_parameters());
 639 
 640   BasicType bt = info->return_type();
 641 
 642   // If we have an exception pending, claim that the return type is an oop
 643   // so the deopt_blob does not overwrite the exception_oop.
 644 
 645   if (exec_mode == Unpack_exception)
 646     bt = T_OBJECT;
 647 
 648   // Cleanup thread deopt data
 649   cleanup_deopt_info(thread, array);
 650 
 651 #ifndef PRODUCT
 652   if (VerifyStack) {


 793 
 794   if (SafepointSynchronize::is_at_safepoint()) {
 795     DeoptimizeMarkedTC deopt(false);
 796     // Make the dependent methods not entrant
 797     CodeCache::make_marked_nmethods_not_entrant();
 798     Threads::java_threads_do(&deopt);
 799   } else {
 800     // Make the dependent methods not entrant
 801     {
 802       MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 803       CodeCache::make_marked_nmethods_not_entrant();
 804     }
 805     DeoptimizeMarkedTC deopt(true);
 806     Handshake::execute(&deopt);
 807   }
 808 }
 809 
 810 Deoptimization::DeoptAction Deoptimization::_unloaded_action
 811   = Deoptimization::Action_reinterpret;
 812 



























































































































 813 #if COMPILER2_OR_JVMCI
 814 bool Deoptimization::realloc_objects(JavaThread* thread, frame* fr, GrowableArray<ScopeValue*>* objects, TRAPS) {
 815   Handle pending_exception(THREAD, thread->pending_exception());
 816   const char* exception_file = thread->exception_file();
 817   int exception_line = thread->exception_line();
 818   thread->clear_pending_exception();
 819 
 820   bool failures = false;
 821 
 822   for (int i = 0; i < objects->length(); i++) {
 823     assert(objects->at(i)->is_object(), "invalid debug information");
 824     ObjectValue* sv = (ObjectValue*) objects->at(i);
 825 
 826     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
 827     oop obj = NULL;
 828 
 829     if (k->is_instance_klass()) {











 830       InstanceKlass* ik = InstanceKlass::cast(k);
 831       obj = ik->allocate_instance(THREAD);


 832     } else if (k->is_typeArray_klass()) {
 833       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
 834       assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
 835       int len = sv->field_size() / type2size[ak->element_type()];
 836       obj = ak->allocate(len, THREAD);
 837     } else if (k->is_objArray_klass()) {
 838       ObjArrayKlass* ak = ObjArrayKlass::cast(k);
 839       obj = ak->allocate(sv->field_size(), THREAD);
 840     }
 841 
 842     if (obj == NULL) {
 843       failures = true;
 844     }
 845 
 846     assert(sv->value().is_null(), "redundant reallocation");
 847     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
 848     CLEAR_PENDING_EXCEPTION;
 849     sv->set_value(obj);
 850   }
 851 


1084         ShouldNotReachHere();
1085     }
1086     svIndex++;
1087   }
1088   return svIndex;
1089 }
1090 
1091 // restore fields of all eliminated objects and arrays
1092 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
1093   for (int i = 0; i < objects->length(); i++) {
1094     ObjectValue* sv = (ObjectValue*) objects->at(i);
1095     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1096     Handle obj = sv->value();
1097     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1098     if (PrintDeoptimizationDetails) {
1099       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1100     }
1101     if (obj.is_null()) {
1102       continue;
1103     }
1104 





1105     if (k->is_instance_klass()) {
1106       InstanceKlass* ik = InstanceKlass::cast(k);
1107       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal);
1108     } else if (k->is_typeArray_klass()) {
1109       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1110       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1111     } else if (k->is_objArray_klass()) {
1112       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1113     }
1114   }
1115 }
1116 
1117 
1118 // relock objects for which synchronization was eliminated
1119 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {
1120   for (int i = 0; i < monitors->length(); i++) {
1121     MonitorInfo* mon_info = monitors->at(i);
1122     if (mon_info->eliminated()) {
1123       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1124       if (!mon_info->owner_is_scalar_replaced()) {


1155 
1156     tty->print("     object <" INTPTR_FORMAT "> of type ", p2i(sv->value()()));
1157     k->print_value();
1158     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1159     if (obj.is_null()) {
1160       tty->print(" allocation failed");
1161     } else {
1162       tty->print(" allocated (%d bytes)", obj->size() * HeapWordSize);
1163     }
1164     tty->cr();
1165 
1166     if (Verbose && !obj.is_null()) {
1167       k->oop_print_on(obj(), tty);
1168     }
1169   }
1170 }
1171 #endif
1172 #endif // COMPILER2_OR_JVMCI
1173 
1174 vframeArray* Deoptimization::create_vframeArray(JavaThread* thread, frame fr, RegisterMap *reg_map, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures) {
1175   Events::log(thread, "DEOPT PACKING pc=" INTPTR_FORMAT " sp=" INTPTR_FORMAT, p2i(fr.pc()), p2i(fr.sp()));
1176 
1177 #ifndef PRODUCT
1178   if (PrintDeoptimizationDetails) {
1179     ttyLocker ttyl;
1180     tty->print("DEOPT PACKING thread " INTPTR_FORMAT " ", p2i(thread));
1181     fr.print_on(tty);
1182     tty->print_cr("     Virtual frames (innermost first):");
1183     for (int index = 0; index < chunk->length(); index++) {
1184       compiledVFrame* vf = chunk->at(index);
1185       tty->print("       %2d - ", index);
1186       vf->print_value();
1187       int bci = chunk->at(index)->raw_bci();
1188       const char* code_name;
1189       if (bci == SynchronizationEntryBCI) {
1190         code_name = "sync entry";
1191       } else {
1192         Bytecodes::Code code = vf->method()->code_at(bci);
1193         code_name = Bytecodes::name(code);
1194       }
1195       tty->print(" - %s", code_name);


1515   // uncommon_trap() is called at the beginning of the uncommon trap
1516   // handler. Note this fact before we start generating temporary frames
1517   // that can confuse an asynchronous stack walker. This counter is
1518   // decremented at the end of unpack_frames().
1519   thread->inc_in_deopt_handler();
1520 
1521   // We need to update the map if we have biased locking.
1522 #if INCLUDE_JVMCI
1523   // JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
1524   RegisterMap reg_map(thread, true);
1525 #else
1526   RegisterMap reg_map(thread, UseBiasedLocking);
1527 #endif
1528   frame stub_frame = thread->last_frame();
1529   frame fr = stub_frame.sender(&reg_map);
1530   // Make sure the calling nmethod is not getting deoptimized and removed
1531   // before we are done with it.
1532   nmethodLocker nl(fr.pc());
1533 
1534   // Log a message
1535   Events::log(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT,
1536               trap_request, p2i(fr.pc()), fr.pc() - fr.cb()->code_begin());
1537 
1538   {
1539     ResourceMark rm;
1540 
1541     // Revoke biases of any monitors in the frame to ensure we can migrate them
1542     revoke_biases_of_monitors(thread, fr, &reg_map);
1543 
1544     DeoptReason reason = trap_request_reason(trap_request);
1545     DeoptAction action = trap_request_action(trap_request);
1546 #if INCLUDE_JVMCI
1547     int debug_id = trap_request_debug_id(trap_request);
1548 #endif
1549     jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1
1550 
1551     vframe*  vf  = vframe::new_vframe(&fr, &reg_map, thread);
1552     compiledVFrame* cvf = compiledVFrame::cast(vf);
1553 
1554     CompiledMethod* nm = cvf->code();
1555 


1915       if ((reason != Reason_rtm_state_change) && (trap_mdo != NULL) &&
1916           UseRTMDeopt && (nm->as_nmethod()->rtm_state() != ProfileRTM)) {
1917         trap_mdo->atomic_set_rtm_state(ProfileRTM);
1918       }
1919 #endif
1920       // For code aging we count traps separately here, using make_not_entrant()
1921       // as a guard against simultaneous deopts in multiple threads.
1922       if (reason == Reason_tenured && trap_mdo != NULL) {
1923         trap_mdo->inc_tenure_traps();
1924       }
1925     }
1926 
1927     if (inc_recompile_count) {
1928       trap_mdo->inc_overflow_recompile_count();
1929       if ((uint)trap_mdo->overflow_recompile_count() >
1930           (uint)PerBytecodeRecompilationCutoff) {
1931         // Give up on the method containing the bad BCI.
1932         if (trap_method() == nm->method()) {
1933           make_not_compilable = true;
1934         } else {
1935           trap_method->set_not_compilable(CompLevel_full_optimization, true, "overflow_recompile_count > PerBytecodeRecompilationCutoff");
1936           // But give grace to the enclosing nm->method().
1937         }
1938       }
1939     }
1940 
1941     // Reprofile
1942     if (reprofile) {
1943       CompilationPolicy::policy()->reprofile(trap_scope, nm->is_osr_method());
1944     }
1945 
1946     // Give up compiling
1947     if (make_not_compilable && !nm->method()->is_not_compilable(CompLevel_full_optimization)) {
1948       assert(make_not_entrant, "consistent");
1949       nm->method()->set_not_compilable(CompLevel_full_optimization);
1950     }
1951 
1952   } // Free marked resources
1953 
1954 }
1955 JRT_END
1956 
1957 ProfileData*
1958 Deoptimization::query_update_method_data(MethodData* trap_mdo,
1959                                          int trap_bci,
1960                                          Deoptimization::DeoptReason reason,
1961                                          bool update_total_trap_count,
1962 #if INCLUDE_JVMCI
1963                                          bool is_osr,
1964 #endif
1965                                          Method* compiled_method,
1966                                          //outputs:
1967                                          uint& ret_this_trap_count,
1968                                          bool& ret_maybe_prior_trap,
1969                                          bool& ret_maybe_prior_recompile) {


< prev index next >