< prev index next >

src/hotspot/share/runtime/deoptimization.cpp

Print this page




  28 #include "classfile/systemDictionary.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "code/debugInfoRec.hpp"
  31 #include "code/nmethod.hpp"
  32 #include "code/pcDesc.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "interpreter/bytecode.hpp"
  35 #include "interpreter/interpreter.hpp"
  36 #include "interpreter/oopMapCache.hpp"
  37 #include "memory/allocation.inline.hpp"
  38 #include "memory/oopFactory.hpp"
  39 #include "memory/resourceArea.hpp"
  40 #include "memory/universe.hpp"
  41 #include "oops/constantPool.hpp"
  42 #include "oops/method.hpp"
  43 #include "oops/objArrayKlass.hpp"
  44 #include "oops/objArrayOop.inline.hpp"
  45 #include "oops/oop.inline.hpp"
  46 #include "oops/fieldStreams.hpp"
  47 #include "oops/typeArrayOop.inline.hpp"



  48 #include "oops/verifyOopClosure.hpp"
  49 #include "prims/jvmtiThreadState.hpp"
  50 #include "runtime/biasedLocking.hpp"
  51 #include "runtime/compilationPolicy.hpp"
  52 #include "runtime/deoptimization.hpp"
  53 #include "runtime/fieldDescriptor.hpp"
  54 #include "runtime/fieldDescriptor.inline.hpp"
  55 #include "runtime/frame.inline.hpp"
  56 #include "runtime/jniHandles.inline.hpp"
  57 #include "runtime/handles.inline.hpp"
  58 #include "runtime/interfaceSupport.inline.hpp"
  59 #include "runtime/safepointVerifiers.hpp"
  60 #include "runtime/sharedRuntime.hpp"
  61 #include "runtime/signature.hpp"
  62 #include "runtime/stubRoutines.hpp"
  63 #include "runtime/thread.hpp"
  64 #include "runtime/threadSMR.hpp"
  65 #include "runtime/vframe.hpp"
  66 #include "runtime/vframeArray.hpp"
  67 #include "runtime/vframe_hp.hpp"


 202 
 203 #if COMPILER2_OR_JVMCI
 204   // Reallocate the non-escaping objects and restore their fields. Then
 205   // relock objects if synchronization on them was eliminated.
 206 #if !INCLUDE_JVMCI
 207   if (DoEscapeAnalysis || EliminateNestedLocks) {
 208     if (EliminateAllocations) {
 209 #endif // INCLUDE_JVMCI
 210       assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
 211       GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
 212 
 213       // The flag return_oop() indicates call sites which return oop
 214       // in compiled code. Such sites include java method calls,
 215       // runtime calls (for example, used to allocate new objects/arrays
 216       // on slow code path) and any other calls generated in compiled code.
 217       // It is not guaranteed that we can get such information here only
 218       // by analyzing bytecode in deoptimized frames. This is why this flag
 219       // is set during method compilation (see Compile::Process_OopMap_Node()).
 220       // If the previous frame was popped or if we are dispatching an exception,
 221       // we don't have an oop result.
 222       bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
 223       Handle return_value;











 224       if (save_oop_result) {
 225         // Reallocation may trigger GC. If deoptimization happened on return from
 226         // call which returns oop we need to save it since it is not in oopmap.
 227         oop result = deoptee.saved_oop_result(&map);
 228         assert(oopDesc::is_oop_or_null(result), "must be oop");
 229         return_value = Handle(thread, result);
 230         assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
 231         if (TraceDeoptimization) {
 232           ttyLocker ttyl;
 233           tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
 234         }
 235       }
 236       if (objects != NULL) {

 237         JRT_BLOCK
 238           realloc_failures = realloc_objects(thread, &deoptee, &map, objects, THREAD);






 239         JRT_END
 240         bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
 241         reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);
 242 #ifndef PRODUCT
 243         if (TraceDeoptimization) {
 244           ttyLocker ttyl;
 245           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 246           print_objects(objects, realloc_failures);
 247         }
 248 #endif
 249       }
 250       if (save_oop_result) {
 251         // Restore result.
 252         deoptee.set_saved_oop_result(&map, return_value());

 253       }
 254 #if !INCLUDE_JVMCI
 255     }
 256     if (EliminateLocks) {
 257 #endif // INCLUDE_JVMCI
 258 #ifndef PRODUCT
 259       bool first = true;
 260 #endif
 261       for (int i = 0; i < chunk->length(); i++) {
 262         compiledVFrame* cvf = chunk->at(i);
 263         assert (cvf->scope() != NULL,"expect only compiled java frames");
 264         GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
 265         if (monitors->is_nonempty()) {
 266           relock_objects(monitors, thread, realloc_failures);
 267 #ifndef PRODUCT
 268           if (PrintDeoptimizationDetails) {
 269             ttyLocker ttyl;
 270             for (int j = 0; j < monitors->length(); j++) {
 271               MonitorInfo* mi = monitors->at(j);
 272               if (mi->eliminated()) {


 469   // its caller's stack by. If the caller is a compiled frame then
 470   // we pretend that the callee has no parameters so that the
 471   // extension counts for the full amount of locals and not just
 472   // locals-parms. This is because without a c2i adapter the parm
 473   // area as created by the compiled frame will not be usable by
 474   // the interpreter. (Depending on the calling convention there
 475   // may not even be enough space).
 476 
 477   // QQQ I'd rather see this pushed down into last_frame_adjust
 478   // and have it take the sender (aka caller).
 479 
 480   if (deopt_sender.is_compiled_frame() || caller_was_method_handle) {
 481     caller_adjustment = last_frame_adjust(0, callee_locals);
 482   } else if (callee_locals > callee_parameters) {
 483     // The caller frame may need extending to accommodate
 484     // non-parameter locals of the first unpacked interpreted frame.
 485     // Compute that adjustment.
 486     caller_adjustment = last_frame_adjust(callee_parameters, callee_locals);
 487   }
 488 
 489   // If the sender is deoptimized the we must retrieve the address of the handler
 490   // since the frame will "magically" show the original pc before the deopt
 491   // and we'd undo the deopt.
 492 
 493   frame_pcs[0] = deopt_sender.raw_pc();
 494 
 495   assert(CodeCache::find_blob_unsafe(frame_pcs[0]) != NULL, "bad pc");
 496 
 497 #if INCLUDE_JVMCI
 498   if (exceptionObject() != NULL) {
 499     thread->set_exception_oop(exceptionObject());
 500     exec_mode = Unpack_exception;
 501   }
 502 #endif
 503 
 504   if (thread->frames_to_pop_failed_realloc() > 0 && exec_mode != Unpack_uncommon_trap) {
 505     assert(thread->has_pending_exception(), "should have thrown OOME");
 506     thread->set_exception_oop(thread->pending_exception());
 507     thread->clear_pending_exception();
 508     exec_mode = Unpack_exception;
 509   }


 951 
 952     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
 953     oop obj = NULL;
 954 
 955     if (k->is_instance_klass()) {
 956 #if INCLUDE_JVMCI || INCLUDE_AOT
 957       CompiledMethod* cm = fr->cb()->as_compiled_method_or_null();
 958       if (cm->is_compiled_by_jvmci() && sv->is_auto_box()) {
 959         AutoBoxObjectValue* abv = (AutoBoxObjectValue*) sv;
 960         obj = get_cached_box(abv, fr, reg_map, THREAD);
 961         if (obj != NULL) {
 962           // Set the flag to indicate the box came from a cache, so that we can skip the field reassignment for it.
 963           abv->set_cached(true);
 964         }
 965       }
 966 #endif // INCLUDE_JVMCI || INCLUDE_AOT
 967       InstanceKlass* ik = InstanceKlass::cast(k);
 968       if (obj == NULL) {
 969         obj = ik->allocate_instance(THREAD);
 970       }




 971     } else if (k->is_typeArray_klass()) {
 972       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
 973       assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
 974       int len = sv->field_size() / type2size[ak->element_type()];
 975       obj = ak->allocate(len, THREAD);
 976     } else if (k->is_objArray_klass()) {
 977       ObjArrayKlass* ak = ObjArrayKlass::cast(k);
 978       obj = ak->allocate(sv->field_size(), THREAD);
 979     }
 980 
 981     if (obj == NULL) {
 982       failures = true;
 983     }
 984 
 985     assert(sv->value().is_null(), "redundant reallocation");
 986     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
 987     CLEAR_PENDING_EXCEPTION;
 988     sv->set_value(obj);
 989   }
 990 
 991   if (failures) {
 992     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), failures);
 993   } else if (pending_exception.not_null()) {
 994     thread->set_pending_exception(pending_exception(), exception_file, exception_line);
 995   }
 996 
 997   return failures;
 998 }
 999 















1000 // restore elements of an eliminated type array
1001 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
1002   int index = 0;
1003   intptr_t val;
1004 
1005   for (int i = 0; i < sv->field_size(); i++) {
1006     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1007     switch(type) {
1008     case T_LONG: case T_DOUBLE: {
1009       assert(value->type() == T_INT, "Agreement.");
1010       StackValue* low =
1011         StackValue::create_stack_value(fr, reg_map, sv->field_at(++i));
1012 #ifdef _LP64
1013       jlong res = (jlong)low->get_int();
1014 #else
1015 #ifdef SPARC
1016       // For SPARC we have to swap high and low words.
1017       jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int());
1018 #else
1019       jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int());


1090         ShouldNotReachHere();
1091     }
1092     index++;
1093   }
1094 }
1095 
1096 
1097 // restore fields of an eliminated object array
1098 void Deoptimization::reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj) {
1099   for (int i = 0; i < sv->field_size(); i++) {
1100     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1101     assert(value->type() == T_OBJECT, "object element expected");
1102     obj->obj_at_put(i, value->get_obj()());
1103   }
1104 }
1105 
1106 class ReassignedField {
1107 public:
1108   int _offset;
1109   BasicType _type;

1110 public:
1111   ReassignedField() {
1112     _offset = 0;
1113     _type = T_ILLEGAL;

1114   }
1115 };
1116 
1117 int compare(ReassignedField* left, ReassignedField* right) {
1118   return left->_offset - right->_offset;
1119 }
1120 
1121 // Restore fields of an eliminated instance object using the same field order
1122 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
1123 static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) {
1124   if (klass->superklass() != NULL) {
1125     svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal);
1126   }
1127 
1128   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
1129   for (AllFieldStream fs(klass); !fs.done(); fs.next()) {
1130     if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
1131       ReassignedField field;
1132       field._offset = fs.offset();
1133       field._type = FieldType::basic_type(fs.signature());









1134       fields->append(field);
1135     }
1136   }
1137   fields->sort(compare);
1138   for (int i = 0; i < fields->length(); i++) {
1139     intptr_t val;
1140     ScopeValue* scope_field = sv->field_at(svIndex);
1141     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
1142     int offset = fields->at(i)._offset;
1143     BasicType type = fields->at(i)._type;
1144     switch (type) {
1145       case T_OBJECT: case T_ARRAY:

1146         assert(value->type() == T_OBJECT, "Agreement.");
1147         obj->obj_field_put(offset, value->get_obj()());
1148         break;
1149 









1150       // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.
1151       case T_INT: case T_FLOAT: { // 4 bytes.
1152         assert(value->type() == T_INT, "Agreement.");
1153         bool big_value = false;
1154         if (i+1 < fields->length() && fields->at(i+1)._type == T_INT) {
1155           if (scope_field->is_location()) {
1156             Location::Type type = ((LocationValue*) scope_field)->location().type();
1157             if (type == Location::dbl || type == Location::lng) {
1158               big_value = true;
1159             }
1160           }
1161           if (scope_field->is_constant_int()) {
1162             ScopeValue* next_scope_field = sv->field_at(svIndex + 1);
1163             if (next_scope_field->is_constant_long() || next_scope_field->is_constant_double()) {
1164               big_value = true;
1165             }
1166           }
1167         }
1168 
1169         if (big_value) {


1210       case T_BYTE:
1211         assert(value->type() == T_INT, "Agreement.");
1212         val = value->get_int();
1213         obj->byte_field_put(offset, (jbyte)*((jint*)&val));
1214         break;
1215 
1216       case T_BOOLEAN:
1217         assert(value->type() == T_INT, "Agreement.");
1218         val = value->get_int();
1219         obj->bool_field_put(offset, (jboolean)*((jint*)&val));
1220         break;
1221 
1222       default:
1223         ShouldNotReachHere();
1224     }
1225     svIndex++;
1226   }
1227   return svIndex;
1228 }
1229 














1230 // restore fields of all eliminated objects and arrays
1231 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
1232   for (int i = 0; i < objects->length(); i++) {
1233     ObjectValue* sv = (ObjectValue*) objects->at(i);
1234     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1235     Handle obj = sv->value();
1236     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1237     if (PrintDeoptimizationDetails) {
1238       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1239     }
1240     if (obj.is_null()) {
1241       continue;
1242     }
1243 #if INCLUDE_JVMCI || INCLUDE_AOT
1244     // Don't reassign fields of boxes that came from a cache. Caches may be in CDS.
1245     if (sv->is_auto_box() && ((AutoBoxObjectValue*) sv)->is_cached()) {
1246       continue;
1247     }
1248 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1249     if (k->is_instance_klass()) {
1250       InstanceKlass* ik = InstanceKlass::cast(k);
1251       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal);



1252     } else if (k->is_typeArray_klass()) {
1253       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1254       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1255     } else if (k->is_objArray_klass()) {
1256       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1257     }
1258   }
1259 }
1260 
1261 
1262 // relock objects for which synchronization was eliminated
1263 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {
1264   for (int i = 0; i < monitors->length(); i++) {
1265     MonitorInfo* mon_info = monitors->at(i);
1266     if (mon_info->eliminated()) {
1267       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1268       if (!mon_info->owner_is_scalar_replaced()) {
1269         Handle obj(thread, mon_info->owner());
1270         markOop mark = obj->mark();
1271         if (UseBiasedLocking && mark->has_bias_pattern()) {




  28 #include "classfile/systemDictionary.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "code/debugInfoRec.hpp"
  31 #include "code/nmethod.hpp"
  32 #include "code/pcDesc.hpp"
  33 #include "code/scopeDesc.hpp"
  34 #include "interpreter/bytecode.hpp"
  35 #include "interpreter/interpreter.hpp"
  36 #include "interpreter/oopMapCache.hpp"
  37 #include "memory/allocation.inline.hpp"
  38 #include "memory/oopFactory.hpp"
  39 #include "memory/resourceArea.hpp"
  40 #include "memory/universe.hpp"
  41 #include "oops/constantPool.hpp"
  42 #include "oops/method.hpp"
  43 #include "oops/objArrayKlass.hpp"
  44 #include "oops/objArrayOop.inline.hpp"
  45 #include "oops/oop.inline.hpp"
  46 #include "oops/fieldStreams.hpp"
  47 #include "oops/typeArrayOop.inline.hpp"
  48 #include "oops/valueArrayKlass.hpp"
  49 #include "oops/valueArrayOop.hpp"
  50 #include "oops/valueKlass.hpp"
  51 #include "oops/verifyOopClosure.hpp"
  52 #include "prims/jvmtiThreadState.hpp"
  53 #include "runtime/biasedLocking.hpp"
  54 #include "runtime/compilationPolicy.hpp"
  55 #include "runtime/deoptimization.hpp"
  56 #include "runtime/fieldDescriptor.hpp"
  57 #include "runtime/fieldDescriptor.inline.hpp"
  58 #include "runtime/frame.inline.hpp"
  59 #include "runtime/jniHandles.inline.hpp"
  60 #include "runtime/handles.inline.hpp"
  61 #include "runtime/interfaceSupport.inline.hpp"
  62 #include "runtime/safepointVerifiers.hpp"
  63 #include "runtime/sharedRuntime.hpp"
  64 #include "runtime/signature.hpp"
  65 #include "runtime/stubRoutines.hpp"
  66 #include "runtime/thread.hpp"
  67 #include "runtime/threadSMR.hpp"
  68 #include "runtime/vframe.hpp"
  69 #include "runtime/vframeArray.hpp"
  70 #include "runtime/vframe_hp.hpp"


 205 
 206 #if COMPILER2_OR_JVMCI
 207   // Reallocate the non-escaping objects and restore their fields. Then
 208   // relock objects if synchronization on them was eliminated.
 209 #if !INCLUDE_JVMCI
 210   if (DoEscapeAnalysis || EliminateNestedLocks) {
 211     if (EliminateAllocations) {
 212 #endif // INCLUDE_JVMCI
 213       assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
 214       GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
 215 
 216       // The flag return_oop() indicates call sites which return oop
 217       // in compiled code. Such sites include java method calls,
 218       // runtime calls (for example, used to allocate new objects/arrays
 219       // on slow code path) and any other calls generated in compiled code.
 220       // It is not guaranteed that we can get such information here only
 221       // by analyzing bytecode in deoptimized frames. This is why this flag
 222       // is set during method compilation (see Compile::Process_OopMap_Node()).
 223       // If the previous frame was popped or if we are dispatching an exception,
 224       // we don't have an oop result.
 225       ScopeDesc* scope = chunk->at(0)->scope();
 226       bool save_oop_result = scope->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
 227       // In case of the return of multiple values, we must take care
 228       // of all oop return values.
 229       GrowableArray<Handle> return_oops;
 230       ValueKlass* vk = NULL;
 231       if (save_oop_result && scope->return_vt()) {
 232         vk = ValueKlass::returned_value_klass(map);
 233         if (vk != NULL) {
 234           vk->save_oop_fields(map, return_oops);
 235           save_oop_result = false;
 236         }
 237       }
 238       if (save_oop_result) {
 239         // Reallocation may trigger GC. If deoptimization happened on return from
 240         // call which returns oop we need to save it since it is not in oopmap.
 241         oop result = deoptee.saved_oop_result(&map);
 242         assert(oopDesc::is_oop_or_null(result), "must be oop");
 243         return_oops.push(Handle(thread, result));
 244         assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
 245         if (TraceDeoptimization) {
 246           ttyLocker ttyl;
 247           tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
 248         }
 249       }
 250       if (objects != NULL || vk != NULL) {
 251         bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
 252         JRT_BLOCK
 253           if (vk != NULL) {
 254             realloc_failures = realloc_value_type_result(vk, map, return_oops, THREAD);
 255           }
 256           if (objects != NULL) {
 257             realloc_failures = realloc_failures || realloc_objects(thread, &deoptee, &map, objects, THREAD);
 258             reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal, THREAD);
 259           }
 260         JRT_END


 261 #ifndef PRODUCT
 262         if (TraceDeoptimization) {
 263           ttyLocker ttyl;
 264           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 265           print_objects(objects, realloc_failures);
 266         }
 267 #endif
 268       }
 269       if (save_oop_result || vk != NULL) {
 270         // Restore result.
 271         assert(return_oops.length() == 1, "no value type");
 272         deoptee.set_saved_oop_result(&map, return_oops.pop()());
 273       }
 274 #if !INCLUDE_JVMCI
 275     }
 276     if (EliminateLocks) {
 277 #endif // INCLUDE_JVMCI
 278 #ifndef PRODUCT
 279       bool first = true;
 280 #endif
 281       for (int i = 0; i < chunk->length(); i++) {
 282         compiledVFrame* cvf = chunk->at(i);
 283         assert (cvf->scope() != NULL,"expect only compiled java frames");
 284         GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
 285         if (monitors->is_nonempty()) {
 286           relock_objects(monitors, thread, realloc_failures);
 287 #ifndef PRODUCT
 288           if (PrintDeoptimizationDetails) {
 289             ttyLocker ttyl;
 290             for (int j = 0; j < monitors->length(); j++) {
 291               MonitorInfo* mi = monitors->at(j);
 292               if (mi->eliminated()) {


 489   // its caller's stack by. If the caller is a compiled frame then
 490   // we pretend that the callee has no parameters so that the
 491   // extension counts for the full amount of locals and not just
 492   // locals-parms. This is because without a c2i adapter the parm
 493   // area as created by the compiled frame will not be usable by
 494   // the interpreter. (Depending on the calling convention there
 495   // may not even be enough space).
 496 
 497   // QQQ I'd rather see this pushed down into last_frame_adjust
 498   // and have it take the sender (aka caller).
 499 
 500   if (deopt_sender.is_compiled_frame() || caller_was_method_handle) {
 501     caller_adjustment = last_frame_adjust(0, callee_locals);
 502   } else if (callee_locals > callee_parameters) {
 503     // The caller frame may need extending to accommodate
 504     // non-parameter locals of the first unpacked interpreted frame.
 505     // Compute that adjustment.
 506     caller_adjustment = last_frame_adjust(callee_parameters, callee_locals);
 507   }
 508 
 509   // If the sender is deoptimized we must retrieve the address of the handler
 510   // since the frame will "magically" show the original pc before the deopt
 511   // and we'd undo the deopt.
 512 
 513   frame_pcs[0] = deopt_sender.raw_pc();
 514 
 515   assert(CodeCache::find_blob_unsafe(frame_pcs[0]) != NULL, "bad pc");
 516 
 517 #if INCLUDE_JVMCI
 518   if (exceptionObject() != NULL) {
 519     thread->set_exception_oop(exceptionObject());
 520     exec_mode = Unpack_exception;
 521   }
 522 #endif
 523 
 524   if (thread->frames_to_pop_failed_realloc() > 0 && exec_mode != Unpack_uncommon_trap) {
 525     assert(thread->has_pending_exception(), "should have thrown OOME");
 526     thread->set_exception_oop(thread->pending_exception());
 527     thread->clear_pending_exception();
 528     exec_mode = Unpack_exception;
 529   }


 971 
 972     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
 973     oop obj = NULL;
 974 
 975     if (k->is_instance_klass()) {
 976 #if INCLUDE_JVMCI || INCLUDE_AOT
 977       CompiledMethod* cm = fr->cb()->as_compiled_method_or_null();
 978       if (cm->is_compiled_by_jvmci() && sv->is_auto_box()) {
 979         AutoBoxObjectValue* abv = (AutoBoxObjectValue*) sv;
 980         obj = get_cached_box(abv, fr, reg_map, THREAD);
 981         if (obj != NULL) {
 982           // Set the flag to indicate the box came from a cache, so that we can skip the field reassignment for it.
 983           abv->set_cached(true);
 984         }
 985       }
 986 #endif // INCLUDE_JVMCI || INCLUDE_AOT
 987       InstanceKlass* ik = InstanceKlass::cast(k);
 988       if (obj == NULL) {
 989         obj = ik->allocate_instance(THREAD);
 990       }
 991     } else if (k->is_valueArray_klass()) {
 992       ValueArrayKlass* ak = ValueArrayKlass::cast(k);
 993       // Value type array must be zeroed because not all memory is reassigned
 994       obj = ak->allocate(sv->field_size(), THREAD);
 995     } else if (k->is_typeArray_klass()) {
 996       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
 997       assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
 998       int len = sv->field_size() / type2size[ak->element_type()];
 999       obj = ak->allocate(len, THREAD);
1000     } else if (k->is_objArray_klass()) {
1001       ObjArrayKlass* ak = ObjArrayKlass::cast(k);
1002       obj = ak->allocate(sv->field_size(), THREAD);
1003     }
1004 
1005     if (obj == NULL) {
1006       failures = true;
1007     }
1008 
1009     assert(sv->value().is_null(), "redundant reallocation");
1010     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
1011     CLEAR_PENDING_EXCEPTION;
1012     sv->set_value(obj);
1013   }
1014 
1015   if (failures) {
1016     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), failures);
1017   } else if (pending_exception.not_null()) {
1018     thread->set_pending_exception(pending_exception(), exception_file, exception_line);
1019   }
1020 
1021   return failures;
1022 }
1023 
1024 // We're deoptimizing at the return of a call, value type fields are
1025 // in registers. When we go back to the interpreter, it will expect a
1026 // reference to a value type instance. Allocate and initialize it from
1027 // the register values here.
1028 bool Deoptimization::realloc_value_type_result(ValueKlass* vk, const RegisterMap& map, GrowableArray<Handle>& return_oops, TRAPS) {
1029   oop new_vt = vk->realloc_result(map, return_oops, THREAD);
1030   if (new_vt == NULL) {
1031     CLEAR_PENDING_EXCEPTION;
1032     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), true);
1033   }
1034   return_oops.clear();
1035   return_oops.push(Handle(THREAD, new_vt));
1036   return false;
1037 }
1038 
1039 // restore elements of an eliminated type array
1040 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
1041   int index = 0;
1042   intptr_t val;
1043 
1044   for (int i = 0; i < sv->field_size(); i++) {
1045     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1046     switch(type) {
1047     case T_LONG: case T_DOUBLE: {
1048       assert(value->type() == T_INT, "Agreement.");
1049       StackValue* low =
1050         StackValue::create_stack_value(fr, reg_map, sv->field_at(++i));
1051 #ifdef _LP64
1052       jlong res = (jlong)low->get_int();
1053 #else
1054 #ifdef SPARC
1055       // For SPARC we have to swap high and low words.
1056       jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int());
1057 #else
1058       jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int());


1129         ShouldNotReachHere();
1130     }
1131     index++;
1132   }
1133 }
1134 
1135 
1136 // restore fields of an eliminated object array
1137 void Deoptimization::reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj) {
1138   for (int i = 0; i < sv->field_size(); i++) {
1139     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1140     assert(value->type() == T_OBJECT, "object element expected");
1141     obj->obj_at_put(i, value->get_obj()());
1142   }
1143 }
1144 
1145 class ReassignedField {
1146 public:
1147   int _offset;
1148   BasicType _type;
1149   InstanceKlass* _klass;
1150 public:
1151   ReassignedField() {
1152     _offset = 0;
1153     _type = T_ILLEGAL;
1154     _klass = NULL;
1155   }
1156 };
1157 
1158 int compare(ReassignedField* left, ReassignedField* right) {
1159   return left->_offset - right->_offset;
1160 }
1161 
1162 // Restore fields of an eliminated instance object using the same field order
1163 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
1164 static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal, int base_offset, TRAPS) {
1165   if (klass->superklass() != NULL) {
1166     svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal, 0, CHECK_0);
1167   }
1168 
1169   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
1170   for (AllFieldStream fs(klass); !fs.done(); fs.next()) {
1171     if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
1172       ReassignedField field;
1173       field._offset = fs.offset();
1174       field._type = FieldType::basic_type(fs.signature());
1175       if (field._type == T_VALUETYPE) {
1176         field._type = T_OBJECT;
1177       }
1178       if (fs.is_flattened()) {
1179         // Resolve klass of flattened value type field
1180         Klass* vk = klass->get_value_field_klass(fs.index());
1181         field._klass = ValueKlass::cast(vk);
1182         field._type = T_VALUETYPE;
1183       }
1184       fields->append(field);
1185     }
1186   }
1187   fields->sort(compare);
1188   for (int i = 0; i < fields->length(); i++) {
1189     intptr_t val;
1190     ScopeValue* scope_field = sv->field_at(svIndex);
1191     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
1192     int offset = base_offset + fields->at(i)._offset;
1193     BasicType type = fields->at(i)._type;
1194     switch (type) {
1195       case T_OBJECT:
1196       case T_ARRAY:
1197         assert(value->type() == T_OBJECT, "Agreement.");
1198         obj->obj_field_put(offset, value->get_obj()());
1199         break;
1200 
1201       case T_VALUETYPE: {
1202         // Recursively re-assign flattened value type fields
1203         InstanceKlass* vk = fields->at(i)._klass;
1204         assert(vk != NULL, "must be resolved");
1205         offset -= ValueKlass::cast(vk)->first_field_offset(); // Adjust offset to omit oop header
1206         svIndex = reassign_fields_by_klass(vk, fr, reg_map, sv, svIndex, obj, skip_internal, offset, CHECK_0);
1207         continue; // Continue because we don't need to increment svIndex
1208       }
1209 
1210       // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.
1211       case T_INT: case T_FLOAT: { // 4 bytes.
1212         assert(value->type() == T_INT, "Agreement.");
1213         bool big_value = false;
1214         if (i+1 < fields->length() && fields->at(i+1)._type == T_INT) {
1215           if (scope_field->is_location()) {
1216             Location::Type type = ((LocationValue*) scope_field)->location().type();
1217             if (type == Location::dbl || type == Location::lng) {
1218               big_value = true;
1219             }
1220           }
1221           if (scope_field->is_constant_int()) {
1222             ScopeValue* next_scope_field = sv->field_at(svIndex + 1);
1223             if (next_scope_field->is_constant_long() || next_scope_field->is_constant_double()) {
1224               big_value = true;
1225             }
1226           }
1227         }
1228 
1229         if (big_value) {


1270       case T_BYTE:
1271         assert(value->type() == T_INT, "Agreement.");
1272         val = value->get_int();
1273         obj->byte_field_put(offset, (jbyte)*((jint*)&val));
1274         break;
1275 
1276       case T_BOOLEAN:
1277         assert(value->type() == T_INT, "Agreement.");
1278         val = value->get_int();
1279         obj->bool_field_put(offset, (jboolean)*((jint*)&val));
1280         break;
1281 
1282       default:
1283         ShouldNotReachHere();
1284     }
1285     svIndex++;
1286   }
1287   return svIndex;
1288 }
1289 
1290 // restore fields of an eliminated value type array
1291 void Deoptimization::reassign_value_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, valueArrayOop obj, ValueArrayKlass* vak, TRAPS) {
1292   ValueKlass* vk = vak->element_klass();
1293   assert(vk->flatten_array(), "should only be used for flattened value type arrays");
1294   // Adjust offset to omit oop header
1295   int base_offset = arrayOopDesc::base_offset_in_bytes(T_VALUETYPE) - ValueKlass::cast(vk)->first_field_offset();
1296   // Initialize all elements of the flattened value type array
1297   for (int i = 0; i < sv->field_size(); i++) {
1298     ScopeValue* val = sv->field_at(i);
1299     int offset = base_offset + (i << Klass::layout_helper_log2_element_size(vak->layout_helper()));
1300     reassign_fields_by_klass(vk, fr, reg_map, val->as_ObjectValue(), 0, (oop)obj, false /* skip_internal */, offset, CHECK);
1301   }
1302 }
1303 
1304 // restore fields of all eliminated objects and arrays
1305 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal, TRAPS) {
1306   for (int i = 0; i < objects->length(); i++) {
1307     ObjectValue* sv = (ObjectValue*) objects->at(i);
1308     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1309     Handle obj = sv->value();
1310     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1311     if (PrintDeoptimizationDetails) {
1312       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1313     }
1314     if (obj.is_null()) {
1315       continue;
1316     }
1317 #if INCLUDE_JVMCI || INCLUDE_AOT
1318     // Don't reassign fields of boxes that came from a cache. Caches may be in CDS.
1319     if (sv->is_auto_box() && ((AutoBoxObjectValue*) sv)->is_cached()) {
1320       continue;
1321     }
1322 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1323     if (k->is_instance_klass()) {
1324       InstanceKlass* ik = InstanceKlass::cast(k);
1325       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal, 0, CHECK);
1326     } else if (k->is_valueArray_klass()) {
1327       ValueArrayKlass* vak = ValueArrayKlass::cast(k);
1328       reassign_value_array_elements(fr, reg_map, sv, (valueArrayOop) obj(), vak, CHECK);
1329     } else if (k->is_typeArray_klass()) {
1330       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1331       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1332     } else if (k->is_objArray_klass()) {
1333       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1334     }
1335   }
1336 }
1337 
1338 
1339 // relock objects for which synchronization was eliminated
1340 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {
1341   for (int i = 0; i < monitors->length(); i++) {
1342     MonitorInfo* mon_info = monitors->at(i);
1343     if (mon_info->eliminated()) {
1344       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1345       if (!mon_info->owner_is_scalar_replaced()) {
1346         Handle obj(thread, mon_info->owner());
1347         markOop mark = obj->mark();
1348         if (UseBiasedLocking && mark->has_bias_pattern()) {


< prev index next >