< prev index next >

src/hotspot/share/c1/c1_Runtime1.cpp

Print this page

   1 /*
   2  * Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *

  33 #include "classfile/vmSymbols.hpp"
  34 #include "code/aotCodeCache.hpp"
  35 #include "code/codeBlob.hpp"
  36 #include "code/compiledIC.hpp"
  37 #include "code/scopeDesc.hpp"
  38 #include "code/vtableStubs.hpp"
  39 #include "compiler/compilationPolicy.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "compiler/oopMap.hpp"
  42 #include "gc/shared/barrierSet.hpp"
  43 #include "gc/shared/c1/barrierSetC1.hpp"
  44 #include "gc/shared/collectedHeap.hpp"
  45 #include "interpreter/bytecode.hpp"
  46 #include "interpreter/interpreter.hpp"
  47 #include "jfr/support/jfrIntrinsics.hpp"
  48 #include "logging/log.hpp"
  49 #include "memory/oopFactory.hpp"
  50 #include "memory/resourceArea.hpp"
  51 #include "memory/universe.hpp"
  52 #include "oops/access.inline.hpp"



  53 #include "oops/objArrayKlass.hpp"
  54 #include "oops/objArrayOop.inline.hpp"
  55 #include "oops/oop.inline.hpp"
  56 #include "prims/jvmtiExport.hpp"
  57 #include "runtime/atomicAccess.hpp"
  58 #include "runtime/fieldDescriptor.inline.hpp"
  59 #include "runtime/frame.inline.hpp"
  60 #include "runtime/handles.inline.hpp"
  61 #include "runtime/interfaceSupport.inline.hpp"
  62 #include "runtime/javaCalls.hpp"
  63 #include "runtime/sharedRuntime.hpp"
  64 #include "runtime/stackWatermarkSet.hpp"
  65 #include "runtime/stubInfo.hpp"
  66 #include "runtime/stubRoutines.hpp"
  67 #include "runtime/vframe.inline.hpp"
  68 #include "runtime/vframeArray.hpp"
  69 #include "runtime/vm_version.hpp"
  70 #include "utilities/copy.hpp"
  71 #include "utilities/events.hpp"
  72 

  97 
  98 
  99 void StubAssembler::set_num_rt_args(int args) {
 100   if (_num_rt_args == 0) {
 101     _num_rt_args = args;
 102   }
 103   assert(_num_rt_args == args, "can't change the number of args");
 104 }
 105 
 106 // Implementation of Runtime1
 107 CodeBlob* Runtime1::_blobs[StubInfo::C1_STUB_COUNT];
 108 
 109 #ifndef PRODUCT
 110 // statistics
 111 uint Runtime1::_generic_arraycopystub_cnt = 0;
 112 uint Runtime1::_arraycopy_slowcase_cnt = 0;
 113 uint Runtime1::_arraycopy_checkcast_cnt = 0;
 114 uint Runtime1::_arraycopy_checkcast_attempt_cnt = 0;
 115 uint Runtime1::_new_type_array_slowcase_cnt = 0;
 116 uint Runtime1::_new_object_array_slowcase_cnt = 0;

 117 uint Runtime1::_new_instance_slowcase_cnt = 0;
 118 uint Runtime1::_new_multi_array_slowcase_cnt = 0;





 119 uint Runtime1::_monitorenter_slowcase_cnt = 0;
 120 uint Runtime1::_monitorexit_slowcase_cnt = 0;
 121 uint Runtime1::_patch_code_slowcase_cnt = 0;
 122 uint Runtime1::_throw_range_check_exception_count = 0;
 123 uint Runtime1::_throw_index_exception_count = 0;
 124 uint Runtime1::_throw_div0_exception_count = 0;
 125 uint Runtime1::_throw_null_pointer_exception_count = 0;
 126 uint Runtime1::_throw_class_cast_exception_count = 0;
 127 uint Runtime1::_throw_incompatible_class_change_error_count = 0;


 128 uint Runtime1::_throw_count = 0;
 129 
 130 static uint _byte_arraycopy_stub_cnt = 0;
 131 static uint _short_arraycopy_stub_cnt = 0;
 132 static uint _int_arraycopy_stub_cnt = 0;
 133 static uint _long_arraycopy_stub_cnt = 0;
 134 static uint _oop_arraycopy_stub_cnt = 0;
 135 
 136 address Runtime1::arraycopy_count_address(BasicType type) {
 137   switch (type) {
 138   case T_BOOLEAN:
 139   case T_BYTE:   return (address)&_byte_arraycopy_stub_cnt;
 140   case T_CHAR:
 141   case T_SHORT:  return (address)&_short_arraycopy_stub_cnt;
 142   case T_FLOAT:
 143   case T_INT:    return (address)&_int_arraycopy_stub_cnt;
 144   case T_DOUBLE:
 145   case T_LONG:   return (address)&_long_arraycopy_stub_cnt;
 146   case T_ARRAY:
 147   case T_OBJECT: return (address)&_oop_arraycopy_stub_cnt;

 355   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32());
 356   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32C());
 357   FUNCTION_CASE(entry, StubRoutines::vectorizedMismatch());
 358   FUNCTION_CASE(entry, StubRoutines::dexp());
 359   FUNCTION_CASE(entry, StubRoutines::dlog());
 360   FUNCTION_CASE(entry, StubRoutines::dlog10());
 361   FUNCTION_CASE(entry, StubRoutines::dpow());
 362   FUNCTION_CASE(entry, StubRoutines::dsin());
 363   FUNCTION_CASE(entry, StubRoutines::dcos());
 364   FUNCTION_CASE(entry, StubRoutines::dtan());
 365   FUNCTION_CASE(entry, StubRoutines::dsinh());
 366   FUNCTION_CASE(entry, StubRoutines::dtanh());
 367   FUNCTION_CASE(entry, StubRoutines::dcbrt());
 368 
 369 #undef FUNCTION_CASE
 370 
 371   // Soft float adds more runtime names.
 372   return pd_name_for_address(entry);
 373 }
 374 
 375 
 376 JRT_ENTRY(void, Runtime1::new_instance(JavaThread* current, Klass* klass))
 377 #ifndef PRODUCT
 378   if (PrintC1Statistics) {
 379     _new_instance_slowcase_cnt++;
 380   }
 381 #endif
 382   assert(klass->is_klass(), "not a class");
 383   Handle holder(current, klass->klass_holder()); // keep the klass alive
 384   InstanceKlass* h = InstanceKlass::cast(klass);
 385   h->check_valid_for_instantiation(true, CHECK);
 386   // make sure klass is initialized
 387   h->initialize(CHECK);
 388   // allocate instance and return via TLS
 389   oop obj = h->allocate_instance(CHECK);
 390   current->set_vm_result_oop(obj);
 391 JRT_END
 392 



 393 
 394 JRT_ENTRY(void, Runtime1::new_type_array(JavaThread* current, Klass* klass, jint length))
 395 #ifndef PRODUCT
 396   if (PrintC1Statistics) {
 397     _new_type_array_slowcase_cnt++;
 398   }
 399 #endif
 400   // Note: no handle for klass needed since they are not used
 401   //       anymore after new_typeArray() and no GC can happen before.
 402   //       (This may have to change if this code changes!)
 403   assert(klass->is_klass(), "not a class");
 404   BasicType elt_type = TypeArrayKlass::cast(klass)->element_type();
 405   oop obj = oopFactory::new_typeArray(elt_type, length, CHECK);
 406   current->set_vm_result_oop(obj);
 407   // This is pretty rare but this runtime patch is stressful to deoptimization
 408   // if we deoptimize here so force a deopt to stress the path.
 409   if (DeoptimizeALot) {
 410     deopt_caller(current);
 411   }
 412 

 418   if (PrintC1Statistics) {
 419     _new_object_array_slowcase_cnt++;
 420   }
 421 #endif
 422   // Note: no handle for klass needed since they are not used
 423   //       anymore after new_objArray() and no GC can happen before.
 424   //       (This may have to change if this code changes!)
 425   assert(array_klass->is_klass(), "not a class");
 426   Handle holder(current, array_klass->klass_holder()); // keep the klass alive
 427   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 428   objArrayOop obj = oopFactory::new_objArray(elem_klass, length, CHECK);
 429   current->set_vm_result_oop(obj);
 430   // This is pretty rare but this runtime patch is stressful to deoptimization
 431   // if we deoptimize here so force a deopt to stress the path.
 432   if (DeoptimizeALot) {
 433     deopt_caller(current);
 434   }
 435 JRT_END
 436 
 437 


























 438 JRT_ENTRY(void, Runtime1::new_multi_array(JavaThread* current, Klass* klass, int rank, jint* dims))
 439 #ifndef PRODUCT
 440   if (PrintC1Statistics) {
 441     _new_multi_array_slowcase_cnt++;
 442   }
 443 #endif
 444   assert(klass->is_klass(), "not a class");
 445   assert(rank >= 1, "rank must be nonzero");
 446   Handle holder(current, klass->klass_holder()); // keep the klass alive
 447   oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
 448   current->set_vm_result_oop(obj);
 449 JRT_END
 450 
 451 
































































































 452 JRT_ENTRY(void, Runtime1::unimplemented_entry(JavaThread* current, StubId id))
 453   tty->print_cr("Runtime1::entry_for(%d) returned unimplemented entry point", (int)id);
 454 JRT_END
 455 
 456 
 457 JRT_ENTRY(void, Runtime1::throw_array_store_exception(JavaThread* current, oopDesc* obj))
 458   ResourceMark rm(current);
 459   const char* klass_name = obj->klass()->external_name();
 460   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArrayStoreException(), klass_name);
 461 JRT_END
 462 
 463 
 464 // counter_overflow() is called from within C1-compiled methods. The enclosing method is the method
 465 // associated with the top activation record. The inlinee (that is possibly included in the enclosing
 466 // method) method is passed as an argument. In order to do that it is embedded in the code as
 467 // a constant.
 468 static nmethod* counter_overflow_helper(JavaThread* current, int branch_bci, Method* m) {
 469   nmethod* osr_nm = nullptr;
 470   methodHandle method(current, m);
 471 

 749     _throw_class_cast_exception_count++;
 750   }
 751 #endif
 752   ResourceMark rm(current);
 753   char* message = SharedRuntime::generate_class_cast_message(current, object->klass());
 754   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ClassCastException(), message);
 755 JRT_END
 756 
 757 
 758 JRT_ENTRY(void, Runtime1::throw_incompatible_class_change_error(JavaThread* current))
 759 #ifndef PRODUCT
 760   if (PrintC1Statistics) {
 761     _throw_incompatible_class_change_error_count++;
 762   }
 763 #endif
 764   ResourceMark rm(current);
 765   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError());
 766 JRT_END
 767 
 768 













 769 JRT_BLOCK_ENTRY(void, Runtime1::monitorenter(JavaThread* current, oopDesc* obj, BasicObjectLock* lock))
 770 #ifndef PRODUCT
 771   if (PrintC1Statistics) {
 772     _monitorenter_slowcase_cnt++;
 773   }
 774 #endif
 775   assert(obj == lock->obj(), "must match");
 776   SharedRuntime::monitor_enter_helper(obj, lock->lock(), current);
 777 JRT_END
 778 
 779 
 780 JRT_LEAF(void, Runtime1::monitorexit(JavaThread* current, BasicObjectLock* lock))
 781   assert(current == JavaThread::current(), "pre-condition");
 782 #ifndef PRODUCT
 783   if (PrintC1Statistics) {
 784     _monitorexit_slowcase_cnt++;
 785   }
 786 #endif
 787   assert(current->last_Java_sp(), "last_Java_sp must be set");
 788   oop obj = lock->obj();

 951                       RegisterMap::WalkContinuation::skip);
 952   frame runtime_frame = current->last_frame();
 953   frame caller_frame = runtime_frame.sender(&reg_map);
 954 
 955   // last java frame on stack
 956   vframeStream vfst(current, true);
 957   assert(!vfst.at_end(), "Java frame must exist");
 958 
 959   methodHandle caller_method(current, vfst.method());
 960   // Note that caller_method->code() may not be same as caller_code because of OSR's
 961   // Note also that in the presence of inlining it is not guaranteed
 962   // that caller_method() == caller_code->method()
 963 
 964   int bci = vfst.bci();
 965   Bytecodes::Code code = caller_method()->java_code_at(bci);
 966 
 967   // this is used by assertions in the access_field_patching_id
 968   BasicType patch_field_type = T_ILLEGAL;
 969   bool deoptimize_for_volatile = false;
 970   bool deoptimize_for_atomic = false;



 971   int patch_field_offset = -1;
 972   Klass* init_klass = nullptr; // klass needed by load_klass_patching code
 973   Klass* load_klass = nullptr; // klass needed by load_klass_patching code
 974   Handle mirror(current, nullptr); // oop needed by load_mirror_patching code
 975   Handle appendix(current, nullptr); // oop needed by appendix_patching code
 976   bool load_klass_or_mirror_patch_id =
 977     (stub_id == StubId::c1_load_klass_patching_id || stub_id == StubId::c1_load_mirror_patching_id);
 978 
 979   if (stub_id == StubId::c1_access_field_patching_id) {
 980 
 981     Bytecode_field field_access(caller_method, bci);
 982     fieldDescriptor result; // initialize class if needed
 983     Bytecodes::Code code = field_access.code();
 984     constantPoolHandle constants(current, caller_method->constants());
 985     LinkResolver::resolve_field_access(result, constants, field_access.index(), caller_method, Bytecodes::java_code(code), CHECK);
 986     patch_field_offset = result.offset();
 987 
 988     // If we're patching a field which is volatile then at compile it
 989     // must not have been know to be volatile, so the generated code
 990     // isn't correct for a volatile reference.  The nmethod has to be

 994     // used for patching references to oops which don't need special
 995     // handling in the volatile case.
 996 
 997     deoptimize_for_volatile = result.access_flags().is_volatile();
 998 
 999     // If we are patching a field which should be atomic, then
1000     // the generated code is not correct either, force deoptimizing.
1001     // We need to only cover T_LONG and T_DOUBLE fields, as we can
1002     // break access atomicity only for them.
1003 
1004     // Strictly speaking, the deoptimization on 64-bit platforms
1005     // is unnecessary, and T_LONG stores on 32-bit platforms need
1006     // to be handled by special patching code when AlwaysAtomicAccesses
1007     // becomes product feature. At this point, we are still going
1008     // for the deoptimization for consistency against volatile
1009     // accesses.
1010 
1011     patch_field_type = result.field_type();
1012     deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
1013 













1014   } else if (load_klass_or_mirror_patch_id) {
1015     Klass* k = nullptr;
1016     switch (code) {
1017       case Bytecodes::_putstatic:
1018       case Bytecodes::_getstatic:
1019         { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK);
1020           init_klass = klass;
1021           mirror = Handle(current, klass->java_mirror());
1022         }
1023         break;
1024       case Bytecodes::_new:
1025         { Bytecode_new bnew(caller_method(), caller_method->bcp_from(bci));
1026           k = caller_method->constants()->klass_at(bnew.index(), CHECK);
1027         }
1028         break;
1029       case Bytecodes::_multianewarray:
1030         { Bytecode_multianewarray mna(caller_method(), caller_method->bcp_from(bci));
1031           k = caller_method->constants()->klass_at(mna.index(), CHECK);
1032         }
1033         break;
1034       case Bytecodes::_instanceof:
1035         { Bytecode_instanceof io(caller_method(), caller_method->bcp_from(bci));
1036           k = caller_method->constants()->klass_at(io.index(), CHECK);
1037         }
1038         break;
1039       case Bytecodes::_checkcast:
1040         { Bytecode_checkcast cc(caller_method(), caller_method->bcp_from(bci));
1041           k = caller_method->constants()->klass_at(cc.index(), CHECK);
1042         }
1043         break;
1044       case Bytecodes::_anewarray:
1045         { Bytecode_anewarray anew(caller_method(), caller_method->bcp_from(bci));
1046           Klass* ek = caller_method->constants()->klass_at(anew.index(), CHECK);
1047           k = ek->array_klass(CHECK);






1048         }
1049         break;
1050       case Bytecodes::_ldc:
1051       case Bytecodes::_ldc_w:
1052       case Bytecodes::_ldc2_w:
1053         {
1054           Bytecode_loadconstant cc(caller_method, bci);
1055           oop m = cc.resolve_constant(CHECK);
1056           mirror = Handle(current, m);
1057         }
1058         break;
1059       default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id");
1060     }
1061     load_klass = k;
1062   } else if (stub_id == StubId::c1_load_appendix_patching_id) {
1063     Bytecode_invoke bytecode(caller_method, bci);
1064     Bytecodes::Code bc = bytecode.invoke_code();
1065 
1066     CallInfo info;
1067     constantPoolHandle pool(current, caller_method->constants());
1068     int index = bytecode.index();
1069     LinkResolver::resolve_invoke(info, Handle(), pool, index, bc, CHECK);
1070     switch (bc) {
1071       case Bytecodes::_invokehandle: {
1072         ResolvedMethodEntry* entry = pool->cache()->set_method_handle(index, info);
1073         appendix = Handle(current, pool->cache()->appendix_if_resolved(entry));
1074         break;
1075       }
1076       case Bytecodes::_invokedynamic: {
1077         appendix = Handle(current, pool->cache()->set_dynamic_call(info, index));
1078         break;
1079       }
1080       default: fatal("unexpected bytecode for load_appendix_patching_id");
1081     }
1082   } else {
1083     ShouldNotReachHere();
1084   }
1085 
1086   if (deoptimize_for_volatile || deoptimize_for_atomic) {




1087     // At compile time we assumed the field wasn't volatile/atomic but after
1088     // loading it turns out it was volatile/atomic so we have to throw the
1089     // compiled code out and let it be regenerated.
1090     if (TracePatching) {
1091       if (deoptimize_for_volatile) {
1092         tty->print_cr("Deoptimizing for patching volatile field reference");
1093       }
1094       if (deoptimize_for_atomic) {
1095         tty->print_cr("Deoptimizing for patching atomic field reference");
1096       }









1097     }
1098 
1099     // It's possible the nmethod was invalidated in the last
1100     // safepoint, but if it's still alive then make it not_entrant.
1101     nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
1102     if (nm != nullptr) {
1103       nm->make_not_entrant(nmethod::InvalidationReason::C1_CODEPATCH);
1104     }
1105 
1106     Deoptimization::deoptimize_frame(current, caller_frame.id());
1107 
1108     // Return to the now deoptimized frame.
1109   }
1110 
1111   // Now copy code back
1112 
1113   {
1114     MutexLocker ml_code (current, CodeCache_lock, Mutex::_no_safepoint_check_flag);
1115     //
1116     // Deoptimization may have happened while we waited for the lock.

1527 #ifndef PRODUCT
1528 void Runtime1::print_statistics() {
1529   tty->print_cr("C1 Runtime statistics:");
1530   tty->print_cr(" _resolve_invoke_virtual_cnt:     %u", SharedRuntime::_resolve_virtual_ctr);
1531   tty->print_cr(" _resolve_invoke_opt_virtual_cnt: %u", SharedRuntime::_resolve_opt_virtual_ctr);
1532   tty->print_cr(" _resolve_invoke_static_cnt:      %u", SharedRuntime::_resolve_static_ctr);
1533   tty->print_cr(" _handle_wrong_method_cnt:        %u", SharedRuntime::_wrong_method_ctr);
1534   tty->print_cr(" _ic_miss_cnt:                    %u", SharedRuntime::_ic_miss_ctr);
1535   tty->print_cr(" _generic_arraycopystub_cnt:      %u", _generic_arraycopystub_cnt);
1536   tty->print_cr(" _byte_arraycopy_cnt:             %u", _byte_arraycopy_stub_cnt);
1537   tty->print_cr(" _short_arraycopy_cnt:            %u", _short_arraycopy_stub_cnt);
1538   tty->print_cr(" _int_arraycopy_cnt:              %u", _int_arraycopy_stub_cnt);
1539   tty->print_cr(" _long_arraycopy_cnt:             %u", _long_arraycopy_stub_cnt);
1540   tty->print_cr(" _oop_arraycopy_cnt:              %u", _oop_arraycopy_stub_cnt);
1541   tty->print_cr(" _arraycopy_slowcase_cnt:         %u", _arraycopy_slowcase_cnt);
1542   tty->print_cr(" _arraycopy_checkcast_cnt:        %u", _arraycopy_checkcast_cnt);
1543   tty->print_cr(" _arraycopy_checkcast_attempt_cnt:%u", _arraycopy_checkcast_attempt_cnt);
1544 
1545   tty->print_cr(" _new_type_array_slowcase_cnt:    %u", _new_type_array_slowcase_cnt);
1546   tty->print_cr(" _new_object_array_slowcase_cnt:  %u", _new_object_array_slowcase_cnt);

1547   tty->print_cr(" _new_instance_slowcase_cnt:      %u", _new_instance_slowcase_cnt);
1548   tty->print_cr(" _new_multi_array_slowcase_cnt:   %u", _new_multi_array_slowcase_cnt);






1549   tty->print_cr(" _monitorenter_slowcase_cnt:      %u", _monitorenter_slowcase_cnt);
1550   tty->print_cr(" _monitorexit_slowcase_cnt:       %u", _monitorexit_slowcase_cnt);
1551   tty->print_cr(" _patch_code_slowcase_cnt:        %u", _patch_code_slowcase_cnt);
1552 
1553   tty->print_cr(" _throw_range_check_exception_count:            %u:", _throw_range_check_exception_count);
1554   tty->print_cr(" _throw_index_exception_count:                  %u:", _throw_index_exception_count);
1555   tty->print_cr(" _throw_div0_exception_count:                   %u:", _throw_div0_exception_count);
1556   tty->print_cr(" _throw_null_pointer_exception_count:           %u:", _throw_null_pointer_exception_count);
1557   tty->print_cr(" _throw_class_cast_exception_count:             %u:", _throw_class_cast_exception_count);
1558   tty->print_cr(" _throw_incompatible_class_change_error_count:  %u:", _throw_incompatible_class_change_error_count);


1559   tty->print_cr(" _throw_count:                                  %u:", _throw_count);
1560 
1561   SharedRuntime::print_ic_miss_histogram();
1562   tty->cr();
1563 }
1564 #endif // PRODUCT

   1 /*
   2  * Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *

  33 #include "classfile/vmSymbols.hpp"
  34 #include "code/aotCodeCache.hpp"
  35 #include "code/codeBlob.hpp"
  36 #include "code/compiledIC.hpp"
  37 #include "code/scopeDesc.hpp"
  38 #include "code/vtableStubs.hpp"
  39 #include "compiler/compilationPolicy.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "compiler/oopMap.hpp"
  42 #include "gc/shared/barrierSet.hpp"
  43 #include "gc/shared/c1/barrierSetC1.hpp"
  44 #include "gc/shared/collectedHeap.hpp"
  45 #include "interpreter/bytecode.hpp"
  46 #include "interpreter/interpreter.hpp"
  47 #include "jfr/support/jfrIntrinsics.hpp"
  48 #include "logging/log.hpp"
  49 #include "memory/oopFactory.hpp"
  50 #include "memory/resourceArea.hpp"
  51 #include "memory/universe.hpp"
  52 #include "oops/access.inline.hpp"
  53 #include "oops/arrayProperties.hpp"
  54 #include "oops/flatArrayKlass.hpp"
  55 #include "oops/flatArrayOop.inline.hpp"
  56 #include "oops/objArrayKlass.hpp"
  57 #include "oops/objArrayOop.inline.hpp"
  58 #include "oops/oop.inline.hpp"
  59 #include "prims/jvmtiExport.hpp"
  60 #include "runtime/atomicAccess.hpp"
  61 #include "runtime/fieldDescriptor.inline.hpp"
  62 #include "runtime/frame.inline.hpp"
  63 #include "runtime/handles.inline.hpp"
  64 #include "runtime/interfaceSupport.inline.hpp"
  65 #include "runtime/javaCalls.hpp"
  66 #include "runtime/sharedRuntime.hpp"
  67 #include "runtime/stackWatermarkSet.hpp"
  68 #include "runtime/stubInfo.hpp"
  69 #include "runtime/stubRoutines.hpp"
  70 #include "runtime/vframe.inline.hpp"
  71 #include "runtime/vframeArray.hpp"
  72 #include "runtime/vm_version.hpp"
  73 #include "utilities/copy.hpp"
  74 #include "utilities/events.hpp"
  75 

 100 
 101 
 102 void StubAssembler::set_num_rt_args(int args) {
 103   if (_num_rt_args == 0) {
 104     _num_rt_args = args;
 105   }
 106   assert(_num_rt_args == args, "can't change the number of args");
 107 }
 108 
 109 // Implementation of Runtime1
 110 CodeBlob* Runtime1::_blobs[StubInfo::C1_STUB_COUNT];
 111 
 112 #ifndef PRODUCT
 113 // statistics
 114 uint Runtime1::_generic_arraycopystub_cnt = 0;
 115 uint Runtime1::_arraycopy_slowcase_cnt = 0;
 116 uint Runtime1::_arraycopy_checkcast_cnt = 0;
 117 uint Runtime1::_arraycopy_checkcast_attempt_cnt = 0;
 118 uint Runtime1::_new_type_array_slowcase_cnt = 0;
 119 uint Runtime1::_new_object_array_slowcase_cnt = 0;
 120 uint Runtime1::_new_null_free_array_slowcase_cnt = 0;
 121 uint Runtime1::_new_instance_slowcase_cnt = 0;
 122 uint Runtime1::_new_multi_array_slowcase_cnt = 0;
 123 uint Runtime1::_load_flat_array_slowcase_cnt = 0;
 124 uint Runtime1::_store_flat_array_slowcase_cnt = 0;
 125 uint Runtime1::_substitutability_check_slowcase_cnt = 0;
 126 uint Runtime1::_buffer_inline_args_slowcase_cnt = 0;
 127 uint Runtime1::_buffer_inline_args_no_receiver_slowcase_cnt = 0;
 128 uint Runtime1::_monitorenter_slowcase_cnt = 0;
 129 uint Runtime1::_monitorexit_slowcase_cnt = 0;
 130 uint Runtime1::_patch_code_slowcase_cnt = 0;
 131 uint Runtime1::_throw_range_check_exception_count = 0;
 132 uint Runtime1::_throw_index_exception_count = 0;
 133 uint Runtime1::_throw_div0_exception_count = 0;
 134 uint Runtime1::_throw_null_pointer_exception_count = 0;
 135 uint Runtime1::_throw_class_cast_exception_count = 0;
 136 uint Runtime1::_throw_incompatible_class_change_error_count = 0;
 137 uint Runtime1::_throw_illegal_monitor_state_exception_count = 0;
 138 uint Runtime1::_throw_identity_exception_count = 0;
 139 uint Runtime1::_throw_count = 0;
 140 
 141 static uint _byte_arraycopy_stub_cnt = 0;
 142 static uint _short_arraycopy_stub_cnt = 0;
 143 static uint _int_arraycopy_stub_cnt = 0;
 144 static uint _long_arraycopy_stub_cnt = 0;
 145 static uint _oop_arraycopy_stub_cnt = 0;
 146 
 147 address Runtime1::arraycopy_count_address(BasicType type) {
 148   switch (type) {
 149   case T_BOOLEAN:
 150   case T_BYTE:   return (address)&_byte_arraycopy_stub_cnt;
 151   case T_CHAR:
 152   case T_SHORT:  return (address)&_short_arraycopy_stub_cnt;
 153   case T_FLOAT:
 154   case T_INT:    return (address)&_int_arraycopy_stub_cnt;
 155   case T_DOUBLE:
 156   case T_LONG:   return (address)&_long_arraycopy_stub_cnt;
 157   case T_ARRAY:
 158   case T_OBJECT: return (address)&_oop_arraycopy_stub_cnt;

 366   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32());
 367   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32C());
 368   FUNCTION_CASE(entry, StubRoutines::vectorizedMismatch());
 369   FUNCTION_CASE(entry, StubRoutines::dexp());
 370   FUNCTION_CASE(entry, StubRoutines::dlog());
 371   FUNCTION_CASE(entry, StubRoutines::dlog10());
 372   FUNCTION_CASE(entry, StubRoutines::dpow());
 373   FUNCTION_CASE(entry, StubRoutines::dsin());
 374   FUNCTION_CASE(entry, StubRoutines::dcos());
 375   FUNCTION_CASE(entry, StubRoutines::dtan());
 376   FUNCTION_CASE(entry, StubRoutines::dsinh());
 377   FUNCTION_CASE(entry, StubRoutines::dtanh());
 378   FUNCTION_CASE(entry, StubRoutines::dcbrt());
 379 
 380 #undef FUNCTION_CASE
 381 
 382   // Soft float adds more runtime names.
 383   return pd_name_for_address(entry);
 384 }
 385 
 386 static void allocate_instance(JavaThread* current, Klass* klass, TRAPS) {

 387 #ifndef PRODUCT
 388   if (PrintC1Statistics) {
 389     Runtime1::_new_instance_slowcase_cnt++;
 390   }
 391 #endif
 392   assert(klass->is_klass(), "not a class");
 393   Handle holder(current, klass->klass_holder()); // keep the klass alive
 394   InstanceKlass* h = InstanceKlass::cast(klass);
 395   h->check_valid_for_instantiation(true, CHECK);
 396   // make sure klass is initialized
 397   h->initialize(CHECK);
 398   // allocate instance and return via TLS
 399   oop obj = h->allocate_instance(CHECK);
 400   current->set_vm_result_oop(obj);
 401 JRT_END
 402 
 403 JRT_ENTRY(void, Runtime1::new_instance(JavaThread* current, Klass* klass))
 404   allocate_instance(current, klass, CHECK);
 405 JRT_END
 406 
 407 JRT_ENTRY(void, Runtime1::new_type_array(JavaThread* current, Klass* klass, jint length))
 408 #ifndef PRODUCT
 409   if (PrintC1Statistics) {
 410     _new_type_array_slowcase_cnt++;
 411   }
 412 #endif
 413   // Note: no handle for klass needed since they are not used
 414   //       anymore after new_typeArray() and no GC can happen before.
 415   //       (This may have to change if this code changes!)
 416   assert(klass->is_klass(), "not a class");
 417   BasicType elt_type = TypeArrayKlass::cast(klass)->element_type();
 418   oop obj = oopFactory::new_typeArray(elt_type, length, CHECK);
 419   current->set_vm_result_oop(obj);
 420   // This is pretty rare but this runtime patch is stressful to deoptimization
 421   // if we deoptimize here so force a deopt to stress the path.
 422   if (DeoptimizeALot) {
 423     deopt_caller(current);
 424   }
 425 

 431   if (PrintC1Statistics) {
 432     _new_object_array_slowcase_cnt++;
 433   }
 434 #endif
 435   // Note: no handle for klass needed since they are not used
 436   //       anymore after new_objArray() and no GC can happen before.
 437   //       (This may have to change if this code changes!)
 438   assert(array_klass->is_klass(), "not a class");
 439   Handle holder(current, array_klass->klass_holder()); // keep the klass alive
 440   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 441   objArrayOop obj = oopFactory::new_objArray(elem_klass, length, CHECK);
 442   current->set_vm_result_oop(obj);
 443   // This is pretty rare but this runtime patch is stressful to deoptimization
 444   // if we deoptimize here so force a deopt to stress the path.
 445   if (DeoptimizeALot) {
 446     deopt_caller(current);
 447   }
 448 JRT_END
 449 
 450 
 451 JRT_ENTRY(void, Runtime1::new_null_free_array(JavaThread* current, Klass* array_klass, jint length))
 452   NOT_PRODUCT(_new_null_free_array_slowcase_cnt++;)
 453   // TODO 8350865 This is dead code since 8325660 because null-free arrays can only be created via the factory methods that are not yet implemented in C1. Should probably be fixed by 8265122.
 454 
 455   // Note: no handle for klass needed since they are not used
 456   //       anymore after new_objArray() and no GC can happen before.
 457   //       (This may have to change if this code changes!)
 458   assert(array_klass->is_klass(), "not a class");
 459   Handle holder(THREAD, array_klass->klass_holder()); // keep the klass alive
 460   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 461   assert(elem_klass->is_inline_klass(), "must be");
 462   // Logically creates elements, ensure klass init
 463   elem_klass->initialize(CHECK);
 464 
 465   const ArrayProperties props = ArrayProperties::Default().with_null_restricted();
 466   arrayOop obj = oopFactory::new_objArray(elem_klass, length, props, CHECK);
 467 
 468   current->set_vm_result_oop(obj);
 469   // This is pretty rare but this runtime patch is stressful to deoptimization
 470   // if we deoptimize here so force a deopt to stress the path.
 471   if (DeoptimizeALot) {
 472     deopt_caller(current);
 473   }
 474 JRT_END
 475 
 476 
 477 JRT_ENTRY(void, Runtime1::new_multi_array(JavaThread* current, Klass* klass, int rank, jint* dims))
 478 #ifndef PRODUCT
 479   if (PrintC1Statistics) {
 480     _new_multi_array_slowcase_cnt++;
 481   }
 482 #endif
 483   assert(klass->is_klass(), "not a class");
 484   assert(rank >= 1, "rank must be nonzero");
 485   Handle holder(current, klass->klass_holder()); // keep the klass alive
 486   oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
 487   current->set_vm_result_oop(obj);
 488 JRT_END
 489 
 490 
 491 static void profile_flat_array(JavaThread* current, bool load, bool null_free) {
 492   ResourceMark rm(current);
 493   vframeStream vfst(current, true);
 494   assert(!vfst.at_end(), "Java frame must exist");
 495   // Check if array access profiling is enabled
 496   if (vfst.nm()->comp_level() != CompLevel_full_profile || !C1UpdateMethodData) {
 497     return;
 498   }
 499   int bci = vfst.bci();
 500   Method* method = vfst.method();
 501   MethodData* md = method->method_data();
 502   if (md != nullptr) {
 503     // Lock to access ProfileData, and ensure lock is not broken by a safepoint
 504     MutexLocker ml(md->extra_data_lock(), Mutex::_no_safepoint_check_flag);
 505 
 506     ProfileData* data = md->bci_to_data(bci);
 507     assert(data != nullptr, "incorrect profiling entry");
 508     if (data->is_ArrayLoadData()) {
 509       assert(load, "should be an array load");
 510       ArrayLoadData* load_data = (ArrayLoadData*) data;
 511       load_data->set_flat_array();
 512       if (null_free) {
 513         load_data->set_null_free_array();
 514       }
 515     } else {
 516       assert(data->is_ArrayStoreData(), "");
 517       assert(!load, "should be an array store");
 518       ArrayStoreData* store_data = (ArrayStoreData*) data;
 519       store_data->set_flat_array();
 520       if (null_free) {
 521         store_data->set_null_free_array();
 522       }
 523     }
 524   }
 525 }
 526 
 527 JRT_ENTRY(void, Runtime1::load_flat_array(JavaThread* current, flatArrayOopDesc* array, int index))
 528   assert(array->klass()->is_flatArray_klass(), "should not be called");
 529   profile_flat_array(current, true, array->is_null_free_array());
 530 
 531   NOT_PRODUCT(_load_flat_array_slowcase_cnt++;)
 532   assert(array->length() > 0 && index < array->length(), "already checked");
 533   flatArrayHandle vah(current, array);
 534   oop obj = array->obj_at(index, CHECK);
 535   current->set_vm_result_oop(obj);
 536 JRT_END
 537 
 538 JRT_ENTRY(void, Runtime1::store_flat_array(JavaThread* current, flatArrayOopDesc* array, int index, oopDesc* value))
 539   // TOOD 8350865 We can call here with a non-flat array because of LIR_Assembler::emit_opFlattenedArrayCheck
 540   if (array->klass()->is_flatArray_klass()) {
 541     profile_flat_array(current, false, array->is_null_free_array());
 542   }
 543 
 544   NOT_PRODUCT(_store_flat_array_slowcase_cnt++;)
 545   if (value == nullptr && array->is_null_free_array()) {
 546     SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_NullPointerException());
 547   } else {
 548     assert(array->klass()->is_flatArray_klass(), "should not be called");
 549     array->obj_at_put(index, value, CHECK);
 550   }
 551 JRT_END
 552 
 553 JRT_ENTRY(int, Runtime1::substitutability_check(JavaThread* current, oopDesc* left, oopDesc* right))
 554   NOT_PRODUCT(_substitutability_check_slowcase_cnt++;)
 555   JavaCallArguments args;
 556   args.push_oop(Handle(THREAD, left));
 557   args.push_oop(Handle(THREAD, right));
 558   JavaValue result(T_BOOLEAN);
 559   JavaCalls::call_static(&result,
 560                          vmClasses::ValueObjectMethods_klass(),
 561                          vmSymbols::isSubstitutable_name(),
 562                          vmSymbols::object_object_boolean_signature(),
 563                          &args, CHECK_0);
 564   return result.get_jboolean() ? 1 : 0;
 565 JRT_END
 566 
 567 
 568 extern "C" void ps();
 569 
 570 void Runtime1::buffer_inline_args_impl(JavaThread* current, Method* m, bool allocate_receiver) {
 571   JavaThread* THREAD = current;
 572   methodHandle method(current, m); // We are inside the verified_entry or verified_inline_ro_entry of this method.
 573   oop obj = SharedRuntime::allocate_inline_types_impl(current, method, allocate_receiver, CHECK);
 574   current->set_vm_result_oop(obj);
 575 }
 576 
 577 JRT_ENTRY(void, Runtime1::buffer_inline_args(JavaThread* current, Method* method))
 578   NOT_PRODUCT(_buffer_inline_args_slowcase_cnt++;)
 579   buffer_inline_args_impl(current, method, true);
 580 JRT_END
 581 
 582 JRT_ENTRY(void, Runtime1::buffer_inline_args_no_receiver(JavaThread* current, Method* method))
 583   NOT_PRODUCT(_buffer_inline_args_no_receiver_slowcase_cnt++;)
 584   buffer_inline_args_impl(current, method, false);
 585 JRT_END
 586 
 587 JRT_ENTRY(void, Runtime1::unimplemented_entry(JavaThread* current, StubId id))
 588   tty->print_cr("Runtime1::entry_for(%d) returned unimplemented entry point", (int)id);
 589 JRT_END
 590 
 591 
 592 JRT_ENTRY(void, Runtime1::throw_array_store_exception(JavaThread* current, oopDesc* obj))
 593   ResourceMark rm(current);
 594   const char* klass_name = obj->klass()->external_name();
 595   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArrayStoreException(), klass_name);
 596 JRT_END
 597 
 598 
 599 // counter_overflow() is called from within C1-compiled methods. The enclosing method is the method
 600 // associated with the top activation record. The inlinee (that is possibly included in the enclosing
 601 // method) method is passed as an argument. In order to do that it is embedded in the code as
 602 // a constant.
 603 static nmethod* counter_overflow_helper(JavaThread* current, int branch_bci, Method* m) {
 604   nmethod* osr_nm = nullptr;
 605   methodHandle method(current, m);
 606 

 884     _throw_class_cast_exception_count++;
 885   }
 886 #endif
 887   ResourceMark rm(current);
 888   char* message = SharedRuntime::generate_class_cast_message(current, object->klass());
 889   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ClassCastException(), message);
 890 JRT_END
 891 
 892 
 893 JRT_ENTRY(void, Runtime1::throw_incompatible_class_change_error(JavaThread* current))
 894 #ifndef PRODUCT
 895   if (PrintC1Statistics) {
 896     _throw_incompatible_class_change_error_count++;
 897   }
 898 #endif
 899   ResourceMark rm(current);
 900   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError());
 901 JRT_END
 902 
 903 
 904 JRT_ENTRY(void, Runtime1::throw_illegal_monitor_state_exception(JavaThread* current))
 905   NOT_PRODUCT(_throw_illegal_monitor_state_exception_count++;)
 906   ResourceMark rm(current);
 907   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IllegalMonitorStateException());
 908 JRT_END
 909 
 910 JRT_ENTRY(void, Runtime1::throw_identity_exception(JavaThread* current, oopDesc* object))
 911   NOT_PRODUCT(_throw_identity_exception_count++;)
 912   ResourceMark rm(current);
 913   char* message = SharedRuntime::generate_identity_exception_message(current, object->klass());
 914   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IdentityException(), message);
 915 JRT_END
 916 
 917 JRT_BLOCK_ENTRY(void, Runtime1::monitorenter(JavaThread* current, oopDesc* obj, BasicObjectLock* lock))
 918 #ifndef PRODUCT
 919   if (PrintC1Statistics) {
 920     _monitorenter_slowcase_cnt++;
 921   }
 922 #endif
 923   assert(obj == lock->obj(), "must match");
 924   SharedRuntime::monitor_enter_helper(obj, lock->lock(), current);
 925 JRT_END
 926 
 927 
 928 JRT_LEAF(void, Runtime1::monitorexit(JavaThread* current, BasicObjectLock* lock))
 929   assert(current == JavaThread::current(), "pre-condition");
 930 #ifndef PRODUCT
 931   if (PrintC1Statistics) {
 932     _monitorexit_slowcase_cnt++;
 933   }
 934 #endif
 935   assert(current->last_Java_sp(), "last_Java_sp must be set");
 936   oop obj = lock->obj();

1099                       RegisterMap::WalkContinuation::skip);
1100   frame runtime_frame = current->last_frame();
1101   frame caller_frame = runtime_frame.sender(&reg_map);
1102 
1103   // last java frame on stack
1104   vframeStream vfst(current, true);
1105   assert(!vfst.at_end(), "Java frame must exist");
1106 
1107   methodHandle caller_method(current, vfst.method());
1108   // Note that caller_method->code() may not be same as caller_code because of OSR's
1109   // Note also that in the presence of inlining it is not guaranteed
1110   // that caller_method() == caller_code->method()
1111 
1112   int bci = vfst.bci();
1113   Bytecodes::Code code = caller_method()->java_code_at(bci);
1114 
1115   // this is used by assertions in the access_field_patching_id
1116   BasicType patch_field_type = T_ILLEGAL;
1117   bool deoptimize_for_volatile = false;
1118   bool deoptimize_for_atomic = false;
1119   bool deoptimize_for_null_free = false;
1120   bool deoptimize_for_flat = false;
1121   bool deoptimize_for_strict_static = false;
1122   int patch_field_offset = -1;
1123   Klass* init_klass = nullptr; // klass needed by load_klass_patching code
1124   Klass* load_klass = nullptr; // klass needed by load_klass_patching code
1125   Handle mirror(current, nullptr); // oop needed by load_mirror_patching code
1126   Handle appendix(current, nullptr); // oop needed by appendix_patching code
1127   bool load_klass_or_mirror_patch_id =
1128     (stub_id == StubId::c1_load_klass_patching_id || stub_id == StubId::c1_load_mirror_patching_id);
1129 
1130   if (stub_id == StubId::c1_access_field_patching_id) {
1131 
1132     Bytecode_field field_access(caller_method, bci);
1133     fieldDescriptor result; // initialize class if needed
1134     Bytecodes::Code code = field_access.code();
1135     constantPoolHandle constants(current, caller_method->constants());
1136     LinkResolver::resolve_field_access(result, constants, field_access.index(), caller_method, Bytecodes::java_code(code), CHECK);
1137     patch_field_offset = result.offset();
1138 
1139     // If we're patching a field which is volatile then at compile it
1140     // must not have been know to be volatile, so the generated code
1141     // isn't correct for a volatile reference.  The nmethod has to be

1145     // used for patching references to oops which don't need special
1146     // handling in the volatile case.
1147 
1148     deoptimize_for_volatile = result.access_flags().is_volatile();
1149 
1150     // If we are patching a field which should be atomic, then
1151     // the generated code is not correct either, force deoptimizing.
1152     // We need to only cover T_LONG and T_DOUBLE fields, as we can
1153     // break access atomicity only for them.
1154 
1155     // Strictly speaking, the deoptimization on 64-bit platforms
1156     // is unnecessary, and T_LONG stores on 32-bit platforms need
1157     // to be handled by special patching code when AlwaysAtomicAccesses
1158     // becomes product feature. At this point, we are still going
1159     // for the deoptimization for consistency against volatile
1160     // accesses.
1161 
1162     patch_field_type = result.field_type();
1163     deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
1164 
1165     // The field we are patching is null-free. Deoptimize and regenerate
1166     // the compiled code if we patch a putfield/putstatic because it
1167     // does not contain the required null check.
1168     deoptimize_for_null_free = result.is_null_free_inline_type() && (field_access.is_putfield() || field_access.is_putstatic());
1169 
1170     // The field we are patching is flat. Deoptimize and regenerate
1171     // the compiled code which can't handle the layout of the flat
1172     // field because it was unknown at compile time.
1173     deoptimize_for_flat = result.is_flat();
1174 
1175     // Strict statics may require tracking if their class is not fully initialized.
1176     // For now we can bail out of the compiler and let the interpreter handle it.
1177     deoptimize_for_strict_static = result.is_strict_static_unset();
1178   } else if (load_klass_or_mirror_patch_id) {
1179     Klass* k = nullptr;
1180     switch (code) {
1181       case Bytecodes::_putstatic:
1182       case Bytecodes::_getstatic:
1183         { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK);
1184           init_klass = klass;
1185           mirror = Handle(current, klass->java_mirror());
1186         }
1187         break;
1188       case Bytecodes::_new:
1189         { Bytecode_new bnew(caller_method(), caller_method->bcp_from(bci));
1190           k = caller_method->constants()->klass_at(bnew.index(), CHECK);
1191         }
1192         break;
1193       case Bytecodes::_multianewarray:
1194         { Bytecode_multianewarray mna(caller_method(), caller_method->bcp_from(bci));
1195           k = caller_method->constants()->klass_at(mna.index(), CHECK);
1196         }
1197         break;
1198       case Bytecodes::_instanceof:
1199         { Bytecode_instanceof io(caller_method(), caller_method->bcp_from(bci));
1200           k = caller_method->constants()->klass_at(io.index(), CHECK);
1201         }
1202         break;
1203       case Bytecodes::_checkcast:
1204         { Bytecode_checkcast cc(caller_method(), caller_method->bcp_from(bci));
1205           k = caller_method->constants()->klass_at(cc.index(), CHECK);
1206         }
1207         break;
1208       case Bytecodes::_anewarray:
1209         { Bytecode_anewarray anew(caller_method(), caller_method->bcp_from(bci));
1210           Klass* ek = caller_method->constants()->klass_at(anew.index(), CHECK);
1211           k = ek->array_klass(CHECK);
1212           if (!k->is_typeArray_klass() && !k->is_refArray_klass() && !k->is_flatArray_klass()) {
1213             k = ObjArrayKlass::cast(k)->klass_with_properties(ArrayProperties::Default(), THREAD);
1214           }
1215           if (k->is_flatArray_klass()) {
1216             deoptimize_for_flat = true;
1217           }
1218         }
1219         break;
1220       case Bytecodes::_ldc:
1221       case Bytecodes::_ldc_w:
1222       case Bytecodes::_ldc2_w:
1223         {
1224           Bytecode_loadconstant cc(caller_method, bci);
1225           oop m = cc.resolve_constant(CHECK);
1226           mirror = Handle(current, m);
1227         }
1228         break;
1229       default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id");
1230     }
1231     load_klass = k;
1232   } else if (stub_id == StubId::c1_load_appendix_patching_id) {
1233     Bytecode_invoke bytecode(caller_method, bci);
1234     Bytecodes::Code bc = bytecode.invoke_code();
1235 
1236     CallInfo info;
1237     constantPoolHandle pool(current, caller_method->constants());
1238     int index = bytecode.index();
1239     LinkResolver::resolve_invoke(info, Handle(), pool, index, bc, CHECK);
1240     switch (bc) {
1241       case Bytecodes::_invokehandle: {
1242         ResolvedMethodEntry* entry = pool->cache()->set_method_handle(index, info);
1243         appendix = Handle(current, pool->cache()->appendix_if_resolved(entry));
1244         break;
1245       }
1246       case Bytecodes::_invokedynamic: {
1247         appendix = Handle(current, pool->cache()->set_dynamic_call(info, index));
1248         break;
1249       }
1250       default: fatal("unexpected bytecode for load_appendix_patching_id");
1251     }
1252   } else {
1253     ShouldNotReachHere();
1254   }
1255 
1256   if (deoptimize_for_volatile  ||
1257       deoptimize_for_atomic    ||
1258       deoptimize_for_null_free ||
1259       deoptimize_for_flat      ||
1260       deoptimize_for_strict_static) {
1261     // At compile time we assumed the field wasn't volatile/atomic but after
1262     // loading it turns out it was volatile/atomic so we have to throw the
1263     // compiled code out and let it be regenerated.
1264     if (TracePatching) {
1265       if (deoptimize_for_volatile) {
1266         tty->print_cr("Deoptimizing for patching volatile field reference");
1267       }
1268       if (deoptimize_for_atomic) {
1269         tty->print_cr("Deoptimizing for patching atomic field reference");
1270       }
1271       if (deoptimize_for_null_free) {
1272         tty->print_cr("Deoptimizing for patching null-free field reference");
1273       }
1274       if (deoptimize_for_flat) {
1275         tty->print_cr("Deoptimizing for patching flat field or array reference");
1276       }
1277       if (deoptimize_for_strict_static) {
1278         tty->print_cr("Deoptimizing for patching strict static field reference");
1279       }
1280     }
1281 
1282     // It's possible the nmethod was invalidated in the last
1283     // safepoint, but if it's still alive then make it not_entrant.
1284     nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
1285     if (nm != nullptr) {
1286       nm->make_not_entrant(nmethod::InvalidationReason::C1_CODEPATCH);
1287     }
1288 
1289     Deoptimization::deoptimize_frame(current, caller_frame.id());
1290 
1291     // Return to the now deoptimized frame.
1292   }
1293 
1294   // Now copy code back
1295 
1296   {
1297     MutexLocker ml_code (current, CodeCache_lock, Mutex::_no_safepoint_check_flag);
1298     //
1299     // Deoptimization may have happened while we waited for the lock.

1710 #ifndef PRODUCT
1711 void Runtime1::print_statistics() {
1712   tty->print_cr("C1 Runtime statistics:");
1713   tty->print_cr(" _resolve_invoke_virtual_cnt:     %u", SharedRuntime::_resolve_virtual_ctr);
1714   tty->print_cr(" _resolve_invoke_opt_virtual_cnt: %u", SharedRuntime::_resolve_opt_virtual_ctr);
1715   tty->print_cr(" _resolve_invoke_static_cnt:      %u", SharedRuntime::_resolve_static_ctr);
1716   tty->print_cr(" _handle_wrong_method_cnt:        %u", SharedRuntime::_wrong_method_ctr);
1717   tty->print_cr(" _ic_miss_cnt:                    %u", SharedRuntime::_ic_miss_ctr);
1718   tty->print_cr(" _generic_arraycopystub_cnt:      %u", _generic_arraycopystub_cnt);
1719   tty->print_cr(" _byte_arraycopy_cnt:             %u", _byte_arraycopy_stub_cnt);
1720   tty->print_cr(" _short_arraycopy_cnt:            %u", _short_arraycopy_stub_cnt);
1721   tty->print_cr(" _int_arraycopy_cnt:              %u", _int_arraycopy_stub_cnt);
1722   tty->print_cr(" _long_arraycopy_cnt:             %u", _long_arraycopy_stub_cnt);
1723   tty->print_cr(" _oop_arraycopy_cnt:              %u", _oop_arraycopy_stub_cnt);
1724   tty->print_cr(" _arraycopy_slowcase_cnt:         %u", _arraycopy_slowcase_cnt);
1725   tty->print_cr(" _arraycopy_checkcast_cnt:        %u", _arraycopy_checkcast_cnt);
1726   tty->print_cr(" _arraycopy_checkcast_attempt_cnt:%u", _arraycopy_checkcast_attempt_cnt);
1727 
1728   tty->print_cr(" _new_type_array_slowcase_cnt:    %u", _new_type_array_slowcase_cnt);
1729   tty->print_cr(" _new_object_array_slowcase_cnt:  %u", _new_object_array_slowcase_cnt);
1730   tty->print_cr(" _new_null_free_array_slowcase_cnt: %u", _new_null_free_array_slowcase_cnt);
1731   tty->print_cr(" _new_instance_slowcase_cnt:      %u", _new_instance_slowcase_cnt);
1732   tty->print_cr(" _new_multi_array_slowcase_cnt:   %u", _new_multi_array_slowcase_cnt);
1733   tty->print_cr(" _load_flat_array_slowcase_cnt:   %u", _load_flat_array_slowcase_cnt);
1734   tty->print_cr(" _store_flat_array_slowcase_cnt:  %u", _store_flat_array_slowcase_cnt);
1735   tty->print_cr(" _substitutability_check_slowcase_cnt: %u", _substitutability_check_slowcase_cnt);
1736   tty->print_cr(" _buffer_inline_args_slowcase_cnt:%u", _buffer_inline_args_slowcase_cnt);
1737   tty->print_cr(" _buffer_inline_args_no_receiver_slowcase_cnt:%u", _buffer_inline_args_no_receiver_slowcase_cnt);
1738 
1739   tty->print_cr(" _monitorenter_slowcase_cnt:      %u", _monitorenter_slowcase_cnt);
1740   tty->print_cr(" _monitorexit_slowcase_cnt:       %u", _monitorexit_slowcase_cnt);
1741   tty->print_cr(" _patch_code_slowcase_cnt:        %u", _patch_code_slowcase_cnt);
1742 
1743   tty->print_cr(" _throw_range_check_exception_count:            %u:", _throw_range_check_exception_count);
1744   tty->print_cr(" _throw_index_exception_count:                  %u:", _throw_index_exception_count);
1745   tty->print_cr(" _throw_div0_exception_count:                   %u:", _throw_div0_exception_count);
1746   tty->print_cr(" _throw_null_pointer_exception_count:           %u:", _throw_null_pointer_exception_count);
1747   tty->print_cr(" _throw_class_cast_exception_count:             %u:", _throw_class_cast_exception_count);
1748   tty->print_cr(" _throw_incompatible_class_change_error_count:  %u:", _throw_incompatible_class_change_error_count);
1749   tty->print_cr(" _throw_illegal_monitor_state_exception_count:  %u:", _throw_illegal_monitor_state_exception_count);
1750   tty->print_cr(" _throw_identity_exception_count:               %u:", _throw_identity_exception_count);
1751   tty->print_cr(" _throw_count:                                  %u:", _throw_count);
1752 
1753   SharedRuntime::print_ic_miss_histogram();
1754   tty->cr();
1755 }
1756 #endif // PRODUCT
< prev index next >