< prev index next >

src/hotspot/share/c1/c1_Runtime1.cpp

Print this page

   1 /*
   2  * Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *

  33 #include "classfile/vmSymbols.hpp"
  34 #include "code/aotCodeCache.hpp"
  35 #include "code/codeBlob.hpp"
  36 #include "code/compiledIC.hpp"
  37 #include "code/scopeDesc.hpp"
  38 #include "code/vtableStubs.hpp"
  39 #include "compiler/compilationPolicy.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "compiler/oopMap.hpp"
  42 #include "gc/shared/barrierSet.hpp"
  43 #include "gc/shared/c1/barrierSetC1.hpp"
  44 #include "gc/shared/collectedHeap.hpp"
  45 #include "interpreter/bytecode.hpp"
  46 #include "interpreter/interpreter.hpp"
  47 #include "jfr/support/jfrIntrinsics.hpp"
  48 #include "logging/log.hpp"
  49 #include "memory/oopFactory.hpp"
  50 #include "memory/resourceArea.hpp"
  51 #include "memory/universe.hpp"
  52 #include "oops/access.inline.hpp"




  53 #include "oops/objArrayKlass.hpp"
  54 #include "oops/objArrayOop.inline.hpp"
  55 #include "oops/oop.inline.hpp"

  56 #include "prims/jvmtiExport.hpp"
  57 #include "runtime/atomicAccess.hpp"
  58 #include "runtime/fieldDescriptor.inline.hpp"
  59 #include "runtime/frame.inline.hpp"
  60 #include "runtime/handles.inline.hpp"
  61 #include "runtime/interfaceSupport.inline.hpp"
  62 #include "runtime/javaCalls.hpp"
  63 #include "runtime/sharedRuntime.hpp"
  64 #include "runtime/stackWatermarkSet.hpp"
  65 #include "runtime/stubInfo.hpp"
  66 #include "runtime/stubRoutines.hpp"
  67 #include "runtime/vframe.inline.hpp"
  68 #include "runtime/vframeArray.hpp"
  69 #include "runtime/vm_version.hpp"
  70 #include "utilities/copy.hpp"
  71 #include "utilities/events.hpp"
  72 
  73 
  74 // Implementation of StubAssembler
  75 

  97 
  98 
  99 void StubAssembler::set_num_rt_args(int args) {
 100   if (_num_rt_args == 0) {
 101     _num_rt_args = args;
 102   }
 103   assert(_num_rt_args == args, "can't change the number of args");
 104 }
 105 
 106 // Implementation of Runtime1
 107 CodeBlob* Runtime1::_blobs[StubInfo::C1_STUB_COUNT];
 108 
 109 #ifndef PRODUCT
 110 // statistics
 111 uint Runtime1::_generic_arraycopystub_cnt = 0;
 112 uint Runtime1::_arraycopy_slowcase_cnt = 0;
 113 uint Runtime1::_arraycopy_checkcast_cnt = 0;
 114 uint Runtime1::_arraycopy_checkcast_attempt_cnt = 0;
 115 uint Runtime1::_new_type_array_slowcase_cnt = 0;
 116 uint Runtime1::_new_object_array_slowcase_cnt = 0;

 117 uint Runtime1::_new_instance_slowcase_cnt = 0;
 118 uint Runtime1::_new_multi_array_slowcase_cnt = 0;





 119 uint Runtime1::_monitorenter_slowcase_cnt = 0;
 120 uint Runtime1::_monitorexit_slowcase_cnt = 0;
 121 uint Runtime1::_patch_code_slowcase_cnt = 0;
 122 uint Runtime1::_throw_range_check_exception_count = 0;
 123 uint Runtime1::_throw_index_exception_count = 0;
 124 uint Runtime1::_throw_div0_exception_count = 0;
 125 uint Runtime1::_throw_null_pointer_exception_count = 0;
 126 uint Runtime1::_throw_class_cast_exception_count = 0;
 127 uint Runtime1::_throw_incompatible_class_change_error_count = 0;


 128 uint Runtime1::_throw_count = 0;
 129 
 130 static uint _byte_arraycopy_stub_cnt = 0;
 131 static uint _short_arraycopy_stub_cnt = 0;
 132 static uint _int_arraycopy_stub_cnt = 0;
 133 static uint _long_arraycopy_stub_cnt = 0;
 134 static uint _oop_arraycopy_stub_cnt = 0;
 135 
 136 address Runtime1::arraycopy_count_address(BasicType type) {
 137   switch (type) {
 138   case T_BOOLEAN:
 139   case T_BYTE:   return (address)&_byte_arraycopy_stub_cnt;
 140   case T_CHAR:
 141   case T_SHORT:  return (address)&_short_arraycopy_stub_cnt;
 142   case T_FLOAT:
 143   case T_INT:    return (address)&_int_arraycopy_stub_cnt;
 144   case T_DOUBLE:
 145   case T_LONG:   return (address)&_long_arraycopy_stub_cnt;
 146   case T_ARRAY:
 147   case T_OBJECT: return (address)&_oop_arraycopy_stub_cnt;

 353   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32());
 354   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32C());
 355   FUNCTION_CASE(entry, StubRoutines::vectorizedMismatch());
 356   FUNCTION_CASE(entry, StubRoutines::dexp());
 357   FUNCTION_CASE(entry, StubRoutines::dlog());
 358   FUNCTION_CASE(entry, StubRoutines::dlog10());
 359   FUNCTION_CASE(entry, StubRoutines::dpow());
 360   FUNCTION_CASE(entry, StubRoutines::dsin());
 361   FUNCTION_CASE(entry, StubRoutines::dcos());
 362   FUNCTION_CASE(entry, StubRoutines::dtan());
 363   FUNCTION_CASE(entry, StubRoutines::dsinh());
 364   FUNCTION_CASE(entry, StubRoutines::dtanh());
 365   FUNCTION_CASE(entry, StubRoutines::dcbrt());
 366 
 367 #undef FUNCTION_CASE
 368 
 369   // Soft float adds more runtime names.
 370   return pd_name_for_address(entry);
 371 }
 372 
 373 
 374 JRT_ENTRY(void, Runtime1::new_instance(JavaThread* current, Klass* klass))
 375 #ifndef PRODUCT
 376   if (PrintC1Statistics) {
 377     _new_instance_slowcase_cnt++;
 378   }
 379 #endif
 380   assert(klass->is_klass(), "not a class");
 381   Handle holder(current, klass->klass_holder()); // keep the klass alive
 382   InstanceKlass* h = InstanceKlass::cast(klass);
 383   h->check_valid_for_instantiation(true, CHECK);
 384   // make sure klass is initialized
 385   h->initialize(CHECK);
 386   // allocate instance and return via TLS
 387   oop obj = h->allocate_instance(CHECK);
 388   current->set_vm_result_oop(obj);
 389 JRT_END
 390 



 391 
 392 JRT_ENTRY(void, Runtime1::new_type_array(JavaThread* current, Klass* klass, jint length))
 393 #ifndef PRODUCT
 394   if (PrintC1Statistics) {
 395     _new_type_array_slowcase_cnt++;
 396   }
 397 #endif
 398   // Note: no handle for klass needed since they are not used
 399   //       anymore after new_typeArray() and no GC can happen before.
 400   //       (This may have to change if this code changes!)
 401   assert(klass->is_klass(), "not a class");
 402   BasicType elt_type = TypeArrayKlass::cast(klass)->element_type();
 403   oop obj = oopFactory::new_typeArray(elt_type, length, CHECK);
 404   current->set_vm_result_oop(obj);
 405   // This is pretty rare but this runtime patch is stressful to deoptimization
 406   // if we deoptimize here so force a deopt to stress the path.
 407   if (DeoptimizeALot) {
 408     deopt_caller(current);
 409   }
 410 

 415 #ifndef PRODUCT
 416   if (PrintC1Statistics) {
 417     _new_object_array_slowcase_cnt++;
 418   }
 419 #endif
 420   // Note: no handle for klass needed since they are not used
 421   //       anymore after new_objArray() and no GC can happen before.
 422   //       (This may have to change if this code changes!)
 423   assert(array_klass->is_klass(), "not a class");
 424   Handle holder(current, array_klass->klass_holder()); // keep the klass alive
 425   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 426   objArrayOop obj = oopFactory::new_objArray(elem_klass, length, CHECK);
 427   current->set_vm_result_oop(obj);
 428   // This is pretty rare but this runtime patch is stressful to deoptimization
 429   // if we deoptimize here so force a deopt to stress the path.
 430   if (DeoptimizeALot) {
 431     deopt_caller(current);
 432   }
 433 JRT_END
 434 

























 435 
 436 JRT_ENTRY(void, Runtime1::new_multi_array(JavaThread* current, Klass* klass, int rank, jint* dims))
 437 #ifndef PRODUCT
 438   if (PrintC1Statistics) {
 439     _new_multi_array_slowcase_cnt++;
 440   }
 441 #endif
 442   assert(klass->is_klass(), "not a class");
 443   assert(rank >= 1, "rank must be nonzero");
 444   Handle holder(current, klass->klass_holder()); // keep the klass alive
 445   oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
 446   current->set_vm_result_oop(obj);
 447 JRT_END
 448 
 449 





























































































 450 JRT_ENTRY(void, Runtime1::unimplemented_entry(JavaThread* current, StubId id))
 451   tty->print_cr("Runtime1::entry_for(%d) returned unimplemented entry point", (int)id);
 452 JRT_END
 453 
 454 
 455 JRT_ENTRY(void, Runtime1::throw_array_store_exception(JavaThread* current, oopDesc* obj))
 456   ResourceMark rm(current);
 457   const char* klass_name = obj->klass()->external_name();
 458   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArrayStoreException(), klass_name);
 459 JRT_END
 460 
 461 
 462 // counter_overflow() is called from within C1-compiled methods. The enclosing method is the method
 463 // associated with the top activation record. The inlinee (that is possibly included in the enclosing
 464 // method) method is passed as an argument. In order to do that it is embedded in the code as
 465 // a constant.
 466 static nmethod* counter_overflow_helper(JavaThread* current, int branch_bci, Method* m) {
 467   nmethod* osr_nm = nullptr;
 468   methodHandle method(current, m);
 469 

 747     _throw_class_cast_exception_count++;
 748   }
 749 #endif
 750   ResourceMark rm(current);
 751   char* message = SharedRuntime::generate_class_cast_message(current, object->klass());
 752   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ClassCastException(), message);
 753 JRT_END
 754 
 755 
 756 JRT_ENTRY(void, Runtime1::throw_incompatible_class_change_error(JavaThread* current))
 757 #ifndef PRODUCT
 758   if (PrintC1Statistics) {
 759     _throw_incompatible_class_change_error_count++;
 760   }
 761 #endif
 762   ResourceMark rm(current);
 763   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError());
 764 JRT_END
 765 
 766 













 767 JRT_BLOCK_ENTRY(void, Runtime1::monitorenter(JavaThread* current, oopDesc* obj, BasicObjectLock* lock))
 768 #ifndef PRODUCT
 769   if (PrintC1Statistics) {
 770     _monitorenter_slowcase_cnt++;
 771   }
 772 #endif
 773   assert(obj == lock->obj(), "must match");
 774   SharedRuntime::monitor_enter_helper(obj, lock->lock(), current);
 775 JRT_END
 776 
 777 
 778 JRT_LEAF(void, Runtime1::monitorexit(JavaThread* current, BasicObjectLock* lock))
 779   assert(current == JavaThread::current(), "pre-condition");
 780 #ifndef PRODUCT
 781   if (PrintC1Statistics) {
 782     _monitorexit_slowcase_cnt++;
 783   }
 784 #endif
 785   assert(current->last_Java_sp(), "last_Java_sp must be set");
 786   oop obj = lock->obj();

 949                       RegisterMap::WalkContinuation::skip);
 950   frame runtime_frame = current->last_frame();
 951   frame caller_frame = runtime_frame.sender(&reg_map);
 952 
 953   // last java frame on stack
 954   vframeStream vfst(current, true);
 955   assert(!vfst.at_end(), "Java frame must exist");
 956 
 957   methodHandle caller_method(current, vfst.method());
 958   // Note that caller_method->code() may not be same as caller_code because of OSR's
 959   // Note also that in the presence of inlining it is not guaranteed
 960   // that caller_method() == caller_code->method()
 961 
 962   int bci = vfst.bci();
 963   Bytecodes::Code code = caller_method()->java_code_at(bci);
 964 
 965   // this is used by assertions in the access_field_patching_id
 966   BasicType patch_field_type = T_ILLEGAL;
 967   bool deoptimize_for_volatile = false;
 968   bool deoptimize_for_atomic = false;



 969   int patch_field_offset = -1;
 970   Klass* init_klass = nullptr; // klass needed by load_klass_patching code
 971   Klass* load_klass = nullptr; // klass needed by load_klass_patching code
 972   Handle mirror(current, nullptr); // oop needed by load_mirror_patching code
 973   Handle appendix(current, nullptr); // oop needed by appendix_patching code
 974   bool load_klass_or_mirror_patch_id =
 975     (stub_id == StubId::c1_load_klass_patching_id || stub_id == StubId::c1_load_mirror_patching_id);
 976 
 977   if (stub_id == StubId::c1_access_field_patching_id) {
 978 
 979     Bytecode_field field_access(caller_method, bci);
 980     fieldDescriptor result; // initialize class if needed
 981     Bytecodes::Code code = field_access.code();
 982     constantPoolHandle constants(current, caller_method->constants());
 983     LinkResolver::resolve_field_access(result, constants, field_access.index(), caller_method, Bytecodes::java_code(code), CHECK);
 984     patch_field_offset = result.offset();
 985 
 986     // If we're patching a field which is volatile then at compile it
 987     // must not have been know to be volatile, so the generated code
 988     // isn't correct for a volatile reference.  The nmethod has to be

 992     // used for patching references to oops which don't need special
 993     // handling in the volatile case.
 994 
 995     deoptimize_for_volatile = result.access_flags().is_volatile();
 996 
 997     // If we are patching a field which should be atomic, then
 998     // the generated code is not correct either, force deoptimizing.
 999     // We need to only cover T_LONG and T_DOUBLE fields, as we can
1000     // break access atomicity only for them.
1001 
1002     // Strictly speaking, the deoptimization on 64-bit platforms
1003     // is unnecessary, and T_LONG stores on 32-bit platforms need
1004     // to be handled by special patching code when AlwaysAtomicAccesses
1005     // becomes product feature. At this point, we are still going
1006     // for the deoptimization for consistency against volatile
1007     // accesses.
1008 
1009     patch_field_type = result.field_type();
1010     deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
1011 













1012   } else if (load_klass_or_mirror_patch_id) {
1013     Klass* k = nullptr;
1014     switch (code) {
1015       case Bytecodes::_putstatic:
1016       case Bytecodes::_getstatic:
1017         { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK);
1018           init_klass = klass;
1019           mirror = Handle(current, klass->java_mirror());
1020         }
1021         break;
1022       case Bytecodes::_new:
1023         { Bytecode_new bnew(caller_method(), caller_method->bcp_from(bci));
1024           k = caller_method->constants()->klass_at(bnew.index(), CHECK);
1025         }
1026         break;
1027       case Bytecodes::_multianewarray:
1028         { Bytecode_multianewarray mna(caller_method(), caller_method->bcp_from(bci));
1029           k = caller_method->constants()->klass_at(mna.index(), CHECK);
1030         }
1031         break;
1032       case Bytecodes::_instanceof:
1033         { Bytecode_instanceof io(caller_method(), caller_method->bcp_from(bci));
1034           k = caller_method->constants()->klass_at(io.index(), CHECK);
1035         }
1036         break;
1037       case Bytecodes::_checkcast:
1038         { Bytecode_checkcast cc(caller_method(), caller_method->bcp_from(bci));
1039           k = caller_method->constants()->klass_at(cc.index(), CHECK);
1040         }
1041         break;
1042       case Bytecodes::_anewarray:
1043         { Bytecode_anewarray anew(caller_method(), caller_method->bcp_from(bci));
1044           Klass* ek = caller_method->constants()->klass_at(anew.index(), CHECK);
1045           k = ek->array_klass(CHECK);






1046         }
1047         break;
1048       case Bytecodes::_ldc:
1049       case Bytecodes::_ldc_w:
1050       case Bytecodes::_ldc2_w:
1051         {
1052           Bytecode_loadconstant cc(caller_method, bci);
1053           oop m = cc.resolve_constant(CHECK);
1054           mirror = Handle(current, m);
1055         }
1056         break;
1057       default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id");
1058     }
1059     load_klass = k;
1060   } else if (stub_id == StubId::c1_load_appendix_patching_id) {
1061     Bytecode_invoke bytecode(caller_method, bci);
1062     Bytecodes::Code bc = bytecode.invoke_code();
1063 
1064     CallInfo info;
1065     constantPoolHandle pool(current, caller_method->constants());
1066     int index = bytecode.index();
1067     LinkResolver::resolve_invoke(info, Handle(), pool, index, bc, CHECK);
1068     switch (bc) {
1069       case Bytecodes::_invokehandle: {
1070         ResolvedMethodEntry* entry = pool->cache()->set_method_handle(index, info);
1071         appendix = Handle(current, pool->cache()->appendix_if_resolved(entry));
1072         break;
1073       }
1074       case Bytecodes::_invokedynamic: {
1075         appendix = Handle(current, pool->cache()->set_dynamic_call(info, index));
1076         break;
1077       }
1078       default: fatal("unexpected bytecode for load_appendix_patching_id");
1079     }
1080   } else {
1081     ShouldNotReachHere();
1082   }
1083 
1084   if (deoptimize_for_volatile || deoptimize_for_atomic) {




1085     // At compile time we assumed the field wasn't volatile/atomic but after
1086     // loading it turns out it was volatile/atomic so we have to throw the
1087     // compiled code out and let it be regenerated.
1088     if (TracePatching) {
1089       if (deoptimize_for_volatile) {
1090         tty->print_cr("Deoptimizing for patching volatile field reference");
1091       }
1092       if (deoptimize_for_atomic) {
1093         tty->print_cr("Deoptimizing for patching atomic field reference");
1094       }









1095     }
1096 
1097     // It's possible the nmethod was invalidated in the last
1098     // safepoint, but if it's still alive then make it not_entrant.
1099     nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
1100     if (nm != nullptr) {
1101       nm->make_not_entrant(nmethod::InvalidationReason::C1_CODEPATCH);
1102     }
1103 
1104     Deoptimization::deoptimize_frame(current, caller_frame.id());
1105 
1106     // Return to the now deoptimized frame.
1107   }
1108 
1109   // Now copy code back
1110 
1111   {
1112     MutexLocker ml_code (current, CodeCache_lock, Mutex::_no_safepoint_check_flag);
1113     //
1114     // Deoptimization may have happened while we waited for the lock.

1525 #ifndef PRODUCT
1526 void Runtime1::print_statistics() {
1527   tty->print_cr("C1 Runtime statistics:");
1528   tty->print_cr(" _resolve_invoke_virtual_cnt:     %u", SharedRuntime::_resolve_virtual_ctr);
1529   tty->print_cr(" _resolve_invoke_opt_virtual_cnt: %u", SharedRuntime::_resolve_opt_virtual_ctr);
1530   tty->print_cr(" _resolve_invoke_static_cnt:      %u", SharedRuntime::_resolve_static_ctr);
1531   tty->print_cr(" _handle_wrong_method_cnt:        %u", SharedRuntime::_wrong_method_ctr);
1532   tty->print_cr(" _ic_miss_cnt:                    %u", SharedRuntime::_ic_miss_ctr);
1533   tty->print_cr(" _generic_arraycopystub_cnt:      %u", _generic_arraycopystub_cnt);
1534   tty->print_cr(" _byte_arraycopy_cnt:             %u", _byte_arraycopy_stub_cnt);
1535   tty->print_cr(" _short_arraycopy_cnt:            %u", _short_arraycopy_stub_cnt);
1536   tty->print_cr(" _int_arraycopy_cnt:              %u", _int_arraycopy_stub_cnt);
1537   tty->print_cr(" _long_arraycopy_cnt:             %u", _long_arraycopy_stub_cnt);
1538   tty->print_cr(" _oop_arraycopy_cnt:              %u", _oop_arraycopy_stub_cnt);
1539   tty->print_cr(" _arraycopy_slowcase_cnt:         %u", _arraycopy_slowcase_cnt);
1540   tty->print_cr(" _arraycopy_checkcast_cnt:        %u", _arraycopy_checkcast_cnt);
1541   tty->print_cr(" _arraycopy_checkcast_attempt_cnt:%u", _arraycopy_checkcast_attempt_cnt);
1542 
1543   tty->print_cr(" _new_type_array_slowcase_cnt:    %u", _new_type_array_slowcase_cnt);
1544   tty->print_cr(" _new_object_array_slowcase_cnt:  %u", _new_object_array_slowcase_cnt);

1545   tty->print_cr(" _new_instance_slowcase_cnt:      %u", _new_instance_slowcase_cnt);
1546   tty->print_cr(" _new_multi_array_slowcase_cnt:   %u", _new_multi_array_slowcase_cnt);






1547   tty->print_cr(" _monitorenter_slowcase_cnt:      %u", _monitorenter_slowcase_cnt);
1548   tty->print_cr(" _monitorexit_slowcase_cnt:       %u", _monitorexit_slowcase_cnt);
1549   tty->print_cr(" _patch_code_slowcase_cnt:        %u", _patch_code_slowcase_cnt);
1550 
1551   tty->print_cr(" _throw_range_check_exception_count:            %u:", _throw_range_check_exception_count);
1552   tty->print_cr(" _throw_index_exception_count:                  %u:", _throw_index_exception_count);
1553   tty->print_cr(" _throw_div0_exception_count:                   %u:", _throw_div0_exception_count);
1554   tty->print_cr(" _throw_null_pointer_exception_count:           %u:", _throw_null_pointer_exception_count);
1555   tty->print_cr(" _throw_class_cast_exception_count:             %u:", _throw_class_cast_exception_count);
1556   tty->print_cr(" _throw_incompatible_class_change_error_count:  %u:", _throw_incompatible_class_change_error_count);


1557   tty->print_cr(" _throw_count:                                  %u:", _throw_count);
1558 
1559   SharedRuntime::print_ic_miss_histogram();
1560   tty->cr();
1561 }
1562 #endif // PRODUCT

   1 /*
   2  * Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *

  33 #include "classfile/vmSymbols.hpp"
  34 #include "code/aotCodeCache.hpp"
  35 #include "code/codeBlob.hpp"
  36 #include "code/compiledIC.hpp"
  37 #include "code/scopeDesc.hpp"
  38 #include "code/vtableStubs.hpp"
  39 #include "compiler/compilationPolicy.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "compiler/oopMap.hpp"
  42 #include "gc/shared/barrierSet.hpp"
  43 #include "gc/shared/c1/barrierSetC1.hpp"
  44 #include "gc/shared/collectedHeap.hpp"
  45 #include "interpreter/bytecode.hpp"
  46 #include "interpreter/interpreter.hpp"
  47 #include "jfr/support/jfrIntrinsics.hpp"
  48 #include "logging/log.hpp"
  49 #include "memory/oopFactory.hpp"
  50 #include "memory/resourceArea.hpp"
  51 #include "memory/universe.hpp"
  52 #include "oops/access.inline.hpp"
  53 #include "oops/arrayOop.inline.hpp"
  54 #include "oops/arrayProperties.hpp"
  55 #include "oops/flatArrayKlass.hpp"
  56 #include "oops/flatArrayOop.inline.hpp"
  57 #include "oops/objArrayKlass.hpp"
  58 #include "oops/objArrayOop.inline.hpp"
  59 #include "oops/oop.inline.hpp"
  60 #include "oops/oopCast.inline.hpp"
  61 #include "prims/jvmtiExport.hpp"
  62 #include "runtime/atomicAccess.hpp"
  63 #include "runtime/fieldDescriptor.inline.hpp"
  64 #include "runtime/frame.inline.hpp"
  65 #include "runtime/handles.inline.hpp"
  66 #include "runtime/interfaceSupport.inline.hpp"
  67 #include "runtime/javaCalls.hpp"
  68 #include "runtime/sharedRuntime.hpp"
  69 #include "runtime/stackWatermarkSet.hpp"
  70 #include "runtime/stubInfo.hpp"
  71 #include "runtime/stubRoutines.hpp"
  72 #include "runtime/vframe.inline.hpp"
  73 #include "runtime/vframeArray.hpp"
  74 #include "runtime/vm_version.hpp"
  75 #include "utilities/copy.hpp"
  76 #include "utilities/events.hpp"
  77 
  78 
  79 // Implementation of StubAssembler
  80 

 102 
 103 
 104 void StubAssembler::set_num_rt_args(int args) {
 105   if (_num_rt_args == 0) {
 106     _num_rt_args = args;
 107   }
 108   assert(_num_rt_args == args, "can't change the number of args");
 109 }
 110 
 111 // Implementation of Runtime1
 112 CodeBlob* Runtime1::_blobs[StubInfo::C1_STUB_COUNT];
 113 
 114 #ifndef PRODUCT
 115 // statistics
 116 uint Runtime1::_generic_arraycopystub_cnt = 0;
 117 uint Runtime1::_arraycopy_slowcase_cnt = 0;
 118 uint Runtime1::_arraycopy_checkcast_cnt = 0;
 119 uint Runtime1::_arraycopy_checkcast_attempt_cnt = 0;
 120 uint Runtime1::_new_type_array_slowcase_cnt = 0;
 121 uint Runtime1::_new_object_array_slowcase_cnt = 0;
 122 uint Runtime1::_new_null_free_array_slowcase_cnt = 0;
 123 uint Runtime1::_new_instance_slowcase_cnt = 0;
 124 uint Runtime1::_new_multi_array_slowcase_cnt = 0;
 125 uint Runtime1::_load_flat_array_slowcase_cnt = 0;
 126 uint Runtime1::_store_flat_array_slowcase_cnt = 0;
 127 uint Runtime1::_substitutability_check_slowcase_cnt = 0;
 128 uint Runtime1::_buffer_inline_args_slowcase_cnt = 0;
 129 uint Runtime1::_buffer_inline_args_no_receiver_slowcase_cnt = 0;
 130 uint Runtime1::_monitorenter_slowcase_cnt = 0;
 131 uint Runtime1::_monitorexit_slowcase_cnt = 0;
 132 uint Runtime1::_patch_code_slowcase_cnt = 0;
 133 uint Runtime1::_throw_range_check_exception_count = 0;
 134 uint Runtime1::_throw_index_exception_count = 0;
 135 uint Runtime1::_throw_div0_exception_count = 0;
 136 uint Runtime1::_throw_null_pointer_exception_count = 0;
 137 uint Runtime1::_throw_class_cast_exception_count = 0;
 138 uint Runtime1::_throw_incompatible_class_change_error_count = 0;
 139 uint Runtime1::_throw_illegal_monitor_state_exception_count = 0;
 140 uint Runtime1::_throw_identity_exception_count = 0;
 141 uint Runtime1::_throw_count = 0;
 142 
 143 static uint _byte_arraycopy_stub_cnt = 0;
 144 static uint _short_arraycopy_stub_cnt = 0;
 145 static uint _int_arraycopy_stub_cnt = 0;
 146 static uint _long_arraycopy_stub_cnt = 0;
 147 static uint _oop_arraycopy_stub_cnt = 0;
 148 
 149 address Runtime1::arraycopy_count_address(BasicType type) {
 150   switch (type) {
 151   case T_BOOLEAN:
 152   case T_BYTE:   return (address)&_byte_arraycopy_stub_cnt;
 153   case T_CHAR:
 154   case T_SHORT:  return (address)&_short_arraycopy_stub_cnt;
 155   case T_FLOAT:
 156   case T_INT:    return (address)&_int_arraycopy_stub_cnt;
 157   case T_DOUBLE:
 158   case T_LONG:   return (address)&_long_arraycopy_stub_cnt;
 159   case T_ARRAY:
 160   case T_OBJECT: return (address)&_oop_arraycopy_stub_cnt;

 366   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32());
 367   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32C());
 368   FUNCTION_CASE(entry, StubRoutines::vectorizedMismatch());
 369   FUNCTION_CASE(entry, StubRoutines::dexp());
 370   FUNCTION_CASE(entry, StubRoutines::dlog());
 371   FUNCTION_CASE(entry, StubRoutines::dlog10());
 372   FUNCTION_CASE(entry, StubRoutines::dpow());
 373   FUNCTION_CASE(entry, StubRoutines::dsin());
 374   FUNCTION_CASE(entry, StubRoutines::dcos());
 375   FUNCTION_CASE(entry, StubRoutines::dtan());
 376   FUNCTION_CASE(entry, StubRoutines::dsinh());
 377   FUNCTION_CASE(entry, StubRoutines::dtanh());
 378   FUNCTION_CASE(entry, StubRoutines::dcbrt());
 379 
 380 #undef FUNCTION_CASE
 381 
 382   // Soft float adds more runtime names.
 383   return pd_name_for_address(entry);
 384 }
 385 
 386 static void allocate_instance(JavaThread* current, Klass* klass, TRAPS) {

 387 #ifndef PRODUCT
 388   if (PrintC1Statistics) {
 389     Runtime1::_new_instance_slowcase_cnt++;
 390   }
 391 #endif
 392   assert(klass->is_klass(), "not a class");
 393   Handle holder(current, klass->klass_holder()); // keep the klass alive
 394   InstanceKlass* h = InstanceKlass::cast(klass);
 395   h->check_valid_for_instantiation(true, CHECK);
 396   // make sure klass is initialized
 397   h->initialize(CHECK);
 398   // allocate instance and return via TLS
 399   oop obj = h->allocate_instance(CHECK);
 400   current->set_vm_result_oop(obj);
 401 JRT_END
 402 
 403 JRT_ENTRY(void, Runtime1::new_instance(JavaThread* current, Klass* klass))
 404   allocate_instance(current, klass, CHECK);
 405 JRT_END
 406 
 407 JRT_ENTRY(void, Runtime1::new_type_array(JavaThread* current, Klass* klass, jint length))
 408 #ifndef PRODUCT
 409   if (PrintC1Statistics) {
 410     _new_type_array_slowcase_cnt++;
 411   }
 412 #endif
 413   // Note: no handle for klass needed since they are not used
 414   //       anymore after new_typeArray() and no GC can happen before.
 415   //       (This may have to change if this code changes!)
 416   assert(klass->is_klass(), "not a class");
 417   BasicType elt_type = TypeArrayKlass::cast(klass)->element_type();
 418   oop obj = oopFactory::new_typeArray(elt_type, length, CHECK);
 419   current->set_vm_result_oop(obj);
 420   // This is pretty rare but this runtime patch is stressful to deoptimization
 421   // if we deoptimize here so force a deopt to stress the path.
 422   if (DeoptimizeALot) {
 423     deopt_caller(current);
 424   }
 425 

 430 #ifndef PRODUCT
 431   if (PrintC1Statistics) {
 432     _new_object_array_slowcase_cnt++;
 433   }
 434 #endif
 435   // Note: no handle for klass needed since they are not used
 436   //       anymore after new_objArray() and no GC can happen before.
 437   //       (This may have to change if this code changes!)
 438   assert(array_klass->is_klass(), "not a class");
 439   Handle holder(current, array_klass->klass_holder()); // keep the klass alive
 440   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 441   objArrayOop obj = oopFactory::new_objArray(elem_klass, length, CHECK);
 442   current->set_vm_result_oop(obj);
 443   // This is pretty rare but this runtime patch is stressful to deoptimization
 444   // if we deoptimize here so force a deopt to stress the path.
 445   if (DeoptimizeALot) {
 446     deopt_caller(current);
 447   }
 448 JRT_END
 449 
 450 // TODO 8265122 This is currently dead code until the array factory methods are intrinsified
 451 JRT_ENTRY(void, Runtime1::new_null_free_array(JavaThread* current, Klass* array_klass, jint length))
 452   NOT_PRODUCT(_new_null_free_array_slowcase_cnt++;)
 453 
 454   // Note: no handle for klass needed since they are not used
 455   //       anymore after new_objArray() and no GC can happen before.
 456   //       (This may have to change if this code changes!)
 457   assert(array_klass->is_klass(), "not a class");
 458   Handle holder(THREAD, array_klass->klass_holder()); // keep the klass alive
 459   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 460   assert(elem_klass->is_inline_klass(), "must be");
 461   // Logically creates elements, ensure klass init
 462   elem_klass->initialize(CHECK);
 463 
 464   const ArrayProperties props = ArrayProperties::Default().with_null_restricted();
 465   arrayOop obj = oopFactory::new_objArray(elem_klass, length, props, CHECK);
 466 
 467   current->set_vm_result_oop(obj);
 468   // This is pretty rare but this runtime patch is stressful to deoptimization
 469   // if we deoptimize here so force a deopt to stress the path.
 470   if (DeoptimizeALot) {
 471     deopt_caller(current);
 472   }
 473 JRT_END
 474 
 475 
 476 JRT_ENTRY(void, Runtime1::new_multi_array(JavaThread* current, Klass* klass, int rank, jint* dims))
 477 #ifndef PRODUCT
 478   if (PrintC1Statistics) {
 479     _new_multi_array_slowcase_cnt++;
 480   }
 481 #endif
 482   assert(klass->is_klass(), "not a class");
 483   assert(rank >= 1, "rank must be nonzero");
 484   Handle holder(current, klass->klass_holder()); // keep the klass alive
 485   oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
 486   current->set_vm_result_oop(obj);
 487 JRT_END
 488 
 489 
 490 static void profile_flat_array(JavaThread* current, bool load, bool null_free) {
 491   ResourceMark rm(current);
 492   vframeStream vfst(current, true);
 493   assert(!vfst.at_end(), "Java frame must exist");
 494   // Check if array access profiling is enabled
 495   if (vfst.nm()->comp_level() != CompLevel_full_profile || !C1UpdateMethodData) {
 496     return;
 497   }
 498   int bci = vfst.bci();
 499   Method* method = vfst.method();
 500   MethodData* md = method->method_data();
 501   if (md != nullptr) {
 502     // Lock to access ProfileData, and ensure lock is not broken by a safepoint
 503     MutexLocker ml(md->extra_data_lock(), Mutex::_no_safepoint_check_flag);
 504 
 505     ProfileData* data = md->bci_to_data(bci);
 506     assert(data != nullptr, "incorrect profiling entry");
 507     if (data->is_ArrayLoadData()) {
 508       assert(load, "should be an array load");
 509       ArrayLoadData* load_data = (ArrayLoadData*) data;
 510       load_data->set_flat_array();
 511       if (null_free) {
 512         load_data->set_null_free_array();
 513       }
 514     } else {
 515       assert(data->is_ArrayStoreData(), "");
 516       assert(!load, "should be an array store");
 517       ArrayStoreData* store_data = (ArrayStoreData*) data;
 518       store_data->set_flat_array();
 519       if (null_free) {
 520         store_data->set_null_free_array();
 521       }
 522     }
 523   }
 524 }
 525 
 526 JRT_ENTRY(void, Runtime1::load_flat_array(JavaThread* current, flatArrayOopDesc* array, int index))
 527   assert(array->klass()->is_flatArray_klass(), "should not be called");
 528   profile_flat_array(current, true, array->is_null_free_array());
 529 
 530   NOT_PRODUCT(_load_flat_array_slowcase_cnt++;)
 531   assert(array->length() > 0 && index < array->length(), "already checked");
 532   flatArrayHandle vah(current, array);
 533   oop obj = array->obj_at(index, CHECK);
 534   current->set_vm_result_oop(obj);
 535 JRT_END
 536 
 537 JRT_ENTRY(void, Runtime1::store_flat_array(JavaThread* current, flatArrayOopDesc* array, int index, oopDesc* value))
 538   assert(array->is_flatArray(), "should not be called");
 539   profile_flat_array(current, false, array->is_null_free_array());
 540 
 541   NOT_PRODUCT(_store_flat_array_slowcase_cnt++;)
 542   if (value == nullptr && array->is_null_free_array()) {
 543     SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_NullPointerException());
 544   } else {
 545     array->obj_at_put(index, value, CHECK);
 546   }
 547 JRT_END
 548 
 549 JRT_ENTRY(int, Runtime1::substitutability_check(JavaThread* current, oopDesc* left, oopDesc* right))
 550   NOT_PRODUCT(_substitutability_check_slowcase_cnt++;)
 551   JavaCallArguments args;
 552   args.push_oop(Handle(THREAD, left));
 553   args.push_oop(Handle(THREAD, right));
 554   JavaValue result(T_BOOLEAN);
 555   JavaCalls::call_static(&result,
 556                          vmClasses::ValueObjectMethods_klass(),
 557                          vmSymbols::isSubstitutable_name(),
 558                          vmSymbols::object_object_boolean_signature(),
 559                          &args, CHECK_0);
 560   return result.get_jboolean() ? 1 : 0;
 561 JRT_END
 562 
 563 
 564 extern "C" void ps();
 565 
 566 void Runtime1::buffer_inline_args_impl(JavaThread* current, Method* m, bool allocate_receiver) {
 567   JavaThread* THREAD = current;
 568   methodHandle method(current, m); // We are inside the verified_entry or verified_inline_ro_entry of this method.
 569   oop obj = SharedRuntime::allocate_inline_types_impl(current, method, allocate_receiver, true, CHECK);
 570   current->set_vm_result_oop(obj);
 571 }
 572 
 573 JRT_ENTRY(void, Runtime1::buffer_inline_args(JavaThread* current, Method* method))
 574   NOT_PRODUCT(_buffer_inline_args_slowcase_cnt++;)
 575   buffer_inline_args_impl(current, method, true);
 576 JRT_END
 577 
 578 JRT_ENTRY(void, Runtime1::buffer_inline_args_no_receiver(JavaThread* current, Method* method))
 579   NOT_PRODUCT(_buffer_inline_args_no_receiver_slowcase_cnt++;)
 580   buffer_inline_args_impl(current, method, false);
 581 JRT_END
 582 
 583 JRT_ENTRY(void, Runtime1::unimplemented_entry(JavaThread* current, StubId id))
 584   tty->print_cr("Runtime1::entry_for(%d) returned unimplemented entry point", (int)id);
 585 JRT_END
 586 
 587 
 588 JRT_ENTRY(void, Runtime1::throw_array_store_exception(JavaThread* current, oopDesc* obj))
 589   ResourceMark rm(current);
 590   const char* klass_name = obj->klass()->external_name();
 591   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArrayStoreException(), klass_name);
 592 JRT_END
 593 
 594 
 595 // counter_overflow() is called from within C1-compiled methods. The enclosing method is the method
 596 // associated with the top activation record. The inlinee (that is possibly included in the enclosing
 597 // method) method is passed as an argument. In order to do that it is embedded in the code as
 598 // a constant.
 599 static nmethod* counter_overflow_helper(JavaThread* current, int branch_bci, Method* m) {
 600   nmethod* osr_nm = nullptr;
 601   methodHandle method(current, m);
 602 

 880     _throw_class_cast_exception_count++;
 881   }
 882 #endif
 883   ResourceMark rm(current);
 884   char* message = SharedRuntime::generate_class_cast_message(current, object->klass());
 885   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ClassCastException(), message);
 886 JRT_END
 887 
 888 
 889 JRT_ENTRY(void, Runtime1::throw_incompatible_class_change_error(JavaThread* current))
 890 #ifndef PRODUCT
 891   if (PrintC1Statistics) {
 892     _throw_incompatible_class_change_error_count++;
 893   }
 894 #endif
 895   ResourceMark rm(current);
 896   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError());
 897 JRT_END
 898 
 899 
 900 JRT_ENTRY(void, Runtime1::throw_illegal_monitor_state_exception(JavaThread* current))
 901   NOT_PRODUCT(_throw_illegal_monitor_state_exception_count++;)
 902   ResourceMark rm(current);
 903   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IllegalMonitorStateException());
 904 JRT_END
 905 
 906 JRT_ENTRY(void, Runtime1::throw_identity_exception(JavaThread* current, oopDesc* object))
 907   NOT_PRODUCT(_throw_identity_exception_count++;)
 908   ResourceMark rm(current);
 909   char* message = SharedRuntime::generate_identity_exception_message(current, object->klass());
 910   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IdentityException(), message);
 911 JRT_END
 912 
 913 JRT_BLOCK_ENTRY(void, Runtime1::monitorenter(JavaThread* current, oopDesc* obj, BasicObjectLock* lock))
 914 #ifndef PRODUCT
 915   if (PrintC1Statistics) {
 916     _monitorenter_slowcase_cnt++;
 917   }
 918 #endif
 919   assert(obj == lock->obj(), "must match");
 920   SharedRuntime::monitor_enter_helper(obj, lock->lock(), current);
 921 JRT_END
 922 
 923 
 924 JRT_LEAF(void, Runtime1::monitorexit(JavaThread* current, BasicObjectLock* lock))
 925   assert(current == JavaThread::current(), "pre-condition");
 926 #ifndef PRODUCT
 927   if (PrintC1Statistics) {
 928     _monitorexit_slowcase_cnt++;
 929   }
 930 #endif
 931   assert(current->last_Java_sp(), "last_Java_sp must be set");
 932   oop obj = lock->obj();

1095                       RegisterMap::WalkContinuation::skip);
1096   frame runtime_frame = current->last_frame();
1097   frame caller_frame = runtime_frame.sender(&reg_map);
1098 
1099   // last java frame on stack
1100   vframeStream vfst(current, true);
1101   assert(!vfst.at_end(), "Java frame must exist");
1102 
1103   methodHandle caller_method(current, vfst.method());
1104   // Note that caller_method->code() may not be same as caller_code because of OSR's
1105   // Note also that in the presence of inlining it is not guaranteed
1106   // that caller_method() == caller_code->method()
1107 
1108   int bci = vfst.bci();
1109   Bytecodes::Code code = caller_method()->java_code_at(bci);
1110 
1111   // this is used by assertions in the access_field_patching_id
1112   BasicType patch_field_type = T_ILLEGAL;
1113   bool deoptimize_for_volatile = false;
1114   bool deoptimize_for_atomic = false;
1115   bool deoptimize_for_null_free = false;
1116   bool deoptimize_for_flat = false;
1117   bool deoptimize_for_strict_static = false;
1118   int patch_field_offset = -1;
1119   Klass* init_klass = nullptr; // klass needed by load_klass_patching code
1120   Klass* load_klass = nullptr; // klass needed by load_klass_patching code
1121   Handle mirror(current, nullptr); // oop needed by load_mirror_patching code
1122   Handle appendix(current, nullptr); // oop needed by appendix_patching code
1123   bool load_klass_or_mirror_patch_id =
1124     (stub_id == StubId::c1_load_klass_patching_id || stub_id == StubId::c1_load_mirror_patching_id);
1125 
1126   if (stub_id == StubId::c1_access_field_patching_id) {
1127 
1128     Bytecode_field field_access(caller_method, bci);
1129     fieldDescriptor result; // initialize class if needed
1130     Bytecodes::Code code = field_access.code();
1131     constantPoolHandle constants(current, caller_method->constants());
1132     LinkResolver::resolve_field_access(result, constants, field_access.index(), caller_method, Bytecodes::java_code(code), CHECK);
1133     patch_field_offset = result.offset();
1134 
1135     // If we're patching a field which is volatile then at compile it
1136     // must not have been know to be volatile, so the generated code
1137     // isn't correct for a volatile reference.  The nmethod has to be

1141     // used for patching references to oops which don't need special
1142     // handling in the volatile case.
1143 
1144     deoptimize_for_volatile = result.access_flags().is_volatile();
1145 
1146     // If we are patching a field which should be atomic, then
1147     // the generated code is not correct either, force deoptimizing.
1148     // We need to only cover T_LONG and T_DOUBLE fields, as we can
1149     // break access atomicity only for them.
1150 
1151     // Strictly speaking, the deoptimization on 64-bit platforms
1152     // is unnecessary, and T_LONG stores on 32-bit platforms need
1153     // to be handled by special patching code when AlwaysAtomicAccesses
1154     // becomes product feature. At this point, we are still going
1155     // for the deoptimization for consistency against volatile
1156     // accesses.
1157 
1158     patch_field_type = result.field_type();
1159     deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
1160 
1161     // The field we are patching is null-free. Deoptimize and regenerate
1162     // the compiled code if we patch a putfield/putstatic because it
1163     // does not contain the required null check.
1164     deoptimize_for_null_free = result.is_null_free_inline_type() && (field_access.is_putfield() || field_access.is_putstatic());
1165 
1166     // The field we are patching is flat. Deoptimize and regenerate
1167     // the compiled code which can't handle the layout of the flat
1168     // field because it was unknown at compile time.
1169     deoptimize_for_flat = result.is_flat();
1170 
1171     // Strict statics may require tracking if their class is not fully initialized.
1172     // For now we can bail out of the compiler and let the interpreter handle it.
1173     deoptimize_for_strict_static = result.is_strict_static_unset();
1174   } else if (load_klass_or_mirror_patch_id) {
1175     Klass* k = nullptr;
1176     switch (code) {
1177       case Bytecodes::_putstatic:
1178       case Bytecodes::_getstatic:
1179         { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK);
1180           init_klass = klass;
1181           mirror = Handle(current, klass->java_mirror());
1182         }
1183         break;
1184       case Bytecodes::_new:
1185         { Bytecode_new bnew(caller_method(), caller_method->bcp_from(bci));
1186           k = caller_method->constants()->klass_at(bnew.index(), CHECK);
1187         }
1188         break;
1189       case Bytecodes::_multianewarray:
1190         { Bytecode_multianewarray mna(caller_method(), caller_method->bcp_from(bci));
1191           k = caller_method->constants()->klass_at(mna.index(), CHECK);
1192         }
1193         break;
1194       case Bytecodes::_instanceof:
1195         { Bytecode_instanceof io(caller_method(), caller_method->bcp_from(bci));
1196           k = caller_method->constants()->klass_at(io.index(), CHECK);
1197         }
1198         break;
1199       case Bytecodes::_checkcast:
1200         { Bytecode_checkcast cc(caller_method(), caller_method->bcp_from(bci));
1201           k = caller_method->constants()->klass_at(cc.index(), CHECK);
1202         }
1203         break;
1204       case Bytecodes::_anewarray:
1205         { Bytecode_anewarray anew(caller_method(), caller_method->bcp_from(bci));
1206           Klass* ek = caller_method->constants()->klass_at(anew.index(), CHECK);
1207           k = ek->array_klass(CHECK);
1208           if (!k->is_typeArray_klass() && !k->is_refArray_klass() && !k->is_flatArray_klass()) {
1209             k = ObjArrayKlass::cast(k)->klass_with_properties(ArrayProperties::Default(), THREAD);
1210           }
1211           if (k->is_flatArray_klass()) {
1212             deoptimize_for_flat = true;
1213           }
1214         }
1215         break;
1216       case Bytecodes::_ldc:
1217       case Bytecodes::_ldc_w:
1218       case Bytecodes::_ldc2_w:
1219         {
1220           Bytecode_loadconstant cc(caller_method, bci);
1221           oop m = cc.resolve_constant(CHECK);
1222           mirror = Handle(current, m);
1223         }
1224         break;
1225       default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id");
1226     }
1227     load_klass = k;
1228   } else if (stub_id == StubId::c1_load_appendix_patching_id) {
1229     Bytecode_invoke bytecode(caller_method, bci);
1230     Bytecodes::Code bc = bytecode.invoke_code();
1231 
1232     CallInfo info;
1233     constantPoolHandle pool(current, caller_method->constants());
1234     int index = bytecode.index();
1235     LinkResolver::resolve_invoke(info, Handle(), pool, index, bc, CHECK);
1236     switch (bc) {
1237       case Bytecodes::_invokehandle: {
1238         ResolvedMethodEntry* entry = pool->cache()->set_method_handle(index, info);
1239         appendix = Handle(current, pool->cache()->appendix_if_resolved(entry));
1240         break;
1241       }
1242       case Bytecodes::_invokedynamic: {
1243         appendix = Handle(current, pool->cache()->set_dynamic_call(info, index));
1244         break;
1245       }
1246       default: fatal("unexpected bytecode for load_appendix_patching_id");
1247     }
1248   } else {
1249     ShouldNotReachHere();
1250   }
1251 
1252   if (deoptimize_for_volatile  ||
1253       deoptimize_for_atomic    ||
1254       deoptimize_for_null_free ||
1255       deoptimize_for_flat      ||
1256       deoptimize_for_strict_static) {
1257     // At compile time we assumed the field wasn't volatile/atomic but after
1258     // loading it turns out it was volatile/atomic so we have to throw the
1259     // compiled code out and let it be regenerated.
1260     if (TracePatching) {
1261       if (deoptimize_for_volatile) {
1262         tty->print_cr("Deoptimizing for patching volatile field reference");
1263       }
1264       if (deoptimize_for_atomic) {
1265         tty->print_cr("Deoptimizing for patching atomic field reference");
1266       }
1267       if (deoptimize_for_null_free) {
1268         tty->print_cr("Deoptimizing for patching null-free field reference");
1269       }
1270       if (deoptimize_for_flat) {
1271         tty->print_cr("Deoptimizing for patching flat field or array reference");
1272       }
1273       if (deoptimize_for_strict_static) {
1274         tty->print_cr("Deoptimizing for patching strict static field reference");
1275       }
1276     }
1277 
1278     // It's possible the nmethod was invalidated in the last
1279     // safepoint, but if it's still alive then make it not_entrant.
1280     nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
1281     if (nm != nullptr) {
1282       nm->make_not_entrant(nmethod::InvalidationReason::C1_CODEPATCH);
1283     }
1284 
1285     Deoptimization::deoptimize_frame(current, caller_frame.id());
1286 
1287     // Return to the now deoptimized frame.
1288   }
1289 
1290   // Now copy code back
1291 
1292   {
1293     MutexLocker ml_code (current, CodeCache_lock, Mutex::_no_safepoint_check_flag);
1294     //
1295     // Deoptimization may have happened while we waited for the lock.

1706 #ifndef PRODUCT
1707 void Runtime1::print_statistics() {
1708   tty->print_cr("C1 Runtime statistics:");
1709   tty->print_cr(" _resolve_invoke_virtual_cnt:     %u", SharedRuntime::_resolve_virtual_ctr);
1710   tty->print_cr(" _resolve_invoke_opt_virtual_cnt: %u", SharedRuntime::_resolve_opt_virtual_ctr);
1711   tty->print_cr(" _resolve_invoke_static_cnt:      %u", SharedRuntime::_resolve_static_ctr);
1712   tty->print_cr(" _handle_wrong_method_cnt:        %u", SharedRuntime::_wrong_method_ctr);
1713   tty->print_cr(" _ic_miss_cnt:                    %u", SharedRuntime::_ic_miss_ctr);
1714   tty->print_cr(" _generic_arraycopystub_cnt:      %u", _generic_arraycopystub_cnt);
1715   tty->print_cr(" _byte_arraycopy_cnt:             %u", _byte_arraycopy_stub_cnt);
1716   tty->print_cr(" _short_arraycopy_cnt:            %u", _short_arraycopy_stub_cnt);
1717   tty->print_cr(" _int_arraycopy_cnt:              %u", _int_arraycopy_stub_cnt);
1718   tty->print_cr(" _long_arraycopy_cnt:             %u", _long_arraycopy_stub_cnt);
1719   tty->print_cr(" _oop_arraycopy_cnt:              %u", _oop_arraycopy_stub_cnt);
1720   tty->print_cr(" _arraycopy_slowcase_cnt:         %u", _arraycopy_slowcase_cnt);
1721   tty->print_cr(" _arraycopy_checkcast_cnt:        %u", _arraycopy_checkcast_cnt);
1722   tty->print_cr(" _arraycopy_checkcast_attempt_cnt:%u", _arraycopy_checkcast_attempt_cnt);
1723 
1724   tty->print_cr(" _new_type_array_slowcase_cnt:    %u", _new_type_array_slowcase_cnt);
1725   tty->print_cr(" _new_object_array_slowcase_cnt:  %u", _new_object_array_slowcase_cnt);
1726   tty->print_cr(" _new_null_free_array_slowcase_cnt: %u", _new_null_free_array_slowcase_cnt);
1727   tty->print_cr(" _new_instance_slowcase_cnt:      %u", _new_instance_slowcase_cnt);
1728   tty->print_cr(" _new_multi_array_slowcase_cnt:   %u", _new_multi_array_slowcase_cnt);
1729   tty->print_cr(" _load_flat_array_slowcase_cnt:   %u", _load_flat_array_slowcase_cnt);
1730   tty->print_cr(" _store_flat_array_slowcase_cnt:  %u", _store_flat_array_slowcase_cnt);
1731   tty->print_cr(" _substitutability_check_slowcase_cnt: %u", _substitutability_check_slowcase_cnt);
1732   tty->print_cr(" _buffer_inline_args_slowcase_cnt:%u", _buffer_inline_args_slowcase_cnt);
1733   tty->print_cr(" _buffer_inline_args_no_receiver_slowcase_cnt:%u", _buffer_inline_args_no_receiver_slowcase_cnt);
1734 
1735   tty->print_cr(" _monitorenter_slowcase_cnt:      %u", _monitorenter_slowcase_cnt);
1736   tty->print_cr(" _monitorexit_slowcase_cnt:       %u", _monitorexit_slowcase_cnt);
1737   tty->print_cr(" _patch_code_slowcase_cnt:        %u", _patch_code_slowcase_cnt);
1738 
1739   tty->print_cr(" _throw_range_check_exception_count:            %u:", _throw_range_check_exception_count);
1740   tty->print_cr(" _throw_index_exception_count:                  %u:", _throw_index_exception_count);
1741   tty->print_cr(" _throw_div0_exception_count:                   %u:", _throw_div0_exception_count);
1742   tty->print_cr(" _throw_null_pointer_exception_count:           %u:", _throw_null_pointer_exception_count);
1743   tty->print_cr(" _throw_class_cast_exception_count:             %u:", _throw_class_cast_exception_count);
1744   tty->print_cr(" _throw_incompatible_class_change_error_count:  %u:", _throw_incompatible_class_change_error_count);
1745   tty->print_cr(" _throw_illegal_monitor_state_exception_count:  %u:", _throw_illegal_monitor_state_exception_count);
1746   tty->print_cr(" _throw_identity_exception_count:               %u:", _throw_identity_exception_count);
1747   tty->print_cr(" _throw_count:                                  %u:", _throw_count);
1748 
1749   SharedRuntime::print_ic_miss_histogram();
1750   tty->cr();
1751 }
1752 #endif // PRODUCT
< prev index next >