< prev index next >

src/hotspot/share/c1/c1_Runtime1.cpp

Print this page

   1 /*
   2  * Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *

  33 #include "classfile/vmSymbols.hpp"
  34 #include "code/aotCodeCache.hpp"
  35 #include "code/codeBlob.hpp"
  36 #include "code/compiledIC.hpp"
  37 #include "code/scopeDesc.hpp"
  38 #include "code/vtableStubs.hpp"
  39 #include "compiler/compilationPolicy.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "compiler/oopMap.hpp"
  42 #include "gc/shared/barrierSet.hpp"
  43 #include "gc/shared/c1/barrierSetC1.hpp"
  44 #include "gc/shared/collectedHeap.hpp"
  45 #include "interpreter/bytecode.hpp"
  46 #include "interpreter/interpreter.hpp"
  47 #include "jfr/support/jfrIntrinsics.hpp"
  48 #include "logging/log.hpp"
  49 #include "memory/oopFactory.hpp"
  50 #include "memory/resourceArea.hpp"
  51 #include "memory/universe.hpp"
  52 #include "oops/access.inline.hpp"



  53 #include "oops/objArrayKlass.hpp"
  54 #include "oops/objArrayOop.inline.hpp"
  55 #include "oops/oop.inline.hpp"

  56 #include "prims/jvmtiExport.hpp"
  57 #include "runtime/atomicAccess.hpp"
  58 #include "runtime/fieldDescriptor.inline.hpp"
  59 #include "runtime/frame.inline.hpp"
  60 #include "runtime/handles.inline.hpp"
  61 #include "runtime/interfaceSupport.inline.hpp"
  62 #include "runtime/javaCalls.hpp"
  63 #include "runtime/sharedRuntime.hpp"
  64 #include "runtime/stackWatermarkSet.hpp"
  65 #include "runtime/stubInfo.hpp"
  66 #include "runtime/stubRoutines.hpp"
  67 #include "runtime/vframe.inline.hpp"
  68 #include "runtime/vframeArray.hpp"
  69 #include "runtime/vm_version.hpp"
  70 #include "utilities/copy.hpp"
  71 #include "utilities/events.hpp"
  72 
  73 
  74 // Implementation of StubAssembler
  75 

  97 
  98 
  99 void StubAssembler::set_num_rt_args(int args) {
 100   if (_num_rt_args == 0) {
 101     _num_rt_args = args;
 102   }
 103   assert(_num_rt_args == args, "can't change the number of args");
 104 }
 105 
 106 // Implementation of Runtime1
 107 CodeBlob* Runtime1::_blobs[StubInfo::C1_STUB_COUNT];
 108 
 109 #ifndef PRODUCT
 110 // statistics
 111 uint Runtime1::_generic_arraycopystub_cnt = 0;
 112 uint Runtime1::_arraycopy_slowcase_cnt = 0;
 113 uint Runtime1::_arraycopy_checkcast_cnt = 0;
 114 uint Runtime1::_arraycopy_checkcast_attempt_cnt = 0;
 115 uint Runtime1::_new_type_array_slowcase_cnt = 0;
 116 uint Runtime1::_new_object_array_slowcase_cnt = 0;

 117 uint Runtime1::_new_instance_slowcase_cnt = 0;
 118 uint Runtime1::_new_multi_array_slowcase_cnt = 0;





 119 uint Runtime1::_monitorenter_slowcase_cnt = 0;
 120 uint Runtime1::_monitorexit_slowcase_cnt = 0;
 121 uint Runtime1::_patch_code_slowcase_cnt = 0;
 122 uint Runtime1::_throw_range_check_exception_count = 0;
 123 uint Runtime1::_throw_index_exception_count = 0;
 124 uint Runtime1::_throw_div0_exception_count = 0;
 125 uint Runtime1::_throw_null_pointer_exception_count = 0;
 126 uint Runtime1::_throw_class_cast_exception_count = 0;
 127 uint Runtime1::_throw_incompatible_class_change_error_count = 0;


 128 uint Runtime1::_throw_count = 0;
 129 
 130 static uint _byte_arraycopy_stub_cnt = 0;
 131 static uint _short_arraycopy_stub_cnt = 0;
 132 static uint _int_arraycopy_stub_cnt = 0;
 133 static uint _long_arraycopy_stub_cnt = 0;
 134 static uint _oop_arraycopy_stub_cnt = 0;
 135 
 136 address Runtime1::arraycopy_count_address(BasicType type) {
 137   switch (type) {
 138   case T_BOOLEAN:
 139   case T_BYTE:   return (address)&_byte_arraycopy_stub_cnt;
 140   case T_CHAR:
 141   case T_SHORT:  return (address)&_short_arraycopy_stub_cnt;
 142   case T_FLOAT:
 143   case T_INT:    return (address)&_int_arraycopy_stub_cnt;
 144   case T_DOUBLE:
 145   case T_LONG:   return (address)&_long_arraycopy_stub_cnt;
 146   case T_ARRAY:
 147   case T_OBJECT: return (address)&_oop_arraycopy_stub_cnt;

 355   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32());
 356   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32C());
 357   FUNCTION_CASE(entry, StubRoutines::vectorizedMismatch());
 358   FUNCTION_CASE(entry, StubRoutines::dexp());
 359   FUNCTION_CASE(entry, StubRoutines::dlog());
 360   FUNCTION_CASE(entry, StubRoutines::dlog10());
 361   FUNCTION_CASE(entry, StubRoutines::dpow());
 362   FUNCTION_CASE(entry, StubRoutines::dsin());
 363   FUNCTION_CASE(entry, StubRoutines::dcos());
 364   FUNCTION_CASE(entry, StubRoutines::dtan());
 365   FUNCTION_CASE(entry, StubRoutines::dsinh());
 366   FUNCTION_CASE(entry, StubRoutines::dtanh());
 367   FUNCTION_CASE(entry, StubRoutines::dcbrt());
 368 
 369 #undef FUNCTION_CASE
 370 
 371   // Soft float adds more runtime names.
 372   return pd_name_for_address(entry);
 373 }
 374 
 375 
 376 JRT_ENTRY(void, Runtime1::new_instance(JavaThread* current, Klass* klass))
 377 #ifndef PRODUCT
 378   if (PrintC1Statistics) {
 379     _new_instance_slowcase_cnt++;
 380   }
 381 #endif
 382   assert(klass->is_klass(), "not a class");
 383   Handle holder(current, klass->klass_holder()); // keep the klass alive
 384   InstanceKlass* h = InstanceKlass::cast(klass);
 385   h->check_valid_for_instantiation(true, CHECK);
 386   // make sure klass is initialized
 387   h->initialize(CHECK);
 388   // allocate instance and return via TLS
 389   oop obj = h->allocate_instance(CHECK);
 390   current->set_vm_result_oop(obj);
 391 JRT_END
 392 



 393 
 394 JRT_ENTRY(void, Runtime1::new_type_array(JavaThread* current, Klass* klass, jint length))
 395 #ifndef PRODUCT
 396   if (PrintC1Statistics) {
 397     _new_type_array_slowcase_cnt++;
 398   }
 399 #endif
 400   // Note: no handle for klass needed since they are not used
 401   //       anymore after new_typeArray() and no GC can happen before.
 402   //       (This may have to change if this code changes!)
 403   assert(klass->is_klass(), "not a class");
 404   BasicType elt_type = TypeArrayKlass::cast(klass)->element_type();
 405   oop obj = oopFactory::new_typeArray(elt_type, length, CHECK);
 406   current->set_vm_result_oop(obj);
 407   // This is pretty rare but this runtime patch is stressful to deoptimization
 408   // if we deoptimize here so force a deopt to stress the path.
 409   if (DeoptimizeALot) {
 410     deopt_caller(current);
 411   }
 412 

 418   if (PrintC1Statistics) {
 419     _new_object_array_slowcase_cnt++;
 420   }
 421 #endif
 422   // Note: no handle for klass needed since they are not used
 423   //       anymore after new_objArray() and no GC can happen before.
 424   //       (This may have to change if this code changes!)
 425   assert(array_klass->is_klass(), "not a class");
 426   Handle holder(current, array_klass->klass_holder()); // keep the klass alive
 427   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 428   objArrayOop obj = oopFactory::new_objArray(elem_klass, length, CHECK);
 429   current->set_vm_result_oop(obj);
 430   // This is pretty rare but this runtime patch is stressful to deoptimization
 431   // if we deoptimize here so force a deopt to stress the path.
 432   if (DeoptimizeALot) {
 433     deopt_caller(current);
 434   }
 435 JRT_END
 436 
 437 


























 438 JRT_ENTRY(void, Runtime1::new_multi_array(JavaThread* current, Klass* klass, int rank, jint* dims))
 439 #ifndef PRODUCT
 440   if (PrintC1Statistics) {
 441     _new_multi_array_slowcase_cnt++;
 442   }
 443 #endif
 444   assert(klass->is_klass(), "not a class");
 445   assert(rank >= 1, "rank must be nonzero");
 446   Handle holder(current, klass->klass_holder()); // keep the klass alive
 447   oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
 448   current->set_vm_result_oop(obj);
 449 JRT_END
 450 
 451 
































































































 452 JRT_ENTRY(void, Runtime1::unimplemented_entry(JavaThread* current, StubId id))
 453   tty->print_cr("Runtime1::entry_for(%d) returned unimplemented entry point", (int)id);
 454 JRT_END
 455 
 456 
 457 JRT_ENTRY(void, Runtime1::throw_array_store_exception(JavaThread* current, oopDesc* obj))
 458   ResourceMark rm(current);
 459   const char* klass_name = obj->klass()->external_name();
 460   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArrayStoreException(), klass_name);
 461 JRT_END
 462 
 463 
 464 // counter_overflow() is called from within C1-compiled methods. The enclosing method is the method
 465 // associated with the top activation record. The inlinee (that is possibly included in the enclosing
 466 // method) method is passed as an argument. In order to do that it is embedded in the code as
 467 // a constant.
 468 static nmethod* counter_overflow_helper(JavaThread* current, int branch_bci, Method* m) {
 469   nmethod* osr_nm = nullptr;
 470   methodHandle method(current, m);
 471 

 749     _throw_class_cast_exception_count++;
 750   }
 751 #endif
 752   ResourceMark rm(current);
 753   char* message = SharedRuntime::generate_class_cast_message(current, object->klass());
 754   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ClassCastException(), message);
 755 JRT_END
 756 
 757 
 758 JRT_ENTRY(void, Runtime1::throw_incompatible_class_change_error(JavaThread* current))
 759 #ifndef PRODUCT
 760   if (PrintC1Statistics) {
 761     _throw_incompatible_class_change_error_count++;
 762   }
 763 #endif
 764   ResourceMark rm(current);
 765   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError());
 766 JRT_END
 767 
 768 













 769 JRT_BLOCK_ENTRY(void, Runtime1::monitorenter(JavaThread* current, oopDesc* obj, BasicObjectLock* lock))
 770 #ifndef PRODUCT
 771   if (PrintC1Statistics) {
 772     _monitorenter_slowcase_cnt++;
 773   }
 774 #endif
 775   assert(obj == lock->obj(), "must match");
 776   SharedRuntime::monitor_enter_helper(obj, lock->lock(), current);
 777 JRT_END
 778 
 779 
 780 JRT_LEAF(void, Runtime1::monitorexit(JavaThread* current, BasicObjectLock* lock))
 781   assert(current == JavaThread::current(), "pre-condition");
 782 #ifndef PRODUCT
 783   if (PrintC1Statistics) {
 784     _monitorexit_slowcase_cnt++;
 785   }
 786 #endif
 787   assert(current->last_Java_sp(), "last_Java_sp must be set");
 788   oop obj = lock->obj();

 951                       RegisterMap::WalkContinuation::skip);
 952   frame runtime_frame = current->last_frame();
 953   frame caller_frame = runtime_frame.sender(&reg_map);
 954 
 955   // last java frame on stack
 956   vframeStream vfst(current, true);
 957   assert(!vfst.at_end(), "Java frame must exist");
 958 
 959   methodHandle caller_method(current, vfst.method());
 960   // Note that caller_method->code() may not be same as caller_code because of OSR's
 961   // Note also that in the presence of inlining it is not guaranteed
 962   // that caller_method() == caller_code->method()
 963 
 964   int bci = vfst.bci();
 965   Bytecodes::Code code = caller_method()->java_code_at(bci);
 966 
 967   // this is used by assertions in the access_field_patching_id
 968   BasicType patch_field_type = T_ILLEGAL;
 969   bool deoptimize_for_volatile = false;
 970   bool deoptimize_for_atomic = false;



 971   int patch_field_offset = -1;
 972   Klass* init_klass = nullptr; // klass needed by load_klass_patching code
 973   Klass* load_klass = nullptr; // klass needed by load_klass_patching code
 974   Handle mirror(current, nullptr); // oop needed by load_mirror_patching code
 975   Handle appendix(current, nullptr); // oop needed by appendix_patching code
 976   bool load_klass_or_mirror_patch_id =
 977     (stub_id == StubId::c1_load_klass_patching_id || stub_id == StubId::c1_load_mirror_patching_id);
 978 
 979   if (stub_id == StubId::c1_access_field_patching_id) {
 980 
 981     Bytecode_field field_access(caller_method, bci);
 982     fieldDescriptor result; // initialize class if needed
 983     Bytecodes::Code code = field_access.code();
 984     constantPoolHandle constants(current, caller_method->constants());
 985     LinkResolver::resolve_field_access(result, constants, field_access.index(), caller_method, Bytecodes::java_code(code), CHECK);
 986     patch_field_offset = result.offset();
 987 
 988     // If we're patching a field which is volatile then at compile it
 989     // must not have been know to be volatile, so the generated code
 990     // isn't correct for a volatile reference.  The nmethod has to be

 994     // used for patching references to oops which don't need special
 995     // handling in the volatile case.
 996 
 997     deoptimize_for_volatile = result.access_flags().is_volatile();
 998 
 999     // If we are patching a field which should be atomic, then
1000     // the generated code is not correct either, force deoptimizing.
1001     // We need to only cover T_LONG and T_DOUBLE fields, as we can
1002     // break access atomicity only for them.
1003 
1004     // Strictly speaking, the deoptimization on 64-bit platforms
1005     // is unnecessary, and T_LONG stores on 32-bit platforms need
1006     // to be handled by special patching code when AlwaysAtomicAccesses
1007     // becomes product feature. At this point, we are still going
1008     // for the deoptimization for consistency against volatile
1009     // accesses.
1010 
1011     patch_field_type = result.field_type();
1012     deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
1013 













1014   } else if (load_klass_or_mirror_patch_id) {
1015     Klass* k = nullptr;
1016     switch (code) {
1017       case Bytecodes::_putstatic:
1018       case Bytecodes::_getstatic:
1019         { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK);
1020           init_klass = klass;
1021           mirror = Handle(current, klass->java_mirror());
1022         }
1023         break;
1024       case Bytecodes::_new:
1025         { Bytecode_new bnew(caller_method(), caller_method->bcp_from(bci));
1026           k = caller_method->constants()->klass_at(bnew.index(), CHECK);
1027         }
1028         break;
1029       case Bytecodes::_multianewarray:
1030         { Bytecode_multianewarray mna(caller_method(), caller_method->bcp_from(bci));
1031           k = caller_method->constants()->klass_at(mna.index(), CHECK);
1032         }
1033         break;
1034       case Bytecodes::_instanceof:
1035         { Bytecode_instanceof io(caller_method(), caller_method->bcp_from(bci));
1036           k = caller_method->constants()->klass_at(io.index(), CHECK);
1037         }
1038         break;
1039       case Bytecodes::_checkcast:
1040         { Bytecode_checkcast cc(caller_method(), caller_method->bcp_from(bci));
1041           k = caller_method->constants()->klass_at(cc.index(), CHECK);
1042         }
1043         break;
1044       case Bytecodes::_anewarray:
1045         { Bytecode_anewarray anew(caller_method(), caller_method->bcp_from(bci));
1046           Klass* ek = caller_method->constants()->klass_at(anew.index(), CHECK);
1047           k = ek->array_klass(CHECK);






1048         }
1049         break;
1050       case Bytecodes::_ldc:
1051       case Bytecodes::_ldc_w:
1052       case Bytecodes::_ldc2_w:
1053         {
1054           Bytecode_loadconstant cc(caller_method, bci);
1055           oop m = cc.resolve_constant(CHECK);
1056           mirror = Handle(current, m);
1057         }
1058         break;
1059       default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id");
1060     }
1061     load_klass = k;
1062   } else if (stub_id == StubId::c1_load_appendix_patching_id) {
1063     Bytecode_invoke bytecode(caller_method, bci);
1064     Bytecodes::Code bc = bytecode.invoke_code();
1065 
1066     CallInfo info;
1067     constantPoolHandle pool(current, caller_method->constants());
1068     int index = bytecode.index();
1069     LinkResolver::resolve_invoke(info, Handle(), pool, index, bc, CHECK);
1070     switch (bc) {
1071       case Bytecodes::_invokehandle: {
1072         ResolvedMethodEntry* entry = pool->cache()->set_method_handle(index, info);
1073         appendix = Handle(current, pool->cache()->appendix_if_resolved(entry));
1074         break;
1075       }
1076       case Bytecodes::_invokedynamic: {
1077         appendix = Handle(current, pool->cache()->set_dynamic_call(info, index));
1078         break;
1079       }
1080       default: fatal("unexpected bytecode for load_appendix_patching_id");
1081     }
1082   } else {
1083     ShouldNotReachHere();
1084   }
1085 
1086   if (deoptimize_for_volatile || deoptimize_for_atomic) {




1087     // At compile time we assumed the field wasn't volatile/atomic but after
1088     // loading it turns out it was volatile/atomic so we have to throw the
1089     // compiled code out and let it be regenerated.
1090     if (TracePatching) {
1091       if (deoptimize_for_volatile) {
1092         tty->print_cr("Deoptimizing for patching volatile field reference");
1093       }
1094       if (deoptimize_for_atomic) {
1095         tty->print_cr("Deoptimizing for patching atomic field reference");
1096       }









1097     }
1098 
1099     // It's possible the nmethod was invalidated in the last
1100     // safepoint, but if it's still alive then make it not_entrant.
1101     nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
1102     if (nm != nullptr) {
1103       nm->make_not_entrant(nmethod::InvalidationReason::C1_CODEPATCH);
1104     }
1105 
1106     Deoptimization::deoptimize_frame(current, caller_frame.id());
1107 
1108     // Return to the now deoptimized frame.
1109   }
1110 
1111   // Now copy code back
1112 
1113   {
1114     MutexLocker ml_code (current, CodeCache_lock, Mutex::_no_safepoint_check_flag);
1115     //
1116     // Deoptimization may have happened while we waited for the lock.

1527 #ifndef PRODUCT
1528 void Runtime1::print_statistics() {
1529   tty->print_cr("C1 Runtime statistics:");
1530   tty->print_cr(" _resolve_invoke_virtual_cnt:     %u", SharedRuntime::_resolve_virtual_ctr);
1531   tty->print_cr(" _resolve_invoke_opt_virtual_cnt: %u", SharedRuntime::_resolve_opt_virtual_ctr);
1532   tty->print_cr(" _resolve_invoke_static_cnt:      %u", SharedRuntime::_resolve_static_ctr);
1533   tty->print_cr(" _handle_wrong_method_cnt:        %u", SharedRuntime::_wrong_method_ctr);
1534   tty->print_cr(" _ic_miss_cnt:                    %u", SharedRuntime::_ic_miss_ctr);
1535   tty->print_cr(" _generic_arraycopystub_cnt:      %u", _generic_arraycopystub_cnt);
1536   tty->print_cr(" _byte_arraycopy_cnt:             %u", _byte_arraycopy_stub_cnt);
1537   tty->print_cr(" _short_arraycopy_cnt:            %u", _short_arraycopy_stub_cnt);
1538   tty->print_cr(" _int_arraycopy_cnt:              %u", _int_arraycopy_stub_cnt);
1539   tty->print_cr(" _long_arraycopy_cnt:             %u", _long_arraycopy_stub_cnt);
1540   tty->print_cr(" _oop_arraycopy_cnt:              %u", _oop_arraycopy_stub_cnt);
1541   tty->print_cr(" _arraycopy_slowcase_cnt:         %u", _arraycopy_slowcase_cnt);
1542   tty->print_cr(" _arraycopy_checkcast_cnt:        %u", _arraycopy_checkcast_cnt);
1543   tty->print_cr(" _arraycopy_checkcast_attempt_cnt:%u", _arraycopy_checkcast_attempt_cnt);
1544 
1545   tty->print_cr(" _new_type_array_slowcase_cnt:    %u", _new_type_array_slowcase_cnt);
1546   tty->print_cr(" _new_object_array_slowcase_cnt:  %u", _new_object_array_slowcase_cnt);

1547   tty->print_cr(" _new_instance_slowcase_cnt:      %u", _new_instance_slowcase_cnt);
1548   tty->print_cr(" _new_multi_array_slowcase_cnt:   %u", _new_multi_array_slowcase_cnt);






1549   tty->print_cr(" _monitorenter_slowcase_cnt:      %u", _monitorenter_slowcase_cnt);
1550   tty->print_cr(" _monitorexit_slowcase_cnt:       %u", _monitorexit_slowcase_cnt);
1551   tty->print_cr(" _patch_code_slowcase_cnt:        %u", _patch_code_slowcase_cnt);
1552 
1553   tty->print_cr(" _throw_range_check_exception_count:            %u:", _throw_range_check_exception_count);
1554   tty->print_cr(" _throw_index_exception_count:                  %u:", _throw_index_exception_count);
1555   tty->print_cr(" _throw_div0_exception_count:                   %u:", _throw_div0_exception_count);
1556   tty->print_cr(" _throw_null_pointer_exception_count:           %u:", _throw_null_pointer_exception_count);
1557   tty->print_cr(" _throw_class_cast_exception_count:             %u:", _throw_class_cast_exception_count);
1558   tty->print_cr(" _throw_incompatible_class_change_error_count:  %u:", _throw_incompatible_class_change_error_count);


1559   tty->print_cr(" _throw_count:                                  %u:", _throw_count);
1560 
1561   SharedRuntime::print_ic_miss_histogram();
1562   tty->cr();
1563 }
1564 #endif // PRODUCT

   1 /*
   2  * Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *

  33 #include "classfile/vmSymbols.hpp"
  34 #include "code/aotCodeCache.hpp"
  35 #include "code/codeBlob.hpp"
  36 #include "code/compiledIC.hpp"
  37 #include "code/scopeDesc.hpp"
  38 #include "code/vtableStubs.hpp"
  39 #include "compiler/compilationPolicy.hpp"
  40 #include "compiler/disassembler.hpp"
  41 #include "compiler/oopMap.hpp"
  42 #include "gc/shared/barrierSet.hpp"
  43 #include "gc/shared/c1/barrierSetC1.hpp"
  44 #include "gc/shared/collectedHeap.hpp"
  45 #include "interpreter/bytecode.hpp"
  46 #include "interpreter/interpreter.hpp"
  47 #include "jfr/support/jfrIntrinsics.hpp"
  48 #include "logging/log.hpp"
  49 #include "memory/oopFactory.hpp"
  50 #include "memory/resourceArea.hpp"
  51 #include "memory/universe.hpp"
  52 #include "oops/access.inline.hpp"
  53 #include "oops/arrayProperties.hpp"
  54 #include "oops/flatArrayKlass.hpp"
  55 #include "oops/flatArrayOop.inline.hpp"
  56 #include "oops/objArrayKlass.hpp"
  57 #include "oops/objArrayOop.inline.hpp"
  58 #include "oops/oop.inline.hpp"
  59 #include "oops/oopCast.inline.hpp"
  60 #include "prims/jvmtiExport.hpp"
  61 #include "runtime/atomicAccess.hpp"
  62 #include "runtime/fieldDescriptor.inline.hpp"
  63 #include "runtime/frame.inline.hpp"
  64 #include "runtime/handles.inline.hpp"
  65 #include "runtime/interfaceSupport.inline.hpp"
  66 #include "runtime/javaCalls.hpp"
  67 #include "runtime/sharedRuntime.hpp"
  68 #include "runtime/stackWatermarkSet.hpp"
  69 #include "runtime/stubInfo.hpp"
  70 #include "runtime/stubRoutines.hpp"
  71 #include "runtime/vframe.inline.hpp"
  72 #include "runtime/vframeArray.hpp"
  73 #include "runtime/vm_version.hpp"
  74 #include "utilities/copy.hpp"
  75 #include "utilities/events.hpp"
  76 
  77 
  78 // Implementation of StubAssembler
  79 

 101 
 102 
 103 void StubAssembler::set_num_rt_args(int args) {
 104   if (_num_rt_args == 0) {
 105     _num_rt_args = args;
 106   }
 107   assert(_num_rt_args == args, "can't change the number of args");
 108 }
 109 
 110 // Implementation of Runtime1
 111 CodeBlob* Runtime1::_blobs[StubInfo::C1_STUB_COUNT];
 112 
 113 #ifndef PRODUCT
 114 // statistics
 115 uint Runtime1::_generic_arraycopystub_cnt = 0;
 116 uint Runtime1::_arraycopy_slowcase_cnt = 0;
 117 uint Runtime1::_arraycopy_checkcast_cnt = 0;
 118 uint Runtime1::_arraycopy_checkcast_attempt_cnt = 0;
 119 uint Runtime1::_new_type_array_slowcase_cnt = 0;
 120 uint Runtime1::_new_object_array_slowcase_cnt = 0;
 121 uint Runtime1::_new_null_free_array_slowcase_cnt = 0;
 122 uint Runtime1::_new_instance_slowcase_cnt = 0;
 123 uint Runtime1::_new_multi_array_slowcase_cnt = 0;
 124 uint Runtime1::_load_flat_array_slowcase_cnt = 0;
 125 uint Runtime1::_store_flat_array_slowcase_cnt = 0;
 126 uint Runtime1::_substitutability_check_slowcase_cnt = 0;
 127 uint Runtime1::_buffer_inline_args_slowcase_cnt = 0;
 128 uint Runtime1::_buffer_inline_args_no_receiver_slowcase_cnt = 0;
 129 uint Runtime1::_monitorenter_slowcase_cnt = 0;
 130 uint Runtime1::_monitorexit_slowcase_cnt = 0;
 131 uint Runtime1::_patch_code_slowcase_cnt = 0;
 132 uint Runtime1::_throw_range_check_exception_count = 0;
 133 uint Runtime1::_throw_index_exception_count = 0;
 134 uint Runtime1::_throw_div0_exception_count = 0;
 135 uint Runtime1::_throw_null_pointer_exception_count = 0;
 136 uint Runtime1::_throw_class_cast_exception_count = 0;
 137 uint Runtime1::_throw_incompatible_class_change_error_count = 0;
 138 uint Runtime1::_throw_illegal_monitor_state_exception_count = 0;
 139 uint Runtime1::_throw_identity_exception_count = 0;
 140 uint Runtime1::_throw_count = 0;
 141 
 142 static uint _byte_arraycopy_stub_cnt = 0;
 143 static uint _short_arraycopy_stub_cnt = 0;
 144 static uint _int_arraycopy_stub_cnt = 0;
 145 static uint _long_arraycopy_stub_cnt = 0;
 146 static uint _oop_arraycopy_stub_cnt = 0;
 147 
 148 address Runtime1::arraycopy_count_address(BasicType type) {
 149   switch (type) {
 150   case T_BOOLEAN:
 151   case T_BYTE:   return (address)&_byte_arraycopy_stub_cnt;
 152   case T_CHAR:
 153   case T_SHORT:  return (address)&_short_arraycopy_stub_cnt;
 154   case T_FLOAT:
 155   case T_INT:    return (address)&_int_arraycopy_stub_cnt;
 156   case T_DOUBLE:
 157   case T_LONG:   return (address)&_long_arraycopy_stub_cnt;
 158   case T_ARRAY:
 159   case T_OBJECT: return (address)&_oop_arraycopy_stub_cnt;

 367   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32());
 368   FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32C());
 369   FUNCTION_CASE(entry, StubRoutines::vectorizedMismatch());
 370   FUNCTION_CASE(entry, StubRoutines::dexp());
 371   FUNCTION_CASE(entry, StubRoutines::dlog());
 372   FUNCTION_CASE(entry, StubRoutines::dlog10());
 373   FUNCTION_CASE(entry, StubRoutines::dpow());
 374   FUNCTION_CASE(entry, StubRoutines::dsin());
 375   FUNCTION_CASE(entry, StubRoutines::dcos());
 376   FUNCTION_CASE(entry, StubRoutines::dtan());
 377   FUNCTION_CASE(entry, StubRoutines::dsinh());
 378   FUNCTION_CASE(entry, StubRoutines::dtanh());
 379   FUNCTION_CASE(entry, StubRoutines::dcbrt());
 380 
 381 #undef FUNCTION_CASE
 382 
 383   // Soft float adds more runtime names.
 384   return pd_name_for_address(entry);
 385 }
 386 
 387 static void allocate_instance(JavaThread* current, Klass* klass, TRAPS) {

 388 #ifndef PRODUCT
 389   if (PrintC1Statistics) {
 390     Runtime1::_new_instance_slowcase_cnt++;
 391   }
 392 #endif
 393   assert(klass->is_klass(), "not a class");
 394   Handle holder(current, klass->klass_holder()); // keep the klass alive
 395   InstanceKlass* h = InstanceKlass::cast(klass);
 396   h->check_valid_for_instantiation(true, CHECK);
 397   // make sure klass is initialized
 398   h->initialize(CHECK);
 399   // allocate instance and return via TLS
 400   oop obj = h->allocate_instance(CHECK);
 401   current->set_vm_result_oop(obj);
 402 JRT_END
 403 
 404 JRT_ENTRY(void, Runtime1::new_instance(JavaThread* current, Klass* klass))
 405   allocate_instance(current, klass, CHECK);
 406 JRT_END
 407 
 408 JRT_ENTRY(void, Runtime1::new_type_array(JavaThread* current, Klass* klass, jint length))
 409 #ifndef PRODUCT
 410   if (PrintC1Statistics) {
 411     _new_type_array_slowcase_cnt++;
 412   }
 413 #endif
 414   // Note: no handle for klass needed since they are not used
 415   //       anymore after new_typeArray() and no GC can happen before.
 416   //       (This may have to change if this code changes!)
 417   assert(klass->is_klass(), "not a class");
 418   BasicType elt_type = TypeArrayKlass::cast(klass)->element_type();
 419   oop obj = oopFactory::new_typeArray(elt_type, length, CHECK);
 420   current->set_vm_result_oop(obj);
 421   // This is pretty rare but this runtime patch is stressful to deoptimization
 422   // if we deoptimize here so force a deopt to stress the path.
 423   if (DeoptimizeALot) {
 424     deopt_caller(current);
 425   }
 426 

 432   if (PrintC1Statistics) {
 433     _new_object_array_slowcase_cnt++;
 434   }
 435 #endif
 436   // Note: no handle for klass needed since they are not used
 437   //       anymore after new_objArray() and no GC can happen before.
 438   //       (This may have to change if this code changes!)
 439   assert(array_klass->is_klass(), "not a class");
 440   Handle holder(current, array_klass->klass_holder()); // keep the klass alive
 441   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 442   objArrayOop obj = oopFactory::new_objArray(elem_klass, length, CHECK);
 443   current->set_vm_result_oop(obj);
 444   // This is pretty rare but this runtime patch is stressful to deoptimization
 445   // if we deoptimize here so force a deopt to stress the path.
 446   if (DeoptimizeALot) {
 447     deopt_caller(current);
 448   }
 449 JRT_END
 450 
 451 
 452 JRT_ENTRY(void, Runtime1::new_null_free_array(JavaThread* current, Klass* array_klass, jint length))
 453   NOT_PRODUCT(_new_null_free_array_slowcase_cnt++;)
 454   // TODO 8350865 This is dead code since 8325660 because null-free arrays can only be created via the factory methods that are not yet implemented in C1. Should probably be fixed by 8265122.
 455 
 456   // Note: no handle for klass needed since they are not used
 457   //       anymore after new_objArray() and no GC can happen before.
 458   //       (This may have to change if this code changes!)
 459   assert(array_klass->is_klass(), "not a class");
 460   Handle holder(THREAD, array_klass->klass_holder()); // keep the klass alive
 461   Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
 462   assert(elem_klass->is_inline_klass(), "must be");
 463   // Logically creates elements, ensure klass init
 464   elem_klass->initialize(CHECK);
 465 
 466   const ArrayProperties props = ArrayProperties::Default().with_null_restricted();
 467   arrayOop obj = oopFactory::new_objArray(elem_klass, length, props, CHECK);
 468 
 469   current->set_vm_result_oop(obj);
 470   // This is pretty rare but this runtime patch is stressful to deoptimization
 471   // if we deoptimize here so force a deopt to stress the path.
 472   if (DeoptimizeALot) {
 473     deopt_caller(current);
 474   }
 475 JRT_END
 476 
 477 
 478 JRT_ENTRY(void, Runtime1::new_multi_array(JavaThread* current, Klass* klass, int rank, jint* dims))
 479 #ifndef PRODUCT
 480   if (PrintC1Statistics) {
 481     _new_multi_array_slowcase_cnt++;
 482   }
 483 #endif
 484   assert(klass->is_klass(), "not a class");
 485   assert(rank >= 1, "rank must be nonzero");
 486   Handle holder(current, klass->klass_holder()); // keep the klass alive
 487   oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
 488   current->set_vm_result_oop(obj);
 489 JRT_END
 490 
 491 
 492 static void profile_flat_array(JavaThread* current, bool load, bool null_free) {
 493   ResourceMark rm(current);
 494   vframeStream vfst(current, true);
 495   assert(!vfst.at_end(), "Java frame must exist");
 496   // Check if array access profiling is enabled
 497   if (vfst.nm()->comp_level() != CompLevel_full_profile || !C1UpdateMethodData) {
 498     return;
 499   }
 500   int bci = vfst.bci();
 501   Method* method = vfst.method();
 502   MethodData* md = method->method_data();
 503   if (md != nullptr) {
 504     // Lock to access ProfileData, and ensure lock is not broken by a safepoint
 505     MutexLocker ml(md->extra_data_lock(), Mutex::_no_safepoint_check_flag);
 506 
 507     ProfileData* data = md->bci_to_data(bci);
 508     assert(data != nullptr, "incorrect profiling entry");
 509     if (data->is_ArrayLoadData()) {
 510       assert(load, "should be an array load");
 511       ArrayLoadData* load_data = (ArrayLoadData*) data;
 512       load_data->set_flat_array();
 513       if (null_free) {
 514         load_data->set_null_free_array();
 515       }
 516     } else {
 517       assert(data->is_ArrayStoreData(), "");
 518       assert(!load, "should be an array store");
 519       ArrayStoreData* store_data = (ArrayStoreData*) data;
 520       store_data->set_flat_array();
 521       if (null_free) {
 522         store_data->set_null_free_array();
 523       }
 524     }
 525   }
 526 }
 527 
 528 JRT_ENTRY(void, Runtime1::load_flat_array(JavaThread* current, flatArrayOopDesc* array, int index))
 529   assert(array->klass()->is_flatArray_klass(), "should not be called");
 530   profile_flat_array(current, true, array->is_null_free_array());
 531 
 532   NOT_PRODUCT(_load_flat_array_slowcase_cnt++;)
 533   assert(array->length() > 0 && index < array->length(), "already checked");
 534   flatArrayHandle vah(current, array);
 535   oop obj = array->obj_at(index, CHECK);
 536   current->set_vm_result_oop(obj);
 537 JRT_END
 538 
 539 JRT_ENTRY(void, Runtime1::store_flat_array(JavaThread* current, arrayOopDesc* array, int index, oopDesc* value))
 540   // TOOD 8350865 We can call here with a non-flat array because of LIR_Assembler::emit_opFlattenedArrayCheck
 541   if (array->is_flatArray()) {
 542     profile_flat_array(current, false, array->is_null_free_array());
 543   }
 544 
 545   NOT_PRODUCT(_store_flat_array_slowcase_cnt++;)
 546   if (value == nullptr && array->is_null_free_array()) {
 547     SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_NullPointerException());
 548   } else {
 549     // Here we know that we have a flat array
 550     oop_cast<flatArrayOop>(array)->obj_at_put(index, value, CHECK);
 551   }
 552 JRT_END
 553 
 554 JRT_ENTRY(int, Runtime1::substitutability_check(JavaThread* current, oopDesc* left, oopDesc* right))
 555   NOT_PRODUCT(_substitutability_check_slowcase_cnt++;)
 556   JavaCallArguments args;
 557   args.push_oop(Handle(THREAD, left));
 558   args.push_oop(Handle(THREAD, right));
 559   JavaValue result(T_BOOLEAN);
 560   JavaCalls::call_static(&result,
 561                          vmClasses::ValueObjectMethods_klass(),
 562                          vmSymbols::isSubstitutable_name(),
 563                          vmSymbols::object_object_boolean_signature(),
 564                          &args, CHECK_0);
 565   return result.get_jboolean() ? 1 : 0;
 566 JRT_END
 567 
 568 
 569 extern "C" void ps();
 570 
 571 void Runtime1::buffer_inline_args_impl(JavaThread* current, Method* m, bool allocate_receiver) {
 572   JavaThread* THREAD = current;
 573   methodHandle method(current, m); // We are inside the verified_entry or verified_inline_ro_entry of this method.
 574   oop obj = SharedRuntime::allocate_inline_types_impl(current, method, allocate_receiver, CHECK);
 575   current->set_vm_result_oop(obj);
 576 }
 577 
 578 JRT_ENTRY(void, Runtime1::buffer_inline_args(JavaThread* current, Method* method))
 579   NOT_PRODUCT(_buffer_inline_args_slowcase_cnt++;)
 580   buffer_inline_args_impl(current, method, true);
 581 JRT_END
 582 
 583 JRT_ENTRY(void, Runtime1::buffer_inline_args_no_receiver(JavaThread* current, Method* method))
 584   NOT_PRODUCT(_buffer_inline_args_no_receiver_slowcase_cnt++;)
 585   buffer_inline_args_impl(current, method, false);
 586 JRT_END
 587 
 588 JRT_ENTRY(void, Runtime1::unimplemented_entry(JavaThread* current, StubId id))
 589   tty->print_cr("Runtime1::entry_for(%d) returned unimplemented entry point", (int)id);
 590 JRT_END
 591 
 592 
 593 JRT_ENTRY(void, Runtime1::throw_array_store_exception(JavaThread* current, oopDesc* obj))
 594   ResourceMark rm(current);
 595   const char* klass_name = obj->klass()->external_name();
 596   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArrayStoreException(), klass_name);
 597 JRT_END
 598 
 599 
 600 // counter_overflow() is called from within C1-compiled methods. The enclosing method is the method
 601 // associated with the top activation record. The inlinee (that is possibly included in the enclosing
 602 // method) method is passed as an argument. In order to do that it is embedded in the code as
 603 // a constant.
 604 static nmethod* counter_overflow_helper(JavaThread* current, int branch_bci, Method* m) {
 605   nmethod* osr_nm = nullptr;
 606   methodHandle method(current, m);
 607 

 885     _throw_class_cast_exception_count++;
 886   }
 887 #endif
 888   ResourceMark rm(current);
 889   char* message = SharedRuntime::generate_class_cast_message(current, object->klass());
 890   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ClassCastException(), message);
 891 JRT_END
 892 
 893 
 894 JRT_ENTRY(void, Runtime1::throw_incompatible_class_change_error(JavaThread* current))
 895 #ifndef PRODUCT
 896   if (PrintC1Statistics) {
 897     _throw_incompatible_class_change_error_count++;
 898   }
 899 #endif
 900   ResourceMark rm(current);
 901   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError());
 902 JRT_END
 903 
 904 
 905 JRT_ENTRY(void, Runtime1::throw_illegal_monitor_state_exception(JavaThread* current))
 906   NOT_PRODUCT(_throw_illegal_monitor_state_exception_count++;)
 907   ResourceMark rm(current);
 908   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IllegalMonitorStateException());
 909 JRT_END
 910 
 911 JRT_ENTRY(void, Runtime1::throw_identity_exception(JavaThread* current, oopDesc* object))
 912   NOT_PRODUCT(_throw_identity_exception_count++;)
 913   ResourceMark rm(current);
 914   char* message = SharedRuntime::generate_identity_exception_message(current, object->klass());
 915   SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IdentityException(), message);
 916 JRT_END
 917 
 918 JRT_BLOCK_ENTRY(void, Runtime1::monitorenter(JavaThread* current, oopDesc* obj, BasicObjectLock* lock))
 919 #ifndef PRODUCT
 920   if (PrintC1Statistics) {
 921     _monitorenter_slowcase_cnt++;
 922   }
 923 #endif
 924   assert(obj == lock->obj(), "must match");
 925   SharedRuntime::monitor_enter_helper(obj, lock->lock(), current);
 926 JRT_END
 927 
 928 
 929 JRT_LEAF(void, Runtime1::monitorexit(JavaThread* current, BasicObjectLock* lock))
 930   assert(current == JavaThread::current(), "pre-condition");
 931 #ifndef PRODUCT
 932   if (PrintC1Statistics) {
 933     _monitorexit_slowcase_cnt++;
 934   }
 935 #endif
 936   assert(current->last_Java_sp(), "last_Java_sp must be set");
 937   oop obj = lock->obj();

1100                       RegisterMap::WalkContinuation::skip);
1101   frame runtime_frame = current->last_frame();
1102   frame caller_frame = runtime_frame.sender(&reg_map);
1103 
1104   // last java frame on stack
1105   vframeStream vfst(current, true);
1106   assert(!vfst.at_end(), "Java frame must exist");
1107 
1108   methodHandle caller_method(current, vfst.method());
1109   // Note that caller_method->code() may not be same as caller_code because of OSR's
1110   // Note also that in the presence of inlining it is not guaranteed
1111   // that caller_method() == caller_code->method()
1112 
1113   int bci = vfst.bci();
1114   Bytecodes::Code code = caller_method()->java_code_at(bci);
1115 
1116   // this is used by assertions in the access_field_patching_id
1117   BasicType patch_field_type = T_ILLEGAL;
1118   bool deoptimize_for_volatile = false;
1119   bool deoptimize_for_atomic = false;
1120   bool deoptimize_for_null_free = false;
1121   bool deoptimize_for_flat = false;
1122   bool deoptimize_for_strict_static = false;
1123   int patch_field_offset = -1;
1124   Klass* init_klass = nullptr; // klass needed by load_klass_patching code
1125   Klass* load_klass = nullptr; // klass needed by load_klass_patching code
1126   Handle mirror(current, nullptr); // oop needed by load_mirror_patching code
1127   Handle appendix(current, nullptr); // oop needed by appendix_patching code
1128   bool load_klass_or_mirror_patch_id =
1129     (stub_id == StubId::c1_load_klass_patching_id || stub_id == StubId::c1_load_mirror_patching_id);
1130 
1131   if (stub_id == StubId::c1_access_field_patching_id) {
1132 
1133     Bytecode_field field_access(caller_method, bci);
1134     fieldDescriptor result; // initialize class if needed
1135     Bytecodes::Code code = field_access.code();
1136     constantPoolHandle constants(current, caller_method->constants());
1137     LinkResolver::resolve_field_access(result, constants, field_access.index(), caller_method, Bytecodes::java_code(code), CHECK);
1138     patch_field_offset = result.offset();
1139 
1140     // If we're patching a field which is volatile then at compile it
1141     // must not have been know to be volatile, so the generated code
1142     // isn't correct for a volatile reference.  The nmethod has to be

1146     // used for patching references to oops which don't need special
1147     // handling in the volatile case.
1148 
1149     deoptimize_for_volatile = result.access_flags().is_volatile();
1150 
1151     // If we are patching a field which should be atomic, then
1152     // the generated code is not correct either, force deoptimizing.
1153     // We need to only cover T_LONG and T_DOUBLE fields, as we can
1154     // break access atomicity only for them.
1155 
1156     // Strictly speaking, the deoptimization on 64-bit platforms
1157     // is unnecessary, and T_LONG stores on 32-bit platforms need
1158     // to be handled by special patching code when AlwaysAtomicAccesses
1159     // becomes product feature. At this point, we are still going
1160     // for the deoptimization for consistency against volatile
1161     // accesses.
1162 
1163     patch_field_type = result.field_type();
1164     deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
1165 
1166     // The field we are patching is null-free. Deoptimize and regenerate
1167     // the compiled code if we patch a putfield/putstatic because it
1168     // does not contain the required null check.
1169     deoptimize_for_null_free = result.is_null_free_inline_type() && (field_access.is_putfield() || field_access.is_putstatic());
1170 
1171     // The field we are patching is flat. Deoptimize and regenerate
1172     // the compiled code which can't handle the layout of the flat
1173     // field because it was unknown at compile time.
1174     deoptimize_for_flat = result.is_flat();
1175 
1176     // Strict statics may require tracking if their class is not fully initialized.
1177     // For now we can bail out of the compiler and let the interpreter handle it.
1178     deoptimize_for_strict_static = result.is_strict_static_unset();
1179   } else if (load_klass_or_mirror_patch_id) {
1180     Klass* k = nullptr;
1181     switch (code) {
1182       case Bytecodes::_putstatic:
1183       case Bytecodes::_getstatic:
1184         { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK);
1185           init_klass = klass;
1186           mirror = Handle(current, klass->java_mirror());
1187         }
1188         break;
1189       case Bytecodes::_new:
1190         { Bytecode_new bnew(caller_method(), caller_method->bcp_from(bci));
1191           k = caller_method->constants()->klass_at(bnew.index(), CHECK);
1192         }
1193         break;
1194       case Bytecodes::_multianewarray:
1195         { Bytecode_multianewarray mna(caller_method(), caller_method->bcp_from(bci));
1196           k = caller_method->constants()->klass_at(mna.index(), CHECK);
1197         }
1198         break;
1199       case Bytecodes::_instanceof:
1200         { Bytecode_instanceof io(caller_method(), caller_method->bcp_from(bci));
1201           k = caller_method->constants()->klass_at(io.index(), CHECK);
1202         }
1203         break;
1204       case Bytecodes::_checkcast:
1205         { Bytecode_checkcast cc(caller_method(), caller_method->bcp_from(bci));
1206           k = caller_method->constants()->klass_at(cc.index(), CHECK);
1207         }
1208         break;
1209       case Bytecodes::_anewarray:
1210         { Bytecode_anewarray anew(caller_method(), caller_method->bcp_from(bci));
1211           Klass* ek = caller_method->constants()->klass_at(anew.index(), CHECK);
1212           k = ek->array_klass(CHECK);
1213           if (!k->is_typeArray_klass() && !k->is_refArray_klass() && !k->is_flatArray_klass()) {
1214             k = ObjArrayKlass::cast(k)->klass_with_properties(ArrayProperties::Default(), THREAD);
1215           }
1216           if (k->is_flatArray_klass()) {
1217             deoptimize_for_flat = true;
1218           }
1219         }
1220         break;
1221       case Bytecodes::_ldc:
1222       case Bytecodes::_ldc_w:
1223       case Bytecodes::_ldc2_w:
1224         {
1225           Bytecode_loadconstant cc(caller_method, bci);
1226           oop m = cc.resolve_constant(CHECK);
1227           mirror = Handle(current, m);
1228         }
1229         break;
1230       default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id");
1231     }
1232     load_klass = k;
1233   } else if (stub_id == StubId::c1_load_appendix_patching_id) {
1234     Bytecode_invoke bytecode(caller_method, bci);
1235     Bytecodes::Code bc = bytecode.invoke_code();
1236 
1237     CallInfo info;
1238     constantPoolHandle pool(current, caller_method->constants());
1239     int index = bytecode.index();
1240     LinkResolver::resolve_invoke(info, Handle(), pool, index, bc, CHECK);
1241     switch (bc) {
1242       case Bytecodes::_invokehandle: {
1243         ResolvedMethodEntry* entry = pool->cache()->set_method_handle(index, info);
1244         appendix = Handle(current, pool->cache()->appendix_if_resolved(entry));
1245         break;
1246       }
1247       case Bytecodes::_invokedynamic: {
1248         appendix = Handle(current, pool->cache()->set_dynamic_call(info, index));
1249         break;
1250       }
1251       default: fatal("unexpected bytecode for load_appendix_patching_id");
1252     }
1253   } else {
1254     ShouldNotReachHere();
1255   }
1256 
1257   if (deoptimize_for_volatile  ||
1258       deoptimize_for_atomic    ||
1259       deoptimize_for_null_free ||
1260       deoptimize_for_flat      ||
1261       deoptimize_for_strict_static) {
1262     // At compile time we assumed the field wasn't volatile/atomic but after
1263     // loading it turns out it was volatile/atomic so we have to throw the
1264     // compiled code out and let it be regenerated.
1265     if (TracePatching) {
1266       if (deoptimize_for_volatile) {
1267         tty->print_cr("Deoptimizing for patching volatile field reference");
1268       }
1269       if (deoptimize_for_atomic) {
1270         tty->print_cr("Deoptimizing for patching atomic field reference");
1271       }
1272       if (deoptimize_for_null_free) {
1273         tty->print_cr("Deoptimizing for patching null-free field reference");
1274       }
1275       if (deoptimize_for_flat) {
1276         tty->print_cr("Deoptimizing for patching flat field or array reference");
1277       }
1278       if (deoptimize_for_strict_static) {
1279         tty->print_cr("Deoptimizing for patching strict static field reference");
1280       }
1281     }
1282 
1283     // It's possible the nmethod was invalidated in the last
1284     // safepoint, but if it's still alive then make it not_entrant.
1285     nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
1286     if (nm != nullptr) {
1287       nm->make_not_entrant(nmethod::InvalidationReason::C1_CODEPATCH);
1288     }
1289 
1290     Deoptimization::deoptimize_frame(current, caller_frame.id());
1291 
1292     // Return to the now deoptimized frame.
1293   }
1294 
1295   // Now copy code back
1296 
1297   {
1298     MutexLocker ml_code (current, CodeCache_lock, Mutex::_no_safepoint_check_flag);
1299     //
1300     // Deoptimization may have happened while we waited for the lock.

1711 #ifndef PRODUCT
1712 void Runtime1::print_statistics() {
1713   tty->print_cr("C1 Runtime statistics:");
1714   tty->print_cr(" _resolve_invoke_virtual_cnt:     %u", SharedRuntime::_resolve_virtual_ctr);
1715   tty->print_cr(" _resolve_invoke_opt_virtual_cnt: %u", SharedRuntime::_resolve_opt_virtual_ctr);
1716   tty->print_cr(" _resolve_invoke_static_cnt:      %u", SharedRuntime::_resolve_static_ctr);
1717   tty->print_cr(" _handle_wrong_method_cnt:        %u", SharedRuntime::_wrong_method_ctr);
1718   tty->print_cr(" _ic_miss_cnt:                    %u", SharedRuntime::_ic_miss_ctr);
1719   tty->print_cr(" _generic_arraycopystub_cnt:      %u", _generic_arraycopystub_cnt);
1720   tty->print_cr(" _byte_arraycopy_cnt:             %u", _byte_arraycopy_stub_cnt);
1721   tty->print_cr(" _short_arraycopy_cnt:            %u", _short_arraycopy_stub_cnt);
1722   tty->print_cr(" _int_arraycopy_cnt:              %u", _int_arraycopy_stub_cnt);
1723   tty->print_cr(" _long_arraycopy_cnt:             %u", _long_arraycopy_stub_cnt);
1724   tty->print_cr(" _oop_arraycopy_cnt:              %u", _oop_arraycopy_stub_cnt);
1725   tty->print_cr(" _arraycopy_slowcase_cnt:         %u", _arraycopy_slowcase_cnt);
1726   tty->print_cr(" _arraycopy_checkcast_cnt:        %u", _arraycopy_checkcast_cnt);
1727   tty->print_cr(" _arraycopy_checkcast_attempt_cnt:%u", _arraycopy_checkcast_attempt_cnt);
1728 
1729   tty->print_cr(" _new_type_array_slowcase_cnt:    %u", _new_type_array_slowcase_cnt);
1730   tty->print_cr(" _new_object_array_slowcase_cnt:  %u", _new_object_array_slowcase_cnt);
1731   tty->print_cr(" _new_null_free_array_slowcase_cnt: %u", _new_null_free_array_slowcase_cnt);
1732   tty->print_cr(" _new_instance_slowcase_cnt:      %u", _new_instance_slowcase_cnt);
1733   tty->print_cr(" _new_multi_array_slowcase_cnt:   %u", _new_multi_array_slowcase_cnt);
1734   tty->print_cr(" _load_flat_array_slowcase_cnt:   %u", _load_flat_array_slowcase_cnt);
1735   tty->print_cr(" _store_flat_array_slowcase_cnt:  %u", _store_flat_array_slowcase_cnt);
1736   tty->print_cr(" _substitutability_check_slowcase_cnt: %u", _substitutability_check_slowcase_cnt);
1737   tty->print_cr(" _buffer_inline_args_slowcase_cnt:%u", _buffer_inline_args_slowcase_cnt);
1738   tty->print_cr(" _buffer_inline_args_no_receiver_slowcase_cnt:%u", _buffer_inline_args_no_receiver_slowcase_cnt);
1739 
1740   tty->print_cr(" _monitorenter_slowcase_cnt:      %u", _monitorenter_slowcase_cnt);
1741   tty->print_cr(" _monitorexit_slowcase_cnt:       %u", _monitorexit_slowcase_cnt);
1742   tty->print_cr(" _patch_code_slowcase_cnt:        %u", _patch_code_slowcase_cnt);
1743 
1744   tty->print_cr(" _throw_range_check_exception_count:            %u:", _throw_range_check_exception_count);
1745   tty->print_cr(" _throw_index_exception_count:                  %u:", _throw_index_exception_count);
1746   tty->print_cr(" _throw_div0_exception_count:                   %u:", _throw_div0_exception_count);
1747   tty->print_cr(" _throw_null_pointer_exception_count:           %u:", _throw_null_pointer_exception_count);
1748   tty->print_cr(" _throw_class_cast_exception_count:             %u:", _throw_class_cast_exception_count);
1749   tty->print_cr(" _throw_incompatible_class_change_error_count:  %u:", _throw_incompatible_class_change_error_count);
1750   tty->print_cr(" _throw_illegal_monitor_state_exception_count:  %u:", _throw_illegal_monitor_state_exception_count);
1751   tty->print_cr(" _throw_identity_exception_count:               %u:", _throw_identity_exception_count);
1752   tty->print_cr(" _throw_count:                                  %u:", _throw_count);
1753 
1754   SharedRuntime::print_ic_miss_histogram();
1755   tty->cr();
1756 }
1757 #endif // PRODUCT
< prev index next >