33 #include "classfile/vmSymbols.hpp"
34 #include "code/aotCodeCache.hpp"
35 #include "code/codeBlob.hpp"
36 #include "code/compiledIC.hpp"
37 #include "code/scopeDesc.hpp"
38 #include "code/vtableStubs.hpp"
39 #include "compiler/compilationPolicy.hpp"
40 #include "compiler/disassembler.hpp"
41 #include "compiler/oopMap.hpp"
42 #include "gc/shared/barrierSet.hpp"
43 #include "gc/shared/c1/barrierSetC1.hpp"
44 #include "gc/shared/collectedHeap.hpp"
45 #include "interpreter/bytecode.hpp"
46 #include "interpreter/interpreter.hpp"
47 #include "jfr/support/jfrIntrinsics.hpp"
48 #include "logging/log.hpp"
49 #include "memory/oopFactory.hpp"
50 #include "memory/resourceArea.hpp"
51 #include "memory/universe.hpp"
52 #include "oops/access.inline.hpp"
53 #include "oops/objArrayKlass.hpp"
54 #include "oops/objArrayOop.inline.hpp"
55 #include "oops/oop.inline.hpp"
56 #include "prims/jvmtiExport.hpp"
57 #include "runtime/atomic.hpp"
58 #include "runtime/fieldDescriptor.inline.hpp"
59 #include "runtime/frame.inline.hpp"
60 #include "runtime/handles.inline.hpp"
61 #include "runtime/interfaceSupport.inline.hpp"
62 #include "runtime/javaCalls.hpp"
63 #include "runtime/sharedRuntime.hpp"
64 #include "runtime/stackWatermarkSet.hpp"
65 #include "runtime/stubRoutines.hpp"
66 #include "runtime/vframe.inline.hpp"
67 #include "runtime/vframeArray.hpp"
68 #include "runtime/vm_version.hpp"
69 #include "utilities/copy.hpp"
70 #include "utilities/events.hpp"
71
72
103 }
104
105 // Implementation of Runtime1
106
107 CodeBlob* Runtime1::_blobs[(int)C1StubId::NUM_STUBIDS];
108
109 #define C1_BLOB_NAME_DEFINE(name) "C1 Runtime " # name "_blob",
110 const char *Runtime1::_blob_names[] = {
111 C1_STUBS_DO(C1_BLOB_NAME_DEFINE)
112 };
113 #undef C1_STUB_NAME_DEFINE
114
115 #ifndef PRODUCT
116 // statistics
117 uint Runtime1::_generic_arraycopystub_cnt = 0;
118 uint Runtime1::_arraycopy_slowcase_cnt = 0;
119 uint Runtime1::_arraycopy_checkcast_cnt = 0;
120 uint Runtime1::_arraycopy_checkcast_attempt_cnt = 0;
121 uint Runtime1::_new_type_array_slowcase_cnt = 0;
122 uint Runtime1::_new_object_array_slowcase_cnt = 0;
123 uint Runtime1::_new_instance_slowcase_cnt = 0;
124 uint Runtime1::_new_multi_array_slowcase_cnt = 0;
125 uint Runtime1::_monitorenter_slowcase_cnt = 0;
126 uint Runtime1::_monitorexit_slowcase_cnt = 0;
127 uint Runtime1::_patch_code_slowcase_cnt = 0;
128 uint Runtime1::_throw_range_check_exception_count = 0;
129 uint Runtime1::_throw_index_exception_count = 0;
130 uint Runtime1::_throw_div0_exception_count = 0;
131 uint Runtime1::_throw_null_pointer_exception_count = 0;
132 uint Runtime1::_throw_class_cast_exception_count = 0;
133 uint Runtime1::_throw_incompatible_class_change_error_count = 0;
134 uint Runtime1::_throw_count = 0;
135
136 static uint _byte_arraycopy_stub_cnt = 0;
137 static uint _short_arraycopy_stub_cnt = 0;
138 static uint _int_arraycopy_stub_cnt = 0;
139 static uint _long_arraycopy_stub_cnt = 0;
140 static uint _oop_arraycopy_stub_cnt = 0;
141
142 address Runtime1::arraycopy_count_address(BasicType type) {
143 switch (type) {
144 case T_BOOLEAN:
145 case T_BYTE: return (address)&_byte_arraycopy_stub_cnt;
146 case T_CHAR:
147 case T_SHORT: return (address)&_short_arraycopy_stub_cnt;
148 case T_FLOAT:
149 case T_INT: return (address)&_int_arraycopy_stub_cnt;
150 case T_DOUBLE:
151 case T_LONG: return (address)&_long_arraycopy_stub_cnt;
152 case T_ARRAY:
153 case T_OBJECT: return (address)&_oop_arraycopy_stub_cnt;
356 #endif
357 FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32());
358 FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32C());
359 FUNCTION_CASE(entry, StubRoutines::vectorizedMismatch());
360 FUNCTION_CASE(entry, StubRoutines::dexp());
361 FUNCTION_CASE(entry, StubRoutines::dlog());
362 FUNCTION_CASE(entry, StubRoutines::dlog10());
363 FUNCTION_CASE(entry, StubRoutines::dpow());
364 FUNCTION_CASE(entry, StubRoutines::dsin());
365 FUNCTION_CASE(entry, StubRoutines::dcos());
366 FUNCTION_CASE(entry, StubRoutines::dtan());
367 FUNCTION_CASE(entry, StubRoutines::dtanh());
368 FUNCTION_CASE(entry, StubRoutines::dcbrt());
369
370 #undef FUNCTION_CASE
371
372 // Soft float adds more runtime names.
373 return pd_name_for_address(entry);
374 }
375
376
377 JRT_ENTRY(void, Runtime1::new_instance(JavaThread* current, Klass* klass))
378 #ifndef PRODUCT
379 if (PrintC1Statistics) {
380 _new_instance_slowcase_cnt++;
381 }
382 #endif
383 assert(klass->is_klass(), "not a class");
384 Handle holder(current, klass->klass_holder()); // keep the klass alive
385 InstanceKlass* h = InstanceKlass::cast(klass);
386 h->check_valid_for_instantiation(true, CHECK);
387 // make sure klass is initialized
388 h->initialize(CHECK);
389 // allocate instance and return via TLS
390 oop obj = h->allocate_instance(CHECK);
391 current->set_vm_result_oop(obj);
392 JRT_END
393
394
395 JRT_ENTRY(void, Runtime1::new_type_array(JavaThread* current, Klass* klass, jint length))
396 #ifndef PRODUCT
397 if (PrintC1Statistics) {
398 _new_type_array_slowcase_cnt++;
399 }
400 #endif
401 // Note: no handle for klass needed since they are not used
402 // anymore after new_typeArray() and no GC can happen before.
403 // (This may have to change if this code changes!)
404 assert(klass->is_klass(), "not a class");
405 BasicType elt_type = TypeArrayKlass::cast(klass)->element_type();
406 oop obj = oopFactory::new_typeArray(elt_type, length, CHECK);
407 current->set_vm_result_oop(obj);
408 // This is pretty rare but this runtime patch is stressful to deoptimization
409 // if we deoptimize here so force a deopt to stress the path.
410 if (DeoptimizeALot) {
411 deopt_caller(current);
412 }
413
419 if (PrintC1Statistics) {
420 _new_object_array_slowcase_cnt++;
421 }
422 #endif
423 // Note: no handle for klass needed since they are not used
424 // anymore after new_objArray() and no GC can happen before.
425 // (This may have to change if this code changes!)
426 assert(array_klass->is_klass(), "not a class");
427 Handle holder(current, array_klass->klass_holder()); // keep the klass alive
428 Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
429 objArrayOop obj = oopFactory::new_objArray(elem_klass, length, CHECK);
430 current->set_vm_result_oop(obj);
431 // This is pretty rare but this runtime patch is stressful to deoptimization
432 // if we deoptimize here so force a deopt to stress the path.
433 if (DeoptimizeALot) {
434 deopt_caller(current);
435 }
436 JRT_END
437
438
439 JRT_ENTRY(void, Runtime1::new_multi_array(JavaThread* current, Klass* klass, int rank, jint* dims))
440 #ifndef PRODUCT
441 if (PrintC1Statistics) {
442 _new_multi_array_slowcase_cnt++;
443 }
444 #endif
445 assert(klass->is_klass(), "not a class");
446 assert(rank >= 1, "rank must be nonzero");
447 Handle holder(current, klass->klass_holder()); // keep the klass alive
448 oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
449 current->set_vm_result_oop(obj);
450 JRT_END
451
452
453 JRT_ENTRY(void, Runtime1::unimplemented_entry(JavaThread* current, C1StubId id))
454 tty->print_cr("Runtime1::entry_for(%d) returned unimplemented entry point", (int)id);
455 JRT_END
456
457
458 JRT_ENTRY(void, Runtime1::throw_array_store_exception(JavaThread* current, oopDesc* obj))
459 ResourceMark rm(current);
460 const char* klass_name = obj->klass()->external_name();
461 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArrayStoreException(), klass_name);
462 JRT_END
463
464
465 // counter_overflow() is called from within C1-compiled methods. The enclosing method is the method
466 // associated with the top activation record. The inlinee (that is possibly included in the enclosing
467 // method) method is passed as an argument. In order to do that it is embedded in the code as
468 // a constant.
469 static nmethod* counter_overflow_helper(JavaThread* current, int branch_bci, Method* m) {
470 nmethod* osr_nm = nullptr;
471 methodHandle method(current, m);
472
756 _throw_class_cast_exception_count++;
757 }
758 #endif
759 ResourceMark rm(current);
760 char* message = SharedRuntime::generate_class_cast_message(current, object->klass());
761 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ClassCastException(), message);
762 JRT_END
763
764
765 JRT_ENTRY(void, Runtime1::throw_incompatible_class_change_error(JavaThread* current))
766 #ifndef PRODUCT
767 if (PrintC1Statistics) {
768 _throw_incompatible_class_change_error_count++;
769 }
770 #endif
771 ResourceMark rm(current);
772 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError());
773 JRT_END
774
775
776 JRT_BLOCK_ENTRY(void, Runtime1::monitorenter(JavaThread* current, oopDesc* obj, BasicObjectLock* lock))
777 #ifndef PRODUCT
778 if (PrintC1Statistics) {
779 _monitorenter_slowcase_cnt++;
780 }
781 #endif
782 if (LockingMode == LM_MONITOR) {
783 lock->set_obj(obj);
784 }
785 assert(obj == lock->obj(), "must match");
786 SharedRuntime::monitor_enter_helper(obj, lock->lock(), current);
787 JRT_END
788
789
790 JRT_LEAF(void, Runtime1::monitorexit(JavaThread* current, BasicObjectLock* lock))
791 assert(current == JavaThread::current(), "pre-condition");
792 #ifndef PRODUCT
793 if (PrintC1Statistics) {
794 _monitorexit_slowcase_cnt++;
795 }
961 RegisterMap::WalkContinuation::skip);
962 frame runtime_frame = current->last_frame();
963 frame caller_frame = runtime_frame.sender(®_map);
964
965 // last java frame on stack
966 vframeStream vfst(current, true);
967 assert(!vfst.at_end(), "Java frame must exist");
968
969 methodHandle caller_method(current, vfst.method());
970 // Note that caller_method->code() may not be same as caller_code because of OSR's
971 // Note also that in the presence of inlining it is not guaranteed
972 // that caller_method() == caller_code->method()
973
974 int bci = vfst.bci();
975 Bytecodes::Code code = caller_method()->java_code_at(bci);
976
977 // this is used by assertions in the access_field_patching_id
978 BasicType patch_field_type = T_ILLEGAL;
979 bool deoptimize_for_volatile = false;
980 bool deoptimize_for_atomic = false;
981 int patch_field_offset = -1;
982 Klass* init_klass = nullptr; // klass needed by load_klass_patching code
983 Klass* load_klass = nullptr; // klass needed by load_klass_patching code
984 Handle mirror(current, nullptr); // oop needed by load_mirror_patching code
985 Handle appendix(current, nullptr); // oop needed by appendix_patching code
986 bool load_klass_or_mirror_patch_id =
987 (stub_id == C1StubId::load_klass_patching_id || stub_id == C1StubId::load_mirror_patching_id);
988
989 if (stub_id == C1StubId::access_field_patching_id) {
990
991 Bytecode_field field_access(caller_method, bci);
992 fieldDescriptor result; // initialize class if needed
993 Bytecodes::Code code = field_access.code();
994 constantPoolHandle constants(current, caller_method->constants());
995 LinkResolver::resolve_field_access(result, constants, field_access.index(), caller_method, Bytecodes::java_code(code), CHECK);
996 patch_field_offset = result.offset();
997
998 // If we're patching a field which is volatile then at compile it
999 // must not have been know to be volatile, so the generated code
1000 // isn't correct for a volatile reference. The nmethod has to be
1004 // used for patching references to oops which don't need special
1005 // handling in the volatile case.
1006
1007 deoptimize_for_volatile = result.access_flags().is_volatile();
1008
1009 // If we are patching a field which should be atomic, then
1010 // the generated code is not correct either, force deoptimizing.
1011 // We need to only cover T_LONG and T_DOUBLE fields, as we can
1012 // break access atomicity only for them.
1013
1014 // Strictly speaking, the deoptimization on 64-bit platforms
1015 // is unnecessary, and T_LONG stores on 32-bit platforms need
1016 // to be handled by special patching code when AlwaysAtomicAccesses
1017 // becomes product feature. At this point, we are still going
1018 // for the deoptimization for consistency against volatile
1019 // accesses.
1020
1021 patch_field_type = result.field_type();
1022 deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
1023
1024 } else if (load_klass_or_mirror_patch_id) {
1025 Klass* k = nullptr;
1026 switch (code) {
1027 case Bytecodes::_putstatic:
1028 case Bytecodes::_getstatic:
1029 { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK);
1030 init_klass = klass;
1031 mirror = Handle(current, klass->java_mirror());
1032 }
1033 break;
1034 case Bytecodes::_new:
1035 { Bytecode_new bnew(caller_method(), caller_method->bcp_from(bci));
1036 k = caller_method->constants()->klass_at(bnew.index(), CHECK);
1037 }
1038 break;
1039 case Bytecodes::_multianewarray:
1040 { Bytecode_multianewarray mna(caller_method(), caller_method->bcp_from(bci));
1041 k = caller_method->constants()->klass_at(mna.index(), CHECK);
1042 }
1043 break;
1044 case Bytecodes::_instanceof:
1045 { Bytecode_instanceof io(caller_method(), caller_method->bcp_from(bci));
1046 k = caller_method->constants()->klass_at(io.index(), CHECK);
1047 }
1048 break;
1049 case Bytecodes::_checkcast:
1050 { Bytecode_checkcast cc(caller_method(), caller_method->bcp_from(bci));
1051 k = caller_method->constants()->klass_at(cc.index(), CHECK);
1052 }
1053 break;
1054 case Bytecodes::_anewarray:
1055 { Bytecode_anewarray anew(caller_method(), caller_method->bcp_from(bci));
1056 Klass* ek = caller_method->constants()->klass_at(anew.index(), CHECK);
1057 k = ek->array_klass(CHECK);
1058 }
1059 break;
1060 case Bytecodes::_ldc:
1061 case Bytecodes::_ldc_w:
1062 case Bytecodes::_ldc2_w:
1063 {
1064 Bytecode_loadconstant cc(caller_method, bci);
1065 oop m = cc.resolve_constant(CHECK);
1066 mirror = Handle(current, m);
1067 }
1068 break;
1069 default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id");
1070 }
1071 load_klass = k;
1072 } else if (stub_id == C1StubId::load_appendix_patching_id) {
1073 Bytecode_invoke bytecode(caller_method, bci);
1074 Bytecodes::Code bc = bytecode.invoke_code();
1075
1076 CallInfo info;
1077 constantPoolHandle pool(current, caller_method->constants());
1078 int index = bytecode.index();
1079 LinkResolver::resolve_invoke(info, Handle(), pool, index, bc, CHECK);
1080 switch (bc) {
1081 case Bytecodes::_invokehandle: {
1082 ResolvedMethodEntry* entry = pool->cache()->set_method_handle(index, info);
1083 appendix = Handle(current, pool->cache()->appendix_if_resolved(entry));
1084 break;
1085 }
1086 case Bytecodes::_invokedynamic: {
1087 appendix = Handle(current, pool->cache()->set_dynamic_call(info, index));
1088 break;
1089 }
1090 default: fatal("unexpected bytecode for load_appendix_patching_id");
1091 }
1092 } else {
1093 ShouldNotReachHere();
1094 }
1095
1096 if (deoptimize_for_volatile || deoptimize_for_atomic) {
1097 // At compile time we assumed the field wasn't volatile/atomic but after
1098 // loading it turns out it was volatile/atomic so we have to throw the
1099 // compiled code out and let it be regenerated.
1100 if (TracePatching) {
1101 if (deoptimize_for_volatile) {
1102 tty->print_cr("Deoptimizing for patching volatile field reference");
1103 }
1104 if (deoptimize_for_atomic) {
1105 tty->print_cr("Deoptimizing for patching atomic field reference");
1106 }
1107 }
1108
1109 // It's possible the nmethod was invalidated in the last
1110 // safepoint, but if it's still alive then make it not_entrant.
1111 nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
1112 if (nm != nullptr) {
1113 nm->make_not_entrant(nmethod::InvalidationReason::C1_CODEPATCH);
1114 }
1115
1116 Deoptimization::deoptimize_frame(current, caller_frame.id());
1117
1118 // Return to the now deoptimized frame.
1119 }
1120
1121 // Now copy code back
1122
1123 {
1124 MutexLocker ml_code (current, CodeCache_lock, Mutex::_no_safepoint_check_flag);
1125 //
1126 // Deoptimization may have happened while we waited for the lock.
1537 #ifndef PRODUCT
1538 void Runtime1::print_statistics() {
1539 tty->print_cr("C1 Runtime statistics:");
1540 tty->print_cr(" _resolve_invoke_virtual_cnt: %u", SharedRuntime::_resolve_virtual_ctr);
1541 tty->print_cr(" _resolve_invoke_opt_virtual_cnt: %u", SharedRuntime::_resolve_opt_virtual_ctr);
1542 tty->print_cr(" _resolve_invoke_static_cnt: %u", SharedRuntime::_resolve_static_ctr);
1543 tty->print_cr(" _handle_wrong_method_cnt: %u", SharedRuntime::_wrong_method_ctr);
1544 tty->print_cr(" _ic_miss_cnt: %u", SharedRuntime::_ic_miss_ctr);
1545 tty->print_cr(" _generic_arraycopystub_cnt: %u", _generic_arraycopystub_cnt);
1546 tty->print_cr(" _byte_arraycopy_cnt: %u", _byte_arraycopy_stub_cnt);
1547 tty->print_cr(" _short_arraycopy_cnt: %u", _short_arraycopy_stub_cnt);
1548 tty->print_cr(" _int_arraycopy_cnt: %u", _int_arraycopy_stub_cnt);
1549 tty->print_cr(" _long_arraycopy_cnt: %u", _long_arraycopy_stub_cnt);
1550 tty->print_cr(" _oop_arraycopy_cnt: %u", _oop_arraycopy_stub_cnt);
1551 tty->print_cr(" _arraycopy_slowcase_cnt: %u", _arraycopy_slowcase_cnt);
1552 tty->print_cr(" _arraycopy_checkcast_cnt: %u", _arraycopy_checkcast_cnt);
1553 tty->print_cr(" _arraycopy_checkcast_attempt_cnt:%u", _arraycopy_checkcast_attempt_cnt);
1554
1555 tty->print_cr(" _new_type_array_slowcase_cnt: %u", _new_type_array_slowcase_cnt);
1556 tty->print_cr(" _new_object_array_slowcase_cnt: %u", _new_object_array_slowcase_cnt);
1557 tty->print_cr(" _new_instance_slowcase_cnt: %u", _new_instance_slowcase_cnt);
1558 tty->print_cr(" _new_multi_array_slowcase_cnt: %u", _new_multi_array_slowcase_cnt);
1559 tty->print_cr(" _monitorenter_slowcase_cnt: %u", _monitorenter_slowcase_cnt);
1560 tty->print_cr(" _monitorexit_slowcase_cnt: %u", _monitorexit_slowcase_cnt);
1561 tty->print_cr(" _patch_code_slowcase_cnt: %u", _patch_code_slowcase_cnt);
1562
1563 tty->print_cr(" _throw_range_check_exception_count: %u:", _throw_range_check_exception_count);
1564 tty->print_cr(" _throw_index_exception_count: %u:", _throw_index_exception_count);
1565 tty->print_cr(" _throw_div0_exception_count: %u:", _throw_div0_exception_count);
1566 tty->print_cr(" _throw_null_pointer_exception_count: %u:", _throw_null_pointer_exception_count);
1567 tty->print_cr(" _throw_class_cast_exception_count: %u:", _throw_class_cast_exception_count);
1568 tty->print_cr(" _throw_incompatible_class_change_error_count: %u:", _throw_incompatible_class_change_error_count);
1569 tty->print_cr(" _throw_count: %u:", _throw_count);
1570
1571 SharedRuntime::print_ic_miss_histogram();
1572 tty->cr();
1573 }
1574 #endif // PRODUCT
|
33 #include "classfile/vmSymbols.hpp"
34 #include "code/aotCodeCache.hpp"
35 #include "code/codeBlob.hpp"
36 #include "code/compiledIC.hpp"
37 #include "code/scopeDesc.hpp"
38 #include "code/vtableStubs.hpp"
39 #include "compiler/compilationPolicy.hpp"
40 #include "compiler/disassembler.hpp"
41 #include "compiler/oopMap.hpp"
42 #include "gc/shared/barrierSet.hpp"
43 #include "gc/shared/c1/barrierSetC1.hpp"
44 #include "gc/shared/collectedHeap.hpp"
45 #include "interpreter/bytecode.hpp"
46 #include "interpreter/interpreter.hpp"
47 #include "jfr/support/jfrIntrinsics.hpp"
48 #include "logging/log.hpp"
49 #include "memory/oopFactory.hpp"
50 #include "memory/resourceArea.hpp"
51 #include "memory/universe.hpp"
52 #include "oops/access.inline.hpp"
53 #include "oops/flatArrayKlass.hpp"
54 #include "oops/flatArrayOop.inline.hpp"
55 #include "oops/objArrayKlass.hpp"
56 #include "oops/objArrayOop.inline.hpp"
57 #include "oops/oop.inline.hpp"
58 #include "prims/jvmtiExport.hpp"
59 #include "runtime/atomic.hpp"
60 #include "runtime/fieldDescriptor.inline.hpp"
61 #include "runtime/frame.inline.hpp"
62 #include "runtime/handles.inline.hpp"
63 #include "runtime/interfaceSupport.inline.hpp"
64 #include "runtime/javaCalls.hpp"
65 #include "runtime/sharedRuntime.hpp"
66 #include "runtime/stackWatermarkSet.hpp"
67 #include "runtime/stubRoutines.hpp"
68 #include "runtime/vframe.inline.hpp"
69 #include "runtime/vframeArray.hpp"
70 #include "runtime/vm_version.hpp"
71 #include "utilities/copy.hpp"
72 #include "utilities/events.hpp"
73
74
105 }
106
107 // Implementation of Runtime1
108
109 CodeBlob* Runtime1::_blobs[(int)C1StubId::NUM_STUBIDS];
110
111 #define C1_BLOB_NAME_DEFINE(name) "C1 Runtime " # name "_blob",
112 const char *Runtime1::_blob_names[] = {
113 C1_STUBS_DO(C1_BLOB_NAME_DEFINE)
114 };
115 #undef C1_STUB_NAME_DEFINE
116
117 #ifndef PRODUCT
118 // statistics
119 uint Runtime1::_generic_arraycopystub_cnt = 0;
120 uint Runtime1::_arraycopy_slowcase_cnt = 0;
121 uint Runtime1::_arraycopy_checkcast_cnt = 0;
122 uint Runtime1::_arraycopy_checkcast_attempt_cnt = 0;
123 uint Runtime1::_new_type_array_slowcase_cnt = 0;
124 uint Runtime1::_new_object_array_slowcase_cnt = 0;
125 uint Runtime1::_new_null_free_array_slowcase_cnt = 0;
126 uint Runtime1::_new_instance_slowcase_cnt = 0;
127 uint Runtime1::_new_multi_array_slowcase_cnt = 0;
128 uint Runtime1::_load_flat_array_slowcase_cnt = 0;
129 uint Runtime1::_store_flat_array_slowcase_cnt = 0;
130 uint Runtime1::_substitutability_check_slowcase_cnt = 0;
131 uint Runtime1::_buffer_inline_args_slowcase_cnt = 0;
132 uint Runtime1::_buffer_inline_args_no_receiver_slowcase_cnt = 0;
133 uint Runtime1::_monitorenter_slowcase_cnt = 0;
134 uint Runtime1::_monitorexit_slowcase_cnt = 0;
135 uint Runtime1::_patch_code_slowcase_cnt = 0;
136 uint Runtime1::_throw_range_check_exception_count = 0;
137 uint Runtime1::_throw_index_exception_count = 0;
138 uint Runtime1::_throw_div0_exception_count = 0;
139 uint Runtime1::_throw_null_pointer_exception_count = 0;
140 uint Runtime1::_throw_class_cast_exception_count = 0;
141 uint Runtime1::_throw_incompatible_class_change_error_count = 0;
142 uint Runtime1::_throw_illegal_monitor_state_exception_count = 0;
143 uint Runtime1::_throw_identity_exception_count = 0;
144 uint Runtime1::_throw_count = 0;
145
146 static uint _byte_arraycopy_stub_cnt = 0;
147 static uint _short_arraycopy_stub_cnt = 0;
148 static uint _int_arraycopy_stub_cnt = 0;
149 static uint _long_arraycopy_stub_cnt = 0;
150 static uint _oop_arraycopy_stub_cnt = 0;
151
152 address Runtime1::arraycopy_count_address(BasicType type) {
153 switch (type) {
154 case T_BOOLEAN:
155 case T_BYTE: return (address)&_byte_arraycopy_stub_cnt;
156 case T_CHAR:
157 case T_SHORT: return (address)&_short_arraycopy_stub_cnt;
158 case T_FLOAT:
159 case T_INT: return (address)&_int_arraycopy_stub_cnt;
160 case T_DOUBLE:
161 case T_LONG: return (address)&_long_arraycopy_stub_cnt;
162 case T_ARRAY:
163 case T_OBJECT: return (address)&_oop_arraycopy_stub_cnt;
366 #endif
367 FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32());
368 FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32C());
369 FUNCTION_CASE(entry, StubRoutines::vectorizedMismatch());
370 FUNCTION_CASE(entry, StubRoutines::dexp());
371 FUNCTION_CASE(entry, StubRoutines::dlog());
372 FUNCTION_CASE(entry, StubRoutines::dlog10());
373 FUNCTION_CASE(entry, StubRoutines::dpow());
374 FUNCTION_CASE(entry, StubRoutines::dsin());
375 FUNCTION_CASE(entry, StubRoutines::dcos());
376 FUNCTION_CASE(entry, StubRoutines::dtan());
377 FUNCTION_CASE(entry, StubRoutines::dtanh());
378 FUNCTION_CASE(entry, StubRoutines::dcbrt());
379
380 #undef FUNCTION_CASE
381
382 // Soft float adds more runtime names.
383 return pd_name_for_address(entry);
384 }
385
386 static void allocate_instance(JavaThread* current, Klass* klass, TRAPS) {
387 #ifndef PRODUCT
388 if (PrintC1Statistics) {
389 Runtime1::_new_instance_slowcase_cnt++;
390 }
391 #endif
392 assert(klass->is_klass(), "not a class");
393 Handle holder(current, klass->klass_holder()); // keep the klass alive
394 InstanceKlass* h = InstanceKlass::cast(klass);
395 h->check_valid_for_instantiation(true, CHECK);
396 // make sure klass is initialized
397 h->initialize(CHECK);
398 // allocate instance and return via TLS
399 oop obj = h->allocate_instance(CHECK);
400 current->set_vm_result_oop(obj);
401 JRT_END
402
403 JRT_ENTRY(void, Runtime1::new_instance(JavaThread* current, Klass* klass))
404 allocate_instance(current, klass, CHECK);
405 JRT_END
406
407 JRT_ENTRY(void, Runtime1::new_type_array(JavaThread* current, Klass* klass, jint length))
408 #ifndef PRODUCT
409 if (PrintC1Statistics) {
410 _new_type_array_slowcase_cnt++;
411 }
412 #endif
413 // Note: no handle for klass needed since they are not used
414 // anymore after new_typeArray() and no GC can happen before.
415 // (This may have to change if this code changes!)
416 assert(klass->is_klass(), "not a class");
417 BasicType elt_type = TypeArrayKlass::cast(klass)->element_type();
418 oop obj = oopFactory::new_typeArray(elt_type, length, CHECK);
419 current->set_vm_result_oop(obj);
420 // This is pretty rare but this runtime patch is stressful to deoptimization
421 // if we deoptimize here so force a deopt to stress the path.
422 if (DeoptimizeALot) {
423 deopt_caller(current);
424 }
425
431 if (PrintC1Statistics) {
432 _new_object_array_slowcase_cnt++;
433 }
434 #endif
435 // Note: no handle for klass needed since they are not used
436 // anymore after new_objArray() and no GC can happen before.
437 // (This may have to change if this code changes!)
438 assert(array_klass->is_klass(), "not a class");
439 Handle holder(current, array_klass->klass_holder()); // keep the klass alive
440 Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
441 objArrayOop obj = oopFactory::new_objArray(elem_klass, length, CHECK);
442 current->set_vm_result_oop(obj);
443 // This is pretty rare but this runtime patch is stressful to deoptimization
444 // if we deoptimize here so force a deopt to stress the path.
445 if (DeoptimizeALot) {
446 deopt_caller(current);
447 }
448 JRT_END
449
450
451 JRT_ENTRY(void, Runtime1::new_null_free_array(JavaThread* current, Klass* array_klass, jint length))
452 NOT_PRODUCT(_new_null_free_array_slowcase_cnt++;)
453 // TODO 8350865 This is dead code since 8325660 because null-free arrays can only be created via the factory methods that are not yet implemented in C1. Should probably be fixed by 8265122.
454
455 // Note: no handle for klass needed since they are not used
456 // anymore after new_objArray() and no GC can happen before.
457 // (This may have to change if this code changes!)
458 assert(array_klass->is_klass(), "not a class");
459 Handle holder(THREAD, array_klass->klass_holder()); // keep the klass alive
460 Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass();
461 assert(elem_klass->is_inline_klass(), "must be");
462 InlineKlass* vk = InlineKlass::cast(elem_klass);
463 // Logically creates elements, ensure klass init
464 elem_klass->initialize(CHECK);
465 arrayOop obj= oopFactory::new_objArray(elem_klass, length, ArrayKlass::ArrayProperties::NULL_RESTRICTED, CHECK);
466 current->set_vm_result_oop(obj);
467 // This is pretty rare but this runtime patch is stressful to deoptimization
468 // if we deoptimize here so force a deopt to stress the path.
469 if (DeoptimizeALot) {
470 deopt_caller(current);
471 }
472 JRT_END
473
474
475 JRT_ENTRY(void, Runtime1::new_multi_array(JavaThread* current, Klass* klass, int rank, jint* dims))
476 #ifndef PRODUCT
477 if (PrintC1Statistics) {
478 _new_multi_array_slowcase_cnt++;
479 }
480 #endif
481 assert(klass->is_klass(), "not a class");
482 assert(rank >= 1, "rank must be nonzero");
483 Handle holder(current, klass->klass_holder()); // keep the klass alive
484 oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK);
485 current->set_vm_result_oop(obj);
486 JRT_END
487
488
489 static void profile_flat_array(JavaThread* current, bool load, bool null_free) {
490 ResourceMark rm(current);
491 vframeStream vfst(current, true);
492 assert(!vfst.at_end(), "Java frame must exist");
493 // Check if array access profiling is enabled
494 if (vfst.nm()->comp_level() != CompLevel_full_profile || !C1UpdateMethodData) {
495 return;
496 }
497 int bci = vfst.bci();
498 Method* method = vfst.method();
499 MethodData* md = method->method_data();
500 if (md != nullptr) {
501 // Lock to access ProfileData, and ensure lock is not broken by a safepoint
502 MutexLocker ml(md->extra_data_lock(), Mutex::_no_safepoint_check_flag);
503
504 ProfileData* data = md->bci_to_data(bci);
505 assert(data != nullptr, "incorrect profiling entry");
506 if (data->is_ArrayLoadData()) {
507 assert(load, "should be an array load");
508 ArrayLoadData* load_data = (ArrayLoadData*) data;
509 load_data->set_flat_array();
510 if (null_free) {
511 load_data->set_null_free_array();
512 }
513 } else {
514 assert(data->is_ArrayStoreData(), "");
515 assert(!load, "should be an array store");
516 ArrayStoreData* store_data = (ArrayStoreData*) data;
517 store_data->set_flat_array();
518 if (null_free) {
519 store_data->set_null_free_array();
520 }
521 }
522 }
523 }
524
525 JRT_ENTRY(void, Runtime1::load_flat_array(JavaThread* current, flatArrayOopDesc* array, int index))
526 assert(array->klass()->is_flatArray_klass(), "should not be called");
527 profile_flat_array(current, true, array->is_null_free_array());
528
529 NOT_PRODUCT(_load_flat_array_slowcase_cnt++;)
530 assert(array->length() > 0 && index < array->length(), "already checked");
531 flatArrayHandle vah(current, array);
532 oop obj = array->obj_at(index, CHECK);
533 current->set_vm_result_oop(obj);
534 JRT_END
535
536 JRT_ENTRY(void, Runtime1::store_flat_array(JavaThread* current, flatArrayOopDesc* array, int index, oopDesc* value))
537 // TOOD 8350865 We can call here with a non-flat array because of LIR_Assembler::emit_opFlattenedArrayCheck
538 if (array->klass()->is_flatArray_klass()) {
539 profile_flat_array(current, false, array->is_null_free_array());
540 }
541
542 NOT_PRODUCT(_store_flat_array_slowcase_cnt++;)
543 if (value == nullptr && array->is_null_free_array()) {
544 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_NullPointerException());
545 } else {
546 assert(array->klass()->is_flatArray_klass(), "should not be called");
547 array->obj_at_put(index, value, CHECK);
548 }
549 JRT_END
550
551 JRT_ENTRY(int, Runtime1::substitutability_check(JavaThread* current, oopDesc* left, oopDesc* right))
552 NOT_PRODUCT(_substitutability_check_slowcase_cnt++;)
553 JavaCallArguments args;
554 args.push_oop(Handle(THREAD, left));
555 args.push_oop(Handle(THREAD, right));
556 JavaValue result(T_BOOLEAN);
557 JavaCalls::call_static(&result,
558 vmClasses::ValueObjectMethods_klass(),
559 vmSymbols::isSubstitutable_name(),
560 vmSymbols::object_object_boolean_signature(),
561 &args, CHECK_0);
562 return result.get_jboolean() ? 1 : 0;
563 JRT_END
564
565
566 extern "C" void ps();
567
568 void Runtime1::buffer_inline_args_impl(JavaThread* current, Method* m, bool allocate_receiver) {
569 JavaThread* THREAD = current;
570 methodHandle method(current, m); // We are inside the verified_entry or verified_inline_ro_entry of this method.
571 oop obj = SharedRuntime::allocate_inline_types_impl(current, method, allocate_receiver, CHECK);
572 current->set_vm_result_oop(obj);
573 }
574
575 JRT_ENTRY(void, Runtime1::buffer_inline_args(JavaThread* current, Method* method))
576 NOT_PRODUCT(_buffer_inline_args_slowcase_cnt++;)
577 buffer_inline_args_impl(current, method, true);
578 JRT_END
579
580 JRT_ENTRY(void, Runtime1::buffer_inline_args_no_receiver(JavaThread* current, Method* method))
581 NOT_PRODUCT(_buffer_inline_args_no_receiver_slowcase_cnt++;)
582 buffer_inline_args_impl(current, method, false);
583 JRT_END
584
585 JRT_ENTRY(void, Runtime1::unimplemented_entry(JavaThread* current, C1StubId id))
586 tty->print_cr("Runtime1::entry_for(%d) returned unimplemented entry point", (int)id);
587 JRT_END
588
589
590 JRT_ENTRY(void, Runtime1::throw_array_store_exception(JavaThread* current, oopDesc* obj))
591 ResourceMark rm(current);
592 const char* klass_name = obj->klass()->external_name();
593 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ArrayStoreException(), klass_name);
594 JRT_END
595
596
597 // counter_overflow() is called from within C1-compiled methods. The enclosing method is the method
598 // associated with the top activation record. The inlinee (that is possibly included in the enclosing
599 // method) method is passed as an argument. In order to do that it is embedded in the code as
600 // a constant.
601 static nmethod* counter_overflow_helper(JavaThread* current, int branch_bci, Method* m) {
602 nmethod* osr_nm = nullptr;
603 methodHandle method(current, m);
604
888 _throw_class_cast_exception_count++;
889 }
890 #endif
891 ResourceMark rm(current);
892 char* message = SharedRuntime::generate_class_cast_message(current, object->klass());
893 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_ClassCastException(), message);
894 JRT_END
895
896
897 JRT_ENTRY(void, Runtime1::throw_incompatible_class_change_error(JavaThread* current))
898 #ifndef PRODUCT
899 if (PrintC1Statistics) {
900 _throw_incompatible_class_change_error_count++;
901 }
902 #endif
903 ResourceMark rm(current);
904 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IncompatibleClassChangeError());
905 JRT_END
906
907
908 JRT_ENTRY(void, Runtime1::throw_illegal_monitor_state_exception(JavaThread* current))
909 NOT_PRODUCT(_throw_illegal_monitor_state_exception_count++;)
910 ResourceMark rm(current);
911 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IllegalMonitorStateException());
912 JRT_END
913
914 JRT_ENTRY(void, Runtime1::throw_identity_exception(JavaThread* current, oopDesc* object))
915 NOT_PRODUCT(_throw_identity_exception_count++;)
916 ResourceMark rm(current);
917 char* message = SharedRuntime::generate_identity_exception_message(current, object->klass());
918 SharedRuntime::throw_and_post_jvmti_exception(current, vmSymbols::java_lang_IdentityException(), message);
919 JRT_END
920
921 JRT_BLOCK_ENTRY(void, Runtime1::monitorenter(JavaThread* current, oopDesc* obj, BasicObjectLock* lock))
922 #ifndef PRODUCT
923 if (PrintC1Statistics) {
924 _monitorenter_slowcase_cnt++;
925 }
926 #endif
927 if (LockingMode == LM_MONITOR) {
928 lock->set_obj(obj);
929 }
930 assert(obj == lock->obj(), "must match");
931 SharedRuntime::monitor_enter_helper(obj, lock->lock(), current);
932 JRT_END
933
934
935 JRT_LEAF(void, Runtime1::monitorexit(JavaThread* current, BasicObjectLock* lock))
936 assert(current == JavaThread::current(), "pre-condition");
937 #ifndef PRODUCT
938 if (PrintC1Statistics) {
939 _monitorexit_slowcase_cnt++;
940 }
1106 RegisterMap::WalkContinuation::skip);
1107 frame runtime_frame = current->last_frame();
1108 frame caller_frame = runtime_frame.sender(®_map);
1109
1110 // last java frame on stack
1111 vframeStream vfst(current, true);
1112 assert(!vfst.at_end(), "Java frame must exist");
1113
1114 methodHandle caller_method(current, vfst.method());
1115 // Note that caller_method->code() may not be same as caller_code because of OSR's
1116 // Note also that in the presence of inlining it is not guaranteed
1117 // that caller_method() == caller_code->method()
1118
1119 int bci = vfst.bci();
1120 Bytecodes::Code code = caller_method()->java_code_at(bci);
1121
1122 // this is used by assertions in the access_field_patching_id
1123 BasicType patch_field_type = T_ILLEGAL;
1124 bool deoptimize_for_volatile = false;
1125 bool deoptimize_for_atomic = false;
1126 bool deoptimize_for_null_free = false;
1127 bool deoptimize_for_flat = false;
1128 bool deoptimize_for_strict_static = false;
1129 int patch_field_offset = -1;
1130 Klass* init_klass = nullptr; // klass needed by load_klass_patching code
1131 Klass* load_klass = nullptr; // klass needed by load_klass_patching code
1132 Handle mirror(current, nullptr); // oop needed by load_mirror_patching code
1133 Handle appendix(current, nullptr); // oop needed by appendix_patching code
1134 bool load_klass_or_mirror_patch_id =
1135 (stub_id == C1StubId::load_klass_patching_id || stub_id == C1StubId::load_mirror_patching_id);
1136
1137 if (stub_id == C1StubId::access_field_patching_id) {
1138
1139 Bytecode_field field_access(caller_method, bci);
1140 fieldDescriptor result; // initialize class if needed
1141 Bytecodes::Code code = field_access.code();
1142 constantPoolHandle constants(current, caller_method->constants());
1143 LinkResolver::resolve_field_access(result, constants, field_access.index(), caller_method, Bytecodes::java_code(code), CHECK);
1144 patch_field_offset = result.offset();
1145
1146 // If we're patching a field which is volatile then at compile it
1147 // must not have been know to be volatile, so the generated code
1148 // isn't correct for a volatile reference. The nmethod has to be
1152 // used for patching references to oops which don't need special
1153 // handling in the volatile case.
1154
1155 deoptimize_for_volatile = result.access_flags().is_volatile();
1156
1157 // If we are patching a field which should be atomic, then
1158 // the generated code is not correct either, force deoptimizing.
1159 // We need to only cover T_LONG and T_DOUBLE fields, as we can
1160 // break access atomicity only for them.
1161
1162 // Strictly speaking, the deoptimization on 64-bit platforms
1163 // is unnecessary, and T_LONG stores on 32-bit platforms need
1164 // to be handled by special patching code when AlwaysAtomicAccesses
1165 // becomes product feature. At this point, we are still going
1166 // for the deoptimization for consistency against volatile
1167 // accesses.
1168
1169 patch_field_type = result.field_type();
1170 deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
1171
1172 // The field we are patching is null-free. Deoptimize and regenerate
1173 // the compiled code if we patch a putfield/putstatic because it
1174 // does not contain the required null check.
1175 deoptimize_for_null_free = result.is_null_free_inline_type() && (field_access.is_putfield() || field_access.is_putstatic());
1176
1177 // The field we are patching is flat. Deoptimize and regenerate
1178 // the compiled code which can't handle the layout of the flat
1179 // field because it was unknown at compile time.
1180 deoptimize_for_flat = result.is_flat();
1181
1182 // Strict statics may require tracking if their class is not fully initialized.
1183 // For now we can bail out of the compiler and let the interpreter handle it.
1184 deoptimize_for_strict_static = result.is_strict_static_unset();
1185 } else if (load_klass_or_mirror_patch_id) {
1186 Klass* k = nullptr;
1187 switch (code) {
1188 case Bytecodes::_putstatic:
1189 case Bytecodes::_getstatic:
1190 { Klass* klass = resolve_field_return_klass(caller_method, bci, CHECK);
1191 init_klass = klass;
1192 mirror = Handle(current, klass->java_mirror());
1193 }
1194 break;
1195 case Bytecodes::_new:
1196 { Bytecode_new bnew(caller_method(), caller_method->bcp_from(bci));
1197 k = caller_method->constants()->klass_at(bnew.index(), CHECK);
1198 }
1199 break;
1200 case Bytecodes::_multianewarray:
1201 { Bytecode_multianewarray mna(caller_method(), caller_method->bcp_from(bci));
1202 k = caller_method->constants()->klass_at(mna.index(), CHECK);
1203 }
1204 break;
1205 case Bytecodes::_instanceof:
1206 { Bytecode_instanceof io(caller_method(), caller_method->bcp_from(bci));
1207 k = caller_method->constants()->klass_at(io.index(), CHECK);
1208 }
1209 break;
1210 case Bytecodes::_checkcast:
1211 { Bytecode_checkcast cc(caller_method(), caller_method->bcp_from(bci));
1212 k = caller_method->constants()->klass_at(cc.index(), CHECK);
1213 }
1214 break;
1215 case Bytecodes::_anewarray:
1216 { Bytecode_anewarray anew(caller_method(), caller_method->bcp_from(bci));
1217 Klass* ek = caller_method->constants()->klass_at(anew.index(), CHECK);
1218 k = ek->array_klass(CHECK);
1219 if (!k->is_typeArray_klass() && !k->is_refArray_klass() && !k->is_flatArray_klass()) {
1220 k = ObjArrayKlass::cast(k)->klass_with_properties(ArrayKlass::ArrayProperties::DEFAULT, THREAD);
1221 }
1222 if (k->is_flatArray_klass()) {
1223 deoptimize_for_flat = true;
1224 }
1225 }
1226 break;
1227 case Bytecodes::_ldc:
1228 case Bytecodes::_ldc_w:
1229 case Bytecodes::_ldc2_w:
1230 {
1231 Bytecode_loadconstant cc(caller_method, bci);
1232 oop m = cc.resolve_constant(CHECK);
1233 mirror = Handle(current, m);
1234 }
1235 break;
1236 default: fatal("unexpected bytecode for load_klass_or_mirror_patch_id");
1237 }
1238 load_klass = k;
1239 } else if (stub_id == C1StubId::load_appendix_patching_id) {
1240 Bytecode_invoke bytecode(caller_method, bci);
1241 Bytecodes::Code bc = bytecode.invoke_code();
1242
1243 CallInfo info;
1244 constantPoolHandle pool(current, caller_method->constants());
1245 int index = bytecode.index();
1246 LinkResolver::resolve_invoke(info, Handle(), pool, index, bc, CHECK);
1247 switch (bc) {
1248 case Bytecodes::_invokehandle: {
1249 ResolvedMethodEntry* entry = pool->cache()->set_method_handle(index, info);
1250 appendix = Handle(current, pool->cache()->appendix_if_resolved(entry));
1251 break;
1252 }
1253 case Bytecodes::_invokedynamic: {
1254 appendix = Handle(current, pool->cache()->set_dynamic_call(info, index));
1255 break;
1256 }
1257 default: fatal("unexpected bytecode for load_appendix_patching_id");
1258 }
1259 } else {
1260 ShouldNotReachHere();
1261 }
1262
1263 if (deoptimize_for_volatile ||
1264 deoptimize_for_atomic ||
1265 deoptimize_for_null_free ||
1266 deoptimize_for_flat ||
1267 deoptimize_for_strict_static) {
1268 // At compile time we assumed the field wasn't volatile/atomic but after
1269 // loading it turns out it was volatile/atomic so we have to throw the
1270 // compiled code out and let it be regenerated.
1271 if (TracePatching) {
1272 if (deoptimize_for_volatile) {
1273 tty->print_cr("Deoptimizing for patching volatile field reference");
1274 }
1275 if (deoptimize_for_atomic) {
1276 tty->print_cr("Deoptimizing for patching atomic field reference");
1277 }
1278 if (deoptimize_for_null_free) {
1279 tty->print_cr("Deoptimizing for patching null-free field reference");
1280 }
1281 if (deoptimize_for_flat) {
1282 tty->print_cr("Deoptimizing for patching flat field or array reference");
1283 }
1284 if (deoptimize_for_strict_static) {
1285 tty->print_cr("Deoptimizing for patching strict static field reference");
1286 }
1287 }
1288
1289 // It's possible the nmethod was invalidated in the last
1290 // safepoint, but if it's still alive then make it not_entrant.
1291 nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
1292 if (nm != nullptr) {
1293 nm->make_not_entrant(nmethod::InvalidationReason::C1_CODEPATCH);
1294 }
1295
1296 Deoptimization::deoptimize_frame(current, caller_frame.id());
1297
1298 // Return to the now deoptimized frame.
1299 }
1300
1301 // Now copy code back
1302
1303 {
1304 MutexLocker ml_code (current, CodeCache_lock, Mutex::_no_safepoint_check_flag);
1305 //
1306 // Deoptimization may have happened while we waited for the lock.
1717 #ifndef PRODUCT
1718 void Runtime1::print_statistics() {
1719 tty->print_cr("C1 Runtime statistics:");
1720 tty->print_cr(" _resolve_invoke_virtual_cnt: %u", SharedRuntime::_resolve_virtual_ctr);
1721 tty->print_cr(" _resolve_invoke_opt_virtual_cnt: %u", SharedRuntime::_resolve_opt_virtual_ctr);
1722 tty->print_cr(" _resolve_invoke_static_cnt: %u", SharedRuntime::_resolve_static_ctr);
1723 tty->print_cr(" _handle_wrong_method_cnt: %u", SharedRuntime::_wrong_method_ctr);
1724 tty->print_cr(" _ic_miss_cnt: %u", SharedRuntime::_ic_miss_ctr);
1725 tty->print_cr(" _generic_arraycopystub_cnt: %u", _generic_arraycopystub_cnt);
1726 tty->print_cr(" _byte_arraycopy_cnt: %u", _byte_arraycopy_stub_cnt);
1727 tty->print_cr(" _short_arraycopy_cnt: %u", _short_arraycopy_stub_cnt);
1728 tty->print_cr(" _int_arraycopy_cnt: %u", _int_arraycopy_stub_cnt);
1729 tty->print_cr(" _long_arraycopy_cnt: %u", _long_arraycopy_stub_cnt);
1730 tty->print_cr(" _oop_arraycopy_cnt: %u", _oop_arraycopy_stub_cnt);
1731 tty->print_cr(" _arraycopy_slowcase_cnt: %u", _arraycopy_slowcase_cnt);
1732 tty->print_cr(" _arraycopy_checkcast_cnt: %u", _arraycopy_checkcast_cnt);
1733 tty->print_cr(" _arraycopy_checkcast_attempt_cnt:%u", _arraycopy_checkcast_attempt_cnt);
1734
1735 tty->print_cr(" _new_type_array_slowcase_cnt: %u", _new_type_array_slowcase_cnt);
1736 tty->print_cr(" _new_object_array_slowcase_cnt: %u", _new_object_array_slowcase_cnt);
1737 tty->print_cr(" _new_null_free_array_slowcase_cnt: %u", _new_null_free_array_slowcase_cnt);
1738 tty->print_cr(" _new_instance_slowcase_cnt: %u", _new_instance_slowcase_cnt);
1739 tty->print_cr(" _new_multi_array_slowcase_cnt: %u", _new_multi_array_slowcase_cnt);
1740 tty->print_cr(" _load_flat_array_slowcase_cnt: %u", _load_flat_array_slowcase_cnt);
1741 tty->print_cr(" _store_flat_array_slowcase_cnt: %u", _store_flat_array_slowcase_cnt);
1742 tty->print_cr(" _substitutability_check_slowcase_cnt: %u", _substitutability_check_slowcase_cnt);
1743 tty->print_cr(" _buffer_inline_args_slowcase_cnt:%u", _buffer_inline_args_slowcase_cnt);
1744 tty->print_cr(" _buffer_inline_args_no_receiver_slowcase_cnt:%u", _buffer_inline_args_no_receiver_slowcase_cnt);
1745
1746 tty->print_cr(" _monitorenter_slowcase_cnt: %u", _monitorenter_slowcase_cnt);
1747 tty->print_cr(" _monitorexit_slowcase_cnt: %u", _monitorexit_slowcase_cnt);
1748 tty->print_cr(" _patch_code_slowcase_cnt: %u", _patch_code_slowcase_cnt);
1749
1750 tty->print_cr(" _throw_range_check_exception_count: %u:", _throw_range_check_exception_count);
1751 tty->print_cr(" _throw_index_exception_count: %u:", _throw_index_exception_count);
1752 tty->print_cr(" _throw_div0_exception_count: %u:", _throw_div0_exception_count);
1753 tty->print_cr(" _throw_null_pointer_exception_count: %u:", _throw_null_pointer_exception_count);
1754 tty->print_cr(" _throw_class_cast_exception_count: %u:", _throw_class_cast_exception_count);
1755 tty->print_cr(" _throw_incompatible_class_change_error_count: %u:", _throw_incompatible_class_change_error_count);
1756 tty->print_cr(" _throw_illegal_monitor_state_exception_count: %u:", _throw_illegal_monitor_state_exception_count);
1757 tty->print_cr(" _throw_identity_exception_count: %u:", _throw_identity_exception_count);
1758 tty->print_cr(" _throw_count: %u:", _throw_count);
1759
1760 SharedRuntime::print_ic_miss_histogram();
1761 tty->cr();
1762 }
1763 #endif // PRODUCT
|