461 case vmIntrinsics::_getAndAddShort: return inline_unsafe_load_store(T_SHORT, LS_get_add, Volatile);
462 case vmIntrinsics::_getAndAddInt: return inline_unsafe_load_store(T_INT, LS_get_add, Volatile);
463 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_get_add, Volatile);
464
465 case vmIntrinsics::_getAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_get_set, Volatile);
466 case vmIntrinsics::_getAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_get_set, Volatile);
467 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_get_set, Volatile);
468 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_get_set, Volatile);
469 case vmIntrinsics::_getAndSetReference: return inline_unsafe_load_store(T_OBJECT, LS_get_set, Volatile);
470
471 case vmIntrinsics::_loadFence:
472 case vmIntrinsics::_storeFence:
473 case vmIntrinsics::_storeStoreFence:
474 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());
475
476 case vmIntrinsics::_onSpinWait: return inline_onspinwait();
477
478 case vmIntrinsics::_currentCarrierThread: return inline_native_currentCarrierThread();
479 case vmIntrinsics::_currentThread: return inline_native_currentThread();
480 case vmIntrinsics::_setCurrentThread: return inline_native_setCurrentThread();
481
482 case vmIntrinsics::_scopedValueCache: return inline_native_scopedValueCache();
483 case vmIntrinsics::_setScopedValueCache: return inline_native_setScopedValueCache();
484
485 #if INCLUDE_JVMTI
486 case vmIntrinsics::_notifyJvmtiVThreadStart: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_start()),
487 "notifyJvmtiStart", true, false);
488 case vmIntrinsics::_notifyJvmtiVThreadEnd: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_end()),
489 "notifyJvmtiEnd", false, true);
490 case vmIntrinsics::_notifyJvmtiVThreadMount: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_mount()),
491 "notifyJvmtiMount", false, false);
492 case vmIntrinsics::_notifyJvmtiVThreadUnmount: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_unmount()),
493 "notifyJvmtiUnmount", false, false);
494 case vmIntrinsics::_notifyJvmtiVThreadHideFrames: return inline_native_notify_jvmti_hide();
495 case vmIntrinsics::_notifyJvmtiVThreadDisableSuspend: return inline_native_notify_jvmti_sync();
496 #endif
497
498 #ifdef JFR_HAVE_INTRINSICS
499 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JfrTime::time_function()), "counterTime");
500 case vmIntrinsics::_getEventWriter: return inline_native_getEventWriter();
3650
3651 //------------------------inline_native_currentThread------------------
3652 bool LibraryCallKit::inline_native_currentThread() {
3653 Node* junk = nullptr;
3654 set_result(generate_virtual_thread(junk));
3655 return true;
3656 }
3657
3658 //------------------------inline_native_setVthread------------------
3659 bool LibraryCallKit::inline_native_setCurrentThread() {
3660 assert(C->method()->changes_current_thread(),
3661 "method changes current Thread but is not annotated ChangesCurrentThread");
3662 Node* arr = argument(1);
3663 Node* thread = _gvn.transform(new ThreadLocalNode());
3664 Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::vthread_offset()));
3665 Node* thread_obj_handle
3666 = make_load(nullptr, p, p->bottom_type()->is_ptr(), T_OBJECT, MemNode::unordered);
3667 thread_obj_handle = _gvn.transform(thread_obj_handle);
3668 const TypePtr *adr_type = _gvn.type(thread_obj_handle)->isa_ptr();
3669 access_store_at(nullptr, thread_obj_handle, adr_type, arr, _gvn.type(arr), T_OBJECT, IN_NATIVE | MO_UNORDERED);
3670 JFR_ONLY(extend_setCurrentThread(thread, arr);)
3671 return true;
3672 }
3673
3674 const Type* LibraryCallKit::scopedValueCache_type() {
3675 ciKlass* objects_klass = ciObjArrayKlass::make(env()->Object_klass());
3676 const TypeOopPtr* etype = TypeOopPtr::make_from_klass(env()->Object_klass());
3677 const TypeAry* arr0 = TypeAry::make(etype, TypeInt::POS);
3678
3679 // Because we create the scopedValue cache lazily we have to make the
3680 // type of the result BotPTR.
3681 bool xk = etype->klass_is_exact();
3682 const Type* objects_type = TypeAryPtr::make(TypePtr::BotPTR, arr0, objects_klass, xk, 0);
3683 return objects_type;
3684 }
3685
3686 Node* LibraryCallKit::scopedValueCache_helper() {
3687 Node* thread = _gvn.transform(new ThreadLocalNode());
3688 Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::scopedValueCache_offset()));
3689 // We cannot use immutable_memory() because we might flip onto a
3690 // different carrier thread, at which point we'll need to use that
3691 // carrier thread's cache.
3692 // return _gvn.transform(LoadNode::make(_gvn, nullptr, immutable_memory(), p, p->bottom_type()->is_ptr(),
3693 // TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered));
|
461 case vmIntrinsics::_getAndAddShort: return inline_unsafe_load_store(T_SHORT, LS_get_add, Volatile);
462 case vmIntrinsics::_getAndAddInt: return inline_unsafe_load_store(T_INT, LS_get_add, Volatile);
463 case vmIntrinsics::_getAndAddLong: return inline_unsafe_load_store(T_LONG, LS_get_add, Volatile);
464
465 case vmIntrinsics::_getAndSetByte: return inline_unsafe_load_store(T_BYTE, LS_get_set, Volatile);
466 case vmIntrinsics::_getAndSetShort: return inline_unsafe_load_store(T_SHORT, LS_get_set, Volatile);
467 case vmIntrinsics::_getAndSetInt: return inline_unsafe_load_store(T_INT, LS_get_set, Volatile);
468 case vmIntrinsics::_getAndSetLong: return inline_unsafe_load_store(T_LONG, LS_get_set, Volatile);
469 case vmIntrinsics::_getAndSetReference: return inline_unsafe_load_store(T_OBJECT, LS_get_set, Volatile);
470
471 case vmIntrinsics::_loadFence:
472 case vmIntrinsics::_storeFence:
473 case vmIntrinsics::_storeStoreFence:
474 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());
475
476 case vmIntrinsics::_onSpinWait: return inline_onspinwait();
477
478 case vmIntrinsics::_currentCarrierThread: return inline_native_currentCarrierThread();
479 case vmIntrinsics::_currentThread: return inline_native_currentThread();
480 case vmIntrinsics::_setCurrentThread: return inline_native_setCurrentThread();
481 case vmIntrinsics::_setLockId: return inline_native_setLockId();
482
483 case vmIntrinsics::_scopedValueCache: return inline_native_scopedValueCache();
484 case vmIntrinsics::_setScopedValueCache: return inline_native_setScopedValueCache();
485
486 #if INCLUDE_JVMTI
487 case vmIntrinsics::_notifyJvmtiVThreadStart: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_start()),
488 "notifyJvmtiStart", true, false);
489 case vmIntrinsics::_notifyJvmtiVThreadEnd: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_end()),
490 "notifyJvmtiEnd", false, true);
491 case vmIntrinsics::_notifyJvmtiVThreadMount: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_mount()),
492 "notifyJvmtiMount", false, false);
493 case vmIntrinsics::_notifyJvmtiVThreadUnmount: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_unmount()),
494 "notifyJvmtiUnmount", false, false);
495 case vmIntrinsics::_notifyJvmtiVThreadHideFrames: return inline_native_notify_jvmti_hide();
496 case vmIntrinsics::_notifyJvmtiVThreadDisableSuspend: return inline_native_notify_jvmti_sync();
497 #endif
498
499 #ifdef JFR_HAVE_INTRINSICS
500 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JfrTime::time_function()), "counterTime");
501 case vmIntrinsics::_getEventWriter: return inline_native_getEventWriter();
3651
3652 //------------------------inline_native_currentThread------------------
3653 bool LibraryCallKit::inline_native_currentThread() {
3654 Node* junk = nullptr;
3655 set_result(generate_virtual_thread(junk));
3656 return true;
3657 }
3658
3659 //------------------------inline_native_setVthread------------------
3660 bool LibraryCallKit::inline_native_setCurrentThread() {
3661 assert(C->method()->changes_current_thread(),
3662 "method changes current Thread but is not annotated ChangesCurrentThread");
3663 Node* arr = argument(1);
3664 Node* thread = _gvn.transform(new ThreadLocalNode());
3665 Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::vthread_offset()));
3666 Node* thread_obj_handle
3667 = make_load(nullptr, p, p->bottom_type()->is_ptr(), T_OBJECT, MemNode::unordered);
3668 thread_obj_handle = _gvn.transform(thread_obj_handle);
3669 const TypePtr *adr_type = _gvn.type(thread_obj_handle)->isa_ptr();
3670 access_store_at(nullptr, thread_obj_handle, adr_type, arr, _gvn.type(arr), T_OBJECT, IN_NATIVE | MO_UNORDERED);
3671
3672 // Change the lock_id of the JavaThread
3673 Node* tid = load_field_from_object(arr, "tid", "J");
3674 Node* thread_id_offset = basic_plus_adr(thread, in_bytes(JavaThread::lock_id_offset()));
3675 Node* tid_memory = store_to_memory(control(), thread_id_offset, tid, T_LONG, Compile::AliasIdxRaw, MemNode::unordered, true);
3676
3677 JFR_ONLY(extend_setCurrentThread(thread, arr);)
3678 return true;
3679 }
3680
3681 bool LibraryCallKit::inline_native_setLockId() {
3682 Node* thread = _gvn.transform(new ThreadLocalNode());
3683 Node* thread_id_offset = basic_plus_adr(thread, in_bytes(JavaThread::lock_id_offset()));
3684 Node* tid_memory = store_to_memory(control(), thread_id_offset, ConvL2X(argument(0)), T_LONG, Compile::AliasIdxRaw, MemNode::unordered, true);
3685 return true;
3686 }
3687
3688 const Type* LibraryCallKit::scopedValueCache_type() {
3689 ciKlass* objects_klass = ciObjArrayKlass::make(env()->Object_klass());
3690 const TypeOopPtr* etype = TypeOopPtr::make_from_klass(env()->Object_klass());
3691 const TypeAry* arr0 = TypeAry::make(etype, TypeInt::POS);
3692
3693 // Because we create the scopedValue cache lazily we have to make the
3694 // type of the result BotPTR.
3695 bool xk = etype->klass_is_exact();
3696 const Type* objects_type = TypeAryPtr::make(TypePtr::BotPTR, arr0, objects_klass, xk, 0);
3697 return objects_type;
3698 }
3699
3700 Node* LibraryCallKit::scopedValueCache_helper() {
3701 Node* thread = _gvn.transform(new ThreadLocalNode());
3702 Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::scopedValueCache_offset()));
3703 // We cannot use immutable_memory() because we might flip onto a
3704 // different carrier thread, at which point we'll need to use that
3705 // carrier thread's cache.
3706 // return _gvn.transform(LoadNode::make(_gvn, nullptr, immutable_memory(), p, p->bottom_type()->is_ptr(),
3707 // TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered));
|