< prev index next >

src/hotspot/share/opto/library_call.cpp

Print this page

 461   case vmIntrinsics::_getAndAddShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_add,       Volatile);
 462   case vmIntrinsics::_getAndAddInt:                     return inline_unsafe_load_store(T_INT,    LS_get_add,       Volatile);
 463   case vmIntrinsics::_getAndAddLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_add,       Volatile);
 464 
 465   case vmIntrinsics::_getAndSetByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_set,       Volatile);
 466   case vmIntrinsics::_getAndSetShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_set,       Volatile);
 467   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
 468   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
 469   case vmIntrinsics::_getAndSetReference:               return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
 470 
 471   case vmIntrinsics::_loadFence:
 472   case vmIntrinsics::_storeFence:
 473   case vmIntrinsics::_storeStoreFence:
 474   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 475 
 476   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 477 
 478   case vmIntrinsics::_currentCarrierThread:     return inline_native_currentCarrierThread();
 479   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 480   case vmIntrinsics::_setCurrentThread:         return inline_native_setCurrentThread();

 481 
 482   case vmIntrinsics::_scopedValueCache:          return inline_native_scopedValueCache();
 483   case vmIntrinsics::_setScopedValueCache:       return inline_native_setScopedValueCache();
 484 
 485   case vmIntrinsics::_Continuation_pin:          return inline_native_Continuation_pinning(false);
 486   case vmIntrinsics::_Continuation_unpin:        return inline_native_Continuation_pinning(true);
 487 
 488 #if INCLUDE_JVMTI
 489   case vmIntrinsics::_notifyJvmtiVThreadStart:   return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_start()),
 490                                                                                          "notifyJvmtiStart", true, false);
 491   case vmIntrinsics::_notifyJvmtiVThreadEnd:     return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_end()),
 492                                                                                          "notifyJvmtiEnd", false, true);
 493   case vmIntrinsics::_notifyJvmtiVThreadMount:   return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_mount()),
 494                                                                                          "notifyJvmtiMount", false, false);
 495   case vmIntrinsics::_notifyJvmtiVThreadUnmount: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_unmount()),
 496                                                                                          "notifyJvmtiUnmount", false, false);
 497   case vmIntrinsics::_notifyJvmtiVThreadHideFrames:     return inline_native_notify_jvmti_hide();
 498   case vmIntrinsics::_notifyJvmtiVThreadDisableSuspend: return inline_native_notify_jvmti_sync();
 499 #endif
 500 

3671 
3672 //------------------------inline_native_currentThread------------------
3673 bool LibraryCallKit::inline_native_currentThread() {
3674   Node* junk = nullptr;
3675   set_result(generate_virtual_thread(junk));
3676   return true;
3677 }
3678 
3679 //------------------------inline_native_setVthread------------------
3680 bool LibraryCallKit::inline_native_setCurrentThread() {
3681   assert(C->method()->changes_current_thread(),
3682          "method changes current Thread but is not annotated ChangesCurrentThread");
3683   Node* arr = argument(1);
3684   Node* thread = _gvn.transform(new ThreadLocalNode());
3685   Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::vthread_offset()));
3686   Node* thread_obj_handle
3687     = make_load(nullptr, p, p->bottom_type()->is_ptr(), T_OBJECT, MemNode::unordered);
3688   thread_obj_handle = _gvn.transform(thread_obj_handle);
3689   const TypePtr *adr_type = _gvn.type(thread_obj_handle)->isa_ptr();
3690   access_store_at(nullptr, thread_obj_handle, adr_type, arr, _gvn.type(arr), T_OBJECT, IN_NATIVE | MO_UNORDERED);






3691   JFR_ONLY(extend_setCurrentThread(thread, arr);)
3692   return true;
3693 }
3694 







3695 const Type* LibraryCallKit::scopedValueCache_type() {
3696   ciKlass* objects_klass = ciObjArrayKlass::make(env()->Object_klass());
3697   const TypeOopPtr* etype = TypeOopPtr::make_from_klass(env()->Object_klass());
3698   const TypeAry* arr0 = TypeAry::make(etype, TypeInt::POS);
3699 
3700   // Because we create the scopedValue cache lazily we have to make the
3701   // type of the result BotPTR.
3702   bool xk = etype->klass_is_exact();
3703   const Type* objects_type = TypeAryPtr::make(TypePtr::BotPTR, arr0, objects_klass, xk, 0);
3704   return objects_type;
3705 }
3706 
3707 Node* LibraryCallKit::scopedValueCache_helper() {
3708   Node* thread = _gvn.transform(new ThreadLocalNode());
3709   Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::scopedValueCache_offset()));
3710   // We cannot use immutable_memory() because we might flip onto a
3711   // different carrier thread, at which point we'll need to use that
3712   // carrier thread's cache.
3713   // return _gvn.transform(LoadNode::make(_gvn, nullptr, immutable_memory(), p, p->bottom_type()->is_ptr(),
3714   //       TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered));

 461   case vmIntrinsics::_getAndAddShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_add,       Volatile);
 462   case vmIntrinsics::_getAndAddInt:                     return inline_unsafe_load_store(T_INT,    LS_get_add,       Volatile);
 463   case vmIntrinsics::_getAndAddLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_add,       Volatile);
 464 
 465   case vmIntrinsics::_getAndSetByte:                    return inline_unsafe_load_store(T_BYTE,   LS_get_set,       Volatile);
 466   case vmIntrinsics::_getAndSetShort:                   return inline_unsafe_load_store(T_SHORT,  LS_get_set,       Volatile);
 467   case vmIntrinsics::_getAndSetInt:                     return inline_unsafe_load_store(T_INT,    LS_get_set,       Volatile);
 468   case vmIntrinsics::_getAndSetLong:                    return inline_unsafe_load_store(T_LONG,   LS_get_set,       Volatile);
 469   case vmIntrinsics::_getAndSetReference:               return inline_unsafe_load_store(T_OBJECT, LS_get_set,       Volatile);
 470 
 471   case vmIntrinsics::_loadFence:
 472   case vmIntrinsics::_storeFence:
 473   case vmIntrinsics::_storeStoreFence:
 474   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 475 
 476   case vmIntrinsics::_onSpinWait:               return inline_onspinwait();
 477 
 478   case vmIntrinsics::_currentCarrierThread:     return inline_native_currentCarrierThread();
 479   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 480   case vmIntrinsics::_setCurrentThread:         return inline_native_setCurrentThread();
 481   case vmIntrinsics::_setCurrentLockId:         return inline_native_setCurrentLockId();
 482 
 483   case vmIntrinsics::_scopedValueCache:          return inline_native_scopedValueCache();
 484   case vmIntrinsics::_setScopedValueCache:       return inline_native_setScopedValueCache();
 485 
 486   case vmIntrinsics::_Continuation_pin:          return inline_native_Continuation_pinning(false);
 487   case vmIntrinsics::_Continuation_unpin:        return inline_native_Continuation_pinning(true);
 488 
 489 #if INCLUDE_JVMTI
 490   case vmIntrinsics::_notifyJvmtiVThreadStart:   return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_start()),
 491                                                                                          "notifyJvmtiStart", true, false);
 492   case vmIntrinsics::_notifyJvmtiVThreadEnd:     return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_end()),
 493                                                                                          "notifyJvmtiEnd", false, true);
 494   case vmIntrinsics::_notifyJvmtiVThreadMount:   return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_mount()),
 495                                                                                          "notifyJvmtiMount", false, false);
 496   case vmIntrinsics::_notifyJvmtiVThreadUnmount: return inline_native_notify_jvmti_funcs(CAST_FROM_FN_PTR(address, OptoRuntime::notify_jvmti_vthread_unmount()),
 497                                                                                          "notifyJvmtiUnmount", false, false);
 498   case vmIntrinsics::_notifyJvmtiVThreadHideFrames:     return inline_native_notify_jvmti_hide();
 499   case vmIntrinsics::_notifyJvmtiVThreadDisableSuspend: return inline_native_notify_jvmti_sync();
 500 #endif
 501 

3672 
3673 //------------------------inline_native_currentThread------------------
3674 bool LibraryCallKit::inline_native_currentThread() {
3675   Node* junk = nullptr;
3676   set_result(generate_virtual_thread(junk));
3677   return true;
3678 }
3679 
3680 //------------------------inline_native_setVthread------------------
3681 bool LibraryCallKit::inline_native_setCurrentThread() {
3682   assert(C->method()->changes_current_thread(),
3683          "method changes current Thread but is not annotated ChangesCurrentThread");
3684   Node* arr = argument(1);
3685   Node* thread = _gvn.transform(new ThreadLocalNode());
3686   Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::vthread_offset()));
3687   Node* thread_obj_handle
3688     = make_load(nullptr, p, p->bottom_type()->is_ptr(), T_OBJECT, MemNode::unordered);
3689   thread_obj_handle = _gvn.transform(thread_obj_handle);
3690   const TypePtr *adr_type = _gvn.type(thread_obj_handle)->isa_ptr();
3691   access_store_at(nullptr, thread_obj_handle, adr_type, arr, _gvn.type(arr), T_OBJECT, IN_NATIVE | MO_UNORDERED);
3692 
3693   // Change the lock_id of the JavaThread
3694   Node* tid = load_field_from_object(arr, "tid", "J");
3695   Node* thread_id_offset = basic_plus_adr(thread, in_bytes(JavaThread::lock_id_offset()));
3696   Node* tid_memory = store_to_memory(control(), thread_id_offset, tid, T_LONG, Compile::AliasIdxRaw, MemNode::unordered, true);
3697 
3698   JFR_ONLY(extend_setCurrentThread(thread, arr);)
3699   return true;
3700 }
3701 
3702 bool LibraryCallKit::inline_native_setCurrentLockId() {
3703   Node* thread = _gvn.transform(new ThreadLocalNode());
3704   Node* thread_id_offset = basic_plus_adr(thread, in_bytes(JavaThread::lock_id_offset()));
3705   Node* tid_memory = store_to_memory(control(), thread_id_offset, ConvL2X(argument(0)), T_LONG, Compile::AliasIdxRaw, MemNode::unordered, true);
3706   return true;
3707 }
3708 
3709 const Type* LibraryCallKit::scopedValueCache_type() {
3710   ciKlass* objects_klass = ciObjArrayKlass::make(env()->Object_klass());
3711   const TypeOopPtr* etype = TypeOopPtr::make_from_klass(env()->Object_klass());
3712   const TypeAry* arr0 = TypeAry::make(etype, TypeInt::POS);
3713 
3714   // Because we create the scopedValue cache lazily we have to make the
3715   // type of the result BotPTR.
3716   bool xk = etype->klass_is_exact();
3717   const Type* objects_type = TypeAryPtr::make(TypePtr::BotPTR, arr0, objects_klass, xk, 0);
3718   return objects_type;
3719 }
3720 
3721 Node* LibraryCallKit::scopedValueCache_helper() {
3722   Node* thread = _gvn.transform(new ThreadLocalNode());
3723   Node* p = basic_plus_adr(top()/*!oop*/, thread, in_bytes(JavaThread::scopedValueCache_offset()));
3724   // We cannot use immutable_memory() because we might flip onto a
3725   // different carrier thread, at which point we'll need to use that
3726   // carrier thread's cache.
3727   // return _gvn.transform(LoadNode::make(_gvn, nullptr, immutable_memory(), p, p->bottom_type()->is_ptr(),
3728   //       TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered));
< prev index next >