221 bool inline_notify(vmIntrinsics::ID id);
222 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
223 // This returns Type::AnyPtr, RawPtr, or OopPtr.
224 int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
225 Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
226
227 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
228 DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
229 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
230 static bool klass_needs_init_guard(Node* kls);
231 bool inline_unsafe_allocate();
232 bool inline_unsafe_newArray(bool uninitialized);
233 bool inline_unsafe_writeback0();
234 bool inline_unsafe_writebackSync0(bool is_pre);
235 bool inline_unsafe_copyMemory();
236 bool inline_unsafe_setMemory();
237
238 bool inline_native_currentCarrierThread();
239 bool inline_native_currentThread();
240 bool inline_native_setCurrentThread();
241
242 bool inline_native_scopedValueCache();
243 const Type* scopedValueCache_type();
244 Node* scopedValueCache_helper();
245 bool inline_native_setScopedValueCache();
246 bool inline_native_Continuation_pinning(bool unpin);
247
248 bool inline_native_time_funcs(address method, const char* funcName);
249 #if INCLUDE_JVMTI
250 bool inline_native_notify_jvmti_funcs(address funcAddr, const char* funcName, bool is_start, bool is_end);
251 bool inline_native_notify_jvmti_hide();
252 bool inline_native_notify_jvmti_sync();
253 #endif
254
255 #ifdef JFR_HAVE_INTRINSICS
256 bool inline_native_classID();
257 bool inline_native_getEventWriter();
258 bool inline_native_jvm_commit();
259 void extend_setCurrentThread(Node* jt, Node* thread);
260 #endif
|
221 bool inline_notify(vmIntrinsics::ID id);
222 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
223 // This returns Type::AnyPtr, RawPtr, or OopPtr.
224 int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
225 Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
226
227 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
228 DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
229 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned);
230 static bool klass_needs_init_guard(Node* kls);
231 bool inline_unsafe_allocate();
232 bool inline_unsafe_newArray(bool uninitialized);
233 bool inline_unsafe_writeback0();
234 bool inline_unsafe_writebackSync0(bool is_pre);
235 bool inline_unsafe_copyMemory();
236 bool inline_unsafe_setMemory();
237
238 bool inline_native_currentCarrierThread();
239 bool inline_native_currentThread();
240 bool inline_native_setCurrentThread();
241 bool inline_native_setCurrentLockId();
242
243 bool inline_native_scopedValueCache();
244 const Type* scopedValueCache_type();
245 Node* scopedValueCache_helper();
246 bool inline_native_setScopedValueCache();
247 bool inline_native_Continuation_pinning(bool unpin);
248
249 bool inline_native_time_funcs(address method, const char* funcName);
250 #if INCLUDE_JVMTI
251 bool inline_native_notify_jvmti_funcs(address funcAddr, const char* funcName, bool is_start, bool is_end);
252 bool inline_native_notify_jvmti_hide();
253 bool inline_native_notify_jvmti_sync();
254 #endif
255
256 #ifdef JFR_HAVE_INTRINSICS
257 bool inline_native_classID();
258 bool inline_native_getEventWriter();
259 bool inline_native_jvm_commit();
260 void extend_setCurrentThread(Node* jt, Node* thread);
261 #endif
|