1 /*
   2  * Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "classfile/classFileStream.hpp"
  26 #include "classfile/classLoader.hpp"
  27 #include "classfile/classLoadInfo.hpp"
  28 #include "classfile/javaClasses.inline.hpp"
  29 #include "classfile/systemDictionary.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "jfr/jfrEvents.hpp"
  32 #include "jni.h"
  33 #include "jvm.h"
  34 #include "logging/log.hpp"
  35 #include "logging/logStream.hpp"
  36 #include "memory/allocation.inline.hpp"
  37 #include "memory/oopFactory.hpp"
  38 #include "memory/resourceArea.hpp"
  39 #include "oops/access.inline.hpp"
  40 #include "oops/fieldStreams.inline.hpp"
  41 #include "oops/flatArrayKlass.hpp"
  42 #include "oops/flatArrayOop.inline.hpp"
  43 #include "oops/inlineKlass.inline.hpp"
  44 #include "oops/instanceKlass.inline.hpp"
  45 #include "oops/klass.inline.hpp"
  46 #include "oops/objArrayOop.inline.hpp"
  47 #include "oops/oop.inline.hpp"
  48 #include "oops/oopCast.inline.hpp"
  49 #include "oops/typeArrayOop.inline.hpp"
  50 #include "oops/valuePayload.hpp"
  51 #include "prims/jvmtiExport.hpp"
  52 #include "prims/unsafe.hpp"
  53 #include "runtime/fieldDescriptor.inline.hpp"
  54 #include "runtime/globals.hpp"
  55 #include "runtime/handles.inline.hpp"
  56 #include "runtime/interfaceSupport.inline.hpp"
  57 #include "runtime/javaThread.inline.hpp"
  58 #include "runtime/jniHandles.inline.hpp"
  59 #include "runtime/orderAccess.hpp"
  60 #include "runtime/reflection.hpp"
  61 #include "runtime/sharedRuntime.hpp"
  62 #include "runtime/stubRoutines.hpp"
  63 #include "runtime/threadSMR.hpp"
  64 #include "runtime/vm_version.hpp"
  65 #include "runtime/vmOperations.hpp"
  66 #include "sanitizers/ub.hpp"
  67 #include "services/threadService.hpp"
  68 #include "utilities/align.hpp"
  69 #include "utilities/copy.hpp"
  70 #include "utilities/dtrace.hpp"
  71 #include "utilities/macros.hpp"
  72 
  73 /**
  74  * Implementation of the jdk.internal.misc.Unsafe class
  75  */
  76 
  77 
  78 #define MAX_OBJECT_SIZE \
  79   ( arrayOopDesc::base_offset_in_bytes(T_DOUBLE) \
  80     + ((julong)max_jint * sizeof(double)) )
  81 
  82 #define UNSAFE_ENTRY(result_type, header) \
  83   JVM_ENTRY(static result_type, header)
  84 
  85 #define UNSAFE_LEAF(result_type, header) \
  86   JVM_LEAF(static result_type, header)
  87 
  88 // All memory access methods (e.g. getInt, copyMemory) must use this macro.
  89 // We call these methods "scoped" methods, as access to these methods is
  90 // typically governed by a "scope" (a MemorySessionImpl object), and no
  91 // access is allowed when the scope is no longer alive.
  92 //
  93 // Closing a scope object (cf. scopedMemoryAccess.cpp) can install
  94 // an async exception during a safepoint. When that happens,
  95 // scoped methods are not allowed to touch the underlying memory (as that
  96 // memory might have been released). Therefore, when entering a scoped method
  97 // we check if an async exception has been installed, and return immediately
  98 // if that is the case.
  99 //
 100 // As a rule, we disallow safepoints in the middle of a scoped method.
 101 // If an async exception handshake were installed in such a safepoint,
 102 // memory access might still occur before the handshake is honored by
 103 // the accessing thread.
 104 //
 105 // Corollary: as threads in native state are considered to be at a safepoint,
 106 // scoped methods must NOT be executed while in the native thread state.
 107 // Because of this, there can be no UNSAFE_LEAF_SCOPED.
 108 #define UNSAFE_ENTRY_SCOPED(result_type, header) \
 109   JVM_ENTRY(static result_type, header) \
 110   if (thread->has_async_exception_condition()) {return (result_type)0;}
 111 
 112 #define UNSAFE_END JVM_END
 113 
 114 
 115 static inline void* addr_from_java(jlong addr) {
 116   // This assert fails in a variety of ways on 32-bit systems.
 117   // It is impossible to predict whether native code that converts
 118   // pointers to longs will sign-extend or zero-extend the addresses.
 119   //assert(addr == (uintptr_t)addr, "must not be odd high bits");
 120   return (void*)(uintptr_t)addr;
 121 }
 122 
 123 static inline jlong addr_to_java(void* p) {
 124   assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
 125   return (uintptr_t)p;
 126 }
 127 
 128 
 129 // Note: The VM's obj_field and related accessors use byte-scaled
 130 // ("unscaled") offsets, just as the unsafe methods do.
 131 
 132 // However, the method Unsafe.fieldOffset explicitly declines to
 133 // guarantee this.  The field offset values manipulated by the Java user
 134 // through the Unsafe API are opaque cookies that just happen to be byte
 135 // offsets.  We represent this state of affairs by passing the cookies
 136 // through conversion functions when going between the VM and the Unsafe API.
 137 // The conversion functions just happen to be no-ops at present.
 138 
 139 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
 140   return field_offset;
 141 }
 142 
 143 static inline int field_offset_from_byte_offset(int byte_offset) {
 144   return byte_offset;
 145 }
 146 
 147 static inline void assert_field_offset_sane(oop p, jlong field_offset) {
 148 #ifdef ASSERT
 149   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 150 
 151   if (p != nullptr) {
 152     assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
 153     if (byte_offset == (jint)byte_offset) {
 154       void* ptr_plus_disp = cast_from_oop<address>(p) + byte_offset;
 155       assert(p->field_addr<void>((jint)byte_offset) == ptr_plus_disp,
 156              "raw [ptr+disp] must be consistent with oop::field_addr");
 157     }
 158     jlong p_size = HeapWordSize * (jlong)(p->size());
 159     assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
 160   }
 161 #endif
 162 }
 163 
 164 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
 165   assert_field_offset_sane(p, field_offset);
 166   uintptr_t base_address = cast_from_oop<uintptr_t>(p);
 167   uintptr_t byte_offset  = (uintptr_t)field_offset_to_byte_offset(field_offset);
 168   return (void*)(base_address + byte_offset);
 169 }
 170 
 171 // Externally callable versions:
 172 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 173 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 174   return field_offset;
 175 }
 176 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 177   return byte_offset;
 178 }
 179 
 180 
 181 ///// Data read/writes on the Java heap and in native (off-heap) memory
 182 
 183 /**
 184  * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 185  */
 186 class GuardUnsafeAccess {
 187   JavaThread* _thread;
 188 
 189 public:
 190   GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
 191     // native/off-heap access which may raise SIGBUS if accessing
 192     // memory mapped file data in a region of the file which has
 193     // been truncated and is now invalid.
 194     _thread->set_doing_unsafe_access(true);
 195   }
 196 
 197   ~GuardUnsafeAccess() {
 198     _thread->set_doing_unsafe_access(false);
 199   }
 200 };
 201 
 202 /**
 203  * Helper class for accessing memory.
 204  *
 205  * Normalizes values and wraps accesses in
 206  * JavaThread::doing_unsafe_access() if needed.
 207  */
 208 template <typename T>
 209 class MemoryAccess : StackObj {
 210   JavaThread* _thread;
 211   oop _obj;
 212   ptrdiff_t _offset;
 213 
 214   // Resolves and returns the address of the memory access.
 215   // This raw memory access may fault, so we make sure it happens within the
 216   // guarded scope by making the access volatile at least. Since the store
 217   // of Thread::set_doing_unsafe_access() is also volatile, these accesses
 218   // can not be reordered by the compiler. Therefore, if the access triggers
 219   // a fault, we will know that Thread::doing_unsafe_access() returns true.
 220   volatile T* addr() {
 221     void* addr = index_oop_from_field_offset_long(_obj, _offset);
 222     return static_cast<volatile T*>(addr);
 223   }
 224 
 225   template <typename U>
 226   U normalize_for_write(U x) {
 227     return x;
 228   }
 229 
 230   jboolean normalize_for_write(jboolean x) {
 231     return x & 1;
 232   }
 233 
 234   template <typename U>
 235   U normalize_for_read(U x) {
 236     return x;
 237   }
 238 
 239   jboolean normalize_for_read(jboolean x) {
 240     return x != 0;
 241   }
 242 
 243 public:
 244   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 245     : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
 246     assert_field_offset_sane(_obj, offset);
 247   }
 248 
 249   T get() {
 250     GuardUnsafeAccess guard(_thread);
 251     return normalize_for_read(*addr());
 252   }
 253 
 254   // we use this method at some places for writing to 0 e.g. to cause a crash;
 255   // ubsan does not know that this is the desired behavior
 256   ATTRIBUTE_NO_UBSAN
 257   void put(T x) {
 258     GuardUnsafeAccess guard(_thread);
 259     assert(_obj == nullptr || !_obj->is_inline_type(), "receiver cannot be an instance of a value class because they are immutable");
 260     *addr() = normalize_for_write(x);
 261   }
 262 
 263 
 264   T get_volatile() {
 265     GuardUnsafeAccess guard(_thread);
 266     volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
 267     return normalize_for_read(ret);
 268   }
 269 
 270   void put_volatile(T x) {
 271     GuardUnsafeAccess guard(_thread);
 272     RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 273   }
 274 };
 275 
 276 static void log_unsafe_value_access(oop p, jlong offset, InlineKlass* vk) {
 277   Klass* k = p->klass();
 278   if (log_is_enabled(Trace, valuetypes)) {
 279     ResourceMark rm;
 280     if (k->is_flatArray_klass()) {
 281       FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 282       int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 283       flatArrayOop array = oop_cast<flatArrayOop>(p);
 284       if (index >= 0 && index < array->length()) {
 285         address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
 286         log_trace(valuetypes)("%s array type %s index %d element size %d offset " UINT64_FORMAT_X " at " INTPTR_FORMAT,
 287                               p->klass()->external_name(), vak->external_name(),
 288                               index, vak->element_byte_size(), (uint64_t)offset, p2i(dest));
 289       } else {
 290          log_trace(valuetypes)("%s array type %s out-of-bounds index %d element size %d offset " UINT64_FORMAT_X,
 291                               p->klass()->external_name(), vak->external_name(), index, vak->element_byte_size(), (uint64_t)offset);
 292       }
 293     } else {
 294       log_trace(valuetypes)("%s field type %s at offset " UINT64_FORMAT_X,
 295                             p->klass()->external_name(), vk->external_name(), (uint64_t)offset);
 296     }
 297   }
 298 }
 299 
 300 // These functions allow a null base pointer with an arbitrary address.
 301 // But if the base pointer is non-null, the offset should make some sense.
 302 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 303 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 304   oop p = JNIHandles::resolve(obj);
 305   assert_field_offset_sane(p, offset);
 306   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 307   return JNIHandles::make_local(THREAD, v);
 308 } UNSAFE_END
 309 
 310 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 311   oop x = JNIHandles::resolve(x_h);
 312   oop p = JNIHandles::resolve(obj);
 313   assert_field_offset_sane(p, offset);
 314   assert(!p->is_inline_type(), "receiver cannot be an instance of a value class because they are immutable");
 315   HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 316 } UNSAFE_END
 317 
 318 UNSAFE_ENTRY(jlong, Unsafe_ValueHeaderSize(JNIEnv *env, jobject unsafe, jclass c)) {
 319   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
 320   InlineKlass* vk = InlineKlass::cast(k);
 321   return vk->payload_offset();
 322 } UNSAFE_END
 323 
 324 UNSAFE_ENTRY(jboolean, Unsafe_IsFlatField(JNIEnv *env, jobject unsafe, jobject o)) {
 325   oop f = JNIHandles::resolve_non_null(o);
 326   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 327   int slot = java_lang_reflect_Field::slot(f);
 328   return InstanceKlass::cast(k)->field_is_flat(slot);
 329 } UNSAFE_END
 330 
 331 UNSAFE_ENTRY(jboolean, Unsafe_HasNullMarker(JNIEnv *env, jobject unsafe, jobject o)) {
 332   oop f = JNIHandles::resolve_non_null(o);
 333   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 334   int slot = java_lang_reflect_Field::slot(f);
 335   return InstanceKlass::cast(k)->field_has_null_marker(slot);
 336 } UNSAFE_END
 337 
 338 UNSAFE_ENTRY(jint, Unsafe_NullMarkerOffset(JNIEnv *env, jobject unsafe, jobject o)) {
 339   oop f = JNIHandles::resolve_non_null(o);
 340   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 341   int slot = java_lang_reflect_Field::slot(f);
 342   return InstanceKlass::cast(k)->field_null_marker_offset(slot);
 343 } UNSAFE_END
 344 
 345 UNSAFE_ENTRY(jint, Unsafe_ArrayLayout(JNIEnv *env, jobject unsafe, jarray array)) {
 346   oop ar = JNIHandles::resolve_non_null(array);
 347   ArrayKlass* ak = ArrayKlass::cast(ar->klass());
 348   if (ak->is_refArray_klass()) {
 349     return (jint)LayoutKind::REFERENCE;
 350   } else if (ak->is_flatArray_klass()) {
 351     return (jint)FlatArrayKlass::cast(ak)->layout_kind();
 352   } else {
 353     ShouldNotReachHere();
 354     return -1;
 355   }
 356 } UNSAFE_END
 357 
 358 UNSAFE_ENTRY(jint, Unsafe_FieldLayout(JNIEnv *env, jobject unsafe, jobject field)) {
 359   assert(field != nullptr, "field must not be null");
 360 
 361   oop reflected   = JNIHandles::resolve_non_null(field);
 362   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 363   Klass* k        = java_lang_Class::as_Klass(mirror);
 364   int slot        = java_lang_reflect_Field::slot(reflected);
 365   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 366 
 367   if ((modifiers & JVM_ACC_STATIC) != 0) {
 368     return (jint)LayoutKind::REFERENCE; // static fields are never flat
 369   } else {
 370     InstanceKlass* ik = InstanceKlass::cast(k);
 371     if (ik->field_is_flat(slot)) {
 372       return (jint)ik->inline_layout_info(slot).kind();
 373     } else {
 374       return (jint)LayoutKind::REFERENCE;
 375     }
 376   }
 377 } UNSAFE_END
 378 
 379 UNSAFE_ENTRY(jarray, Unsafe_NewSpecialArray(JNIEnv *env, jobject unsafe, jclass elmClass, jint len, jint layoutKind)) {
 380   oop mirror = JNIHandles::resolve_non_null(elmClass);
 381   Klass* klass = java_lang_Class::as_Klass(mirror);
 382   klass->initialize(CHECK_NULL);
 383   if (len < 0) {
 384     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Array length is negative");
 385   }
 386   if (klass->is_array_klass() || klass->is_identity_class()) {
 387     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Element class is not a value class");
 388   }
 389   if (klass->is_abstract()) {
 390     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Element class is abstract");
 391   }
 392   LayoutKind lk = static_cast<LayoutKind>(layoutKind);
 393   if (lk <= LayoutKind::REFERENCE || lk == LayoutKind::NULLABLE_NON_ATOMIC_FLAT || lk >= LayoutKind::UNKNOWN) {
 394     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Invalid layout kind");
 395   }
 396   InlineKlass* vk = InlineKlass::cast(klass);
 397   // WARNING: test below will need modifications when flat layouts supported for fields
 398   // but not for arrays are introduce (NULLABLE_NON_ATOMIC_FLAT for instance)
 399   if (!UseArrayFlattening || !vk->is_layout_supported(lk)) {
 400     THROW_MSG_NULL(vmSymbols::java_lang_UnsupportedOperationException(), "Layout not supported");
 401   }
 402   ArrayProperties props = ArrayKlass::array_properties_from_layout(lk);
 403   oop array = oopFactory::new_flatArray(vk, len, props, lk, CHECK_NULL);
 404   return (jarray) JNIHandles::make_local(THREAD, array);
 405 } UNSAFE_END
 406 
 407 UNSAFE_ENTRY(jobject, Unsafe_GetFlatValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint layoutKind, jclass vc)) {
 408   assert(layoutKind != (int)LayoutKind::UNKNOWN, "Sanity");
 409   assert(layoutKind != (int)LayoutKind::REFERENCE, "This method handles only flat layouts");
 410   oop base = JNIHandles::resolve(obj);
 411   if (base == nullptr) {
 412     THROW_NULL(vmSymbols::java_lang_NullPointerException());
 413   }
 414   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 415   InlineKlass* vk = InlineKlass::cast(k);
 416   log_unsafe_value_access(base, offset, vk);
 417   LayoutKind lk = (LayoutKind)layoutKind;
 418   FlatValuePayload payload = FlatValuePayload::construct_from_parts(base, offset, vk, lk);
 419   oop v = payload.read(CHECK_NULL);
 420   return JNIHandles::make_local(THREAD, v);
 421 } UNSAFE_END
 422 
 423 UNSAFE_ENTRY(void, Unsafe_PutFlatValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint layoutKind, jclass vc, jobject value)) {
 424   assert(layoutKind != (int)LayoutKind::UNKNOWN, "Sanity");
 425   assert(layoutKind != (int)LayoutKind::REFERENCE, "This method handles only flat layouts");
 426   oop base = JNIHandles::resolve(obj);
 427   if (base == nullptr) {
 428     THROW(vmSymbols::java_lang_NullPointerException());
 429   }
 430 
 431   InlineKlass* vk = InlineKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc)));
 432   log_unsafe_value_access(base, offset, vk);
 433   LayoutKind lk = (LayoutKind)layoutKind;
 434   FlatValuePayload payload = FlatValuePayload::construct_from_parts(base, offset, vk, lk);
 435   payload.write(inlineOop(JNIHandles::resolve(value)), CHECK);
 436 } UNSAFE_END
 437 
 438 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 439   oop p = JNIHandles::resolve(obj);
 440   assert_field_offset_sane(p, offset);
 441   oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 442   return JNIHandles::make_local(THREAD, v);
 443 } UNSAFE_END
 444 
 445 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 446   oop x = JNIHandles::resolve(x_h);
 447   oop p = JNIHandles::resolve(obj);
 448   assert_field_offset_sane(p, offset);
 449   HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 450 } UNSAFE_END
 451 
 452 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 453   oop v = *(oop*) (address) addr;
 454   return JNIHandles::make_local(THREAD, v);
 455 } UNSAFE_END
 456 
 457 #define DEFINE_GETSETOOP(java_type, Type) \
 458  \
 459 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 460   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 461 } UNSAFE_END \
 462  \
 463 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 464   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 465 } UNSAFE_END \
 466  \
 467 // END DEFINE_GETSETOOP.
 468 
 469 DEFINE_GETSETOOP(jboolean, Boolean)
 470 DEFINE_GETSETOOP(jbyte, Byte)
 471 DEFINE_GETSETOOP(jshort, Short);
 472 DEFINE_GETSETOOP(jchar, Char);
 473 DEFINE_GETSETOOP(jint, Int);
 474 DEFINE_GETSETOOP(jlong, Long);
 475 DEFINE_GETSETOOP(jfloat, Float);
 476 DEFINE_GETSETOOP(jdouble, Double);
 477 
 478 #undef DEFINE_GETSETOOP
 479 
 480 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
 481  \
 482 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 483   return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
 484 } UNSAFE_END \
 485  \
 486 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 487   MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
 488 } UNSAFE_END \
 489  \
 490 // END DEFINE_GETSETOOP_VOLATILE.
 491 
 492 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
 493 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
 494 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
 495 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
 496 DEFINE_GETSETOOP_VOLATILE(jint, Int);
 497 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
 498 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
 499 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 500 
 501 #undef DEFINE_GETSETOOP_VOLATILE
 502 
 503 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
 504   OrderAccess::fence();
 505 } UNSAFE_END
 506 
 507 ////// Allocation requests
 508 
 509 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
 510   JvmtiVMObjectAllocEventCollector oam;
 511   instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(cls), CHECK_NULL);
 512   return JNIHandles::make_local(THREAD, i);
 513 } UNSAFE_END
 514 
 515 UNSAFE_LEAF(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
 516   size_t sz = (size_t)size;
 517 
 518   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 519 
 520   void* x = os::malloc(sz, mtOther);
 521 
 522   return addr_to_java(x);
 523 } UNSAFE_END
 524 
 525 UNSAFE_LEAF(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
 526   void* p = addr_from_java(addr);
 527   size_t sz = (size_t)size;
 528 
 529   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 530 
 531   void* x = os::realloc(p, sz, mtOther);
 532 
 533   return addr_to_java(x);
 534 } UNSAFE_END
 535 
 536 UNSAFE_LEAF(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
 537   void* p = addr_from_java(addr);
 538 
 539   os::free(p);
 540 } UNSAFE_END
 541 
 542 UNSAFE_ENTRY_SCOPED(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
 543   size_t sz = (size_t)size;
 544 
 545   oop base = JNIHandles::resolve(obj);
 546   void* p = index_oop_from_field_offset_long(base, offset);
 547 
 548   {
 549     GuardUnsafeAccess guard(thread);
 550     if (StubRoutines::unsafe_setmemory() != nullptr) {
 551       MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
 552       StubRoutines::UnsafeSetMemory_stub()(p, sz, value);
 553     } else {
 554       Copy::fill_to_memory_atomic(p, sz, value);
 555     }
 556   }
 557 } UNSAFE_END
 558 
 559 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
 560   size_t sz = (size_t)size;
 561 
 562   oop srcp = JNIHandles::resolve(srcObj);
 563   oop dstp = JNIHandles::resolve(dstObj);
 564 
 565   void* src = index_oop_from_field_offset_long(srcp, srcOffset);
 566   void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
 567   {
 568     GuardUnsafeAccess guard(thread);
 569     if (StubRoutines::unsafe_arraycopy() != nullptr) {
 570       MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
 571       StubRoutines::UnsafeArrayCopy_stub()(src, dst, sz);
 572     } else {
 573       Copy::conjoint_memory_atomic(src, dst, sz);
 574     }
 575   }
 576 } UNSAFE_END
 577 
 578 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
 579   size_t sz = (size_t)size;
 580   size_t esz = (size_t)elemSize;
 581 
 582   oop srcp = JNIHandles::resolve(srcObj);
 583   oop dstp = JNIHandles::resolve(dstObj);
 584 
 585   address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
 586   address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
 587 
 588   {
 589     GuardUnsafeAccess guard(thread);
 590     Copy::conjoint_swap(src, dst, sz, esz);
 591   }
 592 } UNSAFE_END
 593 
 594 UNSAFE_LEAF (void, Unsafe_WriteBack0(JNIEnv *env, jobject unsafe, jlong line)) {
 595   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 596 #ifdef ASSERT
 597   if (TraceMemoryWriteback) {
 598     tty->print_cr("Unsafe: writeback 0x%p", addr_from_java(line));
 599   }
 600 #endif
 601 
 602   MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
 603   assert(StubRoutines::data_cache_writeback() != nullptr, "sanity");
 604   (StubRoutines::DataCacheWriteback_stub())(addr_from_java(line));
 605 } UNSAFE_END
 606 
 607 static void doWriteBackSync0(bool is_pre)
 608 {
 609   MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
 610   assert(StubRoutines::data_cache_writeback_sync() != nullptr, "sanity");
 611   (StubRoutines::DataCacheWritebackSync_stub())(is_pre);
 612 }
 613 
 614 UNSAFE_LEAF (void, Unsafe_WriteBackPreSync0(JNIEnv *env, jobject unsafe)) {
 615   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 616 #ifdef ASSERT
 617   if (TraceMemoryWriteback) {
 618       tty->print_cr("Unsafe: writeback pre-sync");
 619   }
 620 #endif
 621 
 622   doWriteBackSync0(true);
 623 } UNSAFE_END
 624 
 625 UNSAFE_LEAF (void, Unsafe_WriteBackPostSync0(JNIEnv *env, jobject unsafe)) {
 626   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 627 #ifdef ASSERT
 628   if (TraceMemoryWriteback) {
 629     tty->print_cr("Unsafe: writeback pre-sync");
 630   }
 631 #endif
 632 
 633   doWriteBackSync0(false);
 634 } UNSAFE_END
 635 
 636 ////// Random queries
 637 
 638 // Finds the object field offset of a field with the matching name, or an error code
 639 // Error code -1 is not found, -2 is static field
 640 static jlong find_known_instance_field_offset(jclass clazz, jstring name, TRAPS) {
 641   assert(clazz != nullptr, "clazz must not be null");
 642   assert(name != nullptr, "name must not be null");
 643 
 644   ResourceMark rm(THREAD);
 645   char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
 646 
 647   InstanceKlass* k = java_lang_Class::as_InstanceKlass(JNIHandles::resolve_non_null(clazz));
 648 
 649   jint offset = -1; // Not found
 650   for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
 651     Symbol *name = fs.name();
 652     if (name->equals(utf_name)) {
 653       if (!fs.access_flags().is_static()) {
 654         offset = fs.offset();
 655       } else {
 656         offset = -2; // A static field
 657       }
 658       break;
 659     }
 660   }
 661   if (offset < 0) {
 662     return offset; // Error code
 663   }
 664   return field_offset_from_byte_offset(offset);
 665 }
 666 
 667 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
 668   assert(field != nullptr, "field must not be null");
 669 
 670   oop reflected   = JNIHandles::resolve_non_null(field);
 671   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 672   Klass* k        = java_lang_Class::as_Klass(mirror);
 673   int slot        = java_lang_reflect_Field::slot(reflected);
 674   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 675 
 676   if (must_be_static >= 0) {
 677     int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
 678     if (must_be_static != really_is_static) {
 679       THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 680     }
 681   }
 682 
 683   int offset = InstanceKlass::cast(k)->field_offset(slot);
 684   return field_offset_from_byte_offset(offset);
 685 }
 686 
 687 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 688   return find_field_offset(field, 0, THREAD);
 689 } UNSAFE_END
 690 
 691 UNSAFE_ENTRY(jlong, Unsafe_KnownObjectFieldOffset0(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
 692   return find_known_instance_field_offset(c, name, THREAD);
 693 } UNSAFE_END
 694 
 695 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 696   return find_field_offset(field, 1, THREAD);
 697 } UNSAFE_END
 698 
 699 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
 700   assert(field != nullptr, "field must not be null");
 701 
 702   // Note:  In this VM implementation, a field address is always a short
 703   // offset from the base of a klass metaobject.  Thus, the full dynamic
 704   // range of the return type is never used.  However, some implementations
 705   // might put the static field inside an array shared by many classes,
 706   // or even at a fixed address, in which case the address could be quite
 707   // large.  In that last case, this function would return null, since
 708   // the address would operate alone, without any base pointer.
 709 
 710   oop reflected   = JNIHandles::resolve_non_null(field);
 711   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 712   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 713 
 714   if ((modifiers & JVM_ACC_STATIC) == 0) {
 715     THROW_NULL(vmSymbols::java_lang_IllegalArgumentException());
 716   }
 717 
 718   return JNIHandles::make_local(THREAD, mirror);
 719 } UNSAFE_END
 720 
 721 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 722   assert(clazz != nullptr, "clazz must not be null");
 723 
 724   oop mirror = JNIHandles::resolve_non_null(clazz);
 725 
 726   Klass* klass = java_lang_Class::as_Klass(mirror);
 727   if (klass != nullptr && klass->should_be_initialized()) {
 728     InstanceKlass* k = InstanceKlass::cast(klass);
 729     k->initialize(CHECK);
 730   }
 731 }
 732 UNSAFE_END
 733 
 734 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 735   assert(clazz != nullptr, "clazz must not be null");
 736 
 737   oop mirror = JNIHandles::resolve_non_null(clazz);
 738   Klass* klass = java_lang_Class::as_Klass(mirror);
 739 
 740   if (klass != nullptr && klass->should_be_initialized()) {
 741     return true;
 742   }
 743 
 744   return false;
 745 }
 746 UNSAFE_END
 747 
 748 UNSAFE_ENTRY(void, Unsafe_NotifyStrictStaticAccess0(JNIEnv *env, jobject unsafe, jobject clazz,
 749                                                     jlong sfoffset, jboolean writing)) {
 750   assert(clazz != nullptr, "clazz must not be null");
 751 
 752   oop mirror = JNIHandles::resolve_non_null(clazz);
 753   Klass* klass = java_lang_Class::as_Klass(mirror);
 754 
 755   if (klass != nullptr && klass->is_instance_klass()) {
 756     InstanceKlass* ik = InstanceKlass::cast(klass);
 757     fieldDescriptor fd;
 758     if (ik->find_local_field_from_offset((int)sfoffset, true, &fd)) {
 759       // Note: The Unsafe API takes an OFFSET, but the InstanceKlass wants the INDEX.
 760       // We could surface field indexes into Unsafe, but that's too much churn.
 761       ik->notify_strict_static_access(fd.index(), writing, CHECK);
 762       return;
 763     }
 764   }
 765   THROW(vmSymbols::java_lang_InternalError());
 766 }
 767 UNSAFE_END
 768 
 769 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
 770   assert(clazz != nullptr, "clazz must not be null");
 771 
 772   oop mirror = JNIHandles::resolve_non_null(clazz);
 773   Klass* k = java_lang_Class::as_Klass(mirror);
 774 
 775   if (k == nullptr || !k->is_array_klass()) {
 776     THROW(vmSymbols::java_lang_InvalidClassException());
 777   } else if (k->is_typeArray_klass()) {
 778     TypeArrayKlass* tak = TypeArrayKlass::cast(k);
 779     base  = tak->array_header_in_bytes();
 780     assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
 781     scale = (1 << tak->log2_element_size());
 782   } else if (k->is_objArray_klass()) {
 783     Klass* ek = ObjArrayKlass::cast(k)->element_klass();
 784     if (!ek->is_identity_class() && !ek->is_abstract()) {
 785       // Arrays of a concrete value class type can have multiple layouts
 786       // There's no good value to return, so throwing an exception is the way out
 787       THROW_MSG(vmSymbols::java_lang_IllegalArgumentException(), "Arrays of a concrete value class don't have a single base and offset");
 788     }
 789     base  = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
 790     scale = heapOopSize;
 791   } else {
 792     ShouldNotReachHere();
 793   }
 794 }
 795 
 796 UNSAFE_ENTRY(jint, Unsafe_ArrayInstanceBaseOffset0(JNIEnv *env, jobject unsafe, jarray array)) {
 797   assert(array != nullptr, "array must not be null");
 798   oop ar = JNIHandles::resolve_non_null(array);
 799   assert(ar->is_array(), "Must be an array");
 800   ArrayKlass* ak = ArrayKlass::cast(ar->klass());
 801   if (ak->is_refArray_klass()) {
 802     return arrayOopDesc::base_offset_in_bytes(T_OBJECT);
 803   } else if (ak->is_flatArray_klass()) {
 804     FlatArrayKlass* fak = FlatArrayKlass::cast(ak);
 805     return fak->array_header_in_bytes();
 806   } else {
 807     ShouldNotReachHere();
 808   }
 809 } UNSAFE_END
 810 
 811 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 812   int base = 0, scale = 0;
 813   getBaseAndScale(base, scale, clazz, CHECK_0);
 814 
 815   return field_offset_from_byte_offset(base);
 816 } UNSAFE_END
 817 
 818 
 819 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 820   int base = 0, scale = 0;
 821   getBaseAndScale(base, scale, clazz, CHECK_0);
 822 
 823   // This VM packs both fields and array elements down to the byte.
 824   // But watch out:  If this changes, so that array references for
 825   // a given primitive type (say, T_BOOLEAN) use different memory units
 826   // than fields, this method MUST return zero for such arrays.
 827   // For example, the VM used to store sub-word sized fields in full
 828   // words in the object layout, so that accessors like getByte(Object,int)
 829   // did not really do what one might expect for arrays.  Therefore,
 830   // this function used to report a zero scale factor, so that the user
 831   // would know not to attempt to access sub-word array elements.
 832   // // Code for unpacked fields:
 833   // if (scale < wordSize)  return 0;
 834 
 835   // The following allows for a pretty general fieldOffset cookie scheme,
 836   // but requires it to be linear in byte offset.
 837   return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
 838 } UNSAFE_END
 839 
 840 UNSAFE_ENTRY(jint, Unsafe_ArrayInstanceIndexScale0(JNIEnv *env, jobject unsafe, jarray array)) {
 841   assert(array != nullptr, "array must not be null");
 842   oop ar = JNIHandles::resolve_non_null(array);
 843   assert(ar->is_array(), "Must be an array");
 844   ArrayKlass* ak = ArrayKlass::cast(ar->klass());
 845   if (ak->is_refArray_klass()) {
 846     return heapOopSize;
 847   } else if (ak->is_flatArray_klass()) {
 848     FlatArrayKlass* fak = FlatArrayKlass::cast(ak);
 849     return fak->element_byte_size();
 850   } else {
 851     ShouldNotReachHere();
 852   }
 853 } UNSAFE_END
 854 
 855 UNSAFE_ENTRY(jarray, Unsafe_GetFieldMap0(JNIEnv* env, jobject unsafe, jclass clazz)) {
 856   oop mirror = JNIHandles::resolve_non_null(clazz);
 857   Klass* k = java_lang_Class::as_Klass(mirror);
 858 
 859   if (!k->is_inline_klass()) {
 860     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Argument is not a concrete value class");
 861   }
 862   InlineKlass* vk = InlineKlass::cast(k);
 863   oop map = mirror->obj_field(vk->acmp_maps_offset());
 864   return (jarray) JNIHandles::make_local(THREAD, map);
 865 } UNSAFE_END
 866 
 867 UNSAFE_ENTRY(jlong, Unsafe_GetObjectSize0(JNIEnv* env, jobject o, jobject obj))
 868   oop p = JNIHandles::resolve(obj);
 869   return p->size() * HeapWordSize;
 870 UNSAFE_END
 871 
 872 static inline void throw_new(JNIEnv *env, const char *ename) {
 873   jclass cls = env->FindClass(ename);
 874   if (env->ExceptionCheck()) {
 875     env->ExceptionClear();
 876     tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
 877     return;
 878   }
 879 
 880   env->ThrowNew(cls, nullptr);
 881 }
 882 
 883 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
 884   // Code lifted from JDK 1.3 ClassLoader.c
 885 
 886   jbyte *body;
 887   char *utfName = nullptr;
 888   jclass result = nullptr;
 889   char buf[128];
 890 
 891   assert(data != nullptr, "Class bytes must not be null");
 892   assert(length >= 0, "length must not be negative: %d", length);
 893 
 894   if (UsePerfData) {
 895     ClassLoader::unsafe_defineClassCallCounter()->inc();
 896   }
 897 
 898   body = NEW_C_HEAP_ARRAY_RETURN_NULL(jbyte, length, mtInternal);
 899   if (body == nullptr) {
 900     throw_new(env, "java/lang/OutOfMemoryError");
 901     return nullptr;
 902   }
 903 
 904   env->GetByteArrayRegion(data, offset, length, body);
 905   if (env->ExceptionCheck()) {
 906     goto free_body;
 907   }
 908 
 909   if (name != nullptr) {
 910     uint len = env->GetStringUTFLength(name);
 911     int unicode_len = env->GetStringLength(name);
 912 
 913     if (len >= sizeof(buf)) {
 914       utfName = NEW_C_HEAP_ARRAY_RETURN_NULL(char, len + 1, mtInternal);
 915       if (utfName == nullptr) {
 916         throw_new(env, "java/lang/OutOfMemoryError");
 917         goto free_body;
 918       }
 919     } else {
 920       utfName = buf;
 921     }
 922 
 923     env->GetStringUTFRegion(name, 0, unicode_len, utfName);
 924 
 925     for (uint i = 0; i < len; i++) {
 926       if (utfName[i] == '.')   utfName[i] = '/';
 927     }
 928   }
 929 
 930   result = JVM_DefineClass(env, utfName, loader, body, length, pd);
 931 
 932   if (utfName && utfName != buf) {
 933     FREE_C_HEAP_ARRAY(char, utfName);
 934   }
 935 
 936  free_body:
 937   FREE_C_HEAP_ARRAY(jbyte, body);
 938   return result;
 939 }
 940 
 941 
 942 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
 943   ThreadToNativeFromVM ttnfv(thread);
 944 
 945   return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
 946 } UNSAFE_END
 947 
 948 
 949 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
 950   ThreadToNativeFromVM ttnfv(thread);
 951   env->Throw(thr);
 952 } UNSAFE_END
 953 
 954 // JSR166 ------------------------------------------------------------------
 955 
 956 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 957   oop x = JNIHandles::resolve(x_h);
 958   oop e = JNIHandles::resolve(e_h);
 959   oop p = JNIHandles::resolve(obj);
 960   assert_field_offset_sane(p, offset);
 961   oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 962   return JNIHandles::make_local(THREAD, res);
 963 } UNSAFE_END
 964 
 965 UNSAFE_ENTRY_SCOPED(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 966   oop p = JNIHandles::resolve(obj);
 967   volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 968   return AtomicAccess::cmpxchg(addr, e, x);
 969 } UNSAFE_END
 970 
 971 UNSAFE_ENTRY_SCOPED(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 972   oop p = JNIHandles::resolve(obj);
 973   volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 974   return AtomicAccess::cmpxchg(addr, e, x);
 975 } UNSAFE_END
 976 
 977 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 978   oop x = JNIHandles::resolve(x_h);
 979   oop e = JNIHandles::resolve(e_h);
 980   oop p = JNIHandles::resolve(obj);
 981   assert_field_offset_sane(p, offset);
 982   oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 983   return ret == e;
 984 } UNSAFE_END
 985 
 986 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 987   oop p = JNIHandles::resolve(obj);
 988   volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 989   return AtomicAccess::cmpxchg(addr, e, x) == e;
 990 } UNSAFE_END
 991 
 992 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 993   oop p = JNIHandles::resolve(obj);
 994   volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 995   return AtomicAccess::cmpxchg(addr, e, x) == e;
 996 } UNSAFE_END
 997 
 998 static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
 999   assert(event != nullptr, "invariant");
1000   event->set_parkedClass((obj != nullptr) ? obj->klass() : nullptr);
1001   event->set_timeout(timeout_nanos);
1002   event->set_until(until_epoch_millis);
1003   event->set_address((obj != nullptr) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
1004   event->commit();
1005 }
1006 
1007 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
1008   HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
1009   EventThreadPark event;
1010 
1011   JavaThreadParkedState jtps(thread, time != 0);
1012   thread->parker()->park(isAbsolute != 0, time);
1013   if (event.should_commit()) {
1014     const oop obj = thread->current_park_blocker();
1015     if (time == 0) {
1016       post_thread_park_event(&event, obj, min_jlong, min_jlong);
1017     } else {
1018       if (isAbsolute != 0) {
1019         post_thread_park_event(&event, obj, min_jlong, time);
1020       } else {
1021         post_thread_park_event(&event, obj, time, min_jlong);
1022       }
1023     }
1024   }
1025   HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
1026 } UNSAFE_END
1027 
1028 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
1029   if (jthread != nullptr) {
1030     oop thread_oop = JNIHandles::resolve_non_null(jthread);
1031     // Get the JavaThread* stored in the java.lang.Thread object _before_
1032     // the embedded ThreadsListHandle is constructed so we know if the
1033     // early life stage of the JavaThread* is protected. We use acquire
1034     // here to ensure that if we see a non-nullptr value, then we also
1035     // see the main ThreadsList updates from the JavaThread* being added.
1036     FastThreadsListHandle ftlh(thread_oop, java_lang_Thread::thread_acquire(thread_oop));
1037     JavaThread* thr = ftlh.protected_java_thread();
1038     if (thr != nullptr) {
1039       // The still live JavaThread* is protected by the FastThreadsListHandle
1040       // so it is safe to access.
1041       Parker* p = thr->parker();
1042       HOTSPOT_THREAD_UNPARK((uintptr_t) p);
1043       p->unpark();
1044     }
1045   } // FastThreadsListHandle is destroyed here.
1046 } UNSAFE_END
1047 
1048 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
1049   const int max_nelem = 3;
1050   double la[max_nelem];
1051   jint ret;
1052 
1053   typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
1054   assert(a->is_typeArray(), "must be type array");
1055 
1056   ret = os::loadavg(la, nelem);
1057   if (ret == -1) {
1058     return -1;
1059   }
1060 
1061   // if successful, ret is the number of samples actually retrieved.
1062   assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
1063   switch(ret) {
1064     case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
1065     case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
1066     case 1: a->double_at_put(0, (jdouble)la[0]); break;
1067   }
1068 
1069   return ret;
1070 } UNSAFE_END
1071 
1072 
1073 /// JVM_RegisterUnsafeMethods
1074 
1075 #define ADR "J"
1076 
1077 #define LANG "Ljava/lang/"
1078 
1079 #define OBJ LANG "Object;"
1080 #define CLS LANG "Class;"
1081 #define FLD LANG "reflect/Field;"
1082 #define THR LANG "Throwable;"
1083 
1084 #define OBJ_ARR "[" OBJ
1085 
1086 #define DC_Args  LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
1087 #define DAC_Args CLS "[B[" OBJ
1088 
1089 #define CC (char*)  /*cast a literal from (const char*)*/
1090 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
1091 
1092 #define DECLARE_GETPUTOOP(Type, Desc) \
1093     {CC "get"  #Type,      CC "(" OBJ "J)" #Desc,                 FN_PTR(Unsafe_Get##Type)}, \
1094     {CC "put"  #Type,      CC "(" OBJ "J" #Desc ")V",             FN_PTR(Unsafe_Put##Type)}, \
1095     {CC "get"  #Type "Volatile",      CC "(" OBJ "J)" #Desc,      FN_PTR(Unsafe_Get##Type##Volatile)}, \
1096     {CC "put"  #Type "Volatile",      CC "(" OBJ "J" #Desc ")V",  FN_PTR(Unsafe_Put##Type##Volatile)}
1097 
1098 
1099 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
1100     {CC "getReference",         CC "(" OBJ "J)" OBJ "",   FN_PTR(Unsafe_GetReference)},
1101     {CC "putReference",         CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReference)},
1102     {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ,      FN_PTR(Unsafe_GetReferenceVolatile)},
1103     {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReferenceVolatile)},
1104 
1105     {CC "isFlatField0",         CC "(" OBJ ")Z",          FN_PTR(Unsafe_IsFlatField)},
1106     {CC "hasNullMarker0",       CC "(" OBJ ")Z",          FN_PTR(Unsafe_HasNullMarker)},
1107     {CC "nullMarkerOffset0",    CC "(" OBJ ")I",          FN_PTR(Unsafe_NullMarkerOffset)},
1108     {CC "arrayLayout0",         CC "(" OBJ_ARR ")I",      FN_PTR(Unsafe_ArrayLayout)},
1109     {CC "fieldLayout0",         CC "(" OBJ ")I",          FN_PTR(Unsafe_FieldLayout)},
1110     {CC "newSpecialArray",      CC "(" CLS "II)[" OBJ,    FN_PTR(Unsafe_NewSpecialArray)},
1111     {CC "getFlatValue",         CC "(" OBJ "JI" CLS ")" OBJ, FN_PTR(Unsafe_GetFlatValue)},
1112     {CC "putFlatValue",         CC "(" OBJ "JI" CLS OBJ ")V", FN_PTR(Unsafe_PutFlatValue)},
1113     {CC "valueHeaderSize",       CC "(" CLS ")J",         FN_PTR(Unsafe_ValueHeaderSize)},
1114     {CC "getUncompressedObject", CC "(" ADR ")" OBJ,  FN_PTR(Unsafe_GetUncompressedObject)},
1115 
1116     DECLARE_GETPUTOOP(Boolean, Z),
1117     DECLARE_GETPUTOOP(Byte, B),
1118     DECLARE_GETPUTOOP(Short, S),
1119     DECLARE_GETPUTOOP(Char, C),
1120     DECLARE_GETPUTOOP(Int, I),
1121     DECLARE_GETPUTOOP(Long, J),
1122     DECLARE_GETPUTOOP(Float, F),
1123     DECLARE_GETPUTOOP(Double, D),
1124 
1125     {CC "allocateMemory0",    CC "(J)" ADR,              FN_PTR(Unsafe_AllocateMemory0)},
1126     {CC "reallocateMemory0",  CC "(" ADR "J)" ADR,       FN_PTR(Unsafe_ReallocateMemory0)},
1127     {CC "freeMemory0",        CC "(" ADR ")V",           FN_PTR(Unsafe_FreeMemory0)},
1128 
1129     {CC "objectFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_ObjectFieldOffset0)},
1130     {CC "knownObjectFieldOffset0", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_KnownObjectFieldOffset0)},
1131     {CC "staticFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_StaticFieldOffset0)},
1132     {CC "staticFieldBase0",   CC "(" FLD ")" OBJ,        FN_PTR(Unsafe_StaticFieldBase0)},
1133     {CC "ensureClassInitialized0", CC "(" CLS ")V",      FN_PTR(Unsafe_EnsureClassInitialized0)},
1134     {CC "arrayBaseOffset0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayBaseOffset0)},
1135     {CC "arrayInstanceBaseOffset0",   CC "(" OBJ_ARR ")I", FN_PTR(Unsafe_ArrayInstanceBaseOffset0)},
1136     {CC "arrayIndexScale0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayIndexScale0)},
1137     {CC "arrayInstanceIndexScale0",   CC "(" OBJ_ARR ")I", FN_PTR(Unsafe_ArrayInstanceIndexScale0)},
1138     {CC "getFieldMap0",       CC "(Ljava/lang/Class;)[I", FN_PTR(Unsafe_GetFieldMap0)},
1139     {CC "getObjectSize0",     CC "(Ljava/lang/Object;)J", FN_PTR(Unsafe_GetObjectSize0)},
1140 
1141     {CC "defineClass0",       CC "(" DC_Args ")" CLS,    FN_PTR(Unsafe_DefineClass0)},
1142     {CC "allocateInstance",   CC "(" CLS ")" OBJ,        FN_PTR(Unsafe_AllocateInstance)},
1143     {CC "throwException",     CC "(" THR ")V",           FN_PTR(Unsafe_ThrowException)},
1144     {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
1145     {CC "compareAndSetInt",   CC "(" OBJ "J""I""I"")Z",  FN_PTR(Unsafe_CompareAndSetInt)},
1146     {CC "compareAndSetLong",  CC "(" OBJ "J""J""J"")Z",  FN_PTR(Unsafe_CompareAndSetLong)},
1147     {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
1148     {CC "compareAndExchangeInt",  CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
1149     {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
1150 
1151     {CC "park",               CC "(ZJ)V",                FN_PTR(Unsafe_Park)},
1152     {CC "unpark",             CC "(" OBJ ")V",           FN_PTR(Unsafe_Unpark)},
1153 
1154     {CC "getLoadAverage0",    CC "([DI)I",               FN_PTR(Unsafe_GetLoadAverage0)},
1155 
1156     {CC "copyMemory0",        CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
1157     {CC "copySwapMemory0",    CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
1158     {CC "writeback0",         CC "(" "J" ")V",           FN_PTR(Unsafe_WriteBack0)},
1159     {CC "writebackPreSync0",  CC "()V",                  FN_PTR(Unsafe_WriteBackPreSync0)},
1160     {CC "writebackPostSync0", CC "()V",                  FN_PTR(Unsafe_WriteBackPostSync0)},
1161     {CC "setMemory0",         CC "(" OBJ "JJB)V",        FN_PTR(Unsafe_SetMemory0)},
1162 
1163     {CC "shouldBeInitialized0", CC "(" CLS ")Z",         FN_PTR(Unsafe_ShouldBeInitialized0)},
1164     {CC "notifyStrictStaticAccess0", CC "(" CLS "JZ)V",  FN_PTR(Unsafe_NotifyStrictStaticAccess0)},
1165 
1166     {CC "fullFence",          CC "()V",                  FN_PTR(Unsafe_FullFence)},
1167 };
1168 
1169 #undef CC
1170 #undef FN_PTR
1171 
1172 #undef ADR
1173 #undef LANG
1174 #undef OBJ
1175 #undef CLS
1176 #undef FLD
1177 #undef THR
1178 #undef DC_Args
1179 #undef DAC_Args
1180 
1181 #undef DECLARE_GETPUTOOP
1182 
1183 
1184 // This function is exported, used by NativeLookup.
1185 // The Unsafe_xxx functions above are called only from the interpreter.
1186 // The optimizer looks at names and signatures to recognize
1187 // individual functions.
1188 
1189 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
1190   ThreadToNativeFromVM ttnfv(thread);
1191 
1192   int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
1193   guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
1194 } JVM_END