1 /*
   2  * Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "classfile/classFileStream.hpp"
  26 #include "classfile/classLoader.hpp"
  27 #include "classfile/classLoadInfo.hpp"
  28 #include "classfile/javaClasses.inline.hpp"
  29 #include "classfile/systemDictionary.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "jfr/jfrEvents.hpp"
  32 #include "jni.h"
  33 #include "jvm.h"
  34 #include "logging/log.hpp"
  35 #include "logging/logStream.hpp"
  36 #include "memory/allocation.inline.hpp"
  37 #include "memory/oopFactory.hpp"
  38 #include "memory/resourceArea.hpp"
  39 #include "oops/access.inline.hpp"
  40 #include "oops/fieldStreams.inline.hpp"
  41 #include "oops/flatArrayKlass.hpp"
  42 #include "oops/flatArrayOop.inline.hpp"
  43 #include "oops/inlineKlass.inline.hpp"
  44 #include "oops/instanceKlass.inline.hpp"
  45 #include "oops/klass.inline.hpp"
  46 #include "oops/objArrayOop.inline.hpp"
  47 #include "oops/oop.inline.hpp"
  48 #include "oops/typeArrayOop.inline.hpp"
  49 #include "oops/valuePayload.hpp"
  50 #include "prims/jvmtiExport.hpp"
  51 #include "prims/unsafe.hpp"
  52 #include "runtime/fieldDescriptor.inline.hpp"
  53 #include "runtime/globals.hpp"
  54 #include "runtime/handles.inline.hpp"
  55 #include "runtime/interfaceSupport.inline.hpp"
  56 #include "runtime/javaThread.inline.hpp"
  57 #include "runtime/jniHandles.inline.hpp"
  58 #include "runtime/orderAccess.hpp"
  59 #include "runtime/reflection.hpp"
  60 #include "runtime/sharedRuntime.hpp"
  61 #include "runtime/stubRoutines.hpp"
  62 #include "runtime/threadSMR.hpp"
  63 #include "runtime/vm_version.hpp"
  64 #include "runtime/vmOperations.hpp"
  65 #include "sanitizers/ub.hpp"
  66 #include "services/threadService.hpp"
  67 #include "utilities/align.hpp"
  68 #include "utilities/copy.hpp"
  69 #include "utilities/dtrace.hpp"
  70 #include "utilities/macros.hpp"
  71 
  72 /**
  73  * Implementation of the jdk.internal.misc.Unsafe class
  74  */
  75 
  76 
  77 #define MAX_OBJECT_SIZE \
  78   ( arrayOopDesc::base_offset_in_bytes(T_DOUBLE) \
  79     + ((julong)max_jint * sizeof(double)) )
  80 
  81 #define UNSAFE_ENTRY(result_type, header) \
  82   JVM_ENTRY(static result_type, header)
  83 
  84 #define UNSAFE_LEAF(result_type, header) \
  85   JVM_LEAF(static result_type, header)
  86 
  87 // All memory access methods (e.g. getInt, copyMemory) must use this macro.
  88 // We call these methods "scoped" methods, as access to these methods is
  89 // typically governed by a "scope" (a MemorySessionImpl object), and no
  90 // access is allowed when the scope is no longer alive.
  91 //
  92 // Closing a scope object (cf. scopedMemoryAccess.cpp) can install
  93 // an async exception during a safepoint. When that happens,
  94 // scoped methods are not allowed to touch the underlying memory (as that
  95 // memory might have been released). Therefore, when entering a scoped method
  96 // we check if an async exception has been installed, and return immediately
  97 // if that is the case.
  98 //
  99 // As a rule, we disallow safepoints in the middle of a scoped method.
 100 // If an async exception handshake were installed in such a safepoint,
 101 // memory access might still occur before the handshake is honored by
 102 // the accessing thread.
 103 //
 104 // Corollary: as threads in native state are considered to be at a safepoint,
 105 // scoped methods must NOT be executed while in the native thread state.
 106 // Because of this, there can be no UNSAFE_LEAF_SCOPED.
 107 #define UNSAFE_ENTRY_SCOPED(result_type, header) \
 108   JVM_ENTRY(static result_type, header) \
 109   if (thread->has_async_exception_condition()) {return (result_type)0;}
 110 
 111 #define UNSAFE_END JVM_END
 112 
 113 
 114 static inline void* addr_from_java(jlong addr) {
 115   // This assert fails in a variety of ways on 32-bit systems.
 116   // It is impossible to predict whether native code that converts
 117   // pointers to longs will sign-extend or zero-extend the addresses.
 118   //assert(addr == (uintptr_t)addr, "must not be odd high bits");
 119   return (void*)(uintptr_t)addr;
 120 }
 121 
 122 static inline jlong addr_to_java(void* p) {
 123   assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
 124   return (uintptr_t)p;
 125 }
 126 
 127 
 128 // Note: The VM's obj_field and related accessors use byte-scaled
 129 // ("unscaled") offsets, just as the unsafe methods do.
 130 
 131 // However, the method Unsafe.fieldOffset explicitly declines to
 132 // guarantee this.  The field offset values manipulated by the Java user
 133 // through the Unsafe API are opaque cookies that just happen to be byte
 134 // offsets.  We represent this state of affairs by passing the cookies
 135 // through conversion functions when going between the VM and the Unsafe API.
 136 // The conversion functions just happen to be no-ops at present.
 137 
 138 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
 139   return field_offset;
 140 }
 141 
 142 static inline int field_offset_from_byte_offset(int byte_offset) {
 143   return byte_offset;
 144 }
 145 
 146 static inline void assert_field_offset_sane(oop p, jlong field_offset) {
 147 #ifdef ASSERT
 148   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 149 
 150   if (p != nullptr) {
 151     assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
 152     if (byte_offset == (jint)byte_offset) {
 153       void* ptr_plus_disp = cast_from_oop<address>(p) + byte_offset;
 154       assert(p->field_addr<void>((jint)byte_offset) == ptr_plus_disp,
 155              "raw [ptr+disp] must be consistent with oop::field_addr");
 156     }
 157     jlong p_size = HeapWordSize * (jlong)(p->size());
 158     assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
 159   }
 160 #endif
 161 }
 162 
 163 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
 164   assert_field_offset_sane(p, field_offset);
 165   uintptr_t base_address = cast_from_oop<uintptr_t>(p);
 166   uintptr_t byte_offset  = (uintptr_t)field_offset_to_byte_offset(field_offset);
 167   return (void*)(base_address + byte_offset);
 168 }
 169 
 170 // Externally callable versions:
 171 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 172 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 173   return field_offset;
 174 }
 175 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 176   return byte_offset;
 177 }
 178 
 179 ///// Data read/writes on the Java heap and in native (off-heap) memory
 180 
 181 /**
 182  * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 183  */
 184 class GuardUnsafeAccess {
 185   JavaThread* _thread;
 186 
 187 public:
 188   GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
 189     // native/off-heap access which may raise SIGBUS if accessing
 190     // memory mapped file data in a region of the file which has
 191     // been truncated and is now invalid.
 192     _thread->set_doing_unsafe_access(true);
 193   }
 194 
 195   ~GuardUnsafeAccess() {
 196     _thread->set_doing_unsafe_access(false);
 197   }
 198 };
 199 
 200 /**
 201  * Helper class for accessing memory.
 202  *
 203  * Normalizes values and wraps accesses in
 204  * JavaThread::doing_unsafe_access() if needed.
 205  */
 206 template <typename T>
 207 class MemoryAccess : StackObj {
 208   JavaThread* _thread;
 209   oop _obj;
 210   ptrdiff_t _offset;
 211 
 212   // Resolves and returns the address of the memory access.
 213   // This raw memory access may fault, so we make sure it happens within the
 214   // guarded scope by making the access volatile at least. Since the store
 215   // of Thread::set_doing_unsafe_access() is also volatile, these accesses
 216   // can not be reordered by the compiler. Therefore, if the access triggers
 217   // a fault, we will know that Thread::doing_unsafe_access() returns true.
 218   volatile T* addr() {
 219     void* addr = index_oop_from_field_offset_long(_obj, _offset);
 220     return static_cast<volatile T*>(addr);
 221   }
 222 
 223   template <typename U>
 224   U normalize_for_write(U x) {
 225     return x;
 226   }
 227 
 228   jboolean normalize_for_write(jboolean x) {
 229     return x & 1;
 230   }
 231 
 232   template <typename U>
 233   U normalize_for_read(U x) {
 234     return x;
 235   }
 236 
 237   jboolean normalize_for_read(jboolean x) {
 238     return x != 0;
 239   }
 240 
 241 public:
 242   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 243     : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
 244     assert_field_offset_sane(_obj, offset);
 245   }
 246 
 247   T get() {
 248     GuardUnsafeAccess guard(_thread);
 249     return normalize_for_read(*addr());
 250   }
 251 
 252   // we use this method at some places for writing to 0 e.g. to cause a crash;
 253   // ubsan does not know that this is the desired behavior
 254   ATTRIBUTE_NO_UBSAN
 255   void put(T x) {
 256     GuardUnsafeAccess guard(_thread);
 257     assert(_obj == nullptr || !_obj->is_inline_type() || _obj->mark().is_larval_state(), "must be an object instance or a larval inline type");
 258     *addr() = normalize_for_write(x);
 259   }
 260 
 261   T get_volatile() {
 262     GuardUnsafeAccess guard(_thread);
 263     volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
 264     return normalize_for_read(ret);
 265   }
 266 
 267   void put_volatile(T x) {
 268     GuardUnsafeAccess guard(_thread);
 269     RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 270   }
 271 };
 272 
 273 #ifdef ASSERT
 274 /*
 275  * Get the field descriptor of the field of the given object at the given offset.
 276  */
 277 static bool get_field_descriptor(oop p, jlong offset, fieldDescriptor* fd) {
 278   bool found = false;
 279   Klass* k = p->klass();
 280   if (k->is_instance_klass()) {
 281     InstanceKlass* ik = InstanceKlass::cast(k);
 282     found = ik->find_field_from_offset((int)offset, false, fd);
 283     if (!found && ik->is_mirror_instance_klass()) {
 284       Klass* k2 = java_lang_Class::as_Klass(p);
 285       if (k2->is_instance_klass()) {
 286         ik = InstanceKlass::cast(k2);
 287         found = ik->find_field_from_offset((int)offset, true, fd);
 288       }
 289     }
 290   }
 291   return found;
 292 }
 293 #endif // ASSERT
 294 
 295 static void assert_and_log_unsafe_value_access(oop p, jlong offset, InlineKlass* vk) {
 296   Klass* k = p->klass();
 297 #ifdef ASSERT
 298   if (k->is_instance_klass()) {
 299     assert_field_offset_sane(p, offset);
 300     fieldDescriptor fd;
 301     bool found = get_field_descriptor(p, offset, &fd);
 302     if (found) {
 303       assert(found, "value field not found");
 304       assert(fd.is_flat(), "field not flat");
 305     } else {
 306       if (log_is_enabled(Trace, valuetypes)) {
 307         log_trace(valuetypes)("not a field in %s at offset " UINT64_FORMAT_X,
 308                               p->klass()->external_name(), (uint64_t)offset);
 309       }
 310     }
 311   } else if (k->is_flatArray_klass()) {
 312     FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 313     int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 314     address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
 315     assert(dest == (cast_from_oop<address>(p) + offset), "invalid offset");
 316   } else {
 317     ShouldNotReachHere();
 318   }
 319 #endif // ASSERT
 320   if (log_is_enabled(Trace, valuetypes)) {
 321     if (k->is_flatArray_klass()) {
 322       FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 323       int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 324       address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
 325       log_trace(valuetypes)("%s array type %s index %d element size %d offset " UINT64_FORMAT_X " at " INTPTR_FORMAT,
 326                             p->klass()->external_name(), vak->external_name(),
 327                             index, vak->element_byte_size(), (uint64_t)offset, p2i(dest));
 328     } else {
 329       log_trace(valuetypes)("%s field type %s at offset " UINT64_FORMAT_X,
 330                             p->klass()->external_name(), vk->external_name(), (uint64_t)offset);
 331     }
 332   }
 333 }
 334 
 335 // These functions allow a null base pointer with an arbitrary address.
 336 // But if the base pointer is non-null, the offset should make some sense.
 337 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 338 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 339   oop p = JNIHandles::resolve(obj);
 340   assert_field_offset_sane(p, offset);
 341   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 342   return JNIHandles::make_local(THREAD, v);
 343 } UNSAFE_END
 344 
 345 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 346   oop x = JNIHandles::resolve(x_h);
 347   oop p = JNIHandles::resolve(obj);
 348   assert_field_offset_sane(p, offset);
 349   assert(!p->is_inline_type() || p->mark().is_larval_state(), "must be an object instance or a larval inline type");
 350   HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 351 } UNSAFE_END
 352 
 353 UNSAFE_ENTRY(jlong, Unsafe_ValueHeaderSize(JNIEnv *env, jobject unsafe, jclass c)) {
 354   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
 355   InlineKlass* vk = InlineKlass::cast(k);
 356   return vk->payload_offset();
 357 } UNSAFE_END
 358 
 359 UNSAFE_ENTRY(jboolean, Unsafe_IsFlatField(JNIEnv *env, jobject unsafe, jobject o)) {
 360   oop f = JNIHandles::resolve_non_null(o);
 361   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 362   int slot = java_lang_reflect_Field::slot(f);
 363   return InstanceKlass::cast(k)->field_is_flat(slot);
 364 } UNSAFE_END
 365 
 366 UNSAFE_ENTRY(jboolean, Unsafe_HasNullMarker(JNIEnv *env, jobject unsafe, jobject o)) {
 367   oop f = JNIHandles::resolve_non_null(o);
 368   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 369   int slot = java_lang_reflect_Field::slot(f);
 370   return InstanceKlass::cast(k)->field_has_null_marker(slot);
 371 } UNSAFE_END
 372 
 373 UNSAFE_ENTRY(jint, Unsafe_NullMarkerOffset(JNIEnv *env, jobject unsafe, jobject o)) {
 374   oop f = JNIHandles::resolve_non_null(o);
 375   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 376   int slot = java_lang_reflect_Field::slot(f);
 377   return InstanceKlass::cast(k)->field_null_marker_offset(slot);
 378 } UNSAFE_END
 379 
 380 UNSAFE_ENTRY(jint, Unsafe_ArrayLayout(JNIEnv *env, jobject unsafe, jarray array)) {
 381   oop ar = JNIHandles::resolve_non_null(array);
 382   ArrayKlass* ak = ArrayKlass::cast(ar->klass());
 383   if (ak->is_refArray_klass()) {
 384     return (jint)LayoutKind::REFERENCE;
 385   } else if (ak->is_flatArray_klass()) {
 386     return (jint)FlatArrayKlass::cast(ak)->layout_kind();
 387   } else {
 388     ShouldNotReachHere();
 389     return -1;
 390   }
 391 } UNSAFE_END
 392 
 393 UNSAFE_ENTRY(jint, Unsafe_FieldLayout(JNIEnv *env, jobject unsafe, jobject field)) {
 394   assert(field != nullptr, "field must not be null");
 395 
 396   oop reflected   = JNIHandles::resolve_non_null(field);
 397   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 398   Klass* k        = java_lang_Class::as_Klass(mirror);
 399   int slot        = java_lang_reflect_Field::slot(reflected);
 400   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 401 
 402   if ((modifiers & JVM_ACC_STATIC) != 0) {
 403     return (jint)LayoutKind::REFERENCE; // static fields are never flat
 404   } else {
 405     InstanceKlass* ik = InstanceKlass::cast(k);
 406     if (ik->field_is_flat(slot)) {
 407       return (jint)ik->inline_layout_info(slot).kind();
 408     } else {
 409       return (jint)LayoutKind::REFERENCE;
 410     }
 411   }
 412 } UNSAFE_END
 413 
 414 UNSAFE_ENTRY(jarray, Unsafe_NewSpecialArray(JNIEnv *env, jobject unsafe, jclass elmClass, jint len, jint layoutKind)) {
 415   oop mirror = JNIHandles::resolve_non_null(elmClass);
 416   Klass* klass = java_lang_Class::as_Klass(mirror);
 417   klass->initialize(CHECK_NULL);
 418   if (len < 0) {
 419     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Array length is negative");
 420   }
 421   if (klass->is_array_klass() || klass->is_identity_class()) {
 422     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Element class is not a value class");
 423   }
 424   if (klass->is_abstract()) {
 425     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Element class is abstract");
 426   }
 427   LayoutKind lk = static_cast<LayoutKind>(layoutKind);
 428   if (lk <= LayoutKind::REFERENCE || lk == LayoutKind::NULLABLE_NON_ATOMIC_FLAT || lk >= LayoutKind::UNKNOWN) {
 429     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Invalid layout kind");
 430   }
 431   InlineKlass* vk = InlineKlass::cast(klass);
 432   // WARNING: test below will need modifications when flat layouts supported for fields
 433   // but not for arrays are introduce (NULLABLE_NON_ATOMIC_FLAT for instance)
 434   if (!UseArrayFlattening || !vk->is_layout_supported(lk)) {
 435     THROW_MSG_NULL(vmSymbols::java_lang_UnsupportedOperationException(), "Layout not supported");
 436   }
 437   ArrayProperties props = ArrayKlass::array_properties_from_layout(lk);
 438   oop array = oopFactory::new_flatArray(vk, len, props, lk, CHECK_NULL);
 439   return (jarray) JNIHandles::make_local(THREAD, array);
 440 } UNSAFE_END
 441 
 442 UNSAFE_ENTRY(jobject, Unsafe_GetFlatValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint layoutKind, jclass vc)) {
 443   assert(layoutKind != (int)LayoutKind::UNKNOWN, "Sanity");
 444   assert(layoutKind != (int)LayoutKind::REFERENCE, "This method handles only flat layouts");
 445   oop base = JNIHandles::resolve(obj);
 446   if (base == nullptr) {
 447     THROW_NULL(vmSymbols::java_lang_NullPointerException());
 448   }
 449   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 450   InlineKlass* vk = InlineKlass::cast(k);
 451   assert_and_log_unsafe_value_access(base, offset, vk);
 452   LayoutKind lk = (LayoutKind)layoutKind;
 453   FlatValuePayload payload = FlatValuePayload::construct_from_parts(base, offset, vk, lk);
 454   oop v = payload.read(CHECK_NULL);
 455   return JNIHandles::make_local(THREAD, v);
 456 } UNSAFE_END
 457 
 458 UNSAFE_ENTRY(void, Unsafe_PutFlatValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint layoutKind, jclass vc, jobject value)) {
 459   assert(layoutKind != (int)LayoutKind::UNKNOWN, "Sanity");
 460   assert(layoutKind != (int)LayoutKind::REFERENCE, "This method handles only flat layouts");
 461   oop base = JNIHandles::resolve(obj);
 462   if (base == nullptr) {
 463     THROW(vmSymbols::java_lang_NullPointerException());
 464   }
 465 
 466   InlineKlass* vk = InlineKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc)));
 467   assert_and_log_unsafe_value_access(base, offset, vk);
 468   LayoutKind lk = (LayoutKind)layoutKind;
 469   FlatValuePayload payload = FlatValuePayload::construct_from_parts(base, offset, vk, lk);
 470   payload.write(inlineOop(JNIHandles::resolve(value)), CHECK);
 471 } UNSAFE_END
 472 
 473 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 474   oop p = JNIHandles::resolve(obj);
 475   assert_field_offset_sane(p, offset);
 476   oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 477   return JNIHandles::make_local(THREAD, v);
 478 } UNSAFE_END
 479 
 480 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 481   oop x = JNIHandles::resolve(x_h);
 482   oop p = JNIHandles::resolve(obj);
 483   assert_field_offset_sane(p, offset);
 484   HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 485 } UNSAFE_END
 486 
 487 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 488   oop v = *(oop*) (address) addr;
 489   return JNIHandles::make_local(THREAD, v);
 490 } UNSAFE_END
 491 
 492 #define DEFINE_GETSETOOP(java_type, Type) \
 493  \
 494 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 495   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 496 } UNSAFE_END \
 497  \
 498 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 499   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 500 } UNSAFE_END \
 501  \
 502 // END DEFINE_GETSETOOP.
 503 
 504 DEFINE_GETSETOOP(jboolean, Boolean)
 505 DEFINE_GETSETOOP(jbyte, Byte)
 506 DEFINE_GETSETOOP(jshort, Short);
 507 DEFINE_GETSETOOP(jchar, Char);
 508 DEFINE_GETSETOOP(jint, Int);
 509 DEFINE_GETSETOOP(jlong, Long);
 510 DEFINE_GETSETOOP(jfloat, Float);
 511 DEFINE_GETSETOOP(jdouble, Double);
 512 
 513 #undef DEFINE_GETSETOOP
 514 
 515 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
 516  \
 517 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 518   return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
 519 } UNSAFE_END \
 520  \
 521 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 522   MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
 523 } UNSAFE_END \
 524  \
 525 // END DEFINE_GETSETOOP_VOLATILE.
 526 
 527 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
 528 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
 529 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
 530 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
 531 DEFINE_GETSETOOP_VOLATILE(jint, Int);
 532 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
 533 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
 534 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 535 
 536 #undef DEFINE_GETSETOOP_VOLATILE
 537 
 538 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
 539   OrderAccess::fence();
 540 } UNSAFE_END
 541 
 542 ////// Allocation requests
 543 
 544 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
 545   JvmtiVMObjectAllocEventCollector oam;
 546   instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(cls), CHECK_NULL);
 547   return JNIHandles::make_local(THREAD, i);
 548 } UNSAFE_END
 549 
 550 UNSAFE_LEAF(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
 551   size_t sz = (size_t)size;
 552 
 553   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 554 
 555   void* x = os::malloc(sz, mtOther);
 556 
 557   return addr_to_java(x);
 558 } UNSAFE_END
 559 
 560 UNSAFE_LEAF(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
 561   void* p = addr_from_java(addr);
 562   size_t sz = (size_t)size;
 563 
 564   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 565 
 566   void* x = os::realloc(p, sz, mtOther);
 567 
 568   return addr_to_java(x);
 569 } UNSAFE_END
 570 
 571 UNSAFE_LEAF(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
 572   void* p = addr_from_java(addr);
 573 
 574   os::free(p);
 575 } UNSAFE_END
 576 
 577 UNSAFE_ENTRY_SCOPED(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
 578   size_t sz = (size_t)size;
 579 
 580   oop base = JNIHandles::resolve(obj);
 581   void* p = index_oop_from_field_offset_long(base, offset);
 582 
 583   {
 584     GuardUnsafeAccess guard(thread);
 585     if (StubRoutines::unsafe_setmemory() != nullptr) {
 586       MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
 587       StubRoutines::UnsafeSetMemory_stub()(p, sz, value);
 588     } else {
 589       Copy::fill_to_memory_atomic(p, sz, value);
 590     }
 591   }
 592 } UNSAFE_END
 593 
 594 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
 595   size_t sz = (size_t)size;
 596 
 597   oop srcp = JNIHandles::resolve(srcObj);
 598   oop dstp = JNIHandles::resolve(dstObj);
 599 
 600   void* src = index_oop_from_field_offset_long(srcp, srcOffset);
 601   void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
 602   {
 603     GuardUnsafeAccess guard(thread);
 604     if (StubRoutines::unsafe_arraycopy() != nullptr) {
 605       MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
 606       StubRoutines::UnsafeArrayCopy_stub()(src, dst, sz);
 607     } else {
 608       Copy::conjoint_memory_atomic(src, dst, sz);
 609     }
 610   }
 611 } UNSAFE_END
 612 
 613 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
 614   size_t sz = (size_t)size;
 615   size_t esz = (size_t)elemSize;
 616 
 617   oop srcp = JNIHandles::resolve(srcObj);
 618   oop dstp = JNIHandles::resolve(dstObj);
 619 
 620   address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
 621   address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
 622 
 623   {
 624     GuardUnsafeAccess guard(thread);
 625     Copy::conjoint_swap(src, dst, sz, esz);
 626   }
 627 } UNSAFE_END
 628 
 629 UNSAFE_LEAF (void, Unsafe_WriteBack0(JNIEnv *env, jobject unsafe, jlong line)) {
 630   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 631 #ifdef ASSERT
 632   if (TraceMemoryWriteback) {
 633     tty->print_cr("Unsafe: writeback 0x%p", addr_from_java(line));
 634   }
 635 #endif
 636 
 637   MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
 638   assert(StubRoutines::data_cache_writeback() != nullptr, "sanity");
 639   (StubRoutines::DataCacheWriteback_stub())(addr_from_java(line));
 640 } UNSAFE_END
 641 
 642 static void doWriteBackSync0(bool is_pre)
 643 {
 644   MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
 645   assert(StubRoutines::data_cache_writeback_sync() != nullptr, "sanity");
 646   (StubRoutines::DataCacheWritebackSync_stub())(is_pre);
 647 }
 648 
 649 UNSAFE_LEAF (void, Unsafe_WriteBackPreSync0(JNIEnv *env, jobject unsafe)) {
 650   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 651 #ifdef ASSERT
 652   if (TraceMemoryWriteback) {
 653       tty->print_cr("Unsafe: writeback pre-sync");
 654   }
 655 #endif
 656 
 657   doWriteBackSync0(true);
 658 } UNSAFE_END
 659 
 660 UNSAFE_LEAF (void, Unsafe_WriteBackPostSync0(JNIEnv *env, jobject unsafe)) {
 661   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 662 #ifdef ASSERT
 663   if (TraceMemoryWriteback) {
 664     tty->print_cr("Unsafe: writeback pre-sync");
 665   }
 666 #endif
 667 
 668   doWriteBackSync0(false);
 669 } UNSAFE_END
 670 
 671 ////// Random queries
 672 
 673 // Finds the object field offset of a field with the matching name, or an error code
 674 // Error code -1 is not found, -2 is static field
 675 static jlong find_known_instance_field_offset(jclass clazz, jstring name, TRAPS) {
 676   assert(clazz != nullptr, "clazz must not be null");
 677   assert(name != nullptr, "name must not be null");
 678 
 679   ResourceMark rm(THREAD);
 680   char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
 681 
 682   InstanceKlass* k = java_lang_Class::as_InstanceKlass(JNIHandles::resolve_non_null(clazz));
 683 
 684   jint offset = -1; // Not found
 685   for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
 686     Symbol *name = fs.name();
 687     if (name->equals(utf_name)) {
 688       if (!fs.access_flags().is_static()) {
 689         offset = fs.offset();
 690       } else {
 691         offset = -2; // A static field
 692       }
 693       break;
 694     }
 695   }
 696   if (offset < 0) {
 697     return offset; // Error code
 698   }
 699   return field_offset_from_byte_offset(offset);
 700 }
 701 
 702 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
 703   assert(field != nullptr, "field must not be null");
 704 
 705   oop reflected   = JNIHandles::resolve_non_null(field);
 706   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 707   Klass* k        = java_lang_Class::as_Klass(mirror);
 708   int slot        = java_lang_reflect_Field::slot(reflected);
 709   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 710 
 711   if (must_be_static >= 0) {
 712     int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
 713     if (must_be_static != really_is_static) {
 714       THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 715     }
 716   }
 717 
 718   int offset = InstanceKlass::cast(k)->field_offset(slot);
 719   return field_offset_from_byte_offset(offset);
 720 }
 721 
 722 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 723   return find_field_offset(field, 0, THREAD);
 724 } UNSAFE_END
 725 
 726 UNSAFE_ENTRY(jlong, Unsafe_KnownObjectFieldOffset0(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
 727   return find_known_instance_field_offset(c, name, THREAD);
 728 } UNSAFE_END
 729 
 730 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 731   return find_field_offset(field, 1, THREAD);
 732 } UNSAFE_END
 733 
 734 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
 735   assert(field != nullptr, "field must not be null");
 736 
 737   // Note:  In this VM implementation, a field address is always a short
 738   // offset from the base of a klass metaobject.  Thus, the full dynamic
 739   // range of the return type is never used.  However, some implementations
 740   // might put the static field inside an array shared by many classes,
 741   // or even at a fixed address, in which case the address could be quite
 742   // large.  In that last case, this function would return null, since
 743   // the address would operate alone, without any base pointer.
 744 
 745   oop reflected   = JNIHandles::resolve_non_null(field);
 746   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 747   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 748 
 749   if ((modifiers & JVM_ACC_STATIC) == 0) {
 750     THROW_NULL(vmSymbols::java_lang_IllegalArgumentException());
 751   }
 752 
 753   return JNIHandles::make_local(THREAD, mirror);
 754 } UNSAFE_END
 755 
 756 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 757   assert(clazz != nullptr, "clazz must not be null");
 758 
 759   oop mirror = JNIHandles::resolve_non_null(clazz);
 760 
 761   Klass* klass = java_lang_Class::as_Klass(mirror);
 762   if (klass != nullptr && klass->should_be_initialized()) {
 763     InstanceKlass* k = InstanceKlass::cast(klass);
 764     k->initialize(CHECK);
 765   }
 766 }
 767 UNSAFE_END
 768 
 769 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 770   assert(clazz != nullptr, "clazz must not be null");
 771 
 772   oop mirror = JNIHandles::resolve_non_null(clazz);
 773   Klass* klass = java_lang_Class::as_Klass(mirror);
 774 
 775   if (klass != nullptr && klass->should_be_initialized()) {
 776     return true;
 777   }
 778 
 779   return false;
 780 }
 781 UNSAFE_END
 782 
 783 UNSAFE_ENTRY(void, Unsafe_NotifyStrictStaticAccess0(JNIEnv *env, jobject unsafe, jobject clazz,
 784                                                     jlong sfoffset, jboolean writing)) {
 785   assert(clazz != nullptr, "clazz must not be null");
 786 
 787   oop mirror = JNIHandles::resolve_non_null(clazz);
 788   Klass* klass = java_lang_Class::as_Klass(mirror);
 789 
 790   if (klass != nullptr && klass->is_instance_klass()) {
 791     InstanceKlass* ik = InstanceKlass::cast(klass);
 792     fieldDescriptor fd;
 793     if (ik->find_local_field_from_offset((int)sfoffset, true, &fd)) {
 794       // Note: The Unsafe API takes an OFFSET, but the InstanceKlass wants the INDEX.
 795       // We could surface field indexes into Unsafe, but that's too much churn.
 796       ik->notify_strict_static_access(fd.index(), writing, CHECK);
 797       return;
 798     }
 799   }
 800   THROW(vmSymbols::java_lang_InternalError());
 801 }
 802 UNSAFE_END
 803 
 804 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
 805   assert(clazz != nullptr, "clazz must not be null");
 806 
 807   oop mirror = JNIHandles::resolve_non_null(clazz);
 808   Klass* k = java_lang_Class::as_Klass(mirror);
 809 
 810   if (k == nullptr || !k->is_array_klass()) {
 811     THROW(vmSymbols::java_lang_InvalidClassException());
 812   } else if (k->is_typeArray_klass()) {
 813     TypeArrayKlass* tak = TypeArrayKlass::cast(k);
 814     base  = tak->array_header_in_bytes();
 815     assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
 816     scale = (1 << tak->log2_element_size());
 817   } else if (k->is_objArray_klass()) {
 818     Klass* ek = ObjArrayKlass::cast(k)->element_klass();
 819     if (!ek->is_identity_class() && !ek->is_abstract()) {
 820       // Arrays of a concrete value class type can have multiple layouts
 821       // There's no good value to return, so throwing an exception is the way out
 822       THROW_MSG(vmSymbols::java_lang_IllegalArgumentException(), "Arrays of a concrete value class don't have a single base and offset");
 823     }
 824     base  = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
 825     scale = heapOopSize;
 826   } else {
 827     ShouldNotReachHere();
 828   }
 829 }
 830 
 831 UNSAFE_ENTRY(jint, Unsafe_ArrayInstanceBaseOffset0(JNIEnv *env, jobject unsafe, jarray array)) {
 832   assert(array != nullptr, "array must not be null");
 833   oop ar = JNIHandles::resolve_non_null(array);
 834   assert(ar->is_array(), "Must be an array");
 835   ArrayKlass* ak = ArrayKlass::cast(ar->klass());
 836   if (ak->is_refArray_klass()) {
 837     return arrayOopDesc::base_offset_in_bytes(T_OBJECT);
 838   } else if (ak->is_flatArray_klass()) {
 839     FlatArrayKlass* fak = FlatArrayKlass::cast(ak);
 840     return fak->array_header_in_bytes();
 841   } else {
 842     ShouldNotReachHere();
 843   }
 844 } UNSAFE_END
 845 
 846 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 847   int base = 0, scale = 0;
 848   getBaseAndScale(base, scale, clazz, CHECK_0);
 849 
 850   return field_offset_from_byte_offset(base);
 851 } UNSAFE_END
 852 
 853 
 854 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 855   int base = 0, scale = 0;
 856   getBaseAndScale(base, scale, clazz, CHECK_0);
 857 
 858   // This VM packs both fields and array elements down to the byte.
 859   // But watch out:  If this changes, so that array references for
 860   // a given primitive type (say, T_BOOLEAN) use different memory units
 861   // than fields, this method MUST return zero for such arrays.
 862   // For example, the VM used to store sub-word sized fields in full
 863   // words in the object layout, so that accessors like getByte(Object,int)
 864   // did not really do what one might expect for arrays.  Therefore,
 865   // this function used to report a zero scale factor, so that the user
 866   // would know not to attempt to access sub-word array elements.
 867   // // Code for unpacked fields:
 868   // if (scale < wordSize)  return 0;
 869 
 870   // The following allows for a pretty general fieldOffset cookie scheme,
 871   // but requires it to be linear in byte offset.
 872   return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
 873 } UNSAFE_END
 874 
 875 UNSAFE_ENTRY(jint, Unsafe_ArrayInstanceIndexScale0(JNIEnv *env, jobject unsafe, jarray array)) {
 876   assert(array != nullptr, "array must not be null");
 877   oop ar = JNIHandles::resolve_non_null(array);
 878   assert(ar->is_array(), "Must be an array");
 879   ArrayKlass* ak = ArrayKlass::cast(ar->klass());
 880   if (ak->is_refArray_klass()) {
 881     return heapOopSize;
 882   } else if (ak->is_flatArray_klass()) {
 883     FlatArrayKlass* fak = FlatArrayKlass::cast(ak);
 884     return fak->element_byte_size();
 885   } else {
 886     ShouldNotReachHere();
 887   }
 888 } UNSAFE_END
 889 
 890 UNSAFE_ENTRY(jarray, Unsafe_GetFieldMap0(JNIEnv* env, jobject unsafe, jclass clazz)) {
 891   oop mirror = JNIHandles::resolve_non_null(clazz);
 892   Klass* k = java_lang_Class::as_Klass(mirror);
 893 
 894   if (!k->is_inline_klass()) {
 895     THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Argument is not a concrete value class");
 896   }
 897   InlineKlass* vk = InlineKlass::cast(k);
 898   oop map = mirror->obj_field(vk->acmp_maps_offset());
 899   return (jarray) JNIHandles::make_local(THREAD, map);
 900 } UNSAFE_END
 901 
 902 
 903 UNSAFE_ENTRY(jlong, Unsafe_GetObjectSize0(JNIEnv* env, jobject o, jobject obj))
 904   oop p = JNIHandles::resolve(obj);
 905   return p->size() * HeapWordSize;
 906 UNSAFE_END
 907 
 908 
 909 static inline void throw_new(JNIEnv *env, const char *ename) {
 910   jclass cls = env->FindClass(ename);
 911   if (env->ExceptionCheck()) {
 912     env->ExceptionClear();
 913     tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
 914     return;
 915   }
 916 
 917   env->ThrowNew(cls, nullptr);
 918 }
 919 
 920 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
 921   // Code lifted from JDK 1.3 ClassLoader.c
 922 
 923   jbyte *body;
 924   char *utfName = nullptr;
 925   jclass result = nullptr;
 926   char buf[128];
 927 
 928   assert(data != nullptr, "Class bytes must not be null");
 929   assert(length >= 0, "length must not be negative: %d", length);
 930 
 931   if (UsePerfData) {
 932     ClassLoader::unsafe_defineClassCallCounter()->inc();
 933   }
 934 
 935   body = NEW_C_HEAP_ARRAY_RETURN_NULL(jbyte, length, mtInternal);
 936   if (body == nullptr) {
 937     throw_new(env, "java/lang/OutOfMemoryError");
 938     return nullptr;
 939   }
 940 
 941   env->GetByteArrayRegion(data, offset, length, body);
 942   if (env->ExceptionCheck()) {
 943     goto free_body;
 944   }
 945 
 946   if (name != nullptr) {
 947     uint len = env->GetStringUTFLength(name);
 948     int unicode_len = env->GetStringLength(name);
 949 
 950     if (len >= sizeof(buf)) {
 951       utfName = NEW_C_HEAP_ARRAY_RETURN_NULL(char, len + 1, mtInternal);
 952       if (utfName == nullptr) {
 953         throw_new(env, "java/lang/OutOfMemoryError");
 954         goto free_body;
 955       }
 956     } else {
 957       utfName = buf;
 958     }
 959 
 960     env->GetStringUTFRegion(name, 0, unicode_len, utfName);
 961 
 962     for (uint i = 0; i < len; i++) {
 963       if (utfName[i] == '.')   utfName[i] = '/';
 964     }
 965   }
 966 
 967   result = JVM_DefineClass(env, utfName, loader, body, length, pd);
 968 
 969   if (utfName && utfName != buf) {
 970     FREE_C_HEAP_ARRAY(char, utfName);
 971   }
 972 
 973  free_body:
 974   FREE_C_HEAP_ARRAY(jbyte, body);
 975   return result;
 976 }
 977 
 978 
 979 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
 980   ThreadToNativeFromVM ttnfv(thread);
 981 
 982   return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
 983 } UNSAFE_END
 984 
 985 
 986 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
 987   ThreadToNativeFromVM ttnfv(thread);
 988   env->Throw(thr);
 989 } UNSAFE_END
 990 
 991 // JSR166 ------------------------------------------------------------------
 992 
 993 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 994   oop x = JNIHandles::resolve(x_h);
 995   oop e = JNIHandles::resolve(e_h);
 996   oop p = JNIHandles::resolve(obj);
 997   assert_field_offset_sane(p, offset);
 998   oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 999   return JNIHandles::make_local(THREAD, res);
1000 } UNSAFE_END
1001 
1002 UNSAFE_ENTRY_SCOPED(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
1003   oop p = JNIHandles::resolve(obj);
1004   volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
1005   return AtomicAccess::cmpxchg(addr, e, x);
1006 } UNSAFE_END
1007 
1008 UNSAFE_ENTRY_SCOPED(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
1009   oop p = JNIHandles::resolve(obj);
1010   volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
1011   return AtomicAccess::cmpxchg(addr, e, x);
1012 } UNSAFE_END
1013 
1014 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
1015   oop x = JNIHandles::resolve(x_h);
1016   oop e = JNIHandles::resolve(e_h);
1017   oop p = JNIHandles::resolve(obj);
1018   assert_field_offset_sane(p, offset);
1019   oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
1020   return ret == e;
1021 } UNSAFE_END
1022 
1023 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
1024   oop p = JNIHandles::resolve(obj);
1025   volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
1026   return AtomicAccess::cmpxchg(addr, e, x) == e;
1027 } UNSAFE_END
1028 
1029 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
1030   oop p = JNIHandles::resolve(obj);
1031   volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
1032   return AtomicAccess::cmpxchg(addr, e, x) == e;
1033 } UNSAFE_END
1034 
1035 static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
1036   assert(event != nullptr, "invariant");
1037   event->set_parkedClass((obj != nullptr) ? obj->klass() : nullptr);
1038   event->set_timeout(timeout_nanos);
1039   event->set_until(until_epoch_millis);
1040   event->set_address((obj != nullptr) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
1041   event->commit();
1042 }
1043 
1044 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
1045   HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
1046   EventThreadPark event;
1047 
1048   JavaThreadParkedState jtps(thread, time != 0);
1049   thread->parker()->park(isAbsolute != 0, time);
1050   if (event.should_commit()) {
1051     const oop obj = thread->current_park_blocker();
1052     if (time == 0) {
1053       post_thread_park_event(&event, obj, min_jlong, min_jlong);
1054     } else {
1055       if (isAbsolute != 0) {
1056         post_thread_park_event(&event, obj, min_jlong, time);
1057       } else {
1058         post_thread_park_event(&event, obj, time, min_jlong);
1059       }
1060     }
1061   }
1062   HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
1063 } UNSAFE_END
1064 
1065 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
1066   if (jthread != nullptr) {
1067     oop thread_oop = JNIHandles::resolve_non_null(jthread);
1068     // Get the JavaThread* stored in the java.lang.Thread object _before_
1069     // the embedded ThreadsListHandle is constructed so we know if the
1070     // early life stage of the JavaThread* is protected. We use acquire
1071     // here to ensure that if we see a non-nullptr value, then we also
1072     // see the main ThreadsList updates from the JavaThread* being added.
1073     FastThreadsListHandle ftlh(thread_oop, java_lang_Thread::thread_acquire(thread_oop));
1074     JavaThread* thr = ftlh.protected_java_thread();
1075     if (thr != nullptr) {
1076       // The still live JavaThread* is protected by the FastThreadsListHandle
1077       // so it is safe to access.
1078       Parker* p = thr->parker();
1079       HOTSPOT_THREAD_UNPARK((uintptr_t) p);
1080       p->unpark();
1081     }
1082   } // FastThreadsListHandle is destroyed here.
1083 } UNSAFE_END
1084 
1085 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
1086   const int max_nelem = 3;
1087   double la[max_nelem];
1088   jint ret;
1089 
1090   typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
1091   assert(a->is_typeArray(), "must be type array");
1092 
1093   ret = os::loadavg(la, nelem);
1094   if (ret == -1) {
1095     return -1;
1096   }
1097 
1098   // if successful, ret is the number of samples actually retrieved.
1099   assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
1100   switch(ret) {
1101     case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
1102     case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
1103     case 1: a->double_at_put(0, (jdouble)la[0]); break;
1104   }
1105 
1106   return ret;
1107 } UNSAFE_END
1108 
1109 
1110 /// JVM_RegisterUnsafeMethods
1111 
1112 #define ADR "J"
1113 
1114 #define LANG "Ljava/lang/"
1115 
1116 #define OBJ LANG "Object;"
1117 #define CLS LANG "Class;"
1118 #define FLD LANG "reflect/Field;"
1119 #define THR LANG "Throwable;"
1120 
1121 #define OBJ_ARR "[" OBJ
1122 
1123 #define DC_Args  LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
1124 #define DAC_Args CLS "[B[" OBJ
1125 
1126 #define CC (char*)  /*cast a literal from (const char*)*/
1127 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
1128 
1129 #define DECLARE_GETPUTOOP(Type, Desc) \
1130     {CC "get"  #Type,      CC "(" OBJ "J)" #Desc,                 FN_PTR(Unsafe_Get##Type)}, \
1131     {CC "put"  #Type,      CC "(" OBJ "J" #Desc ")V",             FN_PTR(Unsafe_Put##Type)}, \
1132     {CC "get"  #Type "Volatile",      CC "(" OBJ "J)" #Desc,      FN_PTR(Unsafe_Get##Type##Volatile)}, \
1133     {CC "put"  #Type "Volatile",      CC "(" OBJ "J" #Desc ")V",  FN_PTR(Unsafe_Put##Type##Volatile)}
1134 
1135 
1136 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
1137     {CC "getReference",         CC "(" OBJ "J)" OBJ "",   FN_PTR(Unsafe_GetReference)},
1138     {CC "putReference",         CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReference)},
1139     {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ,      FN_PTR(Unsafe_GetReferenceVolatile)},
1140     {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReferenceVolatile)},
1141 
1142     {CC "isFlatField0",         CC "(" OBJ ")Z",          FN_PTR(Unsafe_IsFlatField)},
1143     {CC "hasNullMarker0",       CC "(" OBJ ")Z",          FN_PTR(Unsafe_HasNullMarker)},
1144     {CC "nullMarkerOffset0",    CC "(" OBJ ")I",          FN_PTR(Unsafe_NullMarkerOffset)},
1145     {CC "arrayLayout0",         CC "(" OBJ_ARR ")I",      FN_PTR(Unsafe_ArrayLayout)},
1146     {CC "fieldLayout0",         CC "(" OBJ ")I",          FN_PTR(Unsafe_FieldLayout)},
1147     {CC "newSpecialArray",      CC "(" CLS "II)[" OBJ,    FN_PTR(Unsafe_NewSpecialArray)},
1148     {CC "getFlatValue",         CC "(" OBJ "JI" CLS ")" OBJ, FN_PTR(Unsafe_GetFlatValue)},
1149     {CC "putFlatValue",         CC "(" OBJ "JI" CLS OBJ ")V", FN_PTR(Unsafe_PutFlatValue)},
1150     {CC "valueHeaderSize",       CC "(" CLS ")J",         FN_PTR(Unsafe_ValueHeaderSize)},
1151 
1152     {CC "getUncompressedObject", CC "(" ADR ")" OBJ,  FN_PTR(Unsafe_GetUncompressedObject)},
1153 
1154     DECLARE_GETPUTOOP(Boolean, Z),
1155     DECLARE_GETPUTOOP(Byte, B),
1156     DECLARE_GETPUTOOP(Short, S),
1157     DECLARE_GETPUTOOP(Char, C),
1158     DECLARE_GETPUTOOP(Int, I),
1159     DECLARE_GETPUTOOP(Long, J),
1160     DECLARE_GETPUTOOP(Float, F),
1161     DECLARE_GETPUTOOP(Double, D),
1162 
1163     {CC "allocateMemory0",    CC "(J)" ADR,              FN_PTR(Unsafe_AllocateMemory0)},
1164     {CC "reallocateMemory0",  CC "(" ADR "J)" ADR,       FN_PTR(Unsafe_ReallocateMemory0)},
1165     {CC "freeMemory0",        CC "(" ADR ")V",           FN_PTR(Unsafe_FreeMemory0)},
1166 
1167     {CC "objectFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_ObjectFieldOffset0)},
1168     {CC "knownObjectFieldOffset0", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_KnownObjectFieldOffset0)},
1169     {CC "staticFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_StaticFieldOffset0)},
1170     {CC "staticFieldBase0",   CC "(" FLD ")" OBJ,        FN_PTR(Unsafe_StaticFieldBase0)},
1171     {CC "ensureClassInitialized0", CC "(" CLS ")V",      FN_PTR(Unsafe_EnsureClassInitialized0)},
1172     {CC "arrayBaseOffset0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayBaseOffset0)},
1173     {CC "arrayInstanceBaseOffset0",   CC "(" OBJ_ARR ")I", FN_PTR(Unsafe_ArrayInstanceBaseOffset0)},
1174     {CC "arrayIndexScale0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayIndexScale0)},
1175     {CC "arrayInstanceIndexScale0",   CC "(" OBJ_ARR ")I", FN_PTR(Unsafe_ArrayInstanceIndexScale0)},
1176     {CC "getFieldMap0",       CC "(Ljava/lang/Class;)[I", FN_PTR(Unsafe_GetFieldMap0)},
1177     {CC "getObjectSize0",     CC "(Ljava/lang/Object;)J", FN_PTR(Unsafe_GetObjectSize0)},
1178 
1179     {CC "defineClass0",       CC "(" DC_Args ")" CLS,    FN_PTR(Unsafe_DefineClass0)},
1180     {CC "allocateInstance",   CC "(" CLS ")" OBJ,        FN_PTR(Unsafe_AllocateInstance)},
1181     {CC "throwException",     CC "(" THR ")V",           FN_PTR(Unsafe_ThrowException)},
1182     {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
1183     {CC "compareAndSetInt",   CC "(" OBJ "J""I""I"")Z",  FN_PTR(Unsafe_CompareAndSetInt)},
1184     {CC "compareAndSetLong",  CC "(" OBJ "J""J""J"")Z",  FN_PTR(Unsafe_CompareAndSetLong)},
1185     {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
1186     {CC "compareAndExchangeInt",  CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
1187     {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
1188 
1189     {CC "park",               CC "(ZJ)V",                FN_PTR(Unsafe_Park)},
1190     {CC "unpark",             CC "(" OBJ ")V",           FN_PTR(Unsafe_Unpark)},
1191 
1192     {CC "getLoadAverage0",    CC "([DI)I",               FN_PTR(Unsafe_GetLoadAverage0)},
1193 
1194     {CC "copyMemory0",        CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
1195     {CC "copySwapMemory0",    CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
1196     {CC "writeback0",         CC "(" "J" ")V",           FN_PTR(Unsafe_WriteBack0)},
1197     {CC "writebackPreSync0",  CC "()V",                  FN_PTR(Unsafe_WriteBackPreSync0)},
1198     {CC "writebackPostSync0", CC "()V",                  FN_PTR(Unsafe_WriteBackPostSync0)},
1199     {CC "setMemory0",         CC "(" OBJ "JJB)V",        FN_PTR(Unsafe_SetMemory0)},
1200 
1201     {CC "shouldBeInitialized0", CC "(" CLS ")Z",         FN_PTR(Unsafe_ShouldBeInitialized0)},
1202     {CC "notifyStrictStaticAccess0", CC "(" CLS "JZ)V",  FN_PTR(Unsafe_NotifyStrictStaticAccess0)},
1203 
1204     {CC "fullFence",          CC "()V",                  FN_PTR(Unsafe_FullFence)},
1205 };
1206 
1207 #undef CC
1208 #undef FN_PTR
1209 
1210 #undef ADR
1211 #undef LANG
1212 #undef OBJ
1213 #undef CLS
1214 #undef FLD
1215 #undef THR
1216 #undef DC_Args
1217 #undef DAC_Args
1218 
1219 #undef DECLARE_GETPUTOOP
1220 
1221 
1222 // This function is exported, used by NativeLookup.
1223 // The Unsafe_xxx functions above are called only from the interpreter.
1224 // The optimizer looks at names and signatures to recognize
1225 // individual functions.
1226 
1227 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
1228   ThreadToNativeFromVM ttnfv(thread);
1229 
1230   int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
1231   guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
1232 } JVM_END