1 /*
   2  * Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/classFileStream.hpp"
  27 #include "classfile/classLoader.hpp"
  28 #include "classfile/classLoadInfo.hpp"
  29 #include "classfile/javaClasses.inline.hpp"
  30 #include "classfile/systemDictionary.hpp"
  31 #include "classfile/vmSymbols.hpp"
  32 #include "jfr/jfrEvents.hpp"
  33 #include "jni.h"
  34 #include "jvm.h"
  35 #include "memory/allocation.inline.hpp"
  36 #include "memory/resourceArea.hpp"
  37 #include "logging/log.hpp"
  38 #include "logging/logStream.hpp"
  39 #include "oops/access.inline.hpp"
  40 #include "oops/fieldStreams.inline.hpp"
  41 #include "oops/flatArrayKlass.hpp"
  42 #include "oops/flatArrayOop.inline.hpp"
  43 #include "oops/inlineKlass.inline.hpp"
  44 #include "oops/instanceKlass.inline.hpp"
  45 #include "oops/klass.inline.hpp"
  46 #include "oops/objArrayOop.inline.hpp"
  47 #include "oops/oop.inline.hpp"
  48 #include "oops/typeArrayOop.inline.hpp"
  49 #include "prims/jvmtiExport.hpp"
  50 #include "prims/unsafe.hpp"
  51 #include "runtime/fieldDescriptor.inline.hpp"
  52 #include "runtime/globals.hpp"
  53 #include "runtime/handles.inline.hpp"
  54 #include "runtime/interfaceSupport.inline.hpp"
  55 #include "runtime/javaThread.inline.hpp"
  56 #include "runtime/jniHandles.inline.hpp"
  57 #include "runtime/orderAccess.hpp"
  58 #include "runtime/reflection.hpp"
  59 #include "runtime/sharedRuntime.hpp"
  60 #include "runtime/stubRoutines.hpp"
  61 #include "runtime/threadSMR.hpp"
  62 #include "runtime/vmOperations.hpp"
  63 #include "runtime/vm_version.hpp"
  64 #include "sanitizers/ub.hpp"
  65 #include "services/threadService.hpp"
  66 #include "utilities/align.hpp"
  67 #include "utilities/copy.hpp"
  68 #include "utilities/dtrace.hpp"
  69 #include "utilities/macros.hpp"
  70 
  71 /**
  72  * Implementation of the jdk.internal.misc.Unsafe class
  73  */
  74 
  75 
  76 #define MAX_OBJECT_SIZE \
  77   ( arrayOopDesc::base_offset_in_bytes(T_DOUBLE) \
  78     + ((julong)max_jint * sizeof(double)) )
  79 
  80 #define UNSAFE_ENTRY(result_type, header) \
  81   JVM_ENTRY(static result_type, header)
  82 
  83 #define UNSAFE_LEAF(result_type, header) \
  84   JVM_LEAF(static result_type, header)
  85 
  86 // All memory access methods (e.g. getInt, copyMemory) must use this macro.
  87 // We call these methods "scoped" methods, as access to these methods is
  88 // typically governed by a "scope" (a MemorySessionImpl object), and no
  89 // access is allowed when the scope is no longer alive.
  90 //
  91 // Closing a scope object (cf. scopedMemoryAccess.cpp) can install
  92 // an async exception during a safepoint. When that happens,
  93 // scoped methods are not allowed to touch the underlying memory (as that
  94 // memory might have been released). Therefore, when entering a scoped method
  95 // we check if an async exception has been installed, and return immediately
  96 // if that is the case.
  97 //
  98 // As a rule, we disallow safepoints in the middle of a scoped method.
  99 // If an async exception handshake were installed in such a safepoint,
 100 // memory access might still occur before the handshake is honored by
 101 // the accessing thread.
 102 //
 103 // Corollary: as threads in native state are considered to be at a safepoint,
 104 // scoped methods must NOT be executed while in the native thread state.
 105 // Because of this, there can be no UNSAFE_LEAF_SCOPED.
 106 #define UNSAFE_ENTRY_SCOPED(result_type, header) \
 107   JVM_ENTRY(static result_type, header) \
 108   if (thread->has_async_exception_condition()) {return (result_type)0;}
 109 
 110 #define UNSAFE_END JVM_END
 111 
 112 
 113 static inline void* addr_from_java(jlong addr) {
 114   // This assert fails in a variety of ways on 32-bit systems.
 115   // It is impossible to predict whether native code that converts
 116   // pointers to longs will sign-extend or zero-extend the addresses.
 117   //assert(addr == (uintptr_t)addr, "must not be odd high bits");
 118   return (void*)(uintptr_t)addr;
 119 }
 120 
 121 static inline jlong addr_to_java(void* p) {
 122   assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
 123   return (uintptr_t)p;
 124 }
 125 
 126 
 127 // Note: The VM's obj_field and related accessors use byte-scaled
 128 // ("unscaled") offsets, just as the unsafe methods do.
 129 
 130 // However, the method Unsafe.fieldOffset explicitly declines to
 131 // guarantee this.  The field offset values manipulated by the Java user
 132 // through the Unsafe API are opaque cookies that just happen to be byte
 133 // offsets.  We represent this state of affairs by passing the cookies
 134 // through conversion functions when going between the VM and the Unsafe API.
 135 // The conversion functions just happen to be no-ops at present.
 136 
 137 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
 138   return field_offset;
 139 }
 140 
 141 static inline int field_offset_from_byte_offset(int byte_offset) {
 142   return byte_offset;
 143 }
 144 
 145 static inline void assert_field_offset_sane(oop p, jlong field_offset) {
 146 #ifdef ASSERT
 147   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 148 
 149   if (p != nullptr) {
 150     assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
 151     if (byte_offset == (jint)byte_offset) {
 152       void* ptr_plus_disp = cast_from_oop<address>(p) + byte_offset;
 153       assert(p->field_addr<void>((jint)byte_offset) == ptr_plus_disp,
 154              "raw [ptr+disp] must be consistent with oop::field_addr");
 155     }
 156     jlong p_size = HeapWordSize * (jlong)(p->size());
 157     assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
 158   }
 159 #endif
 160 }
 161 
 162 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
 163   assert_field_offset_sane(p, field_offset);
 164   uintptr_t base_address = cast_from_oop<uintptr_t>(p);
 165   uintptr_t byte_offset  = (uintptr_t)field_offset_to_byte_offset(field_offset);
 166   return (void*)(base_address + byte_offset);
 167 }
 168 
 169 // Externally callable versions:
 170 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 171 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 172   return field_offset;
 173 }
 174 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 175   return byte_offset;
 176 }
 177 
 178 ///// Data read/writes on the Java heap and in native (off-heap) memory
 179 
 180 /**
 181  * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 182  */
 183 class GuardUnsafeAccess {
 184   JavaThread* _thread;
 185 
 186 public:
 187   GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
 188     // native/off-heap access which may raise SIGBUS if accessing
 189     // memory mapped file data in a region of the file which has
 190     // been truncated and is now invalid.
 191     _thread->set_doing_unsafe_access(true);
 192   }
 193 
 194   ~GuardUnsafeAccess() {
 195     _thread->set_doing_unsafe_access(false);
 196   }
 197 };
 198 
 199 /**
 200  * Helper class for accessing memory.
 201  *
 202  * Normalizes values and wraps accesses in
 203  * JavaThread::doing_unsafe_access() if needed.
 204  */
 205 template <typename T>
 206 class MemoryAccess : StackObj {
 207   JavaThread* _thread;
 208   oop _obj;
 209   ptrdiff_t _offset;
 210 
 211   // Resolves and returns the address of the memory access.
 212   // This raw memory access may fault, so we make sure it happens within the
 213   // guarded scope by making the access volatile at least. Since the store
 214   // of Thread::set_doing_unsafe_access() is also volatile, these accesses
 215   // can not be reordered by the compiler. Therefore, if the access triggers
 216   // a fault, we will know that Thread::doing_unsafe_access() returns true.
 217   volatile T* addr() {
 218     void* addr = index_oop_from_field_offset_long(_obj, _offset);
 219     return static_cast<volatile T*>(addr);
 220   }
 221 
 222   template <typename U>
 223   U normalize_for_write(U x) {
 224     return x;
 225   }
 226 
 227   jboolean normalize_for_write(jboolean x) {
 228     return x & 1;
 229   }
 230 
 231   template <typename U>
 232   U normalize_for_read(U x) {
 233     return x;
 234   }
 235 
 236   jboolean normalize_for_read(jboolean x) {
 237     return x != 0;
 238   }
 239 
 240 public:
 241   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 242     : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
 243     assert_field_offset_sane(_obj, offset);
 244   }
 245 
 246   T get() {
 247     GuardUnsafeAccess guard(_thread);
 248     return normalize_for_read(*addr());
 249   }
 250 
 251   // we use this method at some places for writing to 0 e.g. to cause a crash;
 252   // ubsan does not know that this is the desired behavior
 253   ATTRIBUTE_NO_UBSAN
 254   void put(T x) {
 255     GuardUnsafeAccess guard(_thread);
 256     assert(_obj == nullptr || !_obj->is_inline_type() || _obj->mark().is_larval_state(), "must be an object instance or a larval inline type");
 257     *addr() = normalize_for_write(x);
 258   }
 259 
 260   T get_volatile() {
 261     GuardUnsafeAccess guard(_thread);
 262     volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
 263     return normalize_for_read(ret);
 264   }
 265 
 266   void put_volatile(T x) {
 267     GuardUnsafeAccess guard(_thread);
 268     RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 269   }
 270 };
 271 
 272 #ifdef ASSERT
 273 /*
 274  * Get the field descriptor of the field of the given object at the given offset.
 275  */
 276 static bool get_field_descriptor(oop p, jlong offset, fieldDescriptor* fd) {
 277   bool found = false;
 278   Klass* k = p->klass();
 279   if (k->is_instance_klass()) {
 280     InstanceKlass* ik = InstanceKlass::cast(k);
 281     found = ik->find_field_from_offset((int)offset, false, fd);
 282     if (!found && ik->is_mirror_instance_klass()) {
 283       Klass* k2 = java_lang_Class::as_Klass(p);
 284       if (k2->is_instance_klass()) {
 285         ik = InstanceKlass::cast(k2);
 286         found = ik->find_field_from_offset((int)offset, true, fd);
 287       }
 288     }
 289   }
 290   return found;
 291 }
 292 #endif // ASSERT
 293 
 294 static void assert_and_log_unsafe_value_access(oop p, jlong offset, InlineKlass* vk) {
 295   Klass* k = p->klass();
 296 #ifdef ASSERT
 297   if (k->is_instance_klass()) {
 298     assert_field_offset_sane(p, offset);
 299     fieldDescriptor fd;
 300     bool found = get_field_descriptor(p, offset, &fd);
 301     if (found) {
 302       assert(found, "value field not found");
 303       assert(fd.is_flat(), "field not flat");
 304     } else {
 305       if (log_is_enabled(Trace, valuetypes)) {
 306         log_trace(valuetypes)("not a field in %s at offset " UINT64_FORMAT_X,
 307                               p->klass()->external_name(), (uint64_t)offset);
 308       }
 309     }
 310   } else if (k->is_flatArray_klass()) {
 311     FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 312     int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 313     address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
 314     assert(dest == (cast_from_oop<address>(p) + offset), "invalid offset");
 315   } else {
 316     ShouldNotReachHere();
 317   }
 318 #endif // ASSERT
 319   if (log_is_enabled(Trace, valuetypes)) {
 320     if (k->is_flatArray_klass()) {
 321       FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 322       int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 323       address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
 324       log_trace(valuetypes)("%s array type %s index %d element size %d offset " UINT64_FORMAT_X " at " INTPTR_FORMAT,
 325                             p->klass()->external_name(), vak->external_name(),
 326                             index, vak->element_byte_size(), (uint64_t)offset, p2i(dest));
 327     } else {
 328       log_trace(valuetypes)("%s field type %s at offset " UINT64_FORMAT_X,
 329                             p->klass()->external_name(), vk->external_name(), (uint64_t)offset);
 330     }
 331   }
 332 }
 333 
 334 // These functions allow a null base pointer with an arbitrary address.
 335 // But if the base pointer is non-null, the offset should make some sense.
 336 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 337 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 338   oop p = JNIHandles::resolve(obj);
 339   assert_field_offset_sane(p, offset);
 340   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 341   return JNIHandles::make_local(THREAD, v);
 342 } UNSAFE_END
 343 
 344 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 345   oop x = JNIHandles::resolve(x_h);
 346   oop p = JNIHandles::resolve(obj);
 347   assert_field_offset_sane(p, offset);
 348   assert(!p->is_inline_type() || p->mark().is_larval_state(), "must be an object instance or a larval inline type");
 349   HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 350 } UNSAFE_END
 351 
 352 UNSAFE_ENTRY(jlong, Unsafe_ValueHeaderSize(JNIEnv *env, jobject unsafe, jclass c)) {
 353   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
 354   InlineKlass* vk = InlineKlass::cast(k);
 355   return vk->first_field_offset();
 356 } UNSAFE_END
 357 
 358 UNSAFE_ENTRY(jboolean, Unsafe_IsFlatField(JNIEnv *env, jobject unsafe, jobject o)) {
 359   oop f = JNIHandles::resolve_non_null(o);
 360   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 361   int slot = java_lang_reflect_Field::slot(f);
 362   return InstanceKlass::cast(k)->field_is_flat(slot);
 363 } UNSAFE_END
 364 
 365 UNSAFE_ENTRY(jboolean, Unsafe_HasNullMarker(JNIEnv *env, jobject unsafe, jobject o)) {
 366   oop f = JNIHandles::resolve_non_null(o);
 367   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 368   int slot = java_lang_reflect_Field::slot(f);
 369   return InstanceKlass::cast(k)->field_has_null_marker(slot);
 370 } UNSAFE_END
 371 
 372 UNSAFE_ENTRY(jint, Unsafe_NullMarkerOffset(JNIEnv *env, jobject unsafe, jobject o)) {
 373   oop f = JNIHandles::resolve_non_null(o);
 374   Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
 375   int slot = java_lang_reflect_Field::slot(f);
 376   return InstanceKlass::cast(k)->null_marker_offset(slot);
 377 } UNSAFE_END
 378 
 379 UNSAFE_ENTRY(jboolean, Unsafe_IsFlatArray(JNIEnv *env, jobject unsafe, jclass c)) {
 380   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
 381   return k->is_flatArray_klass();
 382 } UNSAFE_END
 383 
 384 UNSAFE_ENTRY(jobject, Unsafe_UninitializedDefaultValue(JNIEnv *env, jobject unsafe, jclass vc)) {
 385   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 386   InlineKlass* vk = InlineKlass::cast(k);
 387   oop v = vk->default_value();
 388   return JNIHandles::make_local(THREAD, v);
 389 } UNSAFE_END
 390 
 391 UNSAFE_ENTRY(jobject, Unsafe_GetValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jclass vc)) {
 392   oop base = JNIHandles::resolve(obj);
 393   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 394   InlineKlass* vk = InlineKlass::cast(k);
 395   assert_and_log_unsafe_value_access(base, offset, vk);
 396   Handle base_h(THREAD, base);
 397   oop v = vk->read_payload_from_addr(base_h(), offset, LayoutKind::PAYLOAD, CHECK_NULL);// TODO FIXME Hard coded layout kind to make the code compile, Unsafe must be upgraded to handle correct layout kind
 398   return JNIHandles::make_local(THREAD, v);
 399 } UNSAFE_END
 400 
 401 UNSAFE_ENTRY(void, Unsafe_PutValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jclass vc, jobject value)) {
 402   oop base = JNIHandles::resolve(obj);
 403   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 404   InlineKlass* vk = InlineKlass::cast(k);
 405   assert(!base->is_inline_type() || base->mark().is_larval_state(), "must be an object instance or a larval inline type");
 406   assert_and_log_unsafe_value_access(base, offset, vk);
 407   oop v = JNIHandles::resolve(value);
 408   // TODO FIXME: problem below, with new APIs, null checking depends on LayoutKind, but Unsafe APIs are not able to communicate the right layout kind yet
 409   vk->write_value_to_addr(v, ((char*)(oopDesc*)base) + offset, LayoutKind::PAYLOAD, true, CHECK);// TODO FIXME Hard coded layout kind to make the code compile, Unsafe must be upgraded to handle correct layout kind
 410 } UNSAFE_END
 411 
 412 UNSAFE_ENTRY(jobject, Unsafe_MakePrivateBuffer(JNIEnv *env, jobject unsafe, jobject value)) {
 413   oop v = JNIHandles::resolve_non_null(value);
 414   assert(v->is_inline_type(), "must be an inline type instance");
 415   Handle vh(THREAD, v);
 416   InlineKlass* vk = InlineKlass::cast(v->klass());
 417   instanceOop new_value = vk->allocate_instance_buffer(CHECK_NULL);
 418   vk->copy_payload_to_addr(vk->data_for_oop(vh()), vk->data_for_oop(new_value), LayoutKind::PAYLOAD, false);
 419   markWord mark = new_value->mark();
 420   new_value->set_mark(mark.enter_larval_state());
 421   return JNIHandles::make_local(THREAD, new_value);
 422 } UNSAFE_END
 423 
 424 UNSAFE_ENTRY(jobject, Unsafe_FinishPrivateBuffer(JNIEnv *env, jobject unsafe, jobject value)) {
 425   oop v = JNIHandles::resolve(value);
 426   assert(v->mark().is_larval_state(), "must be a larval value");
 427   markWord mark = v->mark();
 428   v->set_mark(mark.exit_larval_state());
 429   return JNIHandles::make_local(THREAD, v);
 430 } UNSAFE_END
 431 
 432 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 433   oop p = JNIHandles::resolve(obj);
 434   assert_field_offset_sane(p, offset);
 435   oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 436   return JNIHandles::make_local(THREAD, v);
 437 } UNSAFE_END
 438 
 439 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 440   oop x = JNIHandles::resolve(x_h);
 441   oop p = JNIHandles::resolve(obj);
 442   assert_field_offset_sane(p, offset);
 443   HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 444 } UNSAFE_END
 445 
 446 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 447   oop v = *(oop*) (address) addr;
 448   return JNIHandles::make_local(THREAD, v);
 449 } UNSAFE_END
 450 
 451 #define DEFINE_GETSETOOP(java_type, Type) \
 452  \
 453 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 454   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 455 } UNSAFE_END \
 456  \
 457 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 458   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 459 } UNSAFE_END \
 460  \
 461 // END DEFINE_GETSETOOP.
 462 
 463 DEFINE_GETSETOOP(jboolean, Boolean)
 464 DEFINE_GETSETOOP(jbyte, Byte)
 465 DEFINE_GETSETOOP(jshort, Short);
 466 DEFINE_GETSETOOP(jchar, Char);
 467 DEFINE_GETSETOOP(jint, Int);
 468 DEFINE_GETSETOOP(jlong, Long);
 469 DEFINE_GETSETOOP(jfloat, Float);
 470 DEFINE_GETSETOOP(jdouble, Double);
 471 
 472 #undef DEFINE_GETSETOOP
 473 
 474 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
 475  \
 476 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 477   return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
 478 } UNSAFE_END \
 479  \
 480 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 481   MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
 482 } UNSAFE_END \
 483  \
 484 // END DEFINE_GETSETOOP_VOLATILE.
 485 
 486 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
 487 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
 488 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
 489 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
 490 DEFINE_GETSETOOP_VOLATILE(jint, Int);
 491 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
 492 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
 493 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 494 
 495 #undef DEFINE_GETSETOOP_VOLATILE
 496 
 497 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
 498   OrderAccess::fence();
 499 } UNSAFE_END
 500 
 501 ////// Allocation requests
 502 
 503 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
 504   JvmtiVMObjectAllocEventCollector oam;
 505   instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(cls), CHECK_NULL);
 506   return JNIHandles::make_local(THREAD, i);
 507 } UNSAFE_END
 508 
 509 UNSAFE_LEAF(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
 510   size_t sz = (size_t)size;
 511 
 512   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 513 
 514   void* x = os::malloc(sz, mtOther);
 515 
 516   return addr_to_java(x);
 517 } UNSAFE_END
 518 
 519 UNSAFE_LEAF(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
 520   void* p = addr_from_java(addr);
 521   size_t sz = (size_t)size;
 522 
 523   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 524 
 525   void* x = os::realloc(p, sz, mtOther);
 526 
 527   return addr_to_java(x);
 528 } UNSAFE_END
 529 
 530 UNSAFE_LEAF(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
 531   void* p = addr_from_java(addr);
 532 
 533   os::free(p);
 534 } UNSAFE_END
 535 
 536 UNSAFE_ENTRY_SCOPED(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
 537   size_t sz = (size_t)size;
 538 
 539   oop base = JNIHandles::resolve(obj);
 540   void* p = index_oop_from_field_offset_long(base, offset);
 541 
 542   {
 543     GuardUnsafeAccess guard(thread);
 544     if (StubRoutines::unsafe_setmemory() != nullptr) {
 545       MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
 546       StubRoutines::UnsafeSetMemory_stub()(p, sz, value);
 547     } else {
 548       Copy::fill_to_memory_atomic(p, sz, value);
 549     }
 550   }
 551 } UNSAFE_END
 552 
 553 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
 554   size_t sz = (size_t)size;
 555 
 556   oop srcp = JNIHandles::resolve(srcObj);
 557   oop dstp = JNIHandles::resolve(dstObj);
 558 
 559   void* src = index_oop_from_field_offset_long(srcp, srcOffset);
 560   void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
 561   {
 562     GuardUnsafeAccess guard(thread);
 563     if (StubRoutines::unsafe_arraycopy() != nullptr) {
 564       MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
 565       StubRoutines::UnsafeArrayCopy_stub()(src, dst, sz);
 566     } else {
 567       Copy::conjoint_memory_atomic(src, dst, sz);
 568     }
 569   }
 570 } UNSAFE_END
 571 
 572 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
 573   size_t sz = (size_t)size;
 574   size_t esz = (size_t)elemSize;
 575 
 576   oop srcp = JNIHandles::resolve(srcObj);
 577   oop dstp = JNIHandles::resolve(dstObj);
 578 
 579   address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
 580   address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
 581 
 582   {
 583     GuardUnsafeAccess guard(thread);
 584     Copy::conjoint_swap(src, dst, sz, esz);
 585   }
 586 } UNSAFE_END
 587 
 588 UNSAFE_LEAF (void, Unsafe_WriteBack0(JNIEnv *env, jobject unsafe, jlong line)) {
 589   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 590 #ifdef ASSERT
 591   if (TraceMemoryWriteback) {
 592     tty->print_cr("Unsafe: writeback 0x%p", addr_from_java(line));
 593   }
 594 #endif
 595 
 596   MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
 597   assert(StubRoutines::data_cache_writeback() != nullptr, "sanity");
 598   (StubRoutines::DataCacheWriteback_stub())(addr_from_java(line));
 599 } UNSAFE_END
 600 
 601 static void doWriteBackSync0(bool is_pre)
 602 {
 603   MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
 604   assert(StubRoutines::data_cache_writeback_sync() != nullptr, "sanity");
 605   (StubRoutines::DataCacheWritebackSync_stub())(is_pre);
 606 }
 607 
 608 UNSAFE_LEAF (void, Unsafe_WriteBackPreSync0(JNIEnv *env, jobject unsafe)) {
 609   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 610 #ifdef ASSERT
 611   if (TraceMemoryWriteback) {
 612       tty->print_cr("Unsafe: writeback pre-sync");
 613   }
 614 #endif
 615 
 616   doWriteBackSync0(true);
 617 } UNSAFE_END
 618 
 619 UNSAFE_LEAF (void, Unsafe_WriteBackPostSync0(JNIEnv *env, jobject unsafe)) {
 620   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 621 #ifdef ASSERT
 622   if (TraceMemoryWriteback) {
 623     tty->print_cr("Unsafe: writeback pre-sync");
 624   }
 625 #endif
 626 
 627   doWriteBackSync0(false);
 628 } UNSAFE_END
 629 
 630 ////// Random queries
 631 
 632 static jlong find_field_offset(jclass clazz, jstring name, TRAPS) {
 633   assert(clazz != nullptr, "clazz must not be null");
 634   assert(name != nullptr, "name must not be null");
 635 
 636   ResourceMark rm(THREAD);
 637   char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
 638 
 639   InstanceKlass* k = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz)));
 640 
 641   jint offset = -1;
 642   for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
 643     Symbol *name = fs.name();
 644     if (name->equals(utf_name)) {
 645       offset = fs.offset();
 646       break;
 647     }
 648   }
 649   if (offset < 0) {
 650     THROW_0(vmSymbols::java_lang_InternalError());
 651   }
 652   return field_offset_from_byte_offset(offset);
 653 }
 654 
 655 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
 656   assert(field != nullptr, "field must not be null");
 657 
 658   oop reflected   = JNIHandles::resolve_non_null(field);
 659   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 660   Klass* k        = java_lang_Class::as_Klass(mirror);
 661   int slot        = java_lang_reflect_Field::slot(reflected);
 662   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 663 
 664   if (must_be_static >= 0) {
 665     int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
 666     if (must_be_static != really_is_static) {
 667       THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 668     }
 669   }
 670 
 671   int offset = InstanceKlass::cast(k)->field_offset(slot);
 672   return field_offset_from_byte_offset(offset);
 673 }
 674 
 675 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 676   return find_field_offset(field, 0, THREAD);
 677 } UNSAFE_END
 678 
 679 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset1(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
 680   return find_field_offset(c, name, THREAD);
 681 } UNSAFE_END
 682 
 683 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 684   return find_field_offset(field, 1, THREAD);
 685 } UNSAFE_END
 686 
 687 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
 688   assert(field != nullptr, "field must not be null");
 689 
 690   // Note:  In this VM implementation, a field address is always a short
 691   // offset from the base of a klass metaobject.  Thus, the full dynamic
 692   // range of the return type is never used.  However, some implementations
 693   // might put the static field inside an array shared by many classes,
 694   // or even at a fixed address, in which case the address could be quite
 695   // large.  In that last case, this function would return null, since
 696   // the address would operate alone, without any base pointer.
 697 
 698   oop reflected   = JNIHandles::resolve_non_null(field);
 699   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 700   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 701 
 702   if ((modifiers & JVM_ACC_STATIC) == 0) {
 703     THROW_NULL(vmSymbols::java_lang_IllegalArgumentException());
 704   }
 705 
 706   return JNIHandles::make_local(THREAD, mirror);
 707 } UNSAFE_END
 708 
 709 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 710   assert(clazz != nullptr, "clazz must not be null");
 711 
 712   oop mirror = JNIHandles::resolve_non_null(clazz);
 713 
 714   Klass* klass = java_lang_Class::as_Klass(mirror);
 715   if (klass != nullptr && klass->should_be_initialized()) {
 716     InstanceKlass* k = InstanceKlass::cast(klass);
 717     k->initialize(CHECK);
 718   }
 719 }
 720 UNSAFE_END
 721 
 722 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 723   assert(clazz != nullptr, "clazz must not be null");
 724 
 725   oop mirror = JNIHandles::resolve_non_null(clazz);
 726   Klass* klass = java_lang_Class::as_Klass(mirror);
 727 
 728   if (klass != nullptr && klass->should_be_initialized()) {
 729     return true;
 730   }
 731 
 732   return false;
 733 }
 734 UNSAFE_END
 735 
 736 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
 737   assert(clazz != nullptr, "clazz must not be null");
 738 
 739   oop mirror = JNIHandles::resolve_non_null(clazz);
 740   Klass* k = java_lang_Class::as_Klass(mirror);
 741 
 742   if (k == nullptr || !k->is_array_klass()) {
 743     THROW(vmSymbols::java_lang_InvalidClassException());
 744   } else if (k->is_objArray_klass()) {
 745     base  = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
 746     scale = heapOopSize;
 747   } else if (k->is_typeArray_klass()) {
 748     TypeArrayKlass* tak = TypeArrayKlass::cast(k);
 749     base  = tak->array_header_in_bytes();
 750     assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
 751     scale = (1 << tak->log2_element_size());
 752   } else if (k->is_flatArray_klass()) {
 753     FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 754     InlineKlass* vklass = vak->element_klass();
 755     base = vak->array_header_in_bytes();
 756     scale = vak->element_byte_size();
 757   } else {
 758     ShouldNotReachHere();
 759   }
 760 }
 761 
 762 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 763   int base = 0, scale = 0;
 764   getBaseAndScale(base, scale, clazz, CHECK_0);
 765 
 766   return field_offset_from_byte_offset(base);
 767 } UNSAFE_END
 768 
 769 
 770 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 771   int base = 0, scale = 0;
 772   getBaseAndScale(base, scale, clazz, CHECK_0);
 773 
 774   // This VM packs both fields and array elements down to the byte.
 775   // But watch out:  If this changes, so that array references for
 776   // a given primitive type (say, T_BOOLEAN) use different memory units
 777   // than fields, this method MUST return zero for such arrays.
 778   // For example, the VM used to store sub-word sized fields in full
 779   // words in the object layout, so that accessors like getByte(Object,int)
 780   // did not really do what one might expect for arrays.  Therefore,
 781   // this function used to report a zero scale factor, so that the user
 782   // would know not to attempt to access sub-word array elements.
 783   // // Code for unpacked fields:
 784   // if (scale < wordSize)  return 0;
 785 
 786   // The following allows for a pretty general fieldOffset cookie scheme,
 787   // but requires it to be linear in byte offset.
 788   return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
 789 } UNSAFE_END
 790 
 791 
 792 UNSAFE_ENTRY(jlong, Unsafe_GetObjectSize0(JNIEnv* env, jobject o, jobject obj))
 793   oop p = JNIHandles::resolve(obj);
 794   return p->size() * HeapWordSize;
 795 UNSAFE_END
 796 
 797 
 798 static inline void throw_new(JNIEnv *env, const char *ename) {
 799   jclass cls = env->FindClass(ename);
 800   if (env->ExceptionCheck()) {
 801     env->ExceptionClear();
 802     tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
 803     return;
 804   }
 805 
 806   env->ThrowNew(cls, nullptr);
 807 }
 808 
 809 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
 810   // Code lifted from JDK 1.3 ClassLoader.c
 811 
 812   jbyte *body;
 813   char *utfName = nullptr;
 814   jclass result = nullptr;
 815   char buf[128];
 816 
 817   assert(data != nullptr, "Class bytes must not be null");
 818   assert(length >= 0, "length must not be negative: %d", length);
 819 
 820   if (UsePerfData) {
 821     ClassLoader::unsafe_defineClassCallCounter()->inc();
 822   }
 823 
 824   body = NEW_C_HEAP_ARRAY_RETURN_NULL(jbyte, length, mtInternal);
 825   if (body == nullptr) {
 826     throw_new(env, "java/lang/OutOfMemoryError");
 827     return nullptr;
 828   }
 829 
 830   env->GetByteArrayRegion(data, offset, length, body);
 831   if (env->ExceptionCheck()) {
 832     goto free_body;
 833   }
 834 
 835   if (name != nullptr) {
 836     uint len = env->GetStringUTFLength(name);
 837     int unicode_len = env->GetStringLength(name);
 838 
 839     if (len >= sizeof(buf)) {
 840       utfName = NEW_C_HEAP_ARRAY_RETURN_NULL(char, len + 1, mtInternal);
 841       if (utfName == nullptr) {
 842         throw_new(env, "java/lang/OutOfMemoryError");
 843         goto free_body;
 844       }
 845     } else {
 846       utfName = buf;
 847     }
 848 
 849     env->GetStringUTFRegion(name, 0, unicode_len, utfName);
 850 
 851     for (uint i = 0; i < len; i++) {
 852       if (utfName[i] == '.')   utfName[i] = '/';
 853     }
 854   }
 855 
 856   result = JVM_DefineClass(env, utfName, loader, body, length, pd);
 857 
 858   if (utfName && utfName != buf) {
 859     FREE_C_HEAP_ARRAY(char, utfName);
 860   }
 861 
 862  free_body:
 863   FREE_C_HEAP_ARRAY(jbyte, body);
 864   return result;
 865 }
 866 
 867 
 868 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
 869   ThreadToNativeFromVM ttnfv(thread);
 870 
 871   return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
 872 } UNSAFE_END
 873 
 874 
 875 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
 876   ThreadToNativeFromVM ttnfv(thread);
 877   env->Throw(thr);
 878 } UNSAFE_END
 879 
 880 // JSR166 ------------------------------------------------------------------
 881 
 882 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 883   oop x = JNIHandles::resolve(x_h);
 884   oop e = JNIHandles::resolve(e_h);
 885   oop p = JNIHandles::resolve(obj);
 886   assert_field_offset_sane(p, offset);
 887   oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 888   return JNIHandles::make_local(THREAD, res);
 889 } UNSAFE_END
 890 
 891 UNSAFE_ENTRY_SCOPED(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 892   oop p = JNIHandles::resolve(obj);
 893   volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 894   return Atomic::cmpxchg(addr, e, x);
 895 } UNSAFE_END
 896 
 897 UNSAFE_ENTRY_SCOPED(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 898   oop p = JNIHandles::resolve(obj);
 899   volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 900   return Atomic::cmpxchg(addr, e, x);
 901 } UNSAFE_END
 902 
 903 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 904   oop x = JNIHandles::resolve(x_h);
 905   oop e = JNIHandles::resolve(e_h);
 906   oop p = JNIHandles::resolve(obj);
 907   assert_field_offset_sane(p, offset);
 908   oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 909   return ret == e;
 910 } UNSAFE_END
 911 
 912 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 913   oop p = JNIHandles::resolve(obj);
 914   volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 915   return Atomic::cmpxchg(addr, e, x) == e;
 916 } UNSAFE_END
 917 
 918 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 919   oop p = JNIHandles::resolve(obj);
 920   volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 921   return Atomic::cmpxchg(addr, e, x) == e;
 922 } UNSAFE_END
 923 
 924 static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
 925   assert(event != nullptr, "invariant");
 926   event->set_parkedClass((obj != nullptr) ? obj->klass() : nullptr);
 927   event->set_timeout(timeout_nanos);
 928   event->set_until(until_epoch_millis);
 929   event->set_address((obj != nullptr) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
 930   event->commit();
 931 }
 932 
 933 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
 934   HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
 935   EventThreadPark event;
 936 
 937   JavaThreadParkedState jtps(thread, time != 0);
 938   thread->parker()->park(isAbsolute != 0, time);
 939   if (event.should_commit()) {
 940     const oop obj = thread->current_park_blocker();
 941     if (time == 0) {
 942       post_thread_park_event(&event, obj, min_jlong, min_jlong);
 943     } else {
 944       if (isAbsolute != 0) {
 945         post_thread_park_event(&event, obj, min_jlong, time);
 946       } else {
 947         post_thread_park_event(&event, obj, time, min_jlong);
 948       }
 949     }
 950   }
 951   HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
 952 } UNSAFE_END
 953 
 954 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
 955   if (jthread != nullptr) {
 956     oop thread_oop = JNIHandles::resolve_non_null(jthread);
 957     // Get the JavaThread* stored in the java.lang.Thread object _before_
 958     // the embedded ThreadsListHandle is constructed so we know if the
 959     // early life stage of the JavaThread* is protected. We use acquire
 960     // here to ensure that if we see a non-nullptr value, then we also
 961     // see the main ThreadsList updates from the JavaThread* being added.
 962     FastThreadsListHandle ftlh(thread_oop, java_lang_Thread::thread_acquire(thread_oop));
 963     JavaThread* thr = ftlh.protected_java_thread();
 964     if (thr != nullptr) {
 965       // The still live JavaThread* is protected by the FastThreadsListHandle
 966       // so it is safe to access.
 967       Parker* p = thr->parker();
 968       HOTSPOT_THREAD_UNPARK((uintptr_t) p);
 969       p->unpark();
 970     }
 971   } // FastThreadsListHandle is destroyed here.
 972 } UNSAFE_END
 973 
 974 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
 975   const int max_nelem = 3;
 976   double la[max_nelem];
 977   jint ret;
 978 
 979   typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
 980   assert(a->is_typeArray(), "must be type array");
 981 
 982   ret = os::loadavg(la, nelem);
 983   if (ret == -1) {
 984     return -1;
 985   }
 986 
 987   // if successful, ret is the number of samples actually retrieved.
 988   assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
 989   switch(ret) {
 990     case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
 991     case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
 992     case 1: a->double_at_put(0, (jdouble)la[0]); break;
 993   }
 994 
 995   return ret;
 996 } UNSAFE_END
 997 
 998 
 999 /// JVM_RegisterUnsafeMethods
1000 
1001 #define ADR "J"
1002 
1003 #define LANG "Ljava/lang/"
1004 
1005 #define OBJ LANG "Object;"
1006 #define CLS LANG "Class;"
1007 #define FLD LANG "reflect/Field;"
1008 #define THR LANG "Throwable;"
1009 
1010 #define DC_Args  LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
1011 #define DAC_Args CLS "[B[" OBJ
1012 
1013 #define CC (char*)  /*cast a literal from (const char*)*/
1014 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
1015 
1016 #define DECLARE_GETPUTOOP(Type, Desc) \
1017     {CC "get"  #Type,      CC "(" OBJ "J)" #Desc,                 FN_PTR(Unsafe_Get##Type)}, \
1018     {CC "put"  #Type,      CC "(" OBJ "J" #Desc ")V",             FN_PTR(Unsafe_Put##Type)}, \
1019     {CC "get"  #Type "Volatile",      CC "(" OBJ "J)" #Desc,      FN_PTR(Unsafe_Get##Type##Volatile)}, \
1020     {CC "put"  #Type "Volatile",      CC "(" OBJ "J" #Desc ")V",  FN_PTR(Unsafe_Put##Type##Volatile)}
1021 
1022 
1023 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
1024     {CC "getReference",         CC "(" OBJ "J)" OBJ "",   FN_PTR(Unsafe_GetReference)},
1025     {CC "putReference",         CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReference)},
1026     {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ,      FN_PTR(Unsafe_GetReferenceVolatile)},
1027     {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReferenceVolatile)},
1028 
1029     {CC "isFlatArray", CC "(" CLS ")Z",                   FN_PTR(Unsafe_IsFlatArray)},
1030     {CC "isFlatField0", CC "(" OBJ ")Z",                  FN_PTR(Unsafe_IsFlatField)},
1031     {CC "hasNullMarker0"   , CC "(" OBJ ")Z",                    FN_PTR(Unsafe_HasNullMarker)},
1032     {CC "nullMarkerOffset0", CC "(" OBJ ")I",                    FN_PTR(Unsafe_NullMarkerOffset)},
1033     {CC "getValue",         CC "(" OBJ "J" CLS ")" OBJ,   FN_PTR(Unsafe_GetValue)},
1034     {CC "putValue",         CC "(" OBJ "J" CLS OBJ ")V",  FN_PTR(Unsafe_PutValue)},
1035     {CC "uninitializedDefaultValue", CC "(" CLS ")" OBJ,  FN_PTR(Unsafe_UninitializedDefaultValue)},
1036     {CC "makePrivateBuffer",     CC "(" OBJ ")" OBJ,      FN_PTR(Unsafe_MakePrivateBuffer)},
1037     {CC "finishPrivateBuffer",   CC "(" OBJ ")" OBJ,      FN_PTR(Unsafe_FinishPrivateBuffer)},
1038     {CC "valueHeaderSize",       CC "(" CLS ")J",         FN_PTR(Unsafe_ValueHeaderSize)},
1039 
1040     {CC "getUncompressedObject", CC "(" ADR ")" OBJ,  FN_PTR(Unsafe_GetUncompressedObject)},
1041 
1042     DECLARE_GETPUTOOP(Boolean, Z),
1043     DECLARE_GETPUTOOP(Byte, B),
1044     DECLARE_GETPUTOOP(Short, S),
1045     DECLARE_GETPUTOOP(Char, C),
1046     DECLARE_GETPUTOOP(Int, I),
1047     DECLARE_GETPUTOOP(Long, J),
1048     DECLARE_GETPUTOOP(Float, F),
1049     DECLARE_GETPUTOOP(Double, D),
1050 
1051     {CC "allocateMemory0",    CC "(J)" ADR,              FN_PTR(Unsafe_AllocateMemory0)},
1052     {CC "reallocateMemory0",  CC "(" ADR "J)" ADR,       FN_PTR(Unsafe_ReallocateMemory0)},
1053     {CC "freeMemory0",        CC "(" ADR ")V",           FN_PTR(Unsafe_FreeMemory0)},
1054 
1055     {CC "objectFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_ObjectFieldOffset0)},
1056     {CC "objectFieldOffset1", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_ObjectFieldOffset1)},
1057     {CC "staticFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_StaticFieldOffset0)},
1058     {CC "staticFieldBase0",   CC "(" FLD ")" OBJ,        FN_PTR(Unsafe_StaticFieldBase0)},
1059     {CC "ensureClassInitialized0", CC "(" CLS ")V",      FN_PTR(Unsafe_EnsureClassInitialized0)},
1060     {CC "arrayBaseOffset0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayBaseOffset0)},
1061     {CC "arrayIndexScale0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayIndexScale0)},
1062     {CC "getObjectSize0",     CC "(Ljava/lang/Object;)J", FN_PTR(Unsafe_GetObjectSize0)},
1063 
1064     {CC "defineClass0",       CC "(" DC_Args ")" CLS,    FN_PTR(Unsafe_DefineClass0)},
1065     {CC "allocateInstance",   CC "(" CLS ")" OBJ,        FN_PTR(Unsafe_AllocateInstance)},
1066     {CC "throwException",     CC "(" THR ")V",           FN_PTR(Unsafe_ThrowException)},
1067     {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
1068     {CC "compareAndSetInt",   CC "(" OBJ "J""I""I"")Z",  FN_PTR(Unsafe_CompareAndSetInt)},
1069     {CC "compareAndSetLong",  CC "(" OBJ "J""J""J"")Z",  FN_PTR(Unsafe_CompareAndSetLong)},
1070     {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
1071     {CC "compareAndExchangeInt",  CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
1072     {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
1073 
1074     {CC "park",               CC "(ZJ)V",                FN_PTR(Unsafe_Park)},
1075     {CC "unpark",             CC "(" OBJ ")V",           FN_PTR(Unsafe_Unpark)},
1076 
1077     {CC "getLoadAverage0",    CC "([DI)I",               FN_PTR(Unsafe_GetLoadAverage0)},
1078 
1079     {CC "copyMemory0",        CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
1080     {CC "copySwapMemory0",    CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
1081     {CC "writeback0",         CC "(" "J" ")V",           FN_PTR(Unsafe_WriteBack0)},
1082     {CC "writebackPreSync0",  CC "()V",                  FN_PTR(Unsafe_WriteBackPreSync0)},
1083     {CC "writebackPostSync0", CC "()V",                  FN_PTR(Unsafe_WriteBackPostSync0)},
1084     {CC "setMemory0",         CC "(" OBJ "JJB)V",        FN_PTR(Unsafe_SetMemory0)},
1085 
1086     {CC "shouldBeInitialized0", CC "(" CLS ")Z",         FN_PTR(Unsafe_ShouldBeInitialized0)},
1087 
1088     {CC "fullFence",          CC "()V",                  FN_PTR(Unsafe_FullFence)},
1089 };
1090 
1091 #undef CC
1092 #undef FN_PTR
1093 
1094 #undef ADR
1095 #undef LANG
1096 #undef OBJ
1097 #undef CLS
1098 #undef FLD
1099 #undef THR
1100 #undef DC_Args
1101 #undef DAC_Args
1102 
1103 #undef DECLARE_GETPUTOOP
1104 
1105 
1106 // This function is exported, used by NativeLookup.
1107 // The Unsafe_xxx functions above are called only from the interpreter.
1108 // The optimizer looks at names and signatures to recognize
1109 // individual functions.
1110 
1111 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
1112   ThreadToNativeFromVM ttnfv(thread);
1113 
1114   int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
1115   guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
1116 } JVM_END