1 /*
   2  * Copyright (c) 2000, 2021, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jni.h"
  27 #include "jvm.h"
  28 #include "classfile/classFileStream.hpp"
  29 #include "classfile/classLoader.hpp"
  30 #include "classfile/classLoadInfo.hpp"
  31 #include "classfile/javaClasses.inline.hpp"
  32 #include "classfile/systemDictionary.hpp"
  33 #include "classfile/vmSymbols.hpp"
  34 #include "jfr/jfrEvents.hpp"
  35 #include "memory/allocation.inline.hpp"
  36 #include "memory/resourceArea.hpp"
  37 #include "logging/log.hpp"
  38 #include "logging/logStream.hpp"
  39 #include "oops/access.inline.hpp"
  40 #include "oops/fieldStreams.inline.hpp"
  41 #include "oops/flatArrayKlass.hpp"
  42 #include "oops/flatArrayOop.inline.hpp"
  43 #include "oops/inlineKlass.inline.hpp"
  44 #include "oops/instanceKlass.inline.hpp"
  45 #include "oops/klass.inline.hpp"
  46 #include "oops/objArrayOop.inline.hpp"
  47 #include "oops/oop.inline.hpp"
  48 #include "oops/typeArrayOop.inline.hpp"
  49 #include "prims/unsafe.hpp"
  50 #include "runtime/fieldDescriptor.inline.hpp"
  51 #include "runtime/globals.hpp"
  52 #include "runtime/handles.inline.hpp"
  53 #include "runtime/interfaceSupport.inline.hpp"
  54 #include "runtime/jniHandles.inline.hpp"
  55 #include "runtime/orderAccess.hpp"
  56 #include "runtime/reflection.hpp"
  57 #include "runtime/sharedRuntime.hpp"
  58 #include "runtime/stubRoutines.hpp"
  59 #include "runtime/thread.hpp"
  60 #include "runtime/threadSMR.hpp"
  61 #include "runtime/vmOperations.hpp"
  62 #include "runtime/vm_version.hpp"
  63 #include "services/threadService.hpp"
  64 #include "utilities/align.hpp"
  65 #include "utilities/copy.hpp"
  66 #include "utilities/dtrace.hpp"
  67 #include "utilities/macros.hpp"
  68 
  69 /**
  70  * Implementation of the jdk.internal.misc.Unsafe class
  71  */
  72 
  73 
  74 #define MAX_OBJECT_SIZE \
  75   ( arrayOopDesc::header_size(T_DOUBLE) * HeapWordSize \
  76     + ((julong)max_jint * sizeof(double)) )
  77 
  78 
  79 #define UNSAFE_ENTRY(result_type, header) \
  80   JVM_ENTRY(static result_type, header)
  81 
  82 #define UNSAFE_LEAF(result_type, header) \
  83   JVM_LEAF(static result_type, header)
  84 
  85 #define UNSAFE_END JVM_END
  86 
  87 
  88 static inline void* addr_from_java(jlong addr) {
  89   // This assert fails in a variety of ways on 32-bit systems.
  90   // It is impossible to predict whether native code that converts
  91   // pointers to longs will sign-extend or zero-extend the addresses.
  92   //assert(addr == (uintptr_t)addr, "must not be odd high bits");
  93   return (void*)(uintptr_t)addr;
  94 }
  95 
  96 static inline jlong addr_to_java(void* p) {
  97   assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
  98   return (uintptr_t)p;
  99 }
 100 
 101 
 102 // Note: The VM's obj_field and related accessors use byte-scaled
 103 // ("unscaled") offsets, just as the unsafe methods do.
 104 
 105 // However, the method Unsafe.fieldOffset explicitly declines to
 106 // guarantee this.  The field offset values manipulated by the Java user
 107 // through the Unsafe API are opaque cookies that just happen to be byte
 108 // offsets.  We represent this state of affairs by passing the cookies
 109 // through conversion functions when going between the VM and the Unsafe API.
 110 // The conversion functions just happen to be no-ops at present.
 111 
 112 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
 113   return field_offset;
 114 }
 115 
 116 static inline jlong field_offset_from_byte_offset(jlong byte_offset) {
 117   return byte_offset;
 118 }
 119 
 120 static inline void assert_field_offset_sane(oop p, jlong field_offset) {
 121 #ifdef ASSERT
 122   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 123 
 124   if (p != NULL) {
 125     assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
 126     if (byte_offset == (jint)byte_offset) {
 127       void* ptr_plus_disp = cast_from_oop<address>(p) + byte_offset;
 128       assert(p->field_addr((jint)byte_offset) == ptr_plus_disp,
 129              "raw [ptr+disp] must be consistent with oop::field_addr");
 130     }
 131     jlong p_size = HeapWordSize * (jlong)(p->size());
 132     assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
 133   }
 134 #endif
 135 }
 136 
 137 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
 138   assert_field_offset_sane(p, field_offset);
 139   jlong byte_offset = field_offset_to_byte_offset(field_offset);
 140 
 141   if (sizeof(char*) == sizeof(jint)) {   // (this constant folds!)
 142     return cast_from_oop<address>(p) + (jint) byte_offset;
 143   } else {
 144     return cast_from_oop<address>(p) +        byte_offset;
 145   }
 146 }
 147 
 148 // Externally callable versions:
 149 // (Use these in compiler intrinsics which emulate unsafe primitives.)
 150 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
 151   return field_offset;
 152 }
 153 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
 154   return byte_offset;
 155 }
 156 
 157 ///// Data read/writes on the Java heap and in native (off-heap) memory
 158 
 159 /**
 160  * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 161  */
 162 class GuardUnsafeAccess {
 163   JavaThread* _thread;
 164 
 165 public:
 166   GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
 167     // native/off-heap access which may raise SIGBUS if accessing
 168     // memory mapped file data in a region of the file which has
 169     // been truncated and is now invalid.
 170     _thread->set_doing_unsafe_access(true);
 171   }
 172 
 173   ~GuardUnsafeAccess() {
 174     _thread->set_doing_unsafe_access(false);
 175   }
 176 };
 177 
 178 /**
 179  * Helper class for accessing memory.
 180  *
 181  * Normalizes values and wraps accesses in
 182  * JavaThread::doing_unsafe_access() if needed.
 183  */
 184 template <typename T>
 185 class MemoryAccess : StackObj {
 186   JavaThread* _thread;
 187   oop _obj;
 188   ptrdiff_t _offset;
 189 
 190   // Resolves and returns the address of the memory access.
 191   // This raw memory access may fault, so we make sure it happens within the
 192   // guarded scope by making the access volatile at least. Since the store
 193   // of Thread::set_doing_unsafe_access() is also volatile, these accesses
 194   // can not be reordered by the compiler. Therefore, if the access triggers
 195   // a fault, we will know that Thread::doing_unsafe_access() returns true.
 196   volatile T* addr() {
 197     void* addr = index_oop_from_field_offset_long(_obj, _offset);
 198     return static_cast<volatile T*>(addr);
 199   }
 200 
 201   template <typename U>
 202   U normalize_for_write(U x) {
 203     return x;
 204   }
 205 
 206   jboolean normalize_for_write(jboolean x) {
 207     return x & 1;
 208   }
 209 
 210   template <typename U>
 211   U normalize_for_read(U x) {
 212     return x;
 213   }
 214 
 215   jboolean normalize_for_read(jboolean x) {
 216     return x != 0;
 217   }
 218 
 219 public:
 220   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 221     : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
 222     assert_field_offset_sane(_obj, offset);
 223   }
 224 
 225   T get() {
 226     if (_obj == NULL) {
 227       GuardUnsafeAccess guard(_thread);
 228       T ret = RawAccess<>::load(addr());
 229       return normalize_for_read(ret);
 230     } else {
 231       T ret = HeapAccess<>::load_at(_obj, _offset);
 232       return normalize_for_read(ret);
 233     }
 234   }
 235 
 236   void put(T x) {
 237     if (_obj == NULL) {
 238       GuardUnsafeAccess guard(_thread);
 239       RawAccess<>::store(addr(), normalize_for_write(x));
 240     } else {
 241       assert(!_obj->is_inline_type() || _obj->mark().is_larval_state(), "must be an object instance or a larval inline type");
 242       HeapAccess<>::store_at(_obj, _offset, normalize_for_write(x));
 243     }
 244   }
 245 
 246   T get_volatile() {
 247     if (_obj == NULL) {
 248       GuardUnsafeAccess guard(_thread);
 249       volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
 250       return normalize_for_read(ret);
 251     } else {
 252       T ret = HeapAccess<MO_SEQ_CST>::load_at(_obj, _offset);
 253       return normalize_for_read(ret);
 254     }
 255   }
 256 
 257   void put_volatile(T x) {
 258     if (_obj == NULL) {
 259       GuardUnsafeAccess guard(_thread);
 260       RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
 261     } else {
 262       HeapAccess<MO_SEQ_CST>::store_at(_obj, _offset, normalize_for_write(x));
 263     }
 264   }
 265 };
 266 
 267 #ifdef ASSERT
 268 /*
 269  * Get the field descriptor of the field of the given object at the given offset.
 270  */
 271 static bool get_field_descriptor(oop p, jlong offset, fieldDescriptor* fd) {
 272   bool found = false;
 273   Klass* k = p->klass();
 274   if (k->is_instance_klass()) {
 275     InstanceKlass* ik = InstanceKlass::cast(k);
 276     found = ik->find_field_from_offset((int)offset, false, fd);
 277     if (!found && ik->is_mirror_instance_klass()) {
 278       Klass* k2 = java_lang_Class::as_Klass(p);
 279       if (k2->is_instance_klass()) {
 280         ik = InstanceKlass::cast(k2);
 281         found = ik->find_field_from_offset((int)offset, true, fd);
 282       }
 283     }
 284   }
 285   return found;
 286 }
 287 #endif // ASSERT
 288 
 289 static void assert_and_log_unsafe_value_access(oop p, jlong offset, InlineKlass* vk) {
 290   Klass* k = p->klass();
 291 #ifdef ASSERT
 292   if (k->is_instance_klass()) {
 293     assert_field_offset_sane(p, offset);
 294     fieldDescriptor fd;
 295     bool found = get_field_descriptor(p, offset, &fd);
 296     if (found) {
 297       assert(found, "value field not found");
 298       assert(fd.is_inlined(), "field not flat");
 299     } else {
 300       if (log_is_enabled(Trace, valuetypes)) {
 301         log_trace(valuetypes)("not a field in %s at offset " UINT64_FORMAT_X,
 302                               p->klass()->external_name(), (uint64_t)offset);
 303       }
 304     }
 305   } else if (k->is_flatArray_klass()) {
 306     FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 307     int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 308     address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
 309     assert(dest == (cast_from_oop<address>(p) + offset), "invalid offset");
 310   } else {
 311     ShouldNotReachHere();
 312   }
 313 #endif // ASSERT
 314   if (log_is_enabled(Trace, valuetypes)) {
 315     if (k->is_flatArray_klass()) {
 316       FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 317       int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
 318       address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
 319       log_trace(valuetypes)("%s array type %s index %d element size %d offset " UINT64_FORMAT_X " at " INTPTR_FORMAT,
 320                             p->klass()->external_name(), vak->external_name(),
 321                             index, vak->element_byte_size(), (uint64_t)offset, p2i(dest));
 322     } else {
 323       log_trace(valuetypes)("%s field type %s at offset " UINT64_FORMAT_X,
 324                             p->klass()->external_name(), vk->external_name(), (uint64_t)offset);
 325     }
 326   }
 327 }
 328 
 329 // These functions allow a null base pointer with an arbitrary address.
 330 // But if the base pointer is non-null, the offset should make some sense.
 331 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 332 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 333   oop p = JNIHandles::resolve(obj);
 334   assert_field_offset_sane(p, offset);
 335   oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 336   return JNIHandles::make_local(THREAD, v);
 337 } UNSAFE_END
 338 
 339 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 340   oop x = JNIHandles::resolve(x_h);
 341   oop p = JNIHandles::resolve(obj);
 342   assert_field_offset_sane(p, offset);
 343   assert(!p->is_inline_type() || p->mark().is_larval_state(), "must be an object instance or a larval inline type");
 344   HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 345 } UNSAFE_END
 346 
 347 UNSAFE_ENTRY(jlong, Unsafe_ValueHeaderSize(JNIEnv *env, jobject unsafe, jclass c)) {
 348   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
 349   InlineKlass* vk = InlineKlass::cast(k);
 350   return vk->first_field_offset();
 351 } UNSAFE_END
 352 
 353 UNSAFE_ENTRY(jboolean, Unsafe_IsFlattenedArray(JNIEnv *env, jobject unsafe, jclass c)) {
 354   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
 355   return k->is_flatArray_klass();
 356 } UNSAFE_END
 357 
 358 UNSAFE_ENTRY(jobject, Unsafe_UninitializedDefaultValue(JNIEnv *env, jobject unsafe, jclass vc)) {
 359   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 360   InlineKlass* vk = InlineKlass::cast(k);
 361   oop v = vk->default_value();
 362   return JNIHandles::make_local(THREAD, v);
 363 } UNSAFE_END
 364 
 365 UNSAFE_ENTRY(jobject, Unsafe_GetValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jclass vc)) {
 366   oop base = JNIHandles::resolve(obj);
 367   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 368   InlineKlass* vk = InlineKlass::cast(k);
 369   assert_and_log_unsafe_value_access(base, offset, vk);
 370   Handle base_h(THREAD, base);
 371   oop v = vk->read_inlined_field(base_h(), offset, CHECK_NULL);
 372   return JNIHandles::make_local(THREAD, v);
 373 } UNSAFE_END
 374 
 375 UNSAFE_ENTRY(void, Unsafe_PutValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jclass vc, jobject value)) {
 376   oop base = JNIHandles::resolve(obj);
 377   Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
 378   InlineKlass* vk = InlineKlass::cast(k);
 379   assert(!base->is_inline_type() || base->mark().is_larval_state(), "must be an object instance or a larval inline type");
 380   assert_and_log_unsafe_value_access(base, offset, vk);
 381   oop v = JNIHandles::resolve(value);
 382   vk->write_inlined_field(base, offset, v, CHECK);
 383 } UNSAFE_END
 384 
 385 UNSAFE_ENTRY(jobject, Unsafe_MakePrivateBuffer(JNIEnv *env, jobject unsafe, jobject value)) {
 386   oop v = JNIHandles::resolve_non_null(value);
 387   assert(v->is_inline_type(), "must be an inline type instance");
 388   Handle vh(THREAD, v);
 389   InlineKlass* vk = InlineKlass::cast(v->klass());
 390   instanceOop new_value = vk->allocate_instance_buffer(CHECK_NULL);
 391   vk->inline_copy_oop_to_new_oop(vh(),  new_value);
 392   markWord mark = new_value->mark();
 393   new_value->set_mark(mark.enter_larval_state());
 394   return JNIHandles::make_local(THREAD, new_value);
 395 } UNSAFE_END
 396 
 397 UNSAFE_ENTRY(jobject, Unsafe_FinishPrivateBuffer(JNIEnv *env, jobject unsafe, jobject value)) {
 398   oop v = JNIHandles::resolve(value);
 399   assert(v->mark().is_larval_state(), "must be a larval value");
 400   markWord mark = v->mark();
 401   v->set_mark(mark.exit_larval_state());
 402   return JNIHandles::make_local(THREAD, v);
 403 } UNSAFE_END
 404 
 405 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 406   oop p = JNIHandles::resolve(obj);
 407   assert_field_offset_sane(p, offset);
 408   oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
 409   return JNIHandles::make_local(THREAD, v);
 410 } UNSAFE_END
 411 
 412 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
 413   oop x = JNIHandles::resolve(x_h);
 414   oop p = JNIHandles::resolve(obj);
 415   assert_field_offset_sane(p, offset);
 416   HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
 417 } UNSAFE_END
 418 
 419 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
 420   oop v = *(oop*) (address) addr;
 421   return JNIHandles::make_local(THREAD, v);
 422 } UNSAFE_END
 423 
 424 #define DEFINE_GETSETOOP(java_type, Type) \
 425  \
 426 UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 427   return MemoryAccess<java_type>(thread, obj, offset).get(); \
 428 } UNSAFE_END \
 429  \
 430 UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 431   MemoryAccess<java_type>(thread, obj, offset).put(x); \
 432 } UNSAFE_END \
 433  \
 434 // END DEFINE_GETSETOOP.
 435 
 436 DEFINE_GETSETOOP(jboolean, Boolean)
 437 DEFINE_GETSETOOP(jbyte, Byte)
 438 DEFINE_GETSETOOP(jshort, Short);
 439 DEFINE_GETSETOOP(jchar, Char);
 440 DEFINE_GETSETOOP(jint, Int);
 441 DEFINE_GETSETOOP(jlong, Long);
 442 DEFINE_GETSETOOP(jfloat, Float);
 443 DEFINE_GETSETOOP(jdouble, Double);
 444 
 445 #undef DEFINE_GETSETOOP
 446 
 447 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
 448  \
 449 UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
 450   return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
 451 } UNSAFE_END \
 452  \
 453 UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
 454   MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
 455 } UNSAFE_END \
 456  \
 457 // END DEFINE_GETSETOOP_VOLATILE.
 458 
 459 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
 460 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
 461 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
 462 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
 463 DEFINE_GETSETOOP_VOLATILE(jint, Int);
 464 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
 465 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
 466 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
 467 
 468 #undef DEFINE_GETSETOOP_VOLATILE
 469 
 470 UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
 471   OrderAccess::acquire();
 472 } UNSAFE_END
 473 
 474 UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {
 475   OrderAccess::release();
 476 } UNSAFE_END
 477 
 478 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
 479   OrderAccess::fence();
 480 } UNSAFE_END
 481 
 482 ////// Allocation requests
 483 
 484 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
 485   instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(cls), CHECK_NULL);
 486   return JNIHandles::make_local(THREAD, i);
 487 } UNSAFE_END
 488 
 489 UNSAFE_ENTRY(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
 490   size_t sz = (size_t)size;
 491 
 492   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 493 
 494   void* x = os::malloc(sz, mtOther);
 495 
 496   return addr_to_java(x);
 497 } UNSAFE_END
 498 
 499 UNSAFE_ENTRY(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
 500   void* p = addr_from_java(addr);
 501   size_t sz = (size_t)size;
 502 
 503   assert(is_aligned(sz, HeapWordSize), "sz not aligned");
 504 
 505   void* x = os::realloc(p, sz, mtOther);
 506 
 507   return addr_to_java(x);
 508 } UNSAFE_END
 509 
 510 UNSAFE_ENTRY(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
 511   void* p = addr_from_java(addr);
 512 
 513   os::free(p);
 514 } UNSAFE_END
 515 
 516 UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
 517   size_t sz = (size_t)size;
 518 
 519   oop base = JNIHandles::resolve(obj);
 520   void* p = index_oop_from_field_offset_long(base, offset);
 521 
 522   Copy::fill_to_memory_atomic(p, sz, value);
 523 } UNSAFE_END
 524 
 525 UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
 526   size_t sz = (size_t)size;
 527 
 528   oop srcp = JNIHandles::resolve(srcObj);
 529   oop dstp = JNIHandles::resolve(dstObj);
 530 
 531   void* src = index_oop_from_field_offset_long(srcp, srcOffset);
 532   void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
 533   {
 534     GuardUnsafeAccess guard(thread);
 535     if (StubRoutines::unsafe_arraycopy() != NULL) {
 536       MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
 537       StubRoutines::UnsafeArrayCopy_stub()(src, dst, sz);
 538     } else {
 539       Copy::conjoint_memory_atomic(src, dst, sz);
 540     }
 541   }
 542 } UNSAFE_END
 543 
 544 // This function is a leaf since if the source and destination are both in native memory
 545 // the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
 546 // If either source or destination (or both) are on the heap, the function will enter VM using
 547 // JVM_ENTRY_FROM_LEAF
 548 UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
 549   size_t sz = (size_t)size;
 550   size_t esz = (size_t)elemSize;
 551 
 552   if (srcObj == NULL && dstObj == NULL) {
 553     // Both src & dst are in native memory
 554     address src = (address)srcOffset;
 555     address dst = (address)dstOffset;
 556 
 557     {
 558       JavaThread* thread = JavaThread::thread_from_jni_environment(env);
 559       GuardUnsafeAccess guard(thread);
 560       Copy::conjoint_swap(src, dst, sz, esz);
 561     }
 562   } else {
 563     // At least one of src/dst are on heap, transition to VM to access raw pointers
 564 
 565     JVM_ENTRY_FROM_LEAF(env, void, Unsafe_CopySwapMemory0) {
 566       oop srcp = JNIHandles::resolve(srcObj);
 567       oop dstp = JNIHandles::resolve(dstObj);
 568 
 569       address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
 570       address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
 571 
 572       {
 573         GuardUnsafeAccess guard(thread);
 574         Copy::conjoint_swap(src, dst, sz, esz);
 575       }
 576     } JVM_END
 577   }
 578 } UNSAFE_END
 579 
 580 UNSAFE_LEAF (void, Unsafe_WriteBack0(JNIEnv *env, jobject unsafe, jlong line)) {
 581   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 582 #ifdef ASSERT
 583   if (TraceMemoryWriteback) {
 584     tty->print_cr("Unsafe: writeback 0x%p", addr_from_java(line));
 585   }
 586 #endif
 587 
 588   MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
 589   assert(StubRoutines::data_cache_writeback() != NULL, "sanity");
 590   (StubRoutines::DataCacheWriteback_stub())(addr_from_java(line));
 591 } UNSAFE_END
 592 
 593 static void doWriteBackSync0(bool is_pre)
 594 {
 595   MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
 596   assert(StubRoutines::data_cache_writeback_sync() != NULL, "sanity");
 597   (StubRoutines::DataCacheWritebackSync_stub())(is_pre);
 598 }
 599 
 600 UNSAFE_LEAF (void, Unsafe_WriteBackPreSync0(JNIEnv *env, jobject unsafe)) {
 601   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 602 #ifdef ASSERT
 603   if (TraceMemoryWriteback) {
 604       tty->print_cr("Unsafe: writeback pre-sync");
 605   }
 606 #endif
 607 
 608   doWriteBackSync0(true);
 609 } UNSAFE_END
 610 
 611 UNSAFE_LEAF (void, Unsafe_WriteBackPostSync0(JNIEnv *env, jobject unsafe)) {
 612   assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
 613 #ifdef ASSERT
 614   if (TraceMemoryWriteback) {
 615     tty->print_cr("Unsafe: writeback pre-sync");
 616   }
 617 #endif
 618 
 619   doWriteBackSync0(false);
 620 } UNSAFE_END
 621 
 622 ////// Random queries
 623 
 624 static jlong find_field_offset(jclass clazz, jstring name, TRAPS) {
 625   assert(clazz != NULL, "clazz must not be NULL");
 626   assert(name != NULL, "name must not be NULL");
 627 
 628   ResourceMark rm(THREAD);
 629   char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
 630 
 631   InstanceKlass* k = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz)));
 632 
 633   jint offset = -1;
 634   for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
 635     Symbol *name = fs.name();
 636     if (name->equals(utf_name)) {
 637       offset = fs.offset();
 638       break;
 639     }
 640   }
 641   if (offset < 0) {
 642     THROW_0(vmSymbols::java_lang_InternalError());
 643   }
 644   return field_offset_from_byte_offset(offset);
 645 }
 646 
 647 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
 648   assert(field != NULL, "field must not be NULL");
 649 
 650   oop reflected   = JNIHandles::resolve_non_null(field);
 651   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 652   Klass* k        = java_lang_Class::as_Klass(mirror);
 653   int slot        = java_lang_reflect_Field::slot(reflected);
 654   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 655 
 656   if (must_be_static >= 0) {
 657     int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
 658     if (must_be_static != really_is_static) {
 659       THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 660     }
 661   }
 662 
 663   int offset = InstanceKlass::cast(k)->field_offset(slot);
 664   return field_offset_from_byte_offset(offset);
 665 }
 666 
 667 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 668   return find_field_offset(field, 0, THREAD);
 669 } UNSAFE_END
 670 
 671 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset1(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
 672   return find_field_offset(c, name, THREAD);
 673 } UNSAFE_END
 674 
 675 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
 676   return find_field_offset(field, 1, THREAD);
 677 } UNSAFE_END
 678 
 679 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
 680   assert(field != NULL, "field must not be NULL");
 681 
 682   // Note:  In this VM implementation, a field address is always a short
 683   // offset from the base of a a klass metaobject.  Thus, the full dynamic
 684   // range of the return type is never used.  However, some implementations
 685   // might put the static field inside an array shared by many classes,
 686   // or even at a fixed address, in which case the address could be quite
 687   // large.  In that last case, this function would return NULL, since
 688   // the address would operate alone, without any base pointer.
 689 
 690   oop reflected   = JNIHandles::resolve_non_null(field);
 691   oop mirror      = java_lang_reflect_Field::clazz(reflected);
 692   int modifiers   = java_lang_reflect_Field::modifiers(reflected);
 693 
 694   if ((modifiers & JVM_ACC_STATIC) == 0) {
 695     THROW_0(vmSymbols::java_lang_IllegalArgumentException());
 696   }
 697 
 698   return JNIHandles::make_local(THREAD, mirror);
 699 } UNSAFE_END
 700 
 701 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 702   assert(clazz != NULL, "clazz must not be NULL");
 703 
 704   oop mirror = JNIHandles::resolve_non_null(clazz);
 705 
 706   Klass* klass = java_lang_Class::as_Klass(mirror);
 707   if (klass != NULL && klass->should_be_initialized()) {
 708     InstanceKlass* k = InstanceKlass::cast(klass);
 709     k->initialize(CHECK);
 710   }
 711 }
 712 UNSAFE_END
 713 
 714 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
 715   assert(clazz != NULL, "clazz must not be NULL");
 716 
 717   oop mirror = JNIHandles::resolve_non_null(clazz);
 718   Klass* klass = java_lang_Class::as_Klass(mirror);
 719 
 720   if (klass != NULL && klass->should_be_initialized()) {
 721     return true;
 722   }
 723 
 724   return false;
 725 }
 726 UNSAFE_END
 727 
 728 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
 729   assert(clazz != NULL, "clazz must not be NULL");
 730 
 731   oop mirror = JNIHandles::resolve_non_null(clazz);
 732   Klass* k = java_lang_Class::as_Klass(mirror);
 733 
 734   if (k == NULL || !k->is_array_klass()) {
 735     THROW(vmSymbols::java_lang_InvalidClassException());
 736   } else if (k->is_objArray_klass()) {
 737     base  = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
 738     scale = heapOopSize;
 739   } else if (k->is_typeArray_klass()) {
 740     TypeArrayKlass* tak = TypeArrayKlass::cast(k);
 741     base  = tak->array_header_in_bytes();
 742     assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
 743     scale = (1 << tak->log2_element_size());
 744   } else if (k->is_flatArray_klass()) {
 745     FlatArrayKlass* vak = FlatArrayKlass::cast(k);
 746     InlineKlass* vklass = vak->element_klass();
 747     base = vak->array_header_in_bytes();
 748     scale = vak->element_byte_size();
 749   } else {
 750     ShouldNotReachHere();
 751   }
 752 }
 753 
 754 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 755   int base = 0, scale = 0;
 756   getBaseAndScale(base, scale, clazz, CHECK_0);
 757 
 758   return field_offset_from_byte_offset(base);
 759 } UNSAFE_END
 760 
 761 
 762 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
 763   int base = 0, scale = 0;
 764   getBaseAndScale(base, scale, clazz, CHECK_0);
 765 
 766   // This VM packs both fields and array elements down to the byte.
 767   // But watch out:  If this changes, so that array references for
 768   // a given primitive type (say, T_BOOLEAN) use different memory units
 769   // than fields, this method MUST return zero for such arrays.
 770   // For example, the VM used to store sub-word sized fields in full
 771   // words in the object layout, so that accessors like getByte(Object,int)
 772   // did not really do what one might expect for arrays.  Therefore,
 773   // this function used to report a zero scale factor, so that the user
 774   // would know not to attempt to access sub-word array elements.
 775   // // Code for unpacked fields:
 776   // if (scale < wordSize)  return 0;
 777 
 778   // The following allows for a pretty general fieldOffset cookie scheme,
 779   // but requires it to be linear in byte offset.
 780   return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
 781 } UNSAFE_END
 782 
 783 
 784 UNSAFE_ENTRY(jlong, Unsafe_GetObjectSize0(JNIEnv* env, jobject o, jobject obj))
 785   oop p = JNIHandles::resolve(obj);
 786   return p->size() * HeapWordSize;
 787 UNSAFE_END
 788 
 789 
 790 static inline void throw_new(JNIEnv *env, const char *ename) {
 791   jclass cls = env->FindClass(ename);
 792   if (env->ExceptionCheck()) {
 793     env->ExceptionClear();
 794     tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
 795     return;
 796   }
 797 
 798   env->ThrowNew(cls, NULL);
 799 }
 800 
 801 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
 802   // Code lifted from JDK 1.3 ClassLoader.c
 803 
 804   jbyte *body;
 805   char *utfName = NULL;
 806   jclass result = 0;
 807   char buf[128];
 808 
 809   assert(data != NULL, "Class bytes must not be NULL");
 810   assert(length >= 0, "length must not be negative: %d", length);
 811 
 812   if (UsePerfData) {
 813     ClassLoader::unsafe_defineClassCallCounter()->inc();
 814   }
 815 
 816   body = NEW_C_HEAP_ARRAY_RETURN_NULL(jbyte, length, mtInternal);
 817   if (body == NULL) {
 818     throw_new(env, "java/lang/OutOfMemoryError");
 819     return 0;
 820   }
 821 
 822   env->GetByteArrayRegion(data, offset, length, body);
 823   if (env->ExceptionOccurred()) {
 824     goto free_body;
 825   }
 826 
 827   if (name != NULL) {
 828     uint len = env->GetStringUTFLength(name);
 829     int unicode_len = env->GetStringLength(name);
 830 
 831     if (len >= sizeof(buf)) {
 832       utfName = NEW_C_HEAP_ARRAY_RETURN_NULL(char, len + 1, mtInternal);
 833       if (utfName == NULL) {
 834         throw_new(env, "java/lang/OutOfMemoryError");
 835         goto free_body;
 836       }
 837     } else {
 838       utfName = buf;
 839     }
 840 
 841     env->GetStringUTFRegion(name, 0, unicode_len, utfName);
 842 
 843     for (uint i = 0; i < len; i++) {
 844       if (utfName[i] == '.')   utfName[i] = '/';
 845     }
 846   }
 847 
 848   result = JVM_DefineClass(env, utfName, loader, body, length, pd);
 849 
 850   if (utfName && utfName != buf) {
 851     FREE_C_HEAP_ARRAY(char, utfName);
 852   }
 853 
 854  free_body:
 855   FREE_C_HEAP_ARRAY(jbyte, body);
 856   return result;
 857 }
 858 
 859 
 860 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
 861   ThreadToNativeFromVM ttnfv(thread);
 862 
 863   return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
 864 } UNSAFE_END
 865 
 866 
 867 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
 868   ThreadToNativeFromVM ttnfv(thread);
 869   env->Throw(thr);
 870 } UNSAFE_END
 871 
 872 // JSR166 ------------------------------------------------------------------
 873 
 874 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 875   oop x = JNIHandles::resolve(x_h);
 876   oop e = JNIHandles::resolve(e_h);
 877   oop p = JNIHandles::resolve(obj);
 878   assert_field_offset_sane(p, offset);
 879   oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 880   return JNIHandles::make_local(THREAD, res);
 881 } UNSAFE_END
 882 
 883 UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 884   oop p = JNIHandles::resolve(obj);
 885   if (p == NULL) {
 886     volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 887     return RawAccess<>::atomic_cmpxchg(addr, e, x);
 888   } else {
 889     assert_field_offset_sane(p, offset);
 890     return HeapAccess<>::atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 891   }
 892 } UNSAFE_END
 893 
 894 UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 895   oop p = JNIHandles::resolve(obj);
 896   if (p == NULL) {
 897     volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 898     return RawAccess<>::atomic_cmpxchg(addr, e, x);
 899   } else {
 900     assert_field_offset_sane(p, offset);
 901     return HeapAccess<>::atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 902   }
 903 } UNSAFE_END
 904 
 905 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
 906   oop x = JNIHandles::resolve(x_h);
 907   oop e = JNIHandles::resolve(e_h);
 908   oop p = JNIHandles::resolve(obj);
 909   assert_field_offset_sane(p, offset);
 910   oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
 911   return ret == e;
 912 } UNSAFE_END
 913 
 914 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
 915   oop p = JNIHandles::resolve(obj);
 916   if (p == NULL) {
 917     volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
 918     return RawAccess<>::atomic_cmpxchg(addr, e, x) == e;
 919   } else {
 920     assert_field_offset_sane(p, offset);
 921     return HeapAccess<>::atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x) == e;
 922   }
 923 } UNSAFE_END
 924 
 925 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
 926   oop p = JNIHandles::resolve(obj);
 927   if (p == NULL) {
 928     volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
 929     return RawAccess<>::atomic_cmpxchg(addr, e, x) == e;
 930   } else {
 931     assert_field_offset_sane(p, offset);
 932     return HeapAccess<>::atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x) == e;
 933   }
 934 } UNSAFE_END
 935 
 936 static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
 937   assert(event != NULL, "invariant");
 938   assert(event->should_commit(), "invariant");
 939   event->set_parkedClass((obj != NULL) ? obj->klass() : NULL);
 940   event->set_timeout(timeout_nanos);
 941   event->set_until(until_epoch_millis);
 942   event->set_address((obj != NULL) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
 943   event->commit();
 944 }
 945 
 946 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
 947   HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
 948   EventThreadPark event;
 949 
 950   JavaThreadParkedState jtps(thread, time != 0);
 951   thread->parker()->park(isAbsolute != 0, time);
 952   if (event.should_commit()) {
 953     const oop obj = thread->current_park_blocker();
 954     if (time == 0) {
 955       post_thread_park_event(&event, obj, min_jlong, min_jlong);
 956     } else {
 957       if (isAbsolute != 0) {
 958         post_thread_park_event(&event, obj, min_jlong, time);
 959       } else {
 960         post_thread_park_event(&event, obj, time, min_jlong);
 961       }
 962     }
 963   }
 964   HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
 965 } UNSAFE_END
 966 
 967 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
 968   if (jthread != NULL) {
 969     ThreadsListHandle tlh;
 970     JavaThread* thr = NULL;
 971     oop java_thread = NULL;
 972     (void) tlh.cv_internal_thread_to_JavaThread(jthread, &thr, &java_thread);
 973     if (java_thread != NULL) {
 974       // This is a valid oop.
 975       if (thr != NULL) {
 976         // The JavaThread is alive.
 977         Parker* p = thr->parker();
 978         HOTSPOT_THREAD_UNPARK((uintptr_t) p);
 979         p->unpark();
 980       }
 981     }
 982   } // ThreadsListHandle is destroyed here.
 983 
 984 } UNSAFE_END
 985 
 986 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
 987   const int max_nelem = 3;
 988   double la[max_nelem];
 989   jint ret;
 990 
 991   typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
 992   assert(a->is_typeArray(), "must be type array");
 993 
 994   ret = os::loadavg(la, nelem);
 995   if (ret == -1) {
 996     return -1;
 997   }
 998 
 999   // if successful, ret is the number of samples actually retrieved.
1000   assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
1001   switch(ret) {
1002     case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
1003     case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
1004     case 1: a->double_at_put(0, (jdouble)la[0]); break;
1005   }
1006 
1007   return ret;
1008 } UNSAFE_END
1009 
1010 
1011 /// JVM_RegisterUnsafeMethods
1012 
1013 #define ADR "J"
1014 
1015 #define LANG "Ljava/lang/"
1016 
1017 #define OBJ LANG "Object;"
1018 #define CLS LANG "Class;"
1019 #define FLD LANG "reflect/Field;"
1020 #define THR LANG "Throwable;"
1021 
1022 #define DC_Args  LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
1023 #define DAC_Args CLS "[B[" OBJ
1024 
1025 #define CC (char*)  /*cast a literal from (const char*)*/
1026 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
1027 
1028 #define DECLARE_GETPUTOOP(Type, Desc) \
1029     {CC "get"  #Type,      CC "(" OBJ "J)" #Desc,                 FN_PTR(Unsafe_Get##Type)}, \
1030     {CC "put"  #Type,      CC "(" OBJ "J" #Desc ")V",             FN_PTR(Unsafe_Put##Type)}, \
1031     {CC "get"  #Type "Volatile",      CC "(" OBJ "J)" #Desc,      FN_PTR(Unsafe_Get##Type##Volatile)}, \
1032     {CC "put"  #Type "Volatile",      CC "(" OBJ "J" #Desc ")V",  FN_PTR(Unsafe_Put##Type##Volatile)}
1033 
1034 
1035 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
1036     {CC "getReference",         CC "(" OBJ "J)" OBJ "",   FN_PTR(Unsafe_GetReference)},
1037     {CC "putReference",         CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReference)},
1038     {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ,      FN_PTR(Unsafe_GetReferenceVolatile)},
1039     {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V",  FN_PTR(Unsafe_PutReferenceVolatile)},
1040 
1041     {CC "isFlattenedArray", CC "(" CLS ")Z",                     FN_PTR(Unsafe_IsFlattenedArray)},
1042     {CC "getValue",         CC "(" OBJ "J" CLS ")" OBJ,          FN_PTR(Unsafe_GetValue)},
1043     {CC "putValue",         CC "(" OBJ "J" CLS OBJ ")V",         FN_PTR(Unsafe_PutValue)},
1044     {CC "uninitializedDefaultValue", CC "(" CLS ")" OBJ,         FN_PTR(Unsafe_UninitializedDefaultValue)},
1045     {CC "makePrivateBuffer",     CC "(" OBJ ")" OBJ,             FN_PTR(Unsafe_MakePrivateBuffer)},
1046     {CC "finishPrivateBuffer",   CC "(" OBJ ")" OBJ,             FN_PTR(Unsafe_FinishPrivateBuffer)},
1047     {CC "valueHeaderSize",       CC "(" CLS ")J",                FN_PTR(Unsafe_ValueHeaderSize)},
1048 
1049     {CC "getUncompressedObject", CC "(" ADR ")" OBJ,  FN_PTR(Unsafe_GetUncompressedObject)},
1050 
1051     DECLARE_GETPUTOOP(Boolean, Z),
1052     DECLARE_GETPUTOOP(Byte, B),
1053     DECLARE_GETPUTOOP(Short, S),
1054     DECLARE_GETPUTOOP(Char, C),
1055     DECLARE_GETPUTOOP(Int, I),
1056     DECLARE_GETPUTOOP(Long, J),
1057     DECLARE_GETPUTOOP(Float, F),
1058     DECLARE_GETPUTOOP(Double, D),
1059 
1060     {CC "allocateMemory0",    CC "(J)" ADR,              FN_PTR(Unsafe_AllocateMemory0)},
1061     {CC "reallocateMemory0",  CC "(" ADR "J)" ADR,       FN_PTR(Unsafe_ReallocateMemory0)},
1062     {CC "freeMemory0",        CC "(" ADR ")V",           FN_PTR(Unsafe_FreeMemory0)},
1063 
1064     {CC "objectFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_ObjectFieldOffset0)},
1065     {CC "objectFieldOffset1", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_ObjectFieldOffset1)},
1066     {CC "staticFieldOffset0", CC "(" FLD ")J",           FN_PTR(Unsafe_StaticFieldOffset0)},
1067     {CC "staticFieldBase0",   CC "(" FLD ")" OBJ,        FN_PTR(Unsafe_StaticFieldBase0)},
1068     {CC "ensureClassInitialized0", CC "(" CLS ")V",      FN_PTR(Unsafe_EnsureClassInitialized0)},
1069     {CC "arrayBaseOffset0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayBaseOffset0)},
1070     {CC "arrayIndexScale0",   CC "(" CLS ")I",           FN_PTR(Unsafe_ArrayIndexScale0)},
1071     {CC "getObjectSize0",     CC "(Ljava/lang/Object;)J", FN_PTR(Unsafe_GetObjectSize0)},
1072 
1073     {CC "defineClass0",       CC "(" DC_Args ")" CLS,    FN_PTR(Unsafe_DefineClass0)},
1074     {CC "allocateInstance",   CC "(" CLS ")" OBJ,        FN_PTR(Unsafe_AllocateInstance)},
1075     {CC "throwException",     CC "(" THR ")V",           FN_PTR(Unsafe_ThrowException)},
1076     {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
1077     {CC "compareAndSetInt",   CC "(" OBJ "J""I""I"")Z",  FN_PTR(Unsafe_CompareAndSetInt)},
1078     {CC "compareAndSetLong",  CC "(" OBJ "J""J""J"")Z",  FN_PTR(Unsafe_CompareAndSetLong)},
1079     {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
1080     {CC "compareAndExchangeInt",  CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
1081     {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
1082 
1083     {CC "park",               CC "(ZJ)V",                FN_PTR(Unsafe_Park)},
1084     {CC "unpark",             CC "(" OBJ ")V",           FN_PTR(Unsafe_Unpark)},
1085 
1086     {CC "getLoadAverage0",    CC "([DI)I",               FN_PTR(Unsafe_GetLoadAverage0)},
1087 
1088     {CC "copyMemory0",        CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
1089     {CC "copySwapMemory0",    CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
1090     {CC "writeback0",         CC "(" "J" ")V",           FN_PTR(Unsafe_WriteBack0)},
1091     {CC "writebackPreSync0",  CC "()V",                  FN_PTR(Unsafe_WriteBackPreSync0)},
1092     {CC "writebackPostSync0", CC "()V",                  FN_PTR(Unsafe_WriteBackPostSync0)},
1093     {CC "setMemory0",         CC "(" OBJ "JJB)V",        FN_PTR(Unsafe_SetMemory0)},
1094 
1095     {CC "shouldBeInitialized0", CC "(" CLS ")Z",         FN_PTR(Unsafe_ShouldBeInitialized0)},
1096 
1097     {CC "loadFence",          CC "()V",                  FN_PTR(Unsafe_LoadFence)},
1098     {CC "storeFence",         CC "()V",                  FN_PTR(Unsafe_StoreFence)},
1099     {CC "fullFence",          CC "()V",                  FN_PTR(Unsafe_FullFence)},
1100 };
1101 
1102 #undef CC
1103 #undef FN_PTR
1104 
1105 #undef ADR
1106 #undef LANG
1107 #undef OBJ
1108 #undef CLS
1109 #undef FLD
1110 #undef THR
1111 #undef DC_Args
1112 #undef DAC_Args
1113 
1114 #undef DECLARE_GETPUTOOP
1115 
1116 
1117 // This function is exported, used by NativeLookup.
1118 // The Unsafe_xxx functions above are called only from the interpreter.
1119 // The optimizer looks at names and signatures to recognize
1120 // individual functions.
1121 
1122 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
1123   ThreadToNativeFromVM ttnfv(thread);
1124 
1125   int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
1126   guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
1127 } JVM_END