1 /*
2 * Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileStream.hpp"
26 #include "classfile/classLoader.hpp"
27 #include "classfile/classLoadInfo.hpp"
28 #include "classfile/javaClasses.inline.hpp"
29 #include "classfile/systemDictionary.hpp"
30 #include "classfile/vmSymbols.hpp"
31 #include "jfr/jfrEvents.hpp"
32 #include "jni.h"
33 #include "jvm.h"
34 #include "logging/log.hpp"
35 #include "logging/logStream.hpp"
36 #include "memory/allocation.inline.hpp"
37 #include "memory/oopFactory.hpp"
38 #include "memory/resourceArea.hpp"
39 #include "oops/access.inline.hpp"
40 #include "oops/fieldStreams.inline.hpp"
41 #include "oops/flatArrayKlass.hpp"
42 #include "oops/flatArrayOop.inline.hpp"
43 #include "oops/inlineKlass.inline.hpp"
44 #include "oops/instanceKlass.inline.hpp"
45 #include "oops/klass.inline.hpp"
46 #include "oops/objArrayOop.inline.hpp"
47 #include "oops/oop.inline.hpp"
48 #include "oops/typeArrayOop.inline.hpp"
49 #include "prims/jvmtiExport.hpp"
50 #include "prims/unsafe.hpp"
51 #include "runtime/fieldDescriptor.inline.hpp"
52 #include "runtime/globals.hpp"
53 #include "runtime/handles.inline.hpp"
54 #include "runtime/interfaceSupport.inline.hpp"
55 #include "runtime/javaThread.inline.hpp"
56 #include "runtime/jniHandles.inline.hpp"
57 #include "runtime/orderAccess.hpp"
58 #include "runtime/reflection.hpp"
59 #include "runtime/sharedRuntime.hpp"
60 #include "runtime/stubRoutines.hpp"
61 #include "runtime/threadSMR.hpp"
62 #include "runtime/vm_version.hpp"
63 #include "runtime/vmOperations.hpp"
64 #include "sanitizers/ub.hpp"
65 #include "services/threadService.hpp"
66 #include "utilities/align.hpp"
67 #include "utilities/copy.hpp"
68 #include "utilities/dtrace.hpp"
69 #include "utilities/macros.hpp"
70
71 /**
72 * Implementation of the jdk.internal.misc.Unsafe class
73 */
74
75
76 #define MAX_OBJECT_SIZE \
77 ( arrayOopDesc::base_offset_in_bytes(T_DOUBLE) \
78 + ((julong)max_jint * sizeof(double)) )
79
80 #define UNSAFE_ENTRY(result_type, header) \
81 JVM_ENTRY(static result_type, header)
82
83 #define UNSAFE_LEAF(result_type, header) \
84 JVM_LEAF(static result_type, header)
85
86 // All memory access methods (e.g. getInt, copyMemory) must use this macro.
87 // We call these methods "scoped" methods, as access to these methods is
88 // typically governed by a "scope" (a MemorySessionImpl object), and no
89 // access is allowed when the scope is no longer alive.
90 //
91 // Closing a scope object (cf. scopedMemoryAccess.cpp) can install
92 // an async exception during a safepoint. When that happens,
93 // scoped methods are not allowed to touch the underlying memory (as that
94 // memory might have been released). Therefore, when entering a scoped method
95 // we check if an async exception has been installed, and return immediately
96 // if that is the case.
97 //
98 // As a rule, we disallow safepoints in the middle of a scoped method.
99 // If an async exception handshake were installed in such a safepoint,
100 // memory access might still occur before the handshake is honored by
101 // the accessing thread.
102 //
103 // Corollary: as threads in native state are considered to be at a safepoint,
104 // scoped methods must NOT be executed while in the native thread state.
105 // Because of this, there can be no UNSAFE_LEAF_SCOPED.
106 #define UNSAFE_ENTRY_SCOPED(result_type, header) \
107 JVM_ENTRY(static result_type, header) \
108 if (thread->has_async_exception_condition()) {return (result_type)0;}
109
110 #define UNSAFE_END JVM_END
111
112
113 static inline void* addr_from_java(jlong addr) {
114 // This assert fails in a variety of ways on 32-bit systems.
115 // It is impossible to predict whether native code that converts
116 // pointers to longs will sign-extend or zero-extend the addresses.
117 //assert(addr == (uintptr_t)addr, "must not be odd high bits");
118 return (void*)(uintptr_t)addr;
119 }
120
121 static inline jlong addr_to_java(void* p) {
122 assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
123 return (uintptr_t)p;
124 }
125
126
127 // Note: The VM's obj_field and related accessors use byte-scaled
128 // ("unscaled") offsets, just as the unsafe methods do.
129
130 // However, the method Unsafe.fieldOffset explicitly declines to
131 // guarantee this. The field offset values manipulated by the Java user
132 // through the Unsafe API are opaque cookies that just happen to be byte
133 // offsets. We represent this state of affairs by passing the cookies
134 // through conversion functions when going between the VM and the Unsafe API.
135 // The conversion functions just happen to be no-ops at present.
136
137 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
138 return field_offset;
139 }
140
141 static inline int field_offset_from_byte_offset(int byte_offset) {
142 return byte_offset;
143 }
144
145 static inline void assert_field_offset_sane(oop p, jlong field_offset) {
146 #ifdef ASSERT
147 jlong byte_offset = field_offset_to_byte_offset(field_offset);
148
149 if (p != nullptr) {
150 assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
151 if (byte_offset == (jint)byte_offset) {
152 void* ptr_plus_disp = cast_from_oop<address>(p) + byte_offset;
153 assert(p->field_addr<void>((jint)byte_offset) == ptr_plus_disp,
154 "raw [ptr+disp] must be consistent with oop::field_addr");
155 }
156 jlong p_size = HeapWordSize * (jlong)(p->size());
157 assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
158 }
159 #endif
160 }
161
162 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
163 assert_field_offset_sane(p, field_offset);
164 uintptr_t base_address = cast_from_oop<uintptr_t>(p);
165 uintptr_t byte_offset = (uintptr_t)field_offset_to_byte_offset(field_offset);
166 return (void*)(base_address + byte_offset);
167 }
168
169 // Externally callable versions:
170 // (Use these in compiler intrinsics which emulate unsafe primitives.)
171 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
172 return field_offset;
173 }
174 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
175 return byte_offset;
176 }
177
178 ///// Data read/writes on the Java heap and in native (off-heap) memory
179
180 /**
181 * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
182 */
183 class GuardUnsafeAccess {
184 JavaThread* _thread;
185
186 public:
187 GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
188 // native/off-heap access which may raise SIGBUS if accessing
189 // memory mapped file data in a region of the file which has
190 // been truncated and is now invalid.
191 _thread->set_doing_unsafe_access(true);
192 }
193
194 ~GuardUnsafeAccess() {
195 _thread->set_doing_unsafe_access(false);
196 }
197 };
198
199 /**
200 * Helper class for accessing memory.
201 *
202 * Normalizes values and wraps accesses in
203 * JavaThread::doing_unsafe_access() if needed.
204 */
205 template <typename T>
206 class MemoryAccess : StackObj {
207 JavaThread* _thread;
208 oop _obj;
209 ptrdiff_t _offset;
210
211 // Resolves and returns the address of the memory access.
212 // This raw memory access may fault, so we make sure it happens within the
213 // guarded scope by making the access volatile at least. Since the store
214 // of Thread::set_doing_unsafe_access() is also volatile, these accesses
215 // can not be reordered by the compiler. Therefore, if the access triggers
216 // a fault, we will know that Thread::doing_unsafe_access() returns true.
217 volatile T* addr() {
218 void* addr = index_oop_from_field_offset_long(_obj, _offset);
219 return static_cast<volatile T*>(addr);
220 }
221
222 template <typename U>
223 U normalize_for_write(U x) {
224 return x;
225 }
226
227 jboolean normalize_for_write(jboolean x) {
228 return x & 1;
229 }
230
231 template <typename U>
232 U normalize_for_read(U x) {
233 return x;
234 }
235
236 jboolean normalize_for_read(jboolean x) {
237 return x != 0;
238 }
239
240 public:
241 MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
242 : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
243 assert_field_offset_sane(_obj, offset);
244 }
245
246 T get() {
247 GuardUnsafeAccess guard(_thread);
248 return normalize_for_read(*addr());
249 }
250
251 // we use this method at some places for writing to 0 e.g. to cause a crash;
252 // ubsan does not know that this is the desired behavior
253 ATTRIBUTE_NO_UBSAN
254 void put(T x) {
255 GuardUnsafeAccess guard(_thread);
256 assert(_obj == nullptr || !_obj->is_inline_type() || _obj->mark().is_larval_state(), "must be an object instance or a larval inline type");
257 *addr() = normalize_for_write(x);
258 }
259
260 T get_volatile() {
261 GuardUnsafeAccess guard(_thread);
262 volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
263 return normalize_for_read(ret);
264 }
265
266 void put_volatile(T x) {
267 GuardUnsafeAccess guard(_thread);
268 RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
269 }
270 };
271
272 #ifdef ASSERT
273 /*
274 * Get the field descriptor of the field of the given object at the given offset.
275 */
276 static bool get_field_descriptor(oop p, jlong offset, fieldDescriptor* fd) {
277 bool found = false;
278 Klass* k = p->klass();
279 if (k->is_instance_klass()) {
280 InstanceKlass* ik = InstanceKlass::cast(k);
281 found = ik->find_field_from_offset((int)offset, false, fd);
282 if (!found && ik->is_mirror_instance_klass()) {
283 Klass* k2 = java_lang_Class::as_Klass(p);
284 if (k2->is_instance_klass()) {
285 ik = InstanceKlass::cast(k2);
286 found = ik->find_field_from_offset((int)offset, true, fd);
287 }
288 }
289 }
290 return found;
291 }
292 #endif // ASSERT
293
294 static void assert_and_log_unsafe_value_access(oop p, jlong offset, InlineKlass* vk) {
295 Klass* k = p->klass();
296 #ifdef ASSERT
297 if (k->is_instance_klass()) {
298 assert_field_offset_sane(p, offset);
299 fieldDescriptor fd;
300 bool found = get_field_descriptor(p, offset, &fd);
301 if (found) {
302 assert(found, "value field not found");
303 assert(fd.is_flat(), "field not flat");
304 } else {
305 if (log_is_enabled(Trace, valuetypes)) {
306 log_trace(valuetypes)("not a field in %s at offset " UINT64_FORMAT_X,
307 p->klass()->external_name(), (uint64_t)offset);
308 }
309 }
310 } else if (k->is_flatArray_klass()) {
311 FlatArrayKlass* vak = FlatArrayKlass::cast(k);
312 int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
313 address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
314 assert(dest == (cast_from_oop<address>(p) + offset), "invalid offset");
315 } else {
316 ShouldNotReachHere();
317 }
318 #endif // ASSERT
319 if (log_is_enabled(Trace, valuetypes)) {
320 if (k->is_flatArray_klass()) {
321 FlatArrayKlass* vak = FlatArrayKlass::cast(k);
322 int index = (offset - vak->array_header_in_bytes()) / vak->element_byte_size();
323 address dest = (address)((flatArrayOop)p)->value_at_addr(index, vak->layout_helper());
324 log_trace(valuetypes)("%s array type %s index %d element size %d offset " UINT64_FORMAT_X " at " INTPTR_FORMAT,
325 p->klass()->external_name(), vak->external_name(),
326 index, vak->element_byte_size(), (uint64_t)offset, p2i(dest));
327 } else {
328 log_trace(valuetypes)("%s field type %s at offset " UINT64_FORMAT_X,
329 p->klass()->external_name(), vk->external_name(), (uint64_t)offset);
330 }
331 }
332 }
333
334 // These functions allow a null base pointer with an arbitrary address.
335 // But if the base pointer is non-null, the offset should make some sense.
336 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
337 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
338 oop p = JNIHandles::resolve(obj);
339 assert_field_offset_sane(p, offset);
340 oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
341 return JNIHandles::make_local(THREAD, v);
342 } UNSAFE_END
343
344 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
345 oop x = JNIHandles::resolve(x_h);
346 oop p = JNIHandles::resolve(obj);
347 assert_field_offset_sane(p, offset);
348 assert(!p->is_inline_type() || p->mark().is_larval_state(), "must be an object instance or a larval inline type");
349 HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
350 } UNSAFE_END
351
352 UNSAFE_ENTRY(jlong, Unsafe_ValueHeaderSize(JNIEnv *env, jobject unsafe, jclass c)) {
353 Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(c));
354 InlineKlass* vk = InlineKlass::cast(k);
355 return vk->payload_offset();
356 } UNSAFE_END
357
358 UNSAFE_ENTRY(jboolean, Unsafe_IsFlatField(JNIEnv *env, jobject unsafe, jobject o)) {
359 oop f = JNIHandles::resolve_non_null(o);
360 Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
361 int slot = java_lang_reflect_Field::slot(f);
362 return InstanceKlass::cast(k)->field_is_flat(slot);
363 } UNSAFE_END
364
365 UNSAFE_ENTRY(jboolean, Unsafe_HasNullMarker(JNIEnv *env, jobject unsafe, jobject o)) {
366 oop f = JNIHandles::resolve_non_null(o);
367 Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
368 int slot = java_lang_reflect_Field::slot(f);
369 return InstanceKlass::cast(k)->field_has_null_marker(slot);
370 } UNSAFE_END
371
372 UNSAFE_ENTRY(jint, Unsafe_NullMarkerOffset(JNIEnv *env, jobject unsafe, jobject o)) {
373 oop f = JNIHandles::resolve_non_null(o);
374 Klass* k = java_lang_Class::as_Klass(java_lang_reflect_Field::clazz(f));
375 int slot = java_lang_reflect_Field::slot(f);
376 return InstanceKlass::cast(k)->null_marker_offset(slot);
377 } UNSAFE_END
378
379 UNSAFE_ENTRY(jint, Unsafe_ArrayLayout(JNIEnv *env, jobject unsafe, jarray array)) {
380 oop ar = JNIHandles::resolve_non_null(array);
381 ArrayKlass* ak = ArrayKlass::cast(ar->klass());
382 if (ak->is_refArray_klass()) {
383 return (jint)LayoutKind::REFERENCE;
384 } else if (ak->is_flatArray_klass()) {
385 return (jint)FlatArrayKlass::cast(ak)->layout_kind();
386 } else {
387 ShouldNotReachHere();
388 return -1;
389 }
390 } UNSAFE_END
391
392 UNSAFE_ENTRY(jint, Unsafe_FieldLayout(JNIEnv *env, jobject unsafe, jobject field)) {
393 assert(field != nullptr, "field must not be null");
394
395 oop reflected = JNIHandles::resolve_non_null(field);
396 oop mirror = java_lang_reflect_Field::clazz(reflected);
397 Klass* k = java_lang_Class::as_Klass(mirror);
398 int slot = java_lang_reflect_Field::slot(reflected);
399 int modifiers = java_lang_reflect_Field::modifiers(reflected);
400
401 if ((modifiers & JVM_ACC_STATIC) != 0) {
402 return (jint)LayoutKind::REFERENCE; // static fields are never flat
403 } else {
404 InstanceKlass* ik = InstanceKlass::cast(k);
405 if (ik->field_is_flat(slot)) {
406 return (jint)ik->inline_layout_info(slot).kind();
407 } else {
408 return (jint)LayoutKind::REFERENCE;
409 }
410 }
411 } UNSAFE_END
412
413 UNSAFE_ENTRY(jarray, Unsafe_NewSpecialArray(JNIEnv *env, jobject unsafe, jclass elmClass, jint len, jint layoutKind)) {
414 oop mirror = JNIHandles::resolve_non_null(elmClass);
415 Klass* klass = java_lang_Class::as_Klass(mirror);
416 klass->initialize(CHECK_NULL);
417 if (len < 0) {
418 THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Array length is negative");
419 }
420 if (klass->is_array_klass() || klass->is_identity_class()) {
421 THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Element class is not a value class");
422 }
423 if (klass->is_abstract()) {
424 THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Element class is abstract");
425 }
426 LayoutKind lk = static_cast<LayoutKind>(layoutKind);
427 if (lk <= LayoutKind::REFERENCE || lk >= LayoutKind::UNKNOWN) {
428 THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Invalid layout kind");
429 }
430 InlineKlass* vk = InlineKlass::cast(klass);
431 // WARNING: test below will need modifications when flat layouts supported for fields
432 // but not for arrays are introduce (NULLABLE_NON_ATOMIC_FLAT for instance)
433 if (!UseArrayFlattening || !vk->is_layout_supported(lk)) {
434 THROW_MSG_NULL(vmSymbols::java_lang_UnsupportedOperationException(), "Layout not supported");
435 }
436 ArrayKlass::ArrayProperties props = ArrayKlass::array_properties_from_layout(lk);
437 oop array = oopFactory::new_flatArray(vk, len, props, lk, CHECK_NULL);
438 return (jarray) JNIHandles::make_local(THREAD, array);
439 } UNSAFE_END
440
441 UNSAFE_ENTRY(jobject, Unsafe_GetFlatValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint layoutKind, jclass vc)) {
442 assert(layoutKind != (int)LayoutKind::REFERENCE, "This method handles only flat layouts");
443 oop base = JNIHandles::resolve(obj);
444 if (base == nullptr) {
445 THROW_NULL(vmSymbols::java_lang_NullPointerException());
446 }
447 Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
448 InlineKlass* vk = InlineKlass::cast(k);
449 assert_and_log_unsafe_value_access(base, offset, vk);
450 LayoutKind lk = (LayoutKind)layoutKind;
451 Handle base_h(THREAD, base);
452 oop v = vk->read_payload_from_addr(base_h(), offset, lk, CHECK_NULL);
453 return JNIHandles::make_local(THREAD, v);
454 } UNSAFE_END
455
456 UNSAFE_ENTRY(void, Unsafe_PutFlatValue(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint layoutKind, jclass vc, jobject value)) {
457 assert(layoutKind != (int)LayoutKind::REFERENCE, "This method handles only flat layouts");
458 oop base = JNIHandles::resolve(obj);
459 if (base == nullptr) {
460 THROW(vmSymbols::java_lang_NullPointerException());
461 }
462 Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(vc));
463 InlineKlass* vk = InlineKlass::cast(k);
464 assert(!base->is_inline_type() || base->mark().is_larval_state(), "must be an object instance or a larval inline type");
465 assert_and_log_unsafe_value_access(base, offset, vk);
466 LayoutKind lk = (LayoutKind)layoutKind;
467 oop v = JNIHandles::resolve(value);
468 vk->write_value_to_addr(v, ((char*)(oopDesc*)base) + offset, lk, true, CHECK);
469 } UNSAFE_END
470
471 UNSAFE_ENTRY(jobject, Unsafe_MakePrivateBuffer(JNIEnv *env, jobject unsafe, jobject value)) {
472 oop v = JNIHandles::resolve_non_null(value);
473 assert(v->is_inline_type(), "must be an inline type instance");
474 Handle vh(THREAD, v);
475 InlineKlass* vk = InlineKlass::cast(v->klass());
476 instanceOop new_value = vk->allocate_instance_buffer(CHECK_NULL);
477 vk->copy_payload_to_addr(vk->payload_addr(vh()), vk->payload_addr(new_value), LayoutKind::BUFFERED, false);
478 markWord mark = new_value->mark();
479 new_value->set_mark(mark.enter_larval_state());
480 return JNIHandles::make_local(THREAD, new_value);
481 } UNSAFE_END
482
483 UNSAFE_ENTRY(jobject, Unsafe_FinishPrivateBuffer(JNIEnv *env, jobject unsafe, jobject value)) {
484 oop v = JNIHandles::resolve(value);
485 assert(v->mark().is_larval_state(), "must be a larval value");
486 markWord mark = v->mark();
487 v->set_mark(mark.exit_larval_state());
488 return JNIHandles::make_local(THREAD, v);
489 } UNSAFE_END
490
491 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
492 oop p = JNIHandles::resolve(obj);
493 assert_field_offset_sane(p, offset);
494 oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
495 return JNIHandles::make_local(THREAD, v);
496 } UNSAFE_END
497
498 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
499 oop x = JNIHandles::resolve(x_h);
500 oop p = JNIHandles::resolve(obj);
501 assert_field_offset_sane(p, offset);
502 HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
503 } UNSAFE_END
504
505 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
506 oop v = *(oop*) (address) addr;
507 return JNIHandles::make_local(THREAD, v);
508 } UNSAFE_END
509
510 #define DEFINE_GETSETOOP(java_type, Type) \
511 \
512 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
513 return MemoryAccess<java_type>(thread, obj, offset).get(); \
514 } UNSAFE_END \
515 \
516 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
517 MemoryAccess<java_type>(thread, obj, offset).put(x); \
518 } UNSAFE_END \
519 \
520 // END DEFINE_GETSETOOP.
521
522 DEFINE_GETSETOOP(jboolean, Boolean)
523 DEFINE_GETSETOOP(jbyte, Byte)
524 DEFINE_GETSETOOP(jshort, Short);
525 DEFINE_GETSETOOP(jchar, Char);
526 DEFINE_GETSETOOP(jint, Int);
527 DEFINE_GETSETOOP(jlong, Long);
528 DEFINE_GETSETOOP(jfloat, Float);
529 DEFINE_GETSETOOP(jdouble, Double);
530
531 #undef DEFINE_GETSETOOP
532
533 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
534 \
535 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
536 return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
537 } UNSAFE_END \
538 \
539 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
540 MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
541 } UNSAFE_END \
542 \
543 // END DEFINE_GETSETOOP_VOLATILE.
544
545 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
546 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
547 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
548 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
549 DEFINE_GETSETOOP_VOLATILE(jint, Int);
550 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
551 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
552 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
553
554 #undef DEFINE_GETSETOOP_VOLATILE
555
556 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
557 OrderAccess::fence();
558 } UNSAFE_END
559
560 ////// Allocation requests
561
562 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
563 JvmtiVMObjectAllocEventCollector oam;
564 instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(cls), CHECK_NULL);
565 return JNIHandles::make_local(THREAD, i);
566 } UNSAFE_END
567
568 UNSAFE_LEAF(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
569 size_t sz = (size_t)size;
570
571 assert(is_aligned(sz, HeapWordSize), "sz not aligned");
572
573 void* x = os::malloc(sz, mtOther);
574
575 return addr_to_java(x);
576 } UNSAFE_END
577
578 UNSAFE_LEAF(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
579 void* p = addr_from_java(addr);
580 size_t sz = (size_t)size;
581
582 assert(is_aligned(sz, HeapWordSize), "sz not aligned");
583
584 void* x = os::realloc(p, sz, mtOther);
585
586 return addr_to_java(x);
587 } UNSAFE_END
588
589 UNSAFE_LEAF(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
590 void* p = addr_from_java(addr);
591
592 os::free(p);
593 } UNSAFE_END
594
595 UNSAFE_ENTRY_SCOPED(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
596 size_t sz = (size_t)size;
597
598 oop base = JNIHandles::resolve(obj);
599 void* p = index_oop_from_field_offset_long(base, offset);
600
601 {
602 GuardUnsafeAccess guard(thread);
603 if (StubRoutines::unsafe_setmemory() != nullptr) {
604 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
605 StubRoutines::UnsafeSetMemory_stub()(p, sz, value);
606 } else {
607 Copy::fill_to_memory_atomic(p, sz, value);
608 }
609 }
610 } UNSAFE_END
611
612 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
613 size_t sz = (size_t)size;
614
615 oop srcp = JNIHandles::resolve(srcObj);
616 oop dstp = JNIHandles::resolve(dstObj);
617
618 void* src = index_oop_from_field_offset_long(srcp, srcOffset);
619 void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
620 {
621 GuardUnsafeAccess guard(thread);
622 if (StubRoutines::unsafe_arraycopy() != nullptr) {
623 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
624 StubRoutines::UnsafeArrayCopy_stub()(src, dst, sz);
625 } else {
626 Copy::conjoint_memory_atomic(src, dst, sz);
627 }
628 }
629 } UNSAFE_END
630
631 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
632 size_t sz = (size_t)size;
633 size_t esz = (size_t)elemSize;
634
635 oop srcp = JNIHandles::resolve(srcObj);
636 oop dstp = JNIHandles::resolve(dstObj);
637
638 address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
639 address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
640
641 {
642 GuardUnsafeAccess guard(thread);
643 Copy::conjoint_swap(src, dst, sz, esz);
644 }
645 } UNSAFE_END
646
647 UNSAFE_LEAF (void, Unsafe_WriteBack0(JNIEnv *env, jobject unsafe, jlong line)) {
648 assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
649 #ifdef ASSERT
650 if (TraceMemoryWriteback) {
651 tty->print_cr("Unsafe: writeback 0x%p", addr_from_java(line));
652 }
653 #endif
654
655 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
656 assert(StubRoutines::data_cache_writeback() != nullptr, "sanity");
657 (StubRoutines::DataCacheWriteback_stub())(addr_from_java(line));
658 } UNSAFE_END
659
660 static void doWriteBackSync0(bool is_pre)
661 {
662 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
663 assert(StubRoutines::data_cache_writeback_sync() != nullptr, "sanity");
664 (StubRoutines::DataCacheWritebackSync_stub())(is_pre);
665 }
666
667 UNSAFE_LEAF (void, Unsafe_WriteBackPreSync0(JNIEnv *env, jobject unsafe)) {
668 assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
669 #ifdef ASSERT
670 if (TraceMemoryWriteback) {
671 tty->print_cr("Unsafe: writeback pre-sync");
672 }
673 #endif
674
675 doWriteBackSync0(true);
676 } UNSAFE_END
677
678 UNSAFE_LEAF (void, Unsafe_WriteBackPostSync0(JNIEnv *env, jobject unsafe)) {
679 assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
680 #ifdef ASSERT
681 if (TraceMemoryWriteback) {
682 tty->print_cr("Unsafe: writeback pre-sync");
683 }
684 #endif
685
686 doWriteBackSync0(false);
687 } UNSAFE_END
688
689 ////// Random queries
690
691 // Finds the object field offset of a field with the matching name, or an error code
692 // Error code -1 is not found, -2 is static field
693 static jlong find_known_instance_field_offset(jclass clazz, jstring name, TRAPS) {
694 assert(clazz != nullptr, "clazz must not be null");
695 assert(name != nullptr, "name must not be null");
696
697 ResourceMark rm(THREAD);
698 char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
699
700 InstanceKlass* k = java_lang_Class::as_InstanceKlass(JNIHandles::resolve_non_null(clazz));
701
702 jint offset = -1; // Not found
703 for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
704 Symbol *name = fs.name();
705 if (name->equals(utf_name)) {
706 if (!fs.access_flags().is_static()) {
707 offset = fs.offset();
708 } else {
709 offset = -2; // A static field
710 }
711 break;
712 }
713 }
714 if (offset < 0) {
715 return offset; // Error code
716 }
717 return field_offset_from_byte_offset(offset);
718 }
719
720 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
721 assert(field != nullptr, "field must not be null");
722
723 oop reflected = JNIHandles::resolve_non_null(field);
724 oop mirror = java_lang_reflect_Field::clazz(reflected);
725 Klass* k = java_lang_Class::as_Klass(mirror);
726 int slot = java_lang_reflect_Field::slot(reflected);
727 int modifiers = java_lang_reflect_Field::modifiers(reflected);
728
729 if (must_be_static >= 0) {
730 int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
731 if (must_be_static != really_is_static) {
732 THROW_0(vmSymbols::java_lang_IllegalArgumentException());
733 }
734 }
735
736 int offset = InstanceKlass::cast(k)->field_offset(slot);
737 return field_offset_from_byte_offset(offset);
738 }
739
740 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
741 return find_field_offset(field, 0, THREAD);
742 } UNSAFE_END
743
744 UNSAFE_ENTRY(jlong, Unsafe_KnownObjectFieldOffset0(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
745 return find_known_instance_field_offset(c, name, THREAD);
746 } UNSAFE_END
747
748 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
749 return find_field_offset(field, 1, THREAD);
750 } UNSAFE_END
751
752 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
753 assert(field != nullptr, "field must not be null");
754
755 // Note: In this VM implementation, a field address is always a short
756 // offset from the base of a klass metaobject. Thus, the full dynamic
757 // range of the return type is never used. However, some implementations
758 // might put the static field inside an array shared by many classes,
759 // or even at a fixed address, in which case the address could be quite
760 // large. In that last case, this function would return null, since
761 // the address would operate alone, without any base pointer.
762
763 oop reflected = JNIHandles::resolve_non_null(field);
764 oop mirror = java_lang_reflect_Field::clazz(reflected);
765 int modifiers = java_lang_reflect_Field::modifiers(reflected);
766
767 if ((modifiers & JVM_ACC_STATIC) == 0) {
768 THROW_NULL(vmSymbols::java_lang_IllegalArgumentException());
769 }
770
771 return JNIHandles::make_local(THREAD, mirror);
772 } UNSAFE_END
773
774 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
775 assert(clazz != nullptr, "clazz must not be null");
776
777 oop mirror = JNIHandles::resolve_non_null(clazz);
778
779 Klass* klass = java_lang_Class::as_Klass(mirror);
780 if (klass != nullptr && klass->should_be_initialized()) {
781 InstanceKlass* k = InstanceKlass::cast(klass);
782 k->initialize(CHECK);
783 }
784 }
785 UNSAFE_END
786
787 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
788 assert(clazz != nullptr, "clazz must not be null");
789
790 oop mirror = JNIHandles::resolve_non_null(clazz);
791 Klass* klass = java_lang_Class::as_Klass(mirror);
792
793 if (klass != nullptr && klass->should_be_initialized()) {
794 return true;
795 }
796
797 return false;
798 }
799 UNSAFE_END
800
801 UNSAFE_ENTRY(void, Unsafe_NotifyStrictStaticAccess0(JNIEnv *env, jobject unsafe, jobject clazz,
802 jlong sfoffset, jboolean writing)) {
803 assert(clazz != nullptr, "clazz must not be null");
804
805 oop mirror = JNIHandles::resolve_non_null(clazz);
806 Klass* klass = java_lang_Class::as_Klass(mirror);
807
808 if (klass != nullptr && klass->is_instance_klass()) {
809 InstanceKlass* ik = InstanceKlass::cast(klass);
810 fieldDescriptor fd;
811 if (ik->find_local_field_from_offset((int)sfoffset, true, &fd)) {
812 // Note: The Unsafe API takes an OFFSET, but the InstanceKlass wants the INDEX.
813 // We could surface field indexes into Unsafe, but that's too much churn.
814 ik->notify_strict_static_access(fd.index(), writing, CHECK);
815 return;
816 }
817 }
818 THROW(vmSymbols::java_lang_InternalError());
819 }
820 UNSAFE_END
821
822 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
823 assert(clazz != nullptr, "clazz must not be null");
824
825 oop mirror = JNIHandles::resolve_non_null(clazz);
826 Klass* k = java_lang_Class::as_Klass(mirror);
827
828 if (k == nullptr || !k->is_array_klass()) {
829 THROW(vmSymbols::java_lang_InvalidClassException());
830 } else if (k->is_typeArray_klass()) {
831 TypeArrayKlass* tak = TypeArrayKlass::cast(k);
832 base = tak->array_header_in_bytes();
833 assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
834 scale = (1 << tak->log2_element_size());
835 } else if (k->is_objArray_klass()) {
836 Klass* ek = ObjArrayKlass::cast(k)->element_klass();
837 if (!ek->is_identity_class() && !ek->is_abstract()) {
838 // Arrays of a concrete value class type can have multiple layouts
839 // There's no good value to return, so throwing an exception is the way out
840 THROW_MSG(vmSymbols::java_lang_IllegalArgumentException(), "Arrays of a concrete value class don't have a single base and offset");
841 }
842 base = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
843 scale = heapOopSize;
844 } else {
845 ShouldNotReachHere();
846 }
847 }
848
849 UNSAFE_ENTRY(jint, Unsafe_ArrayInstanceBaseOffset0(JNIEnv *env, jobject unsafe, jarray array)) {
850 assert(array != nullptr, "array must not be null");
851 oop ar = JNIHandles::resolve_non_null(array);
852 assert(ar->is_array(), "Must be an array");
853 ArrayKlass* ak = ArrayKlass::cast(ar->klass());
854 if (ak->is_refArray_klass()) {
855 return arrayOopDesc::base_offset_in_bytes(T_OBJECT);
856 } else if (ak->is_flatArray_klass()) {
857 FlatArrayKlass* fak = FlatArrayKlass::cast(ak);
858 return fak->array_header_in_bytes();
859 } else {
860 ShouldNotReachHere();
861 }
862 } UNSAFE_END
863
864 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
865 int base = 0, scale = 0;
866 getBaseAndScale(base, scale, clazz, CHECK_0);
867
868 return field_offset_from_byte_offset(base);
869 } UNSAFE_END
870
871
872 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
873 int base = 0, scale = 0;
874 getBaseAndScale(base, scale, clazz, CHECK_0);
875
876 // This VM packs both fields and array elements down to the byte.
877 // But watch out: If this changes, so that array references for
878 // a given primitive type (say, T_BOOLEAN) use different memory units
879 // than fields, this method MUST return zero for such arrays.
880 // For example, the VM used to store sub-word sized fields in full
881 // words in the object layout, so that accessors like getByte(Object,int)
882 // did not really do what one might expect for arrays. Therefore,
883 // this function used to report a zero scale factor, so that the user
884 // would know not to attempt to access sub-word array elements.
885 // // Code for unpacked fields:
886 // if (scale < wordSize) return 0;
887
888 // The following allows for a pretty general fieldOffset cookie scheme,
889 // but requires it to be linear in byte offset.
890 return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
891 } UNSAFE_END
892
893 UNSAFE_ENTRY(jint, Unsafe_ArrayInstanceIndexScale0(JNIEnv *env, jobject unsafe, jarray array)) {
894 assert(array != nullptr, "array must not be null");
895 oop ar = JNIHandles::resolve_non_null(array);
896 assert(ar->is_array(), "Must be an array");
897 ArrayKlass* ak = ArrayKlass::cast(ar->klass());
898 if (ak->is_refArray_klass()) {
899 return heapOopSize;
900 } else if (ak->is_flatArray_klass()) {
901 FlatArrayKlass* fak = FlatArrayKlass::cast(ak);
902 return fak->element_byte_size();
903 } else {
904 ShouldNotReachHere();
905 }
906 } UNSAFE_END
907
908 UNSAFE_ENTRY(jarray, Unsafe_GetFieldMap0(JNIEnv* env, jobject unsafe, jclass clazz)) {
909 oop mirror = JNIHandles::resolve_non_null(clazz);
910 Klass* k = java_lang_Class::as_Klass(mirror);
911
912 if (!k->is_inline_klass()) {
913 THROW_MSG_NULL(vmSymbols::java_lang_IllegalArgumentException(), "Argument is not a concrete value class");
914 }
915 InlineKlass* vk = InlineKlass::cast(k);
916 oop map = mirror->obj_field(vk->acmp_maps_offset());
917 return (jarray) JNIHandles::make_local(THREAD, map);
918 } UNSAFE_END
919
920
921 UNSAFE_ENTRY(jlong, Unsafe_GetObjectSize0(JNIEnv* env, jobject o, jobject obj))
922 oop p = JNIHandles::resolve(obj);
923 return p->size() * HeapWordSize;
924 UNSAFE_END
925
926
927 static inline void throw_new(JNIEnv *env, const char *ename) {
928 jclass cls = env->FindClass(ename);
929 if (env->ExceptionCheck()) {
930 env->ExceptionClear();
931 tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
932 return;
933 }
934
935 env->ThrowNew(cls, nullptr);
936 }
937
938 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
939 // Code lifted from JDK 1.3 ClassLoader.c
940
941 jbyte *body;
942 char *utfName = nullptr;
943 jclass result = nullptr;
944 char buf[128];
945
946 assert(data != nullptr, "Class bytes must not be null");
947 assert(length >= 0, "length must not be negative: %d", length);
948
949 if (UsePerfData) {
950 ClassLoader::unsafe_defineClassCallCounter()->inc();
951 }
952
953 body = NEW_C_HEAP_ARRAY_RETURN_NULL(jbyte, length, mtInternal);
954 if (body == nullptr) {
955 throw_new(env, "java/lang/OutOfMemoryError");
956 return nullptr;
957 }
958
959 env->GetByteArrayRegion(data, offset, length, body);
960 if (env->ExceptionCheck()) {
961 goto free_body;
962 }
963
964 if (name != nullptr) {
965 uint len = env->GetStringUTFLength(name);
966 int unicode_len = env->GetStringLength(name);
967
968 if (len >= sizeof(buf)) {
969 utfName = NEW_C_HEAP_ARRAY_RETURN_NULL(char, len + 1, mtInternal);
970 if (utfName == nullptr) {
971 throw_new(env, "java/lang/OutOfMemoryError");
972 goto free_body;
973 }
974 } else {
975 utfName = buf;
976 }
977
978 env->GetStringUTFRegion(name, 0, unicode_len, utfName);
979
980 for (uint i = 0; i < len; i++) {
981 if (utfName[i] == '.') utfName[i] = '/';
982 }
983 }
984
985 result = JVM_DefineClass(env, utfName, loader, body, length, pd);
986
987 if (utfName && utfName != buf) {
988 FREE_C_HEAP_ARRAY(char, utfName);
989 }
990
991 free_body:
992 FREE_C_HEAP_ARRAY(jbyte, body);
993 return result;
994 }
995
996
997 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
998 ThreadToNativeFromVM ttnfv(thread);
999
1000 return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
1001 } UNSAFE_END
1002
1003
1004 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
1005 ThreadToNativeFromVM ttnfv(thread);
1006 env->Throw(thr);
1007 } UNSAFE_END
1008
1009 // JSR166 ------------------------------------------------------------------
1010
1011 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
1012 oop x = JNIHandles::resolve(x_h);
1013 oop e = JNIHandles::resolve(e_h);
1014 oop p = JNIHandles::resolve(obj);
1015 assert_field_offset_sane(p, offset);
1016 oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
1017 return JNIHandles::make_local(THREAD, res);
1018 } UNSAFE_END
1019
1020 UNSAFE_ENTRY_SCOPED(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
1021 oop p = JNIHandles::resolve(obj);
1022 volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
1023 return AtomicAccess::cmpxchg(addr, e, x);
1024 } UNSAFE_END
1025
1026 UNSAFE_ENTRY_SCOPED(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
1027 oop p = JNIHandles::resolve(obj);
1028 volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
1029 return AtomicAccess::cmpxchg(addr, e, x);
1030 } UNSAFE_END
1031
1032 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
1033 oop x = JNIHandles::resolve(x_h);
1034 oop e = JNIHandles::resolve(e_h);
1035 oop p = JNIHandles::resolve(obj);
1036 assert_field_offset_sane(p, offset);
1037 oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
1038 return ret == e;
1039 } UNSAFE_END
1040
1041 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
1042 oop p = JNIHandles::resolve(obj);
1043 volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
1044 return AtomicAccess::cmpxchg(addr, e, x) == e;
1045 } UNSAFE_END
1046
1047 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
1048 oop p = JNIHandles::resolve(obj);
1049 volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
1050 return AtomicAccess::cmpxchg(addr, e, x) == e;
1051 } UNSAFE_END
1052
1053 static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
1054 assert(event != nullptr, "invariant");
1055 event->set_parkedClass((obj != nullptr) ? obj->klass() : nullptr);
1056 event->set_timeout(timeout_nanos);
1057 event->set_until(until_epoch_millis);
1058 event->set_address((obj != nullptr) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
1059 event->commit();
1060 }
1061
1062 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
1063 HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
1064 EventThreadPark event;
1065
1066 JavaThreadParkedState jtps(thread, time != 0);
1067 thread->parker()->park(isAbsolute != 0, time);
1068 if (event.should_commit()) {
1069 const oop obj = thread->current_park_blocker();
1070 if (time == 0) {
1071 post_thread_park_event(&event, obj, min_jlong, min_jlong);
1072 } else {
1073 if (isAbsolute != 0) {
1074 post_thread_park_event(&event, obj, min_jlong, time);
1075 } else {
1076 post_thread_park_event(&event, obj, time, min_jlong);
1077 }
1078 }
1079 }
1080 HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
1081 } UNSAFE_END
1082
1083 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
1084 if (jthread != nullptr) {
1085 oop thread_oop = JNIHandles::resolve_non_null(jthread);
1086 // Get the JavaThread* stored in the java.lang.Thread object _before_
1087 // the embedded ThreadsListHandle is constructed so we know if the
1088 // early life stage of the JavaThread* is protected. We use acquire
1089 // here to ensure that if we see a non-nullptr value, then we also
1090 // see the main ThreadsList updates from the JavaThread* being added.
1091 FastThreadsListHandle ftlh(thread_oop, java_lang_Thread::thread_acquire(thread_oop));
1092 JavaThread* thr = ftlh.protected_java_thread();
1093 if (thr != nullptr) {
1094 // The still live JavaThread* is protected by the FastThreadsListHandle
1095 // so it is safe to access.
1096 Parker* p = thr->parker();
1097 HOTSPOT_THREAD_UNPARK((uintptr_t) p);
1098 p->unpark();
1099 }
1100 } // FastThreadsListHandle is destroyed here.
1101 } UNSAFE_END
1102
1103 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
1104 const int max_nelem = 3;
1105 double la[max_nelem];
1106 jint ret;
1107
1108 typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
1109 assert(a->is_typeArray(), "must be type array");
1110
1111 ret = os::loadavg(la, nelem);
1112 if (ret == -1) {
1113 return -1;
1114 }
1115
1116 // if successful, ret is the number of samples actually retrieved.
1117 assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
1118 switch(ret) {
1119 case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
1120 case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
1121 case 1: a->double_at_put(0, (jdouble)la[0]); break;
1122 }
1123
1124 return ret;
1125 } UNSAFE_END
1126
1127
1128 /// JVM_RegisterUnsafeMethods
1129
1130 #define ADR "J"
1131
1132 #define LANG "Ljava/lang/"
1133
1134 #define OBJ LANG "Object;"
1135 #define CLS LANG "Class;"
1136 #define FLD LANG "reflect/Field;"
1137 #define THR LANG "Throwable;"
1138
1139 #define OBJ_ARR "[" OBJ
1140
1141 #define DC_Args LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
1142 #define DAC_Args CLS "[B[" OBJ
1143
1144 #define CC (char*) /*cast a literal from (const char*)*/
1145 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
1146
1147 #define DECLARE_GETPUTOOP(Type, Desc) \
1148 {CC "get" #Type, CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type)}, \
1149 {CC "put" #Type, CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type)}, \
1150 {CC "get" #Type "Volatile", CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type##Volatile)}, \
1151 {CC "put" #Type "Volatile", CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type##Volatile)}
1152
1153
1154 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
1155 {CC "getReference", CC "(" OBJ "J)" OBJ "", FN_PTR(Unsafe_GetReference)},
1156 {CC "putReference", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutReference)},
1157 {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ, FN_PTR(Unsafe_GetReferenceVolatile)},
1158 {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutReferenceVolatile)},
1159
1160 {CC "isFlatField0", CC "(" OBJ ")Z", FN_PTR(Unsafe_IsFlatField)},
1161 {CC "hasNullMarker0", CC "(" OBJ ")Z", FN_PTR(Unsafe_HasNullMarker)},
1162 {CC "nullMarkerOffset0", CC "(" OBJ ")I", FN_PTR(Unsafe_NullMarkerOffset)},
1163 {CC "arrayLayout0", CC "(" OBJ_ARR ")I", FN_PTR(Unsafe_ArrayLayout)},
1164 {CC "fieldLayout0", CC "(" OBJ ")I", FN_PTR(Unsafe_FieldLayout)},
1165 {CC "newSpecialArray", CC "(" CLS "II)[" OBJ, FN_PTR(Unsafe_NewSpecialArray)},
1166 {CC "getFlatValue", CC "(" OBJ "JI" CLS ")" OBJ, FN_PTR(Unsafe_GetFlatValue)},
1167 {CC "putFlatValue", CC "(" OBJ "JI" CLS OBJ ")V", FN_PTR(Unsafe_PutFlatValue)},
1168 {CC "makePrivateBuffer", CC "(" OBJ ")" OBJ, FN_PTR(Unsafe_MakePrivateBuffer)},
1169 {CC "finishPrivateBuffer", CC "(" OBJ ")" OBJ, FN_PTR(Unsafe_FinishPrivateBuffer)},
1170 {CC "valueHeaderSize", CC "(" CLS ")J", FN_PTR(Unsafe_ValueHeaderSize)},
1171
1172 {CC "getUncompressedObject", CC "(" ADR ")" OBJ, FN_PTR(Unsafe_GetUncompressedObject)},
1173
1174 DECLARE_GETPUTOOP(Boolean, Z),
1175 DECLARE_GETPUTOOP(Byte, B),
1176 DECLARE_GETPUTOOP(Short, S),
1177 DECLARE_GETPUTOOP(Char, C),
1178 DECLARE_GETPUTOOP(Int, I),
1179 DECLARE_GETPUTOOP(Long, J),
1180 DECLARE_GETPUTOOP(Float, F),
1181 DECLARE_GETPUTOOP(Double, D),
1182
1183 {CC "allocateMemory0", CC "(J)" ADR, FN_PTR(Unsafe_AllocateMemory0)},
1184 {CC "reallocateMemory0", CC "(" ADR "J)" ADR, FN_PTR(Unsafe_ReallocateMemory0)},
1185 {CC "freeMemory0", CC "(" ADR ")V", FN_PTR(Unsafe_FreeMemory0)},
1186
1187 {CC "objectFieldOffset0", CC "(" FLD ")J", FN_PTR(Unsafe_ObjectFieldOffset0)},
1188 {CC "knownObjectFieldOffset0", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_KnownObjectFieldOffset0)},
1189 {CC "staticFieldOffset0", CC "(" FLD ")J", FN_PTR(Unsafe_StaticFieldOffset0)},
1190 {CC "staticFieldBase0", CC "(" FLD ")" OBJ, FN_PTR(Unsafe_StaticFieldBase0)},
1191 {CC "ensureClassInitialized0", CC "(" CLS ")V", FN_PTR(Unsafe_EnsureClassInitialized0)},
1192 {CC "arrayBaseOffset0", CC "(" CLS ")I", FN_PTR(Unsafe_ArrayBaseOffset0)},
1193 {CC "arrayInstanceBaseOffset0", CC "(" OBJ_ARR ")I", FN_PTR(Unsafe_ArrayInstanceBaseOffset0)},
1194 {CC "arrayIndexScale0", CC "(" CLS ")I", FN_PTR(Unsafe_ArrayIndexScale0)},
1195 {CC "arrayInstanceIndexScale0", CC "(" OBJ_ARR ")I", FN_PTR(Unsafe_ArrayInstanceIndexScale0)},
1196 {CC "getFieldMap0", CC "(Ljava/lang/Class;)[I", FN_PTR(Unsafe_GetFieldMap0)},
1197 {CC "getObjectSize0", CC "(Ljava/lang/Object;)J", FN_PTR(Unsafe_GetObjectSize0)},
1198
1199 {CC "defineClass0", CC "(" DC_Args ")" CLS, FN_PTR(Unsafe_DefineClass0)},
1200 {CC "allocateInstance", CC "(" CLS ")" OBJ, FN_PTR(Unsafe_AllocateInstance)},
1201 {CC "throwException", CC "(" THR ")V", FN_PTR(Unsafe_ThrowException)},
1202 {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
1203 {CC "compareAndSetInt", CC "(" OBJ "J""I""I"")Z", FN_PTR(Unsafe_CompareAndSetInt)},
1204 {CC "compareAndSetLong", CC "(" OBJ "J""J""J"")Z", FN_PTR(Unsafe_CompareAndSetLong)},
1205 {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
1206 {CC "compareAndExchangeInt", CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
1207 {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
1208
1209 {CC "park", CC "(ZJ)V", FN_PTR(Unsafe_Park)},
1210 {CC "unpark", CC "(" OBJ ")V", FN_PTR(Unsafe_Unpark)},
1211
1212 {CC "getLoadAverage0", CC "([DI)I", FN_PTR(Unsafe_GetLoadAverage0)},
1213
1214 {CC "copyMemory0", CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
1215 {CC "copySwapMemory0", CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
1216 {CC "writeback0", CC "(" "J" ")V", FN_PTR(Unsafe_WriteBack0)},
1217 {CC "writebackPreSync0", CC "()V", FN_PTR(Unsafe_WriteBackPreSync0)},
1218 {CC "writebackPostSync0", CC "()V", FN_PTR(Unsafe_WriteBackPostSync0)},
1219 {CC "setMemory0", CC "(" OBJ "JJB)V", FN_PTR(Unsafe_SetMemory0)},
1220
1221 {CC "shouldBeInitialized0", CC "(" CLS ")Z", FN_PTR(Unsafe_ShouldBeInitialized0)},
1222 {CC "notifyStrictStaticAccess0", CC "(" CLS "JZ)V", FN_PTR(Unsafe_NotifyStrictStaticAccess0)},
1223
1224 {CC "fullFence", CC "()V", FN_PTR(Unsafe_FullFence)},
1225 };
1226
1227 #undef CC
1228 #undef FN_PTR
1229
1230 #undef ADR
1231 #undef LANG
1232 #undef OBJ
1233 #undef CLS
1234 #undef FLD
1235 #undef THR
1236 #undef DC_Args
1237 #undef DAC_Args
1238
1239 #undef DECLARE_GETPUTOOP
1240
1241
1242 // This function is exported, used by NativeLookup.
1243 // The Unsafe_xxx functions above are called only from the interpreter.
1244 // The optimizer looks at names and signatures to recognize
1245 // individual functions.
1246
1247 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
1248 ThreadToNativeFromVM ttnfv(thread);
1249
1250 int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
1251 guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
1252 } JVM_END