1 /*
2 * Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileStream.hpp"
26 #include "classfile/classLoader.hpp"
27 #include "classfile/classLoadInfo.hpp"
28 #include "classfile/javaClasses.inline.hpp"
29 #include "classfile/systemDictionary.hpp"
30 #include "classfile/vmSymbols.hpp"
31 #include "jfr/jfrEvents.hpp"
32 #include "jni.h"
33 #include "jvm.h"
34 #include "memory/allocation.inline.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "oops/access.inline.hpp"
37 #include "oops/fieldStreams.inline.hpp"
38 #include "oops/instanceKlass.inline.hpp"
39 #include "oops/klass.inline.hpp"
40 #include "oops/objArrayOop.inline.hpp"
41 #include "oops/oop.inline.hpp"
42 #include "oops/typeArrayOop.inline.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/unsafe.hpp"
45 #include "runtime/globals.hpp"
46 #include "runtime/handles.inline.hpp"
47 #include "runtime/interfaceSupport.inline.hpp"
48 #include "runtime/javaThread.inline.hpp"
49 #include "runtime/jniHandles.inline.hpp"
50 #include "runtime/orderAccess.hpp"
51 #include "runtime/reflection.hpp"
52 #include "runtime/sharedRuntime.hpp"
53 #include "runtime/stubRoutines.hpp"
54 #include "runtime/threadSMR.hpp"
55 #include "runtime/vm_version.hpp"
56 #include "runtime/vmOperations.hpp"
57 #include "sanitizers/ub.hpp"
58 #include "services/threadService.hpp"
59 #include "utilities/align.hpp"
60 #include "utilities/copy.hpp"
61 #include "utilities/dtrace.hpp"
62 #include "utilities/macros.hpp"
63
64 /**
65 * Implementation of the jdk.internal.misc.Unsafe class
66 */
67
68
69 #define MAX_OBJECT_SIZE \
70 ( arrayOopDesc::base_offset_in_bytes(T_DOUBLE) \
71 + ((julong)max_jint * sizeof(double)) )
72
73 #define UNSAFE_ENTRY(result_type, header) \
74 JVM_ENTRY(static result_type, header)
75
76 #define UNSAFE_LEAF(result_type, header) \
77 JVM_LEAF(static result_type, header)
78
79 // All memory access methods (e.g. getInt, copyMemory) must use this macro.
80 // We call these methods "scoped" methods, as access to these methods is
81 // typically governed by a "scope" (a MemorySessionImpl object), and no
82 // access is allowed when the scope is no longer alive.
83 //
84 // Closing a scope object (cf. scopedMemoryAccess.cpp) can install
85 // an async exception during a safepoint. When that happens,
86 // scoped methods are not allowed to touch the underlying memory (as that
87 // memory might have been released). Therefore, when entering a scoped method
88 // we check if an async exception has been installed, and return immediately
89 // if that is the case.
90 //
91 // As a rule, we disallow safepoints in the middle of a scoped method.
92 // If an async exception handshake were installed in such a safepoint,
93 // memory access might still occur before the handshake is honored by
94 // the accessing thread.
95 //
96 // Corollary: as threads in native state are considered to be at a safepoint,
97 // scoped methods must NOT be executed while in the native thread state.
98 // Because of this, there can be no UNSAFE_LEAF_SCOPED.
99 #define UNSAFE_ENTRY_SCOPED(result_type, header) \
100 JVM_ENTRY(static result_type, header) \
101 if (thread->has_async_exception_condition()) {return (result_type)0;}
102
103 #define UNSAFE_END JVM_END
104
105
106 static inline void* addr_from_java(jlong addr) {
107 // This assert fails in a variety of ways on 32-bit systems.
108 // It is impossible to predict whether native code that converts
109 // pointers to longs will sign-extend or zero-extend the addresses.
110 //assert(addr == (uintptr_t)addr, "must not be odd high bits");
111 return (void*)(uintptr_t)addr;
112 }
113
114 static inline jlong addr_to_java(void* p) {
115 assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
116 return (uintptr_t)p;
117 }
118
119
120 // Note: The VM's obj_field and related accessors use byte-scaled
121 // ("unscaled") offsets, just as the unsafe methods do.
122
123 // However, the method Unsafe.fieldOffset explicitly declines to
124 // guarantee this. The field offset values manipulated by the Java user
125 // through the Unsafe API are opaque cookies that just happen to be byte
126 // offsets. We represent this state of affairs by passing the cookies
127 // through conversion functions when going between the VM and the Unsafe API.
128 // The conversion functions just happen to be no-ops at present.
129
130 static inline jlong field_offset_to_byte_offset(jlong field_offset) {
131 return field_offset;
132 }
133
134 static inline int field_offset_from_byte_offset(int byte_offset) {
135 return byte_offset;
136 }
137
138 static inline void assert_field_offset_sane(oop p, jlong field_offset) {
139 #ifdef ASSERT
140 jlong byte_offset = field_offset_to_byte_offset(field_offset);
141
142 if (p != nullptr) {
143 assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
144 if (byte_offset == (jint)byte_offset) {
145 void* ptr_plus_disp = cast_from_oop<address>(p) + byte_offset;
146 assert(p->field_addr<void>((jint)byte_offset) == ptr_plus_disp,
147 "raw [ptr+disp] must be consistent with oop::field_addr");
148 }
149 jlong p_size = HeapWordSize * (jlong)(p->size());
150 assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
151 }
152 #endif
153 }
154
155 static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
156 assert_field_offset_sane(p, field_offset);
157 uintptr_t base_address = cast_from_oop<uintptr_t>(p);
158 uintptr_t byte_offset = (uintptr_t)field_offset_to_byte_offset(field_offset);
159 return (void*)(base_address + byte_offset);
160 }
161
162 // Externally callable versions:
163 // (Use these in compiler intrinsics which emulate unsafe primitives.)
164 jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
165 return field_offset;
166 }
167 jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
168 return byte_offset;
169 }
170
171
172 ///// Data read/writes on the Java heap and in native (off-heap) memory
173
174 /**
175 * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
176 */
177 class GuardUnsafeAccess {
178 JavaThread* _thread;
179
180 public:
181 GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
182 // native/off-heap access which may raise SIGBUS if accessing
183 // memory mapped file data in a region of the file which has
184 // been truncated and is now invalid.
185 _thread->set_doing_unsafe_access(true);
186 }
187
188 ~GuardUnsafeAccess() {
189 _thread->set_doing_unsafe_access(false);
190 }
191 };
192
193 /**
194 * Helper class for accessing memory.
195 *
196 * Normalizes values and wraps accesses in
197 * JavaThread::doing_unsafe_access() if needed.
198 */
199 template <typename T>
200 class MemoryAccess : StackObj {
201 JavaThread* _thread;
202 oop _obj;
203 ptrdiff_t _offset;
204
205 // Resolves and returns the address of the memory access.
206 // This raw memory access may fault, so we make sure it happens within the
207 // guarded scope by making the access volatile at least. Since the store
208 // of Thread::set_doing_unsafe_access() is also volatile, these accesses
209 // can not be reordered by the compiler. Therefore, if the access triggers
210 // a fault, we will know that Thread::doing_unsafe_access() returns true.
211 volatile T* addr() {
212 void* addr = index_oop_from_field_offset_long(_obj, _offset);
213 return static_cast<volatile T*>(addr);
214 }
215
216 template <typename U>
217 U normalize_for_write(U x) {
218 return x;
219 }
220
221 jboolean normalize_for_write(jboolean x) {
222 return x & 1;
223 }
224
225 template <typename U>
226 U normalize_for_read(U x) {
227 return x;
228 }
229
230 jboolean normalize_for_read(jboolean x) {
231 return x != 0;
232 }
233
234 public:
235 MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
236 : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
237 assert_field_offset_sane(_obj, offset);
238 }
239
240 T get() {
241 GuardUnsafeAccess guard(_thread);
242 return normalize_for_read(*addr());
243 }
244
245 // we use this method at some places for writing to 0 e.g. to cause a crash;
246 // ubsan does not know that this is the desired behavior
247 ATTRIBUTE_NO_UBSAN
248 void put(T x) {
249 GuardUnsafeAccess guard(_thread);
250 *addr() = normalize_for_write(x);
251 }
252
253
254 T get_volatile() {
255 GuardUnsafeAccess guard(_thread);
256 volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
257 return normalize_for_read(ret);
258 }
259
260 void put_volatile(T x) {
261 GuardUnsafeAccess guard(_thread);
262 RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
263 }
264 };
265
266 // These functions allow a null base pointer with an arbitrary address.
267 // But if the base pointer is non-null, the offset should make some sense.
268 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
269 UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
270 oop p = JNIHandles::resolve(obj);
271 assert_field_offset_sane(p, offset);
272 oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
273 return JNIHandles::make_local(THREAD, v);
274 } UNSAFE_END
275
276 UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
277 oop x = JNIHandles::resolve(x_h);
278 oop p = JNIHandles::resolve(obj);
279 assert_field_offset_sane(p, offset);
280 HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
281 } UNSAFE_END
282
283 UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
284 oop p = JNIHandles::resolve(obj);
285 assert_field_offset_sane(p, offset);
286 oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
287 return JNIHandles::make_local(THREAD, v);
288 } UNSAFE_END
289
290 UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
291 oop x = JNIHandles::resolve(x_h);
292 oop p = JNIHandles::resolve(obj);
293 assert_field_offset_sane(p, offset);
294 HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
295 } UNSAFE_END
296
297 UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
298 oop v = *(oop*) (address) addr;
299 return JNIHandles::make_local(THREAD, v);
300 } UNSAFE_END
301
302 #define DEFINE_GETSETOOP(java_type, Type) \
303 \
304 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
305 return MemoryAccess<java_type>(thread, obj, offset).get(); \
306 } UNSAFE_END \
307 \
308 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
309 MemoryAccess<java_type>(thread, obj, offset).put(x); \
310 } UNSAFE_END \
311 \
312 // END DEFINE_GETSETOOP.
313
314 DEFINE_GETSETOOP(jboolean, Boolean)
315 DEFINE_GETSETOOP(jbyte, Byte)
316 DEFINE_GETSETOOP(jshort, Short);
317 DEFINE_GETSETOOP(jchar, Char);
318 DEFINE_GETSETOOP(jint, Int);
319 DEFINE_GETSETOOP(jlong, Long);
320 DEFINE_GETSETOOP(jfloat, Float);
321 DEFINE_GETSETOOP(jdouble, Double);
322
323 #undef DEFINE_GETSETOOP
324
325 #define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
326 \
327 UNSAFE_ENTRY_SCOPED(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
328 return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
329 } UNSAFE_END \
330 \
331 UNSAFE_ENTRY_SCOPED(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
332 MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
333 } UNSAFE_END \
334 \
335 // END DEFINE_GETSETOOP_VOLATILE.
336
337 DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
338 DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
339 DEFINE_GETSETOOP_VOLATILE(jshort, Short);
340 DEFINE_GETSETOOP_VOLATILE(jchar, Char);
341 DEFINE_GETSETOOP_VOLATILE(jint, Int);
342 DEFINE_GETSETOOP_VOLATILE(jlong, Long);
343 DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
344 DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
345
346 #undef DEFINE_GETSETOOP_VOLATILE
347
348 UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
349 OrderAccess::fence();
350 } UNSAFE_END
351
352 ////// Allocation requests
353
354 UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
355 JvmtiVMObjectAllocEventCollector oam;
356 instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(cls), CHECK_NULL);
357 return JNIHandles::make_local(THREAD, i);
358 } UNSAFE_END
359
360 UNSAFE_LEAF(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
361 size_t sz = (size_t)size;
362
363 assert(is_aligned(sz, HeapWordSize), "sz not aligned");
364
365 void* x = os::malloc(sz, mtOther);
366
367 return addr_to_java(x);
368 } UNSAFE_END
369
370 UNSAFE_LEAF(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
371 void* p = addr_from_java(addr);
372 size_t sz = (size_t)size;
373
374 assert(is_aligned(sz, HeapWordSize), "sz not aligned");
375
376 void* x = os::realloc(p, sz, mtOther);
377
378 return addr_to_java(x);
379 } UNSAFE_END
380
381 UNSAFE_LEAF(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
382 void* p = addr_from_java(addr);
383
384 os::free(p);
385 } UNSAFE_END
386
387 UNSAFE_ENTRY_SCOPED(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
388 size_t sz = (size_t)size;
389
390 oop base = JNIHandles::resolve(obj);
391 void* p = index_oop_from_field_offset_long(base, offset);
392
393 {
394 GuardUnsafeAccess guard(thread);
395 if (StubRoutines::unsafe_setmemory() != nullptr) {
396 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
397 StubRoutines::UnsafeSetMemory_stub()(p, sz, value);
398 } else {
399 Copy::fill_to_memory_atomic(p, sz, value);
400 }
401 }
402 } UNSAFE_END
403
404 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
405 size_t sz = (size_t)size;
406
407 oop srcp = JNIHandles::resolve(srcObj);
408 oop dstp = JNIHandles::resolve(dstObj);
409
410 void* src = index_oop_from_field_offset_long(srcp, srcOffset);
411 void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
412 {
413 GuardUnsafeAccess guard(thread);
414 if (StubRoutines::unsafe_arraycopy() != nullptr) {
415 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
416 StubRoutines::UnsafeArrayCopy_stub()(src, dst, sz);
417 } else {
418 Copy::conjoint_memory_atomic(src, dst, sz);
419 }
420 }
421 } UNSAFE_END
422
423 UNSAFE_ENTRY_SCOPED(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
424 size_t sz = (size_t)size;
425 size_t esz = (size_t)elemSize;
426
427 oop srcp = JNIHandles::resolve(srcObj);
428 oop dstp = JNIHandles::resolve(dstObj);
429
430 address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
431 address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
432
433 {
434 GuardUnsafeAccess guard(thread);
435 Copy::conjoint_swap(src, dst, sz, esz);
436 }
437 } UNSAFE_END
438
439 UNSAFE_LEAF (void, Unsafe_WriteBack0(JNIEnv *env, jobject unsafe, jlong line)) {
440 assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
441 #ifdef ASSERT
442 if (TraceMemoryWriteback) {
443 tty->print_cr("Unsafe: writeback 0x%p", addr_from_java(line));
444 }
445 #endif
446
447 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
448 assert(StubRoutines::data_cache_writeback() != nullptr, "sanity");
449 (StubRoutines::DataCacheWriteback_stub())(addr_from_java(line));
450 } UNSAFE_END
451
452 static void doWriteBackSync0(bool is_pre)
453 {
454 MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
455 assert(StubRoutines::data_cache_writeback_sync() != nullptr, "sanity");
456 (StubRoutines::DataCacheWritebackSync_stub())(is_pre);
457 }
458
459 UNSAFE_LEAF (void, Unsafe_WriteBackPreSync0(JNIEnv *env, jobject unsafe)) {
460 assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
461 #ifdef ASSERT
462 if (TraceMemoryWriteback) {
463 tty->print_cr("Unsafe: writeback pre-sync");
464 }
465 #endif
466
467 doWriteBackSync0(true);
468 } UNSAFE_END
469
470 UNSAFE_LEAF (void, Unsafe_WriteBackPostSync0(JNIEnv *env, jobject unsafe)) {
471 assert(VM_Version::supports_data_cache_line_flush(), "should not get here");
472 #ifdef ASSERT
473 if (TraceMemoryWriteback) {
474 tty->print_cr("Unsafe: writeback pre-sync");
475 }
476 #endif
477
478 doWriteBackSync0(false);
479 } UNSAFE_END
480
481 ////// Random queries
482
483 // Finds the object field offset of a field with the matching name, or an error code
484 // Error code -1 is not found, -2 is static field
485 static jlong find_known_instance_field_offset(jclass clazz, jstring name, TRAPS) {
486 assert(clazz != nullptr, "clazz must not be null");
487 assert(name != nullptr, "name must not be null");
488
489 ResourceMark rm(THREAD);
490 char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
491
492 InstanceKlass* k = java_lang_Class::as_InstanceKlass(JNIHandles::resolve_non_null(clazz));
493
494 jint offset = -1; // Not found
495 for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
496 Symbol *name = fs.name();
497 if (name->equals(utf_name)) {
498 if (!fs.access_flags().is_static()) {
499 offset = fs.offset();
500 } else {
501 offset = -2; // A static field
502 }
503 break;
504 }
505 }
506 if (offset < 0) {
507 return offset; // Error code
508 }
509 return field_offset_from_byte_offset(offset);
510 }
511
512 static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
513 assert(field != nullptr, "field must not be null");
514
515 oop reflected = JNIHandles::resolve_non_null(field);
516 oop mirror = java_lang_reflect_Field::clazz(reflected);
517 Klass* k = java_lang_Class::as_Klass(mirror);
518 int slot = java_lang_reflect_Field::slot(reflected);
519 int modifiers = java_lang_reflect_Field::modifiers(reflected);
520
521 if (must_be_static >= 0) {
522 int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
523 if (must_be_static != really_is_static) {
524 THROW_0(vmSymbols::java_lang_IllegalArgumentException());
525 }
526 }
527
528 int offset = InstanceKlass::cast(k)->field_offset(slot);
529 return field_offset_from_byte_offset(offset);
530 }
531
532 UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
533 return find_field_offset(field, 0, THREAD);
534 } UNSAFE_END
535
536 UNSAFE_ENTRY(jlong, Unsafe_KnownObjectFieldOffset0(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
537 return find_known_instance_field_offset(c, name, THREAD);
538 } UNSAFE_END
539
540 UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
541 return find_field_offset(field, 1, THREAD);
542 } UNSAFE_END
543
544 UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
545 assert(field != nullptr, "field must not be null");
546
547 // Note: In this VM implementation, a field address is always a short
548 // offset from the base of a klass metaobject. Thus, the full dynamic
549 // range of the return type is never used. However, some implementations
550 // might put the static field inside an array shared by many classes,
551 // or even at a fixed address, in which case the address could be quite
552 // large. In that last case, this function would return null, since
553 // the address would operate alone, without any base pointer.
554
555 oop reflected = JNIHandles::resolve_non_null(field);
556 oop mirror = java_lang_reflect_Field::clazz(reflected);
557 int modifiers = java_lang_reflect_Field::modifiers(reflected);
558
559 if ((modifiers & JVM_ACC_STATIC) == 0) {
560 THROW_NULL(vmSymbols::java_lang_IllegalArgumentException());
561 }
562
563 return JNIHandles::make_local(THREAD, mirror);
564 } UNSAFE_END
565
566 UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
567 assert(clazz != nullptr, "clazz must not be null");
568
569 oop mirror = JNIHandles::resolve_non_null(clazz);
570
571 Klass* klass = java_lang_Class::as_Klass(mirror);
572 if (klass != nullptr && klass->should_be_initialized()) {
573 InstanceKlass* k = InstanceKlass::cast(klass);
574 k->initialize(CHECK);
575 }
576 }
577 UNSAFE_END
578
579 UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
580 assert(clazz != nullptr, "clazz must not be null");
581
582 oop mirror = JNIHandles::resolve_non_null(clazz);
583 Klass* klass = java_lang_Class::as_Klass(mirror);
584
585 if (klass != nullptr && klass->should_be_initialized()) {
586 return true;
587 }
588
589 return false;
590 }
591 UNSAFE_END
592
593 static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
594 assert(clazz != nullptr, "clazz must not be null");
595
596 oop mirror = JNIHandles::resolve_non_null(clazz);
597 Klass* k = java_lang_Class::as_Klass(mirror);
598
599 if (k == nullptr || !k->is_array_klass()) {
600 THROW(vmSymbols::java_lang_InvalidClassException());
601 } else if (k->is_objArray_klass()) {
602 base = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
603 scale = heapOopSize;
604 } else if (k->is_typeArray_klass()) {
605 TypeArrayKlass* tak = TypeArrayKlass::cast(k);
606 base = tak->array_header_in_bytes();
607 assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
608 scale = (1 << tak->log2_element_size());
609 } else {
610 ShouldNotReachHere();
611 }
612 }
613
614 UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
615 int base = 0, scale = 0;
616 getBaseAndScale(base, scale, clazz, CHECK_0);
617
618 return field_offset_from_byte_offset(base);
619 } UNSAFE_END
620
621
622 UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
623 int base = 0, scale = 0;
624 getBaseAndScale(base, scale, clazz, CHECK_0);
625
626 // This VM packs both fields and array elements down to the byte.
627 // But watch out: If this changes, so that array references for
628 // a given primitive type (say, T_BOOLEAN) use different memory units
629 // than fields, this method MUST return zero for such arrays.
630 // For example, the VM used to store sub-word sized fields in full
631 // words in the object layout, so that accessors like getByte(Object,int)
632 // did not really do what one might expect for arrays. Therefore,
633 // this function used to report a zero scale factor, so that the user
634 // would know not to attempt to access sub-word array elements.
635 // // Code for unpacked fields:
636 // if (scale < wordSize) return 0;
637
638 // The following allows for a pretty general fieldOffset cookie scheme,
639 // but requires it to be linear in byte offset.
640 return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
641 } UNSAFE_END
642
643
644 static inline void throw_new(JNIEnv *env, const char *ename) {
645 jclass cls = env->FindClass(ename);
646 if (env->ExceptionCheck()) {
647 env->ExceptionClear();
648 tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
649 return;
650 }
651
652 env->ThrowNew(cls, nullptr);
653 }
654
655 static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
656 // Code lifted from JDK 1.3 ClassLoader.c
657
658 jbyte *body;
659 char *utfName = nullptr;
660 jclass result = nullptr;
661 char buf[128];
662
663 assert(data != nullptr, "Class bytes must not be null");
664 assert(length >= 0, "length must not be negative: %d", length);
665
666 if (UsePerfData) {
667 ClassLoader::unsafe_defineClassCallCounter()->inc();
668 }
669
670 body = NEW_C_HEAP_ARRAY_RETURN_NULL(jbyte, length, mtInternal);
671 if (body == nullptr) {
672 throw_new(env, "java/lang/OutOfMemoryError");
673 return nullptr;
674 }
675
676 env->GetByteArrayRegion(data, offset, length, body);
677 if (env->ExceptionCheck()) {
678 goto free_body;
679 }
680
681 if (name != nullptr) {
682 uint len = env->GetStringUTFLength(name);
683 int unicode_len = env->GetStringLength(name);
684
685 if (len >= sizeof(buf)) {
686 utfName = NEW_C_HEAP_ARRAY_RETURN_NULL(char, len + 1, mtInternal);
687 if (utfName == nullptr) {
688 throw_new(env, "java/lang/OutOfMemoryError");
689 goto free_body;
690 }
691 } else {
692 utfName = buf;
693 }
694
695 env->GetStringUTFRegion(name, 0, unicode_len, utfName);
696
697 for (uint i = 0; i < len; i++) {
698 if (utfName[i] == '.') utfName[i] = '/';
699 }
700 }
701
702 result = JVM_DefineClass(env, utfName, loader, body, length, pd);
703
704 if (utfName && utfName != buf) {
705 FREE_C_HEAP_ARRAY(char, utfName);
706 }
707
708 free_body:
709 FREE_C_HEAP_ARRAY(jbyte, body);
710 return result;
711 }
712
713
714 UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
715 ThreadToNativeFromVM ttnfv(thread);
716
717 return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
718 } UNSAFE_END
719
720
721 UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
722 ThreadToNativeFromVM ttnfv(thread);
723 env->Throw(thr);
724 } UNSAFE_END
725
726 // JSR166 ------------------------------------------------------------------
727
728 UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
729 oop x = JNIHandles::resolve(x_h);
730 oop e = JNIHandles::resolve(e_h);
731 oop p = JNIHandles::resolve(obj);
732 assert_field_offset_sane(p, offset);
733 oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
734 return JNIHandles::make_local(THREAD, res);
735 } UNSAFE_END
736
737 UNSAFE_ENTRY_SCOPED(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
738 oop p = JNIHandles::resolve(obj);
739 volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
740 return AtomicAccess::cmpxchg(addr, e, x);
741 } UNSAFE_END
742
743 UNSAFE_ENTRY_SCOPED(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
744 oop p = JNIHandles::resolve(obj);
745 volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
746 return AtomicAccess::cmpxchg(addr, e, x);
747 } UNSAFE_END
748
749 UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
750 oop x = JNIHandles::resolve(x_h);
751 oop e = JNIHandles::resolve(e_h);
752 oop p = JNIHandles::resolve(obj);
753 assert_field_offset_sane(p, offset);
754 oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(p, (ptrdiff_t)offset, e, x);
755 return ret == e;
756 } UNSAFE_END
757
758 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
759 oop p = JNIHandles::resolve(obj);
760 volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
761 return AtomicAccess::cmpxchg(addr, e, x) == e;
762 } UNSAFE_END
763
764 UNSAFE_ENTRY_SCOPED(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
765 oop p = JNIHandles::resolve(obj);
766 volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
767 return AtomicAccess::cmpxchg(addr, e, x) == e;
768 } UNSAFE_END
769
770 static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
771 assert(event != nullptr, "invariant");
772 event->set_parkedClass((obj != nullptr) ? obj->klass() : nullptr);
773 event->set_timeout(timeout_nanos);
774 event->set_until(until_epoch_millis);
775 event->set_address((obj != nullptr) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
776 event->commit();
777 }
778
779 UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
780 HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
781 EventThreadPark event;
782
783 JavaThreadParkedState jtps(thread, time != 0);
784 thread->parker()->park(isAbsolute != 0, time);
785 if (event.should_commit()) {
786 const oop obj = thread->current_park_blocker();
787 if (time == 0) {
788 post_thread_park_event(&event, obj, min_jlong, min_jlong);
789 } else {
790 if (isAbsolute != 0) {
791 post_thread_park_event(&event, obj, min_jlong, time);
792 } else {
793 post_thread_park_event(&event, obj, time, min_jlong);
794 }
795 }
796 }
797 HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
798 } UNSAFE_END
799
800 UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
801 if (jthread != nullptr) {
802 oop thread_oop = JNIHandles::resolve_non_null(jthread);
803 // Get the JavaThread* stored in the java.lang.Thread object _before_
804 // the embedded ThreadsListHandle is constructed so we know if the
805 // early life stage of the JavaThread* is protected. We use acquire
806 // here to ensure that if we see a non-nullptr value, then we also
807 // see the main ThreadsList updates from the JavaThread* being added.
808 FastThreadsListHandle ftlh(thread_oop, java_lang_Thread::thread_acquire(thread_oop));
809 JavaThread* thr = ftlh.protected_java_thread();
810 if (thr != nullptr) {
811 // The still live JavaThread* is protected by the FastThreadsListHandle
812 // so it is safe to access.
813 Parker* p = thr->parker();
814 HOTSPOT_THREAD_UNPARK((uintptr_t) p);
815 p->unpark();
816 }
817 } // FastThreadsListHandle is destroyed here.
818 } UNSAFE_END
819
820 UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
821 const int max_nelem = 3;
822 double la[max_nelem];
823 jint ret;
824
825 typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
826 assert(a->is_typeArray(), "must be type array");
827
828 ret = os::loadavg(la, nelem);
829 if (ret == -1) {
830 return -1;
831 }
832
833 // if successful, ret is the number of samples actually retrieved.
834 assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
835 switch(ret) {
836 case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
837 case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
838 case 1: a->double_at_put(0, (jdouble)la[0]); break;
839 }
840
841 return ret;
842 } UNSAFE_END
843
844
845 /// JVM_RegisterUnsafeMethods
846
847 #define ADR "J"
848
849 #define LANG "Ljava/lang/"
850
851 #define OBJ LANG "Object;"
852 #define CLS LANG "Class;"
853 #define FLD LANG "reflect/Field;"
854 #define THR LANG "Throwable;"
855
856 #define DC_Args LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
857 #define DAC_Args CLS "[B[" OBJ
858
859 #define CC (char*) /*cast a literal from (const char*)*/
860 #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
861
862 #define DECLARE_GETPUTOOP(Type, Desc) \
863 {CC "get" #Type, CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type)}, \
864 {CC "put" #Type, CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type)}, \
865 {CC "get" #Type "Volatile", CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type##Volatile)}, \
866 {CC "put" #Type "Volatile", CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type##Volatile)}
867
868
869 static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
870 {CC "getReference", CC "(" OBJ "J)" OBJ "", FN_PTR(Unsafe_GetReference)},
871 {CC "putReference", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutReference)},
872 {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ, FN_PTR(Unsafe_GetReferenceVolatile)},
873 {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutReferenceVolatile)},
874
875 {CC "getUncompressedObject", CC "(" ADR ")" OBJ, FN_PTR(Unsafe_GetUncompressedObject)},
876
877 DECLARE_GETPUTOOP(Boolean, Z),
878 DECLARE_GETPUTOOP(Byte, B),
879 DECLARE_GETPUTOOP(Short, S),
880 DECLARE_GETPUTOOP(Char, C),
881 DECLARE_GETPUTOOP(Int, I),
882 DECLARE_GETPUTOOP(Long, J),
883 DECLARE_GETPUTOOP(Float, F),
884 DECLARE_GETPUTOOP(Double, D),
885
886 {CC "allocateMemory0", CC "(J)" ADR, FN_PTR(Unsafe_AllocateMemory0)},
887 {CC "reallocateMemory0", CC "(" ADR "J)" ADR, FN_PTR(Unsafe_ReallocateMemory0)},
888 {CC "freeMemory0", CC "(" ADR ")V", FN_PTR(Unsafe_FreeMemory0)},
889
890 {CC "objectFieldOffset0", CC "(" FLD ")J", FN_PTR(Unsafe_ObjectFieldOffset0)},
891 {CC "knownObjectFieldOffset0", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_KnownObjectFieldOffset0)},
892 {CC "staticFieldOffset0", CC "(" FLD ")J", FN_PTR(Unsafe_StaticFieldOffset0)},
893 {CC "staticFieldBase0", CC "(" FLD ")" OBJ, FN_PTR(Unsafe_StaticFieldBase0)},
894 {CC "ensureClassInitialized0", CC "(" CLS ")V", FN_PTR(Unsafe_EnsureClassInitialized0)},
895 {CC "arrayBaseOffset0", CC "(" CLS ")I", FN_PTR(Unsafe_ArrayBaseOffset0)},
896 {CC "arrayIndexScale0", CC "(" CLS ")I", FN_PTR(Unsafe_ArrayIndexScale0)},
897
898 {CC "defineClass0", CC "(" DC_Args ")" CLS, FN_PTR(Unsafe_DefineClass0)},
899 {CC "allocateInstance", CC "(" CLS ")" OBJ, FN_PTR(Unsafe_AllocateInstance)},
900 {CC "throwException", CC "(" THR ")V", FN_PTR(Unsafe_ThrowException)},
901 {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
902 {CC "compareAndSetInt", CC "(" OBJ "J""I""I"")Z", FN_PTR(Unsafe_CompareAndSetInt)},
903 {CC "compareAndSetLong", CC "(" OBJ "J""J""J"")Z", FN_PTR(Unsafe_CompareAndSetLong)},
904 {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
905 {CC "compareAndExchangeInt", CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
906 {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
907
908 {CC "park", CC "(ZJ)V", FN_PTR(Unsafe_Park)},
909 {CC "unpark", CC "(" OBJ ")V", FN_PTR(Unsafe_Unpark)},
910
911 {CC "getLoadAverage0", CC "([DI)I", FN_PTR(Unsafe_GetLoadAverage0)},
912
913 {CC "copyMemory0", CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
914 {CC "copySwapMemory0", CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
915 {CC "writeback0", CC "(" "J" ")V", FN_PTR(Unsafe_WriteBack0)},
916 {CC "writebackPreSync0", CC "()V", FN_PTR(Unsafe_WriteBackPreSync0)},
917 {CC "writebackPostSync0", CC "()V", FN_PTR(Unsafe_WriteBackPostSync0)},
918 {CC "setMemory0", CC "(" OBJ "JJB)V", FN_PTR(Unsafe_SetMemory0)},
919
920 {CC "shouldBeInitialized0", CC "(" CLS ")Z", FN_PTR(Unsafe_ShouldBeInitialized0)},
921
922 {CC "fullFence", CC "()V", FN_PTR(Unsafe_FullFence)},
923 };
924
925 #undef CC
926 #undef FN_PTR
927
928 #undef ADR
929 #undef LANG
930 #undef OBJ
931 #undef CLS
932 #undef FLD
933 #undef THR
934 #undef DC_Args
935 #undef DAC_Args
936
937 #undef DECLARE_GETPUTOOP
938
939
940 // This function is exported, used by NativeLookup.
941 // The Unsafe_xxx functions above are called only from the interpreter.
942 // The optimizer looks at names and signatures to recognize
943 // individual functions.
944
945 JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
946 ThreadToNativeFromVM ttnfv(thread);
947
948 int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
949 guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
950 } JVM_END