1 /*
  2  * Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.  Oracle designates this
  8  * particular file as subject to the "Classpath" exception as provided
  9  * by Oracle in the LICENSE file that accompanied this code.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  */
 25 
 26 package jdk.internal.misc;
 27 
 28 import java.lang.annotation.ElementType;
 29 import java.lang.annotation.Retention;
 30 import java.lang.annotation.RetentionPolicy;
 31 import java.lang.annotation.Target;
 32 import java.lang.ref.Reference;
 33 import java.io.FileDescriptor;


 34 
 35 import jdk.internal.access.JavaNioAccess;
 36 import jdk.internal.access.SharedSecrets;
 37 import jdk.internal.access.foreign.MemorySegmentProxy;
 38 import jdk.internal.util.ArraysSupport;
 39 import jdk.internal.vm.annotation.ForceInline;
 40 import jdk.internal.vm.vector.VectorSupport;
 41 
 42 
 43 /**
 44  * This class defines low-level methods to access on-heap and off-heap memory. The methods in this class
 45  * can be thought of as thin wrappers around methods provided in the {@link Unsafe} class. All the methods in this
 46  * class, accept one or more {@link Scope} parameter, which is used to validate as to whether access to memory
 47  * can be performed in a safe fashion - more specifically, to ensure that the memory being accessed has not
 48  * already been released (which would result in a hard VM crash).
 49  * <p>
 50  * Accessing and releasing memory from a single thread is not problematic - after all, a given thread cannot,
 51  * at the same time, access a memory region <em>and</em> free it. But ensuring correctness of memory access
 52  * when multiple threads are involved is much trickier, as there can be cases where a thread is accessing
 53  * a memory region while another thread is releasing it.
 54  * <p>
 55  * This class provides tools to manage races when multiple threads are accessing and/or releasing the same memory
 56  * region concurrently. More specifically, when a thread wants to release a memory region, it should call the
 57  * {@link #closeScope(jdk.internal.misc.ScopedMemoryAccess.Scope)} method provided by this class. This method initiates
 58  * thread-local handshakes with all the other VM threads, which are then stopped one by one. If any thread is found
 59  * accessing memory that is associated to the very scope object being closed, that thread execution is asynchronously
 60  * interrupted with a {@link Scope.ScopedAccessError}.
 61  * <p>
 62  * This synchronization strategy relies on the idea that accessing memory is atomic with respect to checking the
 63  * validity of the scope associated with that memory region - that is, a thread that wants to perform memory access will be
 64  * suspended either <em>before</em> a scope check or <em>after</em> the memory access. To ensure this atomicity,
 65  * all methods in this class are marked with the special {@link Scoped} annotation, which is recognized by the VM,
 66  * and used during the thread-local handshake to detect (and stop) threads performing potentially problematic memory access
 67  * operations. Additionally, to make sure that the scope object(s) of the memory being accessed is always
 68  * reachable during an access operation, all the methods in this class add reachability fences around the underlying
 69  * unsafe access.
 70  * <p>
 71  * This form of synchronization allows APIs to use plain memory access without any other form of synchronization
 72  * which might be deemed to expensive; in other words, this approach prioritizes the performance of memory access over
 73  * that of releasing a shared memory resource.
 74  */
 75 public class ScopedMemoryAccess {
 76 
 77     private static final Unsafe UNSAFE = Unsafe.getUnsafe();
 78 
 79     private static native void registerNatives();
 80     static {
 81         registerNatives();
 82     }
 83 
 84     public boolean closeScope(Scope scope) {
 85         return closeScope0(scope, Scope.ScopedAccessError.INSTANCE);
 86     }
 87 
 88     native boolean closeScope0(Scope scope, Scope.ScopedAccessError exception);
 89 
 90     private ScopedMemoryAccess() {}
 91 
 92     private static final ScopedMemoryAccess theScopedMemoryAccess = new ScopedMemoryAccess();
 93 
 94     public static ScopedMemoryAccess getScopedMemoryAccess() {
 95         return theScopedMemoryAccess;
 96     }
 97 
 98     /**
 99      * Scope interface used during scoped memory access operations. A scope can be thought of as an object
100      * which embodies the temporal checks associated with a given memory region.
101      */
102     public interface Scope {
103 
104         void checkValidState();
105 
106         Thread ownerThread();
107 
108         void acquire0();
109 
110         void release0();
111 
112         /**
113          * Error thrown when memory access fails because the memory has already been released.
114          * Note: for performance reasons, this exception is never created by client; instead a shared instance
115          * is thrown (sometimes, this instance can be thrown asynchronously inside VM code). For this reason,
116          * it is important for clients to always catch this exception and throw a regular exception instead
117          * (which contains full stack information).
118          */
119         final class ScopedAccessError extends Error {
120             private ScopedAccessError() {
121                 super("Attempt to access an already released memory resource", null, false, false);
122             }
123             static final long serialVersionUID = 1L;
124 
125             public static final ScopedAccessError INSTANCE = new ScopedAccessError();
126         }
127     }
128 
129     @Target({ElementType.METHOD, ElementType.CONSTRUCTOR})
130     @Retention(RetentionPolicy.RUNTIME)
131     @interface Scoped { }
132 
133     // bulk ops
134 
135     @ForceInline
136     public void copyMemory(Scope srcScope, Scope dstScope,
137                                    Object srcBase, long srcOffset,
138                                    Object destBase, long destOffset,
139                                    long bytes) {
140           try {
141               copyMemoryInternal(srcScope, dstScope, srcBase, srcOffset, destBase, destOffset, bytes);
142           } catch (Scope.ScopedAccessError ex) {
143               throw new IllegalStateException("This segment is already closed");
144           }
145     }
146 
147     @ForceInline @Scoped
148     private void copyMemoryInternal(Scope srcScope, Scope dstScope,
149                                Object srcBase, long srcOffset,
150                                Object destBase, long destOffset,
151                                long bytes) {
152         try {
153             if (srcScope != null) {
154                 srcScope.checkValidState();
155             }
156             if (dstScope != null) {
157                 dstScope.checkValidState();
158             }
159             UNSAFE.copyMemory(srcBase, srcOffset, destBase, destOffset, bytes);
160         } finally {
161             Reference.reachabilityFence(srcScope);
162             Reference.reachabilityFence(dstScope);
163         }
164     }
165 
166     @ForceInline
167     public void copySwapMemory(Scope srcScope, Scope dstScope,
168                                    Object srcBase, long srcOffset,
169                                    Object destBase, long destOffset,
170                                    long bytes, long elemSize) {
171           try {
172               copySwapMemoryInternal(srcScope, dstScope, srcBase, srcOffset, destBase, destOffset, bytes, elemSize);
173           } catch (Scope.ScopedAccessError ex) {
174               throw new IllegalStateException("This segment is already closed");
175           }
176     }
177 
178     @ForceInline @Scoped
179     private void copySwapMemoryInternal(Scope srcScope, Scope dstScope,
180                                Object srcBase, long srcOffset,
181                                Object destBase, long destOffset,
182                                long bytes, long elemSize) {
183         try {
184             if (srcScope != null) {
185                 srcScope.checkValidState();
186             }
187             if (dstScope != null) {
188                 dstScope.checkValidState();
189             }
190             UNSAFE.copySwapMemory(srcBase, srcOffset, destBase, destOffset, bytes, elemSize);
191         } finally {
192             Reference.reachabilityFence(srcScope);
193             Reference.reachabilityFence(dstScope);
194         }
195     }
196 
197     @ForceInline
198     public void setMemory(Scope scope, Object o, long offset, long bytes, byte value) {
199         try {
200             setMemoryInternal(scope, o, offset, bytes, value);
201         } catch (Scope.ScopedAccessError ex) {
202             throw new IllegalStateException("This segment is already closed");
203         }
204     }
205 
206     @ForceInline @Scoped
207     private void setMemoryInternal(Scope scope, Object o, long offset, long bytes, byte value) {
208         try {
209             if (scope != null) {
210                 scope.checkValidState();
211             }
212             UNSAFE.setMemory(o, offset, bytes, value);
213         } finally {
214             Reference.reachabilityFence(scope);
215         }
216     }
217 
218     @ForceInline
219     public int vectorizedMismatch(Scope aScope, Scope bScope,
220                                              Object a, long aOffset,
221                                              Object b, long bOffset,
222                                              int length,
223                                              int log2ArrayIndexScale) {
224         try {
225             return vectorizedMismatchInternal(aScope, bScope, a, aOffset, b, bOffset, length, log2ArrayIndexScale);
226         } catch (Scope.ScopedAccessError ex) {
227             throw new IllegalStateException("This segment is already closed");
228         }
229     }
230 
231     @ForceInline @Scoped
232     private int vectorizedMismatchInternal(Scope aScope, Scope bScope,
233                                              Object a, long aOffset,
234                                              Object b, long bOffset,
235                                              int length,
236                                              int log2ArrayIndexScale) {
237         try {
238             if (aScope != null) {
239                 aScope.checkValidState();
240             }
241             if (bScope != null) {
242                 bScope.checkValidState();
243             }
244             return ArraysSupport.vectorizedMismatch(a, aOffset, b, bOffset, length, log2ArrayIndexScale);
245         } finally {
246             Reference.reachabilityFence(aScope);
247             Reference.reachabilityFence(bScope);
248         }
249     }
250 
251     @ForceInline
252     public boolean isLoaded(Scope scope, long address, boolean isSync, long size) {
253         try {
254             return isLoadedInternal(scope, address, isSync, size);
255         } catch (Scope.ScopedAccessError ex) {
256             throw new IllegalStateException("This segment is already closed");
257         }
258     }
259 
260     @ForceInline @Scoped
261     public boolean isLoadedInternal(Scope scope, long address, boolean isSync, long size) {
262         try {
263             if (scope != null) {
264                 scope.checkValidState();
265             }
266             return SharedSecrets.getJavaNioAccess().isLoaded(address, isSync, size);
267         } finally {
268             Reference.reachabilityFence(scope);
269         }
270     }
271 
272     @ForceInline
273     public void load(Scope scope, long address, boolean isSync, long size) {
274         try {
275             loadInternal(scope, address, isSync, size);
276         } catch (Scope.ScopedAccessError ex) {
277             throw new IllegalStateException("This segment is already closed");
278         }
279     }
280 
281     @ForceInline @Scoped
282     public void loadInternal(Scope scope, long address, boolean isSync, long size) {
283         try {
284             if (scope != null) {
285                 scope.checkValidState();
286             }
287             SharedSecrets.getJavaNioAccess().load(address, isSync, size);
288         } finally {
289             Reference.reachabilityFence(scope);
290         }
291     }
292 
293     @ForceInline
294     public void unload(Scope scope, long address, boolean isSync, long size) {
295         try {
296             unloadInternal(scope, address, isSync, size);
297         } catch (Scope.ScopedAccessError ex) {
298             throw new IllegalStateException("This segment is already closed");
299         }
300     }
301 
302     @ForceInline @Scoped
303     public void unloadInternal(Scope scope, long address, boolean isSync, long size) {
304         try {
305             if (scope != null) {
306                 scope.checkValidState();
307             }
308             SharedSecrets.getJavaNioAccess().unload(address, isSync, size);
309         } finally {
310             Reference.reachabilityFence(scope);
311         }
312     }
313 
314     @ForceInline
315     public void force(Scope scope, FileDescriptor fd, long address, boolean isSync, long index, long length) {
316         try {
317             forceInternal(scope, fd, address, isSync, index, length);
318         } catch (Scope.ScopedAccessError ex) {
319             throw new IllegalStateException("This segment is already closed");
320         }
321     }
322 
323     @ForceInline @Scoped
324     public void forceInternal(Scope scope, FileDescriptor fd, long address, boolean isSync, long index, long length) {
325         try {
326             if (scope != null) {
327                 scope.checkValidState();
328             }
329             SharedSecrets.getJavaNioAccess().force(fd, address, isSync, index, length);
330         } finally {
331             Reference.reachabilityFence(scope);
332         }
333     }
334 
335     // MemorySegment vector access ops







































336 
337     @ForceInline
338     public static
339     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>>
340     V loadFromMemorySegment(Class<? extends V> vmClass, Class<E> e, int length,
341                          MemorySegmentProxy msp, long offset,
342                          S s,
343                          VectorSupport.LoadOperation<MemorySegmentProxy, V, S> defaultImpl) {
344         // @@@ Smarter alignment checking if accessing heap segment backing non-byte[] array
345         if (msp.maxAlignMask() > 1) {
346             throw new IllegalArgumentException();
347         }
348 
349         try {
350             return loadFromMemorySegmentScopedInternal(
351                     msp.scope(),
352                     vmClass, e, length,
353                     msp, offset,
354                     s,
355                     defaultImpl);
356         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
357             throw new IllegalStateException("This segment is already closed");
358         }
359     }
360 
361     @Scoped
362     @ForceInline
363     private static
364     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>>
365     V loadFromMemorySegmentScopedInternal(ScopedMemoryAccess.Scope scope,
366                                           Class<? extends V> vmClass, Class<E> e, int length,
367                                           MemorySegmentProxy msp, long offset,
368                                           S s,
369                                           VectorSupport.LoadOperation<MemorySegmentProxy, V, S> defaultImpl) {
370         try {
371             scope.checkValidState();




372 
373             return VectorSupport.load(vmClass, e, length,
374                     msp.unsafeGetBase(), msp.unsafeGetOffset() + offset,
375                     msp, offset, s,
376                     defaultImpl);
377         } finally {
378             Reference.reachabilityFence(scope);
379         }
380     }
381 
382     @ForceInline
383     public static
384     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>,
385      M extends VectorSupport.VectorMask<E>>
386     V loadFromMemorySegmentMasked(Class<? extends V> vmClass, Class<M> maskClass, Class<E> e,
387                                   int length, MemorySegmentProxy msp, long offset, M m, S s,
388                                   VectorSupport.LoadVectorMaskedOperation<MemorySegmentProxy, V, S, M> defaultImpl) {
389         // @@@ Smarter alignment checking if accessing heap segment backing non-byte[] array
390         if (msp.maxAlignMask() > 1) {
391             throw new IllegalArgumentException();
392         }
393 
394         try {
395             return loadFromMemorySegmentMaskedScopedInternal(
396                     msp.scope(),
397                     vmClass, maskClass, e, length,
398                     msp, offset, m,
399                     s,
400                     defaultImpl);
401         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
402             throw new IllegalStateException("This segment is already closed");
403         }
404     }
405 
406     @Scoped
407     @ForceInline
408     private static
409     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>,
410      M extends VectorSupport.VectorMask<E>>
411     V loadFromMemorySegmentMaskedScopedInternal(ScopedMemoryAccess.Scope scope, Class<? extends V> vmClass,
412                                                 Class<M> maskClass, Class<E> e, int length,
413                                                 MemorySegmentProxy msp, long offset, M m,
414                                                 S s,
415                                                 VectorSupport.LoadVectorMaskedOperation<MemorySegmentProxy, V, S, M> defaultImpl) {
416         try {
417             scope.checkValidState();


418 
419             return VectorSupport.loadMasked(vmClass, maskClass, e, length,
420                     msp.unsafeGetBase(), msp.unsafeGetOffset() + offset, m,
421                     msp, offset, s,
422                     defaultImpl);
423         } finally {
424             Reference.reachabilityFence(scope);
425         }
426     }
427 
428     @ForceInline
429     public static
430     <V extends VectorSupport.Vector<E>, E>
431     void storeIntoMemorySegment(Class<? extends V> vmClass, Class<E> e, int length,
432                                 V v,
433                                 MemorySegmentProxy msp, long offset,
434                                 VectorSupport.StoreVectorOperation<MemorySegmentProxy, V> defaultImpl) {
435         // @@@ Smarter alignment checking if accessing heap segment backing non-byte[] array
436         if (msp.maxAlignMask() > 1) {
437             throw new IllegalArgumentException();
438         }
439 
440         try {
441             storeIntoMemorySegmentScopedInternal(
442                     msp.scope(),
443                     vmClass, e, length,
444                     v,
445                     msp, offset,
446                     defaultImpl);
447         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
448             throw new IllegalStateException("This segment is already closed");
449         }
450     }
451 
452     @Scoped
453     @ForceInline
454     private static
455     <V extends VectorSupport.Vector<E>, E>
456     void storeIntoMemorySegmentScopedInternal(ScopedMemoryAccess.Scope scope,
457                                               Class<? extends V> vmClass, Class<E> e, int length,
458                                               V v,
459                                               MemorySegmentProxy msp, long offset,
460                                               VectorSupport.StoreVectorOperation<MemorySegmentProxy, V> defaultImpl) {
461         try {
462             scope.checkValidState();




463 
464             VectorSupport.store(vmClass, e, length,
465                     msp.unsafeGetBase(), msp.unsafeGetOffset() + offset,
466                     v,
467                     msp, offset,
468                     defaultImpl);
469         } finally {
470             Reference.reachabilityFence(scope);
471         }
472     }
473 
474     @ForceInline
475     public static
476     <V extends VectorSupport.Vector<E>, E, M extends VectorSupport.VectorMask<E>>
477     void storeIntoMemorySegmentMasked(Class<? extends V> vmClass, Class<M> maskClass, Class<E> e,
478                                       int length, V v, M m,
479                                       MemorySegmentProxy msp, long offset,
480                                       VectorSupport.StoreVectorMaskedOperation<MemorySegmentProxy, V, M> defaultImpl) {
481         // @@@ Smarter alignment checking if accessing heap segment backing non-byte[] array
482         if (msp.maxAlignMask() > 1) {
483             throw new IllegalArgumentException();
484         }
485 
486         try {
487             storeIntoMemorySegmentMaskedScopedInternal(
488                     msp.scope(),
489                     vmClass, maskClass, e, length,
490                     v, m,
491                     msp, offset,
492                     defaultImpl);
493         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
494             throw new IllegalStateException("This segment is already closed");
495         }
496     }
497 
498     @Scoped
499     @ForceInline
500     private static
501     <V extends VectorSupport.Vector<E>, E, M extends VectorSupport.VectorMask<E>>
502     void storeIntoMemorySegmentMaskedScopedInternal(ScopedMemoryAccess.Scope scope,
503                                                     Class<? extends V> vmClass, Class<M> maskClass,
504                                                     Class<E> e, int length, V v, M m,
505                                                     MemorySegmentProxy msp, long offset,
506                                                     VectorSupport.StoreVectorMaskedOperation<MemorySegmentProxy, V, M> defaultImpl) {
507         try {
508             scope.checkValidState();


509 
510             VectorSupport.storeMasked(vmClass, maskClass, e, length,
511                     msp.unsafeGetBase(), msp.unsafeGetOffset() + offset,
512                     v, m,
513                     msp, offset,
514                     defaultImpl);
515         } finally {
516             Reference.reachabilityFence(scope);
517         }
518     }
519 
520     // typed-ops here
521 
522     // Note: all the accessor methods defined below take advantage of argument type profiling
523     // (see src/hotspot/share/oops/methodData.cpp) which greatly enhances performance when the same accessor
524     // method is used repeatedly with different 'base' objects.
--- EOF ---