< prev index next >

src/java.base/share/classes/jdk/internal/misc/X-ScopedMemoryAccess.java.template

Print this page

 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  */
 25 
 26 package jdk.internal.misc;
 27 
 28 import java.lang.annotation.ElementType;
 29 import java.lang.annotation.Retention;
 30 import java.lang.annotation.RetentionPolicy;
 31 import java.lang.annotation.Target;
 32 import java.lang.ref.Reference;
 33 import java.io.FileDescriptor;
 34 import java.nio.Buffer;
 35 import java.nio.ByteBuffer;
 36 
 37 import jdk.internal.access.JavaNioAccess;
 38 import jdk.internal.access.SharedSecrets;
 39 import jdk.internal.access.foreign.MemorySegmentProxy;
 40 import jdk.internal.util.ArraysSupport;
 41 import jdk.internal.vm.annotation.ForceInline;
 42 import jdk.internal.vm.vector.VectorSupport;
 43 
 44 
 45 /**
 46  * This class defines low-level methods to access on-heap and off-heap memory. The methods in this class
 47  * can be thought of as thin wrappers around methods provided in the {@link Unsafe} class. All the methods in this
 48  * class, accept one or more {@link Scope} parameter, which is used to validate as to whether access to memory
 49  * can be performed in a safe fashion - more specifically, to ensure that the memory being accessed has not
 50  * already been released (which would result in a hard VM crash).
 51  * <p>
 52  * Accessing and releasing memory from a single thread is not problematic - after all, a given thread cannot,
 53  * at the same time, access a memory region <em>and</em> free it. But ensuring correctness of memory access
 54  * when multiple threads are involved is much trickier, as there can be cases where a thread is accessing
 55  * a memory region while another thread is releasing it.

317     public void force(Scope scope, FileDescriptor fd, long address, boolean isSync, long index, long length) {
318         try {
319             forceInternal(scope, fd, address, isSync, index, length);
320         } catch (Scope.ScopedAccessError ex) {
321             throw new IllegalStateException("This segment is already closed");
322         }
323     }
324 
325     @ForceInline @Scoped
326     public void forceInternal(Scope scope, FileDescriptor fd, long address, boolean isSync, long index, long length) {
327         try {
328             if (scope != null) {
329                 scope.checkValidState();
330             }
331             SharedSecrets.getJavaNioAccess().force(fd, address, isSync, index, length);
332         } finally {
333             Reference.reachabilityFence(scope);
334         }
335     }
336 
337     // ByteBuffer vector access ops
338 
339     // Buffer access constants, to be initialized when required.
340     // Avoids a null value for NIO_ACCESS, due to class initialization dependencies
341     static final class BufferAccess {
342         // Buffer.address
343         static final long BUFFER_ADDRESS
344                 = UNSAFE.objectFieldOffset(Buffer.class, "address");
345 
346         // ByteBuffer.hb
347         static final long BYTE_BUFFER_HB
348                 = UNSAFE.objectFieldOffset(ByteBuffer.class, "hb");
349 
350         static final long BYTE_BUFFER_IS_READ_ONLY
351                 = UNSAFE.objectFieldOffset(ByteBuffer.class, "isReadOnly");
352 
353         @ForceInline
354         static Object bufferBase(ByteBuffer bb) {
355             return UNSAFE.getReference(bb, BYTE_BUFFER_HB);
356         }
357 
358         @ForceInline
359         static long bufferAddress(ByteBuffer bb, long offset) {
360             return UNSAFE.getLong(bb, BUFFER_ADDRESS) + offset;
361         }
362 
363         static final JavaNioAccess NIO_ACCESS = SharedSecrets.getJavaNioAccess();
364 
365         @ForceInline
366         static ScopedMemoryAccess.Scope scope(ByteBuffer bb) {
367             MemorySegmentProxy segmentProxy = NIO_ACCESS.bufferSegment(bb);
368             return segmentProxy != null ?
369                     segmentProxy.scope() : null;
370         }
371     }
372 
373     @ForceInline
374     public static boolean isReadOnly(ByteBuffer bb) {
375         return UNSAFE.getBoolean(bb, BufferAccess.BYTE_BUFFER_IS_READ_ONLY);
376     }
377 
378     @ForceInline
379     public static
380     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>>
381     V loadFromByteBuffer(Class<? extends V> vmClass, Class<E> e, int length,
382                           ByteBuffer bb, int offset,
383                           S s,
384                           VectorSupport.LoadOperation<ByteBuffer, V, S> defaultImpl) {





385         try {
386             return loadFromByteBufferScoped(
387                     BufferAccess.scope(bb),
388                     vmClass, e, length,
389                     bb, offset,
390                     s,
391                     defaultImpl);
392         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
393             throw new IllegalStateException("This segment is already closed");
394         }
395     }
396 
397     @Scoped
398     @ForceInline
399     private static
400     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>>
401     V loadFromByteBufferScoped(ScopedMemoryAccess.Scope scope,
402                           Class<? extends V> vmClass, Class<E> e, int length,
403                           ByteBuffer bb, int offset,
404                           S s,
405                           VectorSupport.LoadOperation<ByteBuffer, V, S> defaultImpl) {
406         try {
407             if (scope != null) {
408                 scope.checkValidState();
409             }
410 
411             final byte[] base = (byte[]) BufferAccess.bufferBase(bb);
412 
413             return VectorSupport.load(vmClass, e, length,
414                       base, BufferAccess.bufferAddress(bb, offset),
415                       bb, offset, s,
416                       defaultImpl);
417         } finally {
418             Reference.reachabilityFence(scope);
419         }
420     }
421 
422     @ForceInline
423     public static
424     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>,
425      M extends VectorSupport.VectorMask<E>>
426     V loadFromByteBufferMasked(Class<? extends V> vmClass, Class<M> maskClass, Class<E> e,
427                                int length, ByteBuffer bb, int offset, M m, S s,
428                                VectorSupport.LoadVectorMaskedOperation<ByteBuffer, V, S, M> defaultImpl) {





429         try {
430             return loadFromByteBufferMaskedScoped(
431                     BufferAccess.scope(bb),
432                     vmClass, maskClass, e, length,
433                     bb, offset, m,
434                     s,
435                     defaultImpl);
436         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
437             throw new IllegalStateException("This segment is already closed");
438         }
439     }
440 
441     @Scoped
442     @ForceInline
443     private static
444     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>,
445      M extends VectorSupport.VectorMask<E>>
446     V loadFromByteBufferMaskedScoped(ScopedMemoryAccess.Scope scope, Class<? extends V> vmClass,
447                                      Class<M> maskClass, Class<E> e, int length,
448                                      ByteBuffer bb, int offset, M m,
449                                      S s,
450                                      VectorSupport.LoadVectorMaskedOperation<ByteBuffer, V, S, M> defaultImpl) {
451         try {
452             if (scope != null) {
453                 scope.checkValidState();
454             }
455 
456             return VectorSupport.loadMasked(vmClass, maskClass, e, length,
457                     BufferAccess.bufferBase(bb), BufferAccess.bufferAddress(bb, offset), m,
458                     bb, offset, s,
459                     defaultImpl);
460         } finally {
461             Reference.reachabilityFence(scope);
462         }
463     }
464 
465     @ForceInline
466     public static
467     <V extends VectorSupport.Vector<E>, E>
468     void storeIntoByteBuffer(Class<? extends V> vmClass, Class<E> e, int length,
469                              V v,
470                              ByteBuffer bb, int offset,
471                              VectorSupport.StoreVectorOperation<ByteBuffer, V> defaultImpl) {





472         try {
473             storeIntoByteBufferScoped(
474                     BufferAccess.scope(bb),
475                     vmClass, e, length,
476                     v,
477                     bb, offset,
478                     defaultImpl);
479         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
480             throw new IllegalStateException("This segment is already closed");
481         }
482     }
483 
484     @Scoped
485     @ForceInline
486     private static
487     <V extends VectorSupport.Vector<E>, E>
488     void storeIntoByteBufferScoped(ScopedMemoryAccess.Scope scope,
489                                    Class<? extends V> vmClass, Class<E> e, int length,
490                                    V v,
491                                    ByteBuffer bb, int offset,
492                                    VectorSupport.StoreVectorOperation<ByteBuffer, V> defaultImpl) {
493         try {
494             if (scope != null) {
495                 scope.checkValidState();
496             }
497 
498             final byte[] base = (byte[]) BufferAccess.bufferBase(bb);
499 
500             VectorSupport.store(vmClass, e, length,
501                                 base, BufferAccess.bufferAddress(bb, offset),
502                                 v,
503                                 bb, offset,
504                                 defaultImpl);
505         } finally {
506             Reference.reachabilityFence(scope);
507         }
508     }
509 
510     @ForceInline
511     public static
512     <V extends VectorSupport.Vector<E>, E, M extends VectorSupport.VectorMask<E>>
513     void storeIntoByteBufferMasked(Class<? extends V> vmClass, Class<M> maskClass, Class<E> e,
514                                    int length, V v, M m,
515                                    ByteBuffer bb, int offset,
516                                    VectorSupport.StoreVectorMaskedOperation<ByteBuffer, V, M> defaultImpl) {





517         try {
518             storeIntoByteBufferMaskedScoped(
519                     BufferAccess.scope(bb),
520                     vmClass, maskClass, e, length,
521                     v, m,
522                     bb, offset,
523                     defaultImpl);
524         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
525             throw new IllegalStateException("This segment is already closed");
526         }
527     }
528 
529     @Scoped
530     @ForceInline
531     private static
532     <V extends VectorSupport.Vector<E>, E, M extends VectorSupport.VectorMask<E>>
533     void storeIntoByteBufferMaskedScoped(ScopedMemoryAccess.Scope scope,
534                                          Class<? extends V> vmClass, Class<M> maskClass,
535                                          Class<E> e, int length, V v, M m,
536                                          ByteBuffer bb, int offset,
537                                          VectorSupport.StoreVectorMaskedOperation<ByteBuffer, V, M> defaultImpl) {
538         try {
539             if (scope != null) {
540                 scope.checkValidState();
541             }
542 
543             VectorSupport.storeMasked(vmClass, maskClass, e, length,
544                     BufferAccess.bufferBase(bb), BufferAccess.bufferAddress(bb, offset),
545                     v, m,
546                     bb, offset,
547                     defaultImpl);
548         } finally {
549             Reference.reachabilityFence(scope);
550         }
551     }
552 
553     // typed-ops here
554 
555     // Note: all the accessor methods defined below take advantage of argument type profiling
556     // (see src/hotspot/share/oops/methodData.cpp) which greatly enhances performance when the same accessor
557     // method is used repeatedly with different 'base' objects.

 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  */
 25 
 26 package jdk.internal.misc;
 27 
 28 import java.lang.annotation.ElementType;
 29 import java.lang.annotation.Retention;
 30 import java.lang.annotation.RetentionPolicy;
 31 import java.lang.annotation.Target;
 32 import java.lang.ref.Reference;
 33 import java.io.FileDescriptor;


 34 
 35 import jdk.internal.access.JavaNioAccess;
 36 import jdk.internal.access.SharedSecrets;
 37 import jdk.internal.access.foreign.MemorySegmentProxy;
 38 import jdk.internal.util.ArraysSupport;
 39 import jdk.internal.vm.annotation.ForceInline;
 40 import jdk.internal.vm.vector.VectorSupport;
 41 
 42 
 43 /**
 44  * This class defines low-level methods to access on-heap and off-heap memory. The methods in this class
 45  * can be thought of as thin wrappers around methods provided in the {@link Unsafe} class. All the methods in this
 46  * class, accept one or more {@link Scope} parameter, which is used to validate as to whether access to memory
 47  * can be performed in a safe fashion - more specifically, to ensure that the memory being accessed has not
 48  * already been released (which would result in a hard VM crash).
 49  * <p>
 50  * Accessing and releasing memory from a single thread is not problematic - after all, a given thread cannot,
 51  * at the same time, access a memory region <em>and</em> free it. But ensuring correctness of memory access
 52  * when multiple threads are involved is much trickier, as there can be cases where a thread is accessing
 53  * a memory region while another thread is releasing it.

315     public void force(Scope scope, FileDescriptor fd, long address, boolean isSync, long index, long length) {
316         try {
317             forceInternal(scope, fd, address, isSync, index, length);
318         } catch (Scope.ScopedAccessError ex) {
319             throw new IllegalStateException("This segment is already closed");
320         }
321     }
322 
323     @ForceInline @Scoped
324     public void forceInternal(Scope scope, FileDescriptor fd, long address, boolean isSync, long index, long length) {
325         try {
326             if (scope != null) {
327                 scope.checkValidState();
328             }
329             SharedSecrets.getJavaNioAccess().force(fd, address, isSync, index, length);
330         } finally {
331             Reference.reachabilityFence(scope);
332         }
333     }
334 
335     // MemorySegment vector access ops







































336 
337     @ForceInline
338     public static
339     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>>
340     V loadFromMemorySegment(Class<? extends V> vmClass, Class<E> e, int length,
341                          MemorySegmentProxy msp, long offset,
342                          S s,
343                          VectorSupport.LoadOperation<MemorySegmentProxy, V, S> defaultImpl) {
344         // @@@ Smarter alignment checking if accessing heap segment backing non-byte[] array
345         if (msp.maxAlignMask() > 1) {
346             throw new IllegalArgumentException();
347         }
348 
349         try {
350             return loadFromMemorySegmentScopedInternal(
351                     msp.scope(),
352                     vmClass, e, length,
353                     msp, offset,
354                     s,
355                     defaultImpl);
356         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
357             throw new IllegalStateException("This segment is already closed");
358         }
359     }
360 
361     @Scoped
362     @ForceInline
363     private static
364     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>>
365     V loadFromMemorySegmentScopedInternal(ScopedMemoryAccess.Scope scope,
366                                           Class<? extends V> vmClass, Class<E> e, int length,
367                                           MemorySegmentProxy msp, long offset,
368                                           S s,
369                                           VectorSupport.LoadOperation<MemorySegmentProxy, V, S> defaultImpl) {
370         try {
371             scope.checkValidState();




372 
373             return VectorSupport.load(vmClass, e, length,
374                     msp.unsafeGetBase(), msp.unsafeGetOffset() + offset,
375                     msp, offset, s,
376                     defaultImpl);
377         } finally {
378             Reference.reachabilityFence(scope);
379         }
380     }
381 
382     @ForceInline
383     public static
384     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>,
385      M extends VectorSupport.VectorMask<E>>
386     V loadFromMemorySegmentMasked(Class<? extends V> vmClass, Class<M> maskClass, Class<E> e,
387                                   int length, MemorySegmentProxy msp, long offset, M m, S s,
388                                   VectorSupport.LoadVectorMaskedOperation<MemorySegmentProxy, V, S, M> defaultImpl) {
389         // @@@ Smarter alignment checking if accessing heap segment backing non-byte[] array
390         if (msp.maxAlignMask() > 1) {
391             throw new IllegalArgumentException();
392         }
393 
394         try {
395             return loadFromMemorySegmentMaskedScopedInternal(
396                     msp.scope(),
397                     vmClass, maskClass, e, length,
398                     msp, offset, m,
399                     s,
400                     defaultImpl);
401         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
402             throw new IllegalStateException("This segment is already closed");
403         }
404     }
405 
406     @Scoped
407     @ForceInline
408     private static
409     <V extends VectorSupport.Vector<E>, E, S extends VectorSupport.VectorSpecies<E>,
410      M extends VectorSupport.VectorMask<E>>
411     V loadFromMemorySegmentMaskedScopedInternal(ScopedMemoryAccess.Scope scope, Class<? extends V> vmClass,
412                                                 Class<M> maskClass, Class<E> e, int length,
413                                                 MemorySegmentProxy msp, long offset, M m,
414                                                 S s,
415                                                 VectorSupport.LoadVectorMaskedOperation<MemorySegmentProxy, V, S, M> defaultImpl) {
416         try {
417             scope.checkValidState();


418 
419             return VectorSupport.loadMasked(vmClass, maskClass, e, length,
420                     msp.unsafeGetBase(), msp.unsafeGetOffset() + offset, m,
421                     msp, offset, s,
422                     defaultImpl);
423         } finally {
424             Reference.reachabilityFence(scope);
425         }
426     }
427 
428     @ForceInline
429     public static
430     <V extends VectorSupport.Vector<E>, E>
431     void storeIntoMemorySegment(Class<? extends V> vmClass, Class<E> e, int length,
432                                 V v,
433                                 MemorySegmentProxy msp, long offset,
434                                 VectorSupport.StoreVectorOperation<MemorySegmentProxy, V> defaultImpl) {
435         // @@@ Smarter alignment checking if accessing heap segment backing non-byte[] array
436         if (msp.maxAlignMask() > 1) {
437             throw new IllegalArgumentException();
438         }
439 
440         try {
441             storeIntoMemorySegmentScopedInternal(
442                     msp.scope(),
443                     vmClass, e, length,
444                     v,
445                     msp, offset,
446                     defaultImpl);
447         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
448             throw new IllegalStateException("This segment is already closed");
449         }
450     }
451 
452     @Scoped
453     @ForceInline
454     private static
455     <V extends VectorSupport.Vector<E>, E>
456     void storeIntoMemorySegmentScopedInternal(ScopedMemoryAccess.Scope scope,
457                                               Class<? extends V> vmClass, Class<E> e, int length,
458                                               V v,
459                                               MemorySegmentProxy msp, long offset,
460                                               VectorSupport.StoreVectorOperation<MemorySegmentProxy, V> defaultImpl) {
461         try {
462             scope.checkValidState();




463 
464             VectorSupport.store(vmClass, e, length,
465                     msp.unsafeGetBase(), msp.unsafeGetOffset() + offset,
466                     v,
467                     msp, offset,
468                     defaultImpl);
469         } finally {
470             Reference.reachabilityFence(scope);
471         }
472     }
473 
474     @ForceInline
475     public static
476     <V extends VectorSupport.Vector<E>, E, M extends VectorSupport.VectorMask<E>>
477     void storeIntoMemorySegmentMasked(Class<? extends V> vmClass, Class<M> maskClass, Class<E> e,
478                                       int length, V v, M m,
479                                       MemorySegmentProxy msp, long offset,
480                                       VectorSupport.StoreVectorMaskedOperation<MemorySegmentProxy, V, M> defaultImpl) {
481         // @@@ Smarter alignment checking if accessing heap segment backing non-byte[] array
482         if (msp.maxAlignMask() > 1) {
483             throw new IllegalArgumentException();
484         }
485 
486         try {
487             storeIntoMemorySegmentMaskedScopedInternal(
488                     msp.scope(),
489                     vmClass, maskClass, e, length,
490                     v, m,
491                     msp, offset,
492                     defaultImpl);
493         } catch (ScopedMemoryAccess.Scope.ScopedAccessError ex) {
494             throw new IllegalStateException("This segment is already closed");
495         }
496     }
497 
498     @Scoped
499     @ForceInline
500     private static
501     <V extends VectorSupport.Vector<E>, E, M extends VectorSupport.VectorMask<E>>
502     void storeIntoMemorySegmentMaskedScopedInternal(ScopedMemoryAccess.Scope scope,
503                                                     Class<? extends V> vmClass, Class<M> maskClass,
504                                                     Class<E> e, int length, V v, M m,
505                                                     MemorySegmentProxy msp, long offset,
506                                                     VectorSupport.StoreVectorMaskedOperation<MemorySegmentProxy, V, M> defaultImpl) {
507         try {
508             scope.checkValidState();


509 
510             VectorSupport.storeMasked(vmClass, maskClass, e, length,
511                     msp.unsafeGetBase(), msp.unsafeGetOffset() + offset,
512                     v, m,
513                     msp, offset,
514                     defaultImpl);
515         } finally {
516             Reference.reachabilityFence(scope);
517         }
518     }
519 
520     // typed-ops here
521 
522     // Note: all the accessor methods defined below take advantage of argument type profiling
523     // (see src/hotspot/share/oops/methodData.cpp) which greatly enhances performance when the same accessor
524     // method is used repeatedly with different 'base' objects.
< prev index next >