1 /*
   2  * Copyright (c) 2017, 2025, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_OOPS_ACCESSBACKEND_HPP
  26 #define SHARE_OOPS_ACCESSBACKEND_HPP
  27 
  28 #include "cppstdlib/type_traits.hpp"
  29 #include "gc/shared/barrierSetConfig.hpp"
  30 #include "memory/allocation.hpp"
  31 #include "metaprogramming/enableIf.hpp"
  32 #include "oops/accessDecorators.hpp"
  33 #include "oops/inlineKlass.hpp"
  34 #include "oops/oopsHierarchy.hpp"
  35 #include "runtime/globals.hpp"
  36 #include "utilities/debug.hpp"
  37 #include "utilities/globalDefinitions.hpp"
  38 
  39 // This metafunction returns either oop or narrowOop depending on whether
  40 // an access needs to use compressed oops or not.
  41 template <DecoratorSet decorators>
  42 struct HeapOopType: AllStatic {
  43   static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value &&
  44                                          HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
  45   using type = std::conditional_t<needs_oop_compress, narrowOop, oop>;
  46 };
  47 
  48 // This meta-function returns either oop or narrowOop depending on whether
  49 // a back-end needs to consider compressed oops types or not.
  50 template <DecoratorSet decorators>
  51 struct ValueOopType: AllStatic {
  52   static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
  53   using type = std::conditional_t<needs_oop_compress, narrowOop, oop>;
  54 };
  55 
  56 namespace AccessInternal {
  57   enum BarrierType {
  58     BARRIER_STORE,
  59     BARRIER_STORE_AT,
  60     BARRIER_LOAD,
  61     BARRIER_LOAD_AT,
  62     BARRIER_ATOMIC_CMPXCHG,
  63     BARRIER_ATOMIC_CMPXCHG_AT,
  64     BARRIER_ATOMIC_XCHG,
  65     BARRIER_ATOMIC_XCHG_AT,
  66     BARRIER_ARRAYCOPY,
  67     BARRIER_CLONE,
  68     BARRIER_VALUE_COPY
  69   };
  70 
  71   template <DecoratorSet decorators, typename T>
  72   struct MustConvertCompressedOop: public std::integral_constant<bool,
  73     HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value &&
  74     std::is_same<typename HeapOopType<decorators>::type, narrowOop>::value &&
  75     std::is_same<T, oop>::value> {};
  76 
  77   // This metafunction returns an appropriate oop type if the value is oop-like
  78   // and otherwise returns the same type T.
  79   template <DecoratorSet decorators, typename T>
  80   struct EncodedType: AllStatic {
  81     using type = std::conditional_t<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
  82                                     typename HeapOopType<decorators>::type,
  83                                     T>;
  84   };
  85 
  86   template <DecoratorSet decorators>
  87   inline typename HeapOopType<decorators>::type*
  88   oop_field_addr(oop base, ptrdiff_t byte_offset) {
  89     return reinterpret_cast<typename HeapOopType<decorators>::type*>(
  90              reinterpret_cast<intptr_t>((void*)base) + byte_offset);
  91   }
  92 
  93   template <DecoratorSet decorators, typename T>
  94   struct AccessFunctionTypes {
  95     typedef T (*load_at_func_t)(oop base, ptrdiff_t offset);
  96     typedef void (*store_at_func_t)(oop base, ptrdiff_t offset, T value);
  97     typedef T (*atomic_cmpxchg_at_func_t)(oop base, ptrdiff_t offset, T compare_value, T new_value);
  98     typedef T (*atomic_xchg_at_func_t)(oop base, ptrdiff_t offset, T new_value);
  99 
 100     typedef T (*load_func_t)(void* addr);
 101     typedef void (*store_func_t)(void* addr, T value);
 102     typedef T (*atomic_cmpxchg_func_t)(void* addr, T compare_value, T new_value);
 103     typedef T (*atomic_xchg_func_t)(void* addr, T new_value);
 104 
 105     typedef void (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 106                                      arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 107                                      size_t length);
 108     typedef void (*clone_func_t)(oop src, oop dst, size_t size);
 109     typedef void (*value_copy_func_t)(void* src, void* dst, InlineKlass* md, LayoutKind lk);
 110   };
 111 
 112   template <DecoratorSet decorators>
 113   struct AccessFunctionTypes<decorators, void> {
 114     typedef void (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, void* src,
 115                                      arrayOop dst_obj, size_t dst_offset_in_bytes, void* dst,
 116                                      size_t length);
 117   };
 118 
 119   template <DecoratorSet decorators, typename T, BarrierType barrier> struct AccessFunction {};
 120 
 121 #define ACCESS_GENERATE_ACCESS_FUNCTION(bt, func)                   \
 122   template <DecoratorSet decorators, typename T>                    \
 123   struct AccessFunction<decorators, T, bt>: AllStatic{              \
 124     typedef typename AccessFunctionTypes<decorators, T>::func type; \
 125   }
 126   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE, store_func_t);
 127   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE_AT, store_at_func_t);
 128   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD, load_func_t);
 129   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD_AT, load_at_func_t);
 130   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG, atomic_cmpxchg_func_t);
 131   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG_AT, atomic_cmpxchg_at_func_t);
 132   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG, atomic_xchg_func_t);
 133   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG_AT, atomic_xchg_at_func_t);
 134   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ARRAYCOPY, arraycopy_func_t);
 135   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_CLONE, clone_func_t);
 136   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_VALUE_COPY, value_copy_func_t);
 137 #undef ACCESS_GENERATE_ACCESS_FUNCTION
 138 
 139   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 140   typename AccessFunction<decorators, T, barrier_type>::type resolve_barrier();
 141 
 142   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 143   typename AccessFunction<decorators, T, barrier_type>::type resolve_oop_barrier();
 144 
 145   void* field_addr(oop base, ptrdiff_t offset);
 146 
 147   // Forward calls to Copy:: in the cpp file to reduce dependencies and allow
 148   // faster build times, given how frequently included access is.
 149   void arraycopy_arrayof_conjoint_oops(void* src, void* dst, size_t length);
 150   void arraycopy_conjoint_oops(oop* src, oop* dst, size_t length);
 151   void arraycopy_conjoint_oops(narrowOop* src, narrowOop* dst, size_t length);
 152 
 153   void arraycopy_disjoint_words(void* src, void* dst, size_t length);
 154   void arraycopy_disjoint_words_atomic(void* src, void* dst, size_t length);
 155 
 156   template<typename T>
 157   void arraycopy_conjoint(T* src, T* dst, size_t length);
 158   template<typename T>
 159   void arraycopy_arrayof_conjoint(T* src, T* dst, size_t length);
 160   template<typename T>
 161   void arraycopy_conjoint_atomic(T* src, T* dst, size_t length);
 162 
 163   void value_copy_internal(void* src, void* dst, size_t length);
 164 }
 165 
 166 // This mask specifies what decorators are relevant for raw accesses. When passing
 167 // accesses to the raw layer, irrelevant decorators are removed.
 168 const DecoratorSet RAW_DECORATOR_MASK = INTERNAL_DECORATOR_MASK | MO_DECORATOR_MASK |
 169                                         ARRAYCOPY_DECORATOR_MASK | IS_NOT_NULL;
 170 
 171 // The RawAccessBarrier performs raw accesses with additional knowledge of
 172 // memory ordering, so that OrderAccess/Atomic is called when necessary.
 173 // It additionally handles compressed oops, and hence is not completely "raw"
 174 // strictly speaking.
 175 template <DecoratorSet decorators>
 176 class RawAccessBarrier: public AllStatic {
 177 protected:
 178   static inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 179     return AccessInternal::field_addr(base, byte_offset);
 180   }
 181 
 182 protected:
 183   // Only encode if INTERNAL_VALUE_IS_OOP
 184   template <DecoratorSet idecorators, typename T>
 185   static inline typename EnableIf<
 186     AccessInternal::MustConvertCompressedOop<idecorators, T>::value,
 187     typename HeapOopType<idecorators>::type>::type
 188   encode_internal(T value);
 189 
 190   template <DecoratorSet idecorators, typename T>
 191   static inline typename EnableIf<
 192     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 193   encode_internal(T value) {
 194     return value;
 195   }
 196 
 197   template <typename T>
 198   static inline typename AccessInternal::EncodedType<decorators, T>::type
 199   encode(T value) {
 200     return encode_internal<decorators, T>(value);
 201   }
 202 
 203   // Only decode if INTERNAL_VALUE_IS_OOP
 204   template <DecoratorSet idecorators, typename T>
 205   static inline typename EnableIf<
 206     AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 207   decode_internal(typename HeapOopType<idecorators>::type value);
 208 
 209   template <DecoratorSet idecorators, typename T>
 210   static inline typename EnableIf<
 211     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 212   decode_internal(T value) {
 213     return value;
 214   }
 215 
 216   template <typename T>
 217   static inline T decode(typename AccessInternal::EncodedType<decorators, T>::type value) {
 218     return decode_internal<decorators, T>(value);
 219   }
 220 
 221 protected:
 222   template <DecoratorSet ds, typename T>
 223   static typename EnableIf<
 224     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 225   load_internal(void* addr);
 226 
 227   template <DecoratorSet ds, typename T>
 228   static typename EnableIf<
 229     HasDecorator<ds, MO_ACQUIRE>::value, T>::type
 230   load_internal(void* addr);
 231 
 232   template <DecoratorSet ds, typename T>
 233   static typename EnableIf<
 234     HasDecorator<ds, MO_RELAXED>::value, T>::type
 235   load_internal(void* addr);
 236 
 237   template <DecoratorSet ds, typename T>
 238   static inline typename EnableIf<
 239     HasDecorator<ds, MO_UNORDERED>::value, T>::type
 240   load_internal(void* addr) {
 241     return *reinterpret_cast<T*>(addr);
 242   }
 243 
 244   template <DecoratorSet ds, typename T>
 245   static typename EnableIf<
 246     HasDecorator<ds, MO_SEQ_CST>::value>::type
 247   store_internal(void* addr, T value);
 248 
 249   template <DecoratorSet ds, typename T>
 250   static typename EnableIf<
 251     HasDecorator<ds, MO_RELEASE>::value>::type
 252   store_internal(void* addr, T value);
 253 
 254   template <DecoratorSet ds, typename T>
 255   static typename EnableIf<
 256     HasDecorator<ds, MO_RELAXED>::value>::type
 257   store_internal(void* addr, T value);
 258 
 259   template <DecoratorSet ds, typename T>
 260   static inline typename EnableIf<
 261     HasDecorator<ds, MO_UNORDERED>::value>::type
 262   store_internal(void* addr, T value) {
 263     *reinterpret_cast<T*>(addr) = value;
 264   }
 265 
 266   template <DecoratorSet ds, typename T>
 267   static typename EnableIf<
 268     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 269   atomic_cmpxchg_internal(void* addr, T compare_value, T new_value);
 270 
 271   template <DecoratorSet ds, typename T>
 272   static typename EnableIf<
 273     HasDecorator<ds, MO_RELAXED>::value, T>::type
 274   atomic_cmpxchg_internal(void* addr, T compare_value, T new_value);
 275 
 276   template <DecoratorSet ds, typename T>
 277   static typename EnableIf<
 278     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 279   atomic_xchg_internal(void* addr, T new_value);
 280 
 281 public:
 282   template <typename T>
 283   static inline void store(void* addr, T value) {
 284     store_internal<decorators>(addr, value);
 285   }
 286 
 287   template <typename T>
 288   static inline T load(void* addr) {
 289     return load_internal<decorators, T>(addr);
 290   }
 291 
 292   template <typename T>
 293   static inline T atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 294     return atomic_cmpxchg_internal<decorators>(addr, compare_value, new_value);
 295   }
 296 
 297   template <typename T>
 298   static inline T atomic_xchg(void* addr, T new_value) {
 299     return atomic_xchg_internal<decorators>(addr, new_value);
 300   }
 301 
 302   template <typename T>
 303   static void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 304                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 305                         size_t length);
 306 
 307   template <typename T>
 308   static void oop_store(void* addr, T value);
 309   template <typename T>
 310   static void oop_store_at(oop base, ptrdiff_t offset, T value);
 311 
 312   template <typename T>
 313   static T oop_load(void* addr);
 314   template <typename T>
 315   static T oop_load_at(oop base, ptrdiff_t offset);
 316 
 317   template <typename T>
 318   static T oop_atomic_cmpxchg(void* addr, T compare_value, T new_value);
 319   template <typename T>
 320   static T oop_atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value);
 321 
 322   template <typename T>
 323   static T oop_atomic_xchg(void* addr, T new_value);
 324   template <typename T>
 325   static T oop_atomic_xchg_at(oop base, ptrdiff_t offset, T new_value);
 326 
 327   template <typename T>
 328   static void store_at(oop base, ptrdiff_t offset, T value) {
 329     store(field_addr(base, offset), value);
 330   }
 331 
 332   template <typename T>
 333   static T load_at(oop base, ptrdiff_t offset) {
 334     return load<T>(field_addr(base, offset));
 335   }
 336 
 337   template <typename T>
 338   static T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 339     return atomic_cmpxchg(field_addr(base, offset), compare_value, new_value);
 340   }
 341 
 342   template <typename T>
 343   static T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 344     return atomic_xchg(field_addr(base, offset), new_value);
 345   }
 346 
 347   template <typename T>
 348   static void oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 349                             arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 350                             size_t length);
 351 
 352   static void clone(oop src, oop dst, size_t size);
 353   static void value_copy(void* src, void* dst, InlineKlass* md, LayoutKind lk);
 354 
 355 };
 356 
 357 namespace AccessInternal {
 358   DEBUG_ONLY(void check_access_thread_state());
 359 #define assert_access_thread_state() DEBUG_ONLY(check_access_thread_state())
 360 }
 361 
 362 // Below is the implementation of the first 4 steps of the template pipeline:
 363 // * Step 1: Set default decorators and decay types. This step gets rid of CV qualifiers
 364 //           and sets default decorators to sensible values.
 365 // * Step 2: Reduce types. This step makes sure there is only a single T type and not
 366 //           multiple types. The P type of the address and T type of the value must
 367 //           match.
 368 // * Step 3: Pre-runtime dispatch. This step checks whether a runtime call can be
 369 //           avoided, and in that case avoids it (calling raw accesses or
 370 //           primitive accesses in a build that does not require primitive GC barriers)
 371 // * Step 4: Runtime-dispatch. This step performs a runtime dispatch to the corresponding
 372 //           BarrierSet::AccessBarrier accessor that attaches GC-required barriers
 373 //           to the access.
 374 
 375 namespace AccessInternal {
 376   template <typename T>
 377   struct OopOrNarrowOopInternal: AllStatic {
 378     typedef oop type;
 379   };
 380 
 381   template <>
 382   struct OopOrNarrowOopInternal<narrowOop>: AllStatic {
 383     typedef narrowOop type;
 384   };
 385 
 386   // This metafunction returns a canonicalized oop/narrowOop type for a passed
 387   // in oop-like types passed in from oop_* overloads where the user has sworn
 388   // that the passed in values should be oop-like (e.g. oop, oopDesc*, arrayOop,
 389   // narrowOoop, instanceOopDesc*, and random other things).
 390   // In the oop_* overloads, it must hold that if the passed in type T is not
 391   // narrowOop, then it by contract has to be one of many oop-like types implicitly
 392   // convertible to oop, and hence returns oop as the canonical oop type.
 393   // If it turns out it was not, then the implicit conversion to oop will fail
 394   // to compile, as desired.
 395   template <typename T>
 396   struct OopOrNarrowOop: AllStatic {
 397     typedef typename OopOrNarrowOopInternal<std::decay_t<T>>::type type;
 398   };
 399 
 400   inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 401     return reinterpret_cast<void*>(reinterpret_cast<intptr_t>((void*)base) + byte_offset);
 402   }
 403   // Step 4: Runtime dispatch
 404   // The RuntimeDispatch class is responsible for performing a runtime dispatch of the
 405   // accessor. This is required when the access either depends on whether compressed oops
 406   // is being used, or it depends on which GC implementation was chosen (e.g. requires GC
 407   // barriers). The way it works is that a function pointer initially pointing to an
 408   // accessor resolution function gets called for each access. Upon first invocation,
 409   // it resolves which accessor to be used in future invocations and patches the
 410   // function pointer to this new accessor.
 411 
 412   template <DecoratorSet decorators, typename T, BarrierType type>
 413   struct RuntimeDispatch: AllStatic {};
 414 
 415   template <DecoratorSet decorators, typename T>
 416   struct RuntimeDispatch<decorators, T, BARRIER_STORE>: AllStatic {
 417     typedef typename AccessFunction<decorators, T, BARRIER_STORE>::type func_t;
 418     static func_t _store_func;
 419 
 420     static void store_init(void* addr, T value);
 421 
 422     static inline void store(void* addr, T value) {
 423       assert_access_thread_state();
 424       _store_func(addr, value);
 425     }
 426   };
 427 
 428   template <DecoratorSet decorators, typename T>
 429   struct RuntimeDispatch<decorators, T, BARRIER_STORE_AT>: AllStatic {
 430     typedef typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type func_t;
 431     static func_t _store_at_func;
 432 
 433     static void store_at_init(oop base, ptrdiff_t offset, T value);
 434 
 435     static inline void store_at(oop base, ptrdiff_t offset, T value) {
 436       assert_access_thread_state();
 437       _store_at_func(base, offset, value);
 438     }
 439   };
 440 
 441   template <DecoratorSet decorators, typename T>
 442   struct RuntimeDispatch<decorators, T, BARRIER_LOAD>: AllStatic {
 443     typedef typename AccessFunction<decorators, T, BARRIER_LOAD>::type func_t;
 444     static func_t _load_func;
 445 
 446     static T load_init(void* addr);
 447 
 448     static inline T load(void* addr) {
 449       assert_access_thread_state();
 450       return _load_func(addr);
 451     }
 452   };
 453 
 454   template <DecoratorSet decorators, typename T>
 455   struct RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>: AllStatic {
 456     typedef typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type func_t;
 457     static func_t _load_at_func;
 458 
 459     static T load_at_init(oop base, ptrdiff_t offset);
 460 
 461     static inline T load_at(oop base, ptrdiff_t offset) {
 462       assert_access_thread_state();
 463       return _load_at_func(base, offset);
 464     }
 465   };
 466 
 467   template <DecoratorSet decorators, typename T>
 468   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>: AllStatic {
 469     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type func_t;
 470     static func_t _atomic_cmpxchg_func;
 471 
 472     static T atomic_cmpxchg_init(void* addr, T compare_value, T new_value);
 473 
 474     static inline T atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 475       assert_access_thread_state();
 476       return _atomic_cmpxchg_func(addr, compare_value, new_value);
 477     }
 478   };
 479 
 480   template <DecoratorSet decorators, typename T>
 481   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>: AllStatic {
 482     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type func_t;
 483     static func_t _atomic_cmpxchg_at_func;
 484 
 485     static T atomic_cmpxchg_at_init(oop base, ptrdiff_t offset, T compare_value, T new_value);
 486 
 487     static inline T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 488       assert_access_thread_state();
 489       return _atomic_cmpxchg_at_func(base, offset, compare_value, new_value);
 490     }
 491   };
 492 
 493   template <DecoratorSet decorators, typename T>
 494   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>: AllStatic {
 495     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type func_t;
 496     static func_t _atomic_xchg_func;
 497 
 498     static T atomic_xchg_init(void* addr, T new_value);
 499 
 500     static inline T atomic_xchg(void* addr, T new_value) {
 501       assert_access_thread_state();
 502       return _atomic_xchg_func(addr, new_value);
 503     }
 504   };
 505 
 506   template <DecoratorSet decorators, typename T>
 507   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>: AllStatic {
 508     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type func_t;
 509     static func_t _atomic_xchg_at_func;
 510 
 511     static T atomic_xchg_at_init(oop base, ptrdiff_t offset, T new_value);
 512 
 513     static inline T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 514       assert_access_thread_state();
 515       return _atomic_xchg_at_func(base, offset, new_value);
 516     }
 517   };
 518 
 519   template <DecoratorSet decorators, typename T>
 520   struct RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>: AllStatic {
 521     typedef typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type func_t;
 522     static func_t _arraycopy_func;
 523 
 524     static void arraycopy_init(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 525                                arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 526                                size_t length);
 527 
 528     static inline void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 529                                  arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 530                                  size_t length) {
 531       assert_access_thread_state();
 532       return _arraycopy_func(src_obj, src_offset_in_bytes, src_raw,
 533                              dst_obj, dst_offset_in_bytes, dst_raw,
 534                              length);
 535     }
 536   };
 537 
 538   template <DecoratorSet decorators, typename T>
 539   struct RuntimeDispatch<decorators, T, BARRIER_CLONE>: AllStatic {
 540     typedef typename AccessFunction<decorators, T, BARRIER_CLONE>::type func_t;
 541     static func_t _clone_func;
 542 
 543     static void clone_init(oop src, oop dst, size_t size);
 544 
 545     static inline void clone(oop src, oop dst, size_t size) {
 546       assert_access_thread_state();
 547       _clone_func(src, dst, size);
 548     }
 549   };
 550 
 551   template <DecoratorSet decorators, typename T>
 552   struct RuntimeDispatch<decorators, T, BARRIER_VALUE_COPY>: AllStatic {
 553     typedef typename AccessFunction<decorators, T, BARRIER_VALUE_COPY>::type func_t;
 554     static func_t _value_copy_func;
 555 
 556     static void value_copy_init(void* src, void* dst, InlineKlass* md, LayoutKind lk);
 557 
 558     static inline void value_copy(void* src, void* dst, InlineKlass* md, LayoutKind lk) {
 559       _value_copy_func(src, dst, md, lk);
 560     }
 561   };
 562 
 563   // Initialize the function pointers to point to the resolving function.
 564   template <DecoratorSet decorators, typename T>
 565   typename AccessFunction<decorators, T, BARRIER_STORE>::type
 566   RuntimeDispatch<decorators, T, BARRIER_STORE>::_store_func = &store_init;
 567 
 568   template <DecoratorSet decorators, typename T>
 569   typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type
 570   RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::_store_at_func = &store_at_init;
 571 
 572   template <DecoratorSet decorators, typename T>
 573   typename AccessFunction<decorators, T, BARRIER_LOAD>::type
 574   RuntimeDispatch<decorators, T, BARRIER_LOAD>::_load_func = &load_init;
 575 
 576   template <DecoratorSet decorators, typename T>
 577   typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type
 578   RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::_load_at_func = &load_at_init;
 579 
 580   template <DecoratorSet decorators, typename T>
 581   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type
 582   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::_atomic_cmpxchg_func = &atomic_cmpxchg_init;
 583 
 584   template <DecoratorSet decorators, typename T>
 585   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type
 586   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::_atomic_cmpxchg_at_func = &atomic_cmpxchg_at_init;
 587 
 588   template <DecoratorSet decorators, typename T>
 589   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type
 590   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::_atomic_xchg_func = &atomic_xchg_init;
 591 
 592   template <DecoratorSet decorators, typename T>
 593   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
 594   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
 595 
 596   template <DecoratorSet decorators, typename T>
 597   typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
 598   RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
 599 
 600   template <DecoratorSet decorators, typename T>
 601   typename AccessFunction<decorators, T, BARRIER_CLONE>::type
 602   RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
 603 
 604   template <DecoratorSet decorators, typename T>
 605   typename AccessFunction<decorators, T, BARRIER_VALUE_COPY>::type
 606   RuntimeDispatch<decorators, T, BARRIER_VALUE_COPY>::_value_copy_func = &value_copy_init;
 607 
 608   // Step 3: Pre-runtime dispatching.
 609   // The PreRuntimeDispatch class is responsible for filtering the barrier strength
 610   // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
 611   // dispatch point. Otherwise it goes through a runtime check if hardwiring was
 612   // not possible.
 613   struct PreRuntimeDispatch: AllStatic {
 614     template<DecoratorSet decorators>
 615     struct CanHardwireRaw: public std::integral_constant<
 616       bool,
 617       !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
 618       !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
 619       HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
 620     {};
 621 
 622     static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
 623 
 624     template<DecoratorSet decorators>
 625     static bool is_hardwired_primitive() {
 626       return !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
 627     }
 628 
 629     template <DecoratorSet decorators, typename T>
 630     inline static typename EnableIf<
 631       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
 632     store(void* addr, T value) {
 633       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 634       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 635         Raw::oop_store(addr, value);
 636       } else {
 637         Raw::store(addr, value);
 638       }
 639     }
 640 
 641     template <DecoratorSet decorators, typename T>
 642     inline static typename EnableIf<
 643       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
 644     store(void* addr, T value) {
 645       if (UseCompressedOops) {
 646         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 647         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 648       } else {
 649         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 650         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 651       }
 652     }
 653 
 654     template <DecoratorSet decorators, typename T>
 655     inline static typename EnableIf<
 656       !HasDecorator<decorators, AS_RAW>::value>::type
 657     store(void* addr, T value) {
 658       if (is_hardwired_primitive<decorators>()) {
 659         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 660         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 661       } else {
 662         RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
 663       }
 664     }
 665 
 666     template <DecoratorSet decorators, typename T>
 667     inline static typename EnableIf<
 668       HasDecorator<decorators, AS_RAW>::value>::type
 669     store_at(oop base, ptrdiff_t offset, T value) {
 670       store<decorators>(field_addr(base, offset), value);
 671     }
 672 
 673     template <DecoratorSet decorators, typename T>
 674     inline static typename EnableIf<
 675       !HasDecorator<decorators, AS_RAW>::value>::type
 676     store_at(oop base, ptrdiff_t offset, T value) {
 677       if (is_hardwired_primitive<decorators>()) {
 678         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 679         PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
 680       } else {
 681         RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
 682       }
 683     }
 684 
 685     template <DecoratorSet decorators, typename T>
 686     inline static typename EnableIf<
 687       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 688     load(void* addr) {
 689       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 690       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 691         return Raw::template oop_load<T>(addr);
 692       } else {
 693         return Raw::template load<T>(addr);
 694       }
 695     }
 696 
 697     template <DecoratorSet decorators, typename T>
 698     inline static typename EnableIf<
 699       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 700     load(void* addr) {
 701       if (UseCompressedOops) {
 702         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 703         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 704       } else {
 705         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 706         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 707       }
 708     }
 709 
 710     template <DecoratorSet decorators, typename T>
 711     inline static typename EnableIf<
 712       !HasDecorator<decorators, AS_RAW>::value, T>::type
 713     load(void* addr) {
 714       if (is_hardwired_primitive<decorators>()) {
 715         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 716         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 717       } else {
 718         return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
 719       }
 720     }
 721 
 722     template <DecoratorSet decorators, typename T>
 723     inline static typename EnableIf<
 724       HasDecorator<decorators, AS_RAW>::value, T>::type
 725     load_at(oop base, ptrdiff_t offset) {
 726       return load<decorators, T>(field_addr(base, offset));
 727     }
 728 
 729     template <DecoratorSet decorators, typename T>
 730     inline static typename EnableIf<
 731       !HasDecorator<decorators, AS_RAW>::value, T>::type
 732     load_at(oop base, ptrdiff_t offset) {
 733       if (is_hardwired_primitive<decorators>()) {
 734         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 735         return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
 736       } else {
 737         return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
 738       }
 739     }
 740 
 741     template <DecoratorSet decorators, typename T>
 742     inline static typename EnableIf<
 743       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 744     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 745       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 746       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 747         return Raw::oop_atomic_cmpxchg(addr, compare_value, new_value);
 748       } else {
 749         return Raw::atomic_cmpxchg(addr, compare_value, new_value);
 750       }
 751     }
 752 
 753     template <DecoratorSet decorators, typename T>
 754     inline static typename EnableIf<
 755       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 756     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 757       if (UseCompressedOops) {
 758         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 759         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 760       } else {
 761         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 762         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 763       }
 764     }
 765 
 766     template <DecoratorSet decorators, typename T>
 767     inline static typename EnableIf<
 768       !HasDecorator<decorators, AS_RAW>::value, T>::type
 769     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 770       if (is_hardwired_primitive<decorators>()) {
 771         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 772         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 773       } else {
 774         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(addr, compare_value, new_value);
 775       }
 776     }
 777 
 778     template <DecoratorSet decorators, typename T>
 779     inline static typename EnableIf<
 780       HasDecorator<decorators, AS_RAW>::value, T>::type
 781     atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 782       return atomic_cmpxchg<decorators>(field_addr(base, offset), compare_value, new_value);
 783     }
 784 
 785     template <DecoratorSet decorators, typename T>
 786     inline static typename EnableIf<
 787       !HasDecorator<decorators, AS_RAW>::value, T>::type
 788     atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 789       if (is_hardwired_primitive<decorators>()) {
 790         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 791         return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(base, offset, compare_value, new_value);
 792       } else {
 793         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(base, offset, compare_value, new_value);
 794       }
 795     }
 796 
 797     template <DecoratorSet decorators, typename T>
 798     inline static typename EnableIf<
 799       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 800     atomic_xchg(void* addr, T new_value) {
 801       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 802       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 803         return Raw::oop_atomic_xchg(addr, new_value);
 804       } else {
 805         return Raw::atomic_xchg(addr, new_value);
 806       }
 807     }
 808 
 809     template <DecoratorSet decorators, typename T>
 810     inline static typename EnableIf<
 811       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 812     atomic_xchg(void* addr, T new_value) {
 813       if (UseCompressedOops) {
 814         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 815         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 816       } else {
 817         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 818         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 819       }
 820     }
 821 
 822     template <DecoratorSet decorators, typename T>
 823     inline static typename EnableIf<
 824       !HasDecorator<decorators, AS_RAW>::value, T>::type
 825     atomic_xchg(void* addr, T new_value) {
 826       if (is_hardwired_primitive<decorators>()) {
 827         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 828         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 829       } else {
 830         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(addr, new_value);
 831       }
 832     }
 833 
 834     template <DecoratorSet decorators, typename T>
 835     inline static typename EnableIf<
 836       HasDecorator<decorators, AS_RAW>::value, T>::type
 837     atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 838       return atomic_xchg<decorators>(field_addr(base, offset), new_value);
 839     }
 840 
 841     template <DecoratorSet decorators, typename T>
 842     inline static typename EnableIf<
 843       !HasDecorator<decorators, AS_RAW>::value, T>::type
 844     atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 845       if (is_hardwired_primitive<decorators>()) {
 846         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 847         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(base, offset, new_value);
 848       } else {
 849         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::atomic_xchg_at(base, offset, new_value);
 850       }
 851     }
 852 
 853     template <DecoratorSet decorators, typename T>
 854     inline static typename EnableIf<
 855       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, void>::type
 856     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 857               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 858               size_t length) {
 859       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 860       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 861         Raw::oop_arraycopy(src_obj, src_offset_in_bytes, src_raw,
 862                            dst_obj, dst_offset_in_bytes, dst_raw,
 863                            length);
 864       } else {
 865         Raw::arraycopy(src_obj, src_offset_in_bytes, src_raw,
 866                        dst_obj, dst_offset_in_bytes, dst_raw,
 867                        length);
 868       }
 869     }
 870 
 871     template <DecoratorSet decorators, typename T>
 872     inline static typename EnableIf<
 873       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, void>::type
 874     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 875               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 876               size_t length) {
 877       if (UseCompressedOops) {
 878         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 879         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 880                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 881                                                            length);
 882       } else {
 883         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 884         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 885                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 886                                                            length);
 887       }
 888     }
 889 
 890     template <DecoratorSet decorators, typename T>
 891     inline static typename EnableIf<
 892       !HasDecorator<decorators, AS_RAW>::value, void>::type
 893     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 894               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 895               size_t length) {
 896       if (is_hardwired_primitive<decorators>()) {
 897         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 898         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 899                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 900                                                            length);
 901       } else {
 902         RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::arraycopy(src_obj, src_offset_in_bytes, src_raw,
 903                                                                      dst_obj, dst_offset_in_bytes, dst_raw,
 904                                                                      length);
 905       }
 906     }
 907 
 908     template <DecoratorSet decorators>
 909     inline static typename EnableIf<
 910       HasDecorator<decorators, AS_RAW>::value>::type
 911     clone(oop src, oop dst, size_t size) {
 912       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 913       Raw::clone(src, dst, size);
 914     }
 915 
 916     template <DecoratorSet decorators>
 917     inline static typename EnableIf<
 918       !HasDecorator<decorators, AS_RAW>::value>::type
 919     clone(oop src, oop dst, size_t size) {
 920       RuntimeDispatch<decorators, oop, BARRIER_CLONE>::clone(src, dst, size);
 921     }
 922 
 923     template <DecoratorSet decorators>
 924     inline static typename EnableIf<
 925       HasDecorator<decorators, AS_RAW>::value>::type
 926     value_copy(void* src, void* dst, InlineKlass* md, LayoutKind lk) {
 927       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 928       Raw::value_copy(src, dst, md, lk);
 929     }
 930 
 931     template <DecoratorSet decorators>
 932     inline static typename EnableIf<
 933       !HasDecorator<decorators, AS_RAW>::value>::type
 934       value_copy(void* src, void* dst, InlineKlass* md, LayoutKind lk) {
 935       const DecoratorSet expanded_decorators = decorators;
 936       RuntimeDispatch<expanded_decorators, void*, BARRIER_VALUE_COPY>::value_copy(src, dst, md, lk);
 937     }
 938   };
 939 
 940   // Step 2: Reduce types.
 941   // Enforce that for non-oop types, T and P have to be strictly the same.
 942   // P is the type of the address and T is the type of the values.
 943   // As for oop types, it is allow to send T in {narrowOop, oop} and
 944   // P in {narrowOop, oop, HeapWord*}. The following rules apply according to
 945   // the subsequent table. (columns are P, rows are T)
 946   // |           | HeapWord  |   oop   | narrowOop |
 947   // |   oop     |  rt-comp  | hw-none |  hw-comp  |
 948   // | narrowOop |     x     |    x    |  hw-none  |
 949   //
 950   // x means not allowed
 951   // rt-comp means it must be checked at runtime whether the oop is compressed.
 952   // hw-none means it is statically known the oop will not be compressed.
 953   // hw-comp means it is statically known the oop will be compressed.
 954 
 955   template <DecoratorSet decorators, typename T>
 956   inline void store_reduce_types(T* addr, T value) {
 957     PreRuntimeDispatch::store<decorators>(addr, value);
 958   }
 959 
 960   template <DecoratorSet decorators>
 961   inline void store_reduce_types(narrowOop* addr, oop value) {
 962     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 963                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 964     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 965   }
 966 
 967   template <DecoratorSet decorators>
 968   inline void store_reduce_types(narrowOop* addr, narrowOop value) {
 969     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 970                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 971     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 972   }
 973 
 974   template <DecoratorSet decorators>
 975   inline void store_reduce_types(HeapWord* addr, oop value) {
 976     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
 977     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 978   }
 979 
 980   template <DecoratorSet decorators, typename T>
 981   inline T atomic_cmpxchg_reduce_types(T* addr, T compare_value, T new_value) {
 982     return PreRuntimeDispatch::atomic_cmpxchg<decorators>(addr, compare_value, new_value);
 983   }
 984 
 985   template <DecoratorSet decorators>
 986   inline oop atomic_cmpxchg_reduce_types(narrowOop* addr, oop compare_value, oop new_value) {
 987     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 988                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 989     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 990   }
 991 
 992   template <DecoratorSet decorators>
 993   inline narrowOop atomic_cmpxchg_reduce_types(narrowOop* addr, narrowOop compare_value, narrowOop new_value) {
 994     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 995                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 996     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 997   }
 998 
 999   template <DecoratorSet decorators>
1000   inline oop atomic_cmpxchg_reduce_types(HeapWord* addr,
1001                                          oop compare_value,
1002                                          oop new_value) {
1003     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1004     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
1005   }
1006 
1007   template <DecoratorSet decorators, typename T>
1008   inline T atomic_xchg_reduce_types(T* addr, T new_value) {
1009     const DecoratorSet expanded_decorators = decorators;
1010     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1011   }
1012 
1013   template <DecoratorSet decorators>
1014   inline oop atomic_xchg_reduce_types(narrowOop* addr, oop new_value) {
1015     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1016                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1017     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1018   }
1019 
1020   template <DecoratorSet decorators>
1021   inline narrowOop atomic_xchg_reduce_types(narrowOop* addr, narrowOop new_value) {
1022     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1023                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1024     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1025   }
1026 
1027   template <DecoratorSet decorators>
1028   inline oop atomic_xchg_reduce_types(HeapWord* addr, oop new_value) {
1029     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1030     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1031   }
1032 
1033   template <DecoratorSet decorators, typename T>
1034   inline T load_reduce_types(T* addr) {
1035     return PreRuntimeDispatch::load<decorators, T>(addr);
1036   }
1037 
1038   template <DecoratorSet decorators, typename T>
1039   inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
1040     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1041                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1042     return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
1043   }
1044 
1045   template <DecoratorSet decorators, typename T>
1046   inline oop load_reduce_types(HeapWord* addr) {
1047     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1048     return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
1049   }
1050 
1051   template <DecoratorSet decorators, typename T>
1052   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
1053                                      arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1054                                      size_t length) {
1055     PreRuntimeDispatch::arraycopy<decorators>(src_obj, src_offset_in_bytes, src_raw,
1056                                               dst_obj, dst_offset_in_bytes, dst_raw,
1057                                               length);
1058   }
1059 
1060   template <DecoratorSet decorators>
1061   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, HeapWord* src_raw,
1062                                      arrayOop dst_obj, size_t dst_offset_in_bytes, HeapWord* dst_raw,
1063                                      size_t length) {
1064     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1065     PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1066                                                        dst_obj, dst_offset_in_bytes, dst_raw,
1067                                                        length);
1068   }
1069 
1070   template <DecoratorSet decorators>
1071   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, narrowOop* src_raw,
1072                                      arrayOop dst_obj, size_t dst_offset_in_bytes, narrowOop* dst_raw,
1073                                      size_t length) {
1074     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1075                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1076     PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1077                                                        dst_obj, dst_offset_in_bytes, dst_raw,
1078                                                        length);
1079   }
1080 
1081   // Step 1: Set default decorators. This step remembers if a type was volatile
1082   // and then sets the MO_RELAXED decorator by default. Otherwise, a default
1083   // memory ordering is set for the access, and the implied decorator rules
1084   // are applied to select sensible defaults for decorators that have not been
1085   // explicitly set. For example, default object referent strength is set to strong.
1086   // This step also decays the types passed in (e.g. getting rid of CV qualifiers
1087   // and references from the types). This step also perform some type verification
1088   // that the passed in types make sense.
1089 
1090   template <DecoratorSet decorators, typename T>
1091   static void verify_types(){
1092     // If this fails to compile, then you have sent in something that is
1093     // not recognized as a valid primitive type to a primitive Access function.
1094     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // oops have already been validated
1095                    (std::is_pointer<T>::value || std::is_integral<T>::value) ||
1096                     std::is_floating_point<T>::value)); // not allowed primitive type
1097   }
1098 
1099   template <DecoratorSet decorators, typename P, typename T>
1100   inline void store(P* addr, T value) {
1101     verify_types<decorators, T>();
1102     using DecayedP = std::decay_t<P>;
1103     using DecayedT = std::decay_t<T>;
1104     DecayedT decayed_value = value;
1105     // If a volatile address is passed in but no memory ordering decorator,
1106     // set the memory ordering to MO_RELAXED by default.
1107     const DecoratorSet expanded_decorators = DecoratorFixup<
1108       (std::is_volatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1109       (MO_RELAXED | decorators) : decorators>::value;
1110     store_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr), decayed_value);
1111   }
1112 
1113   template <DecoratorSet decorators, typename T>
1114   inline void store_at(oop base, ptrdiff_t offset, T value) {
1115     verify_types<decorators, T>();
1116     using DecayedT = std::decay_t<T>;
1117     DecayedT decayed_value = value;
1118     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1119                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1120                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1121     PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, decayed_value);
1122   }
1123 
1124   template <DecoratorSet decorators, typename P, typename T>
1125   inline T load(P* addr) {
1126     verify_types<decorators, T>();
1127     using DecayedP = std::decay_t<P>;
1128     using DecayedT = std::conditional_t<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1129                                         typename OopOrNarrowOop<T>::type,
1130                                         std::decay_t<T>>;
1131     // If a volatile address is passed in but no memory ordering decorator,
1132     // set the memory ordering to MO_RELAXED by default.
1133     const DecoratorSet expanded_decorators = DecoratorFixup<
1134       (std::is_volatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1135       (MO_RELAXED | decorators) : decorators>::value;
1136     return load_reduce_types<expanded_decorators, DecayedT>(const_cast<DecayedP*>(addr));
1137   }
1138 
1139   template <DecoratorSet decorators, typename T>
1140   inline T load_at(oop base, ptrdiff_t offset) {
1141     verify_types<decorators, T>();
1142     using DecayedT = std::conditional_t<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1143                                         typename OopOrNarrowOop<T>::type,
1144                                         std::decay_t<T>>;
1145     // Expand the decorators (figure out sensible defaults)
1146     // Potentially remember if we need compressed oop awareness
1147     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1148                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1149                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1150     return PreRuntimeDispatch::load_at<expanded_decorators, DecayedT>(base, offset);
1151   }
1152 
1153   template <DecoratorSet decorators, typename P, typename T>
1154   inline T atomic_cmpxchg(P* addr, T compare_value, T new_value) {
1155     verify_types<decorators, T>();
1156     using DecayedP = std::decay_t<P>;
1157     using DecayedT = std::decay_t<T>;
1158     DecayedT new_decayed_value = new_value;
1159     DecayedT compare_decayed_value = compare_value;
1160     const DecoratorSet expanded_decorators = DecoratorFixup<
1161       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1162       (MO_SEQ_CST | decorators) : decorators>::value;
1163     return atomic_cmpxchg_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr),
1164                                                             compare_decayed_value,
1165                                                             new_decayed_value);
1166   }
1167 
1168   template <DecoratorSet decorators, typename T>
1169   inline T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
1170     verify_types<decorators, T>();
1171     using DecayedT = std::decay_t<T>;
1172     DecayedT new_decayed_value = new_value;
1173     DecayedT compare_decayed_value = compare_value;
1174     // Determine default memory ordering
1175     const DecoratorSet expanded_decorators = DecoratorFixup<
1176       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1177       (MO_SEQ_CST | decorators) : decorators>::value;
1178     // Potentially remember that we need compressed oop awareness
1179     const DecoratorSet final_decorators = expanded_decorators |
1180                                           (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1181                                            INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE);
1182     return PreRuntimeDispatch::atomic_cmpxchg_at<final_decorators>(base, offset, compare_decayed_value,
1183                                                                    new_decayed_value);
1184   }
1185 
1186   template <DecoratorSet decorators, typename P, typename T>
1187   inline T atomic_xchg(P* addr, T new_value) {
1188     verify_types<decorators, T>();
1189     using DecayedP = std::decay_t<P>;
1190     using DecayedT = std::decay_t<T>;
1191     DecayedT new_decayed_value = new_value;
1192     // atomic_xchg is only available in SEQ_CST flavour.
1193     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST>::value;
1194     return atomic_xchg_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr),
1195                                                          new_decayed_value);
1196   }
1197 
1198   template <DecoratorSet decorators, typename T>
1199   inline T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
1200     verify_types<decorators, T>();
1201     using DecayedT = std::decay_t<T>;
1202     DecayedT new_decayed_value = new_value;
1203     // atomic_xchg is only available in SEQ_CST flavour.
1204     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST |
1205                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1206                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1207     return PreRuntimeDispatch::atomic_xchg_at<expanded_decorators>(base, offset, new_decayed_value);
1208   }
1209 
1210   template <DecoratorSet decorators, typename T>
1211   inline void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, const T* src_raw,
1212                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1213                         size_t length) {
1214     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ||
1215                    (std::is_same<T, void>::value || std::is_integral<T>::value) ||
1216                     std::is_floating_point<T>::value)); // arraycopy allows type erased void elements
1217     using DecayedT = std::decay_t<T>;
1218     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | IS_ARRAY | IN_HEAP>::value;
1219     arraycopy_reduce_types<expanded_decorators>(src_obj, src_offset_in_bytes, const_cast<DecayedT*>(src_raw),
1220                                                 dst_obj, dst_offset_in_bytes, const_cast<DecayedT*>(dst_raw),
1221                                                 length);
1222   }
1223 
1224   template <DecoratorSet decorators>
1225   inline void clone(oop src, oop dst, size_t size) {
1226     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1227     PreRuntimeDispatch::clone<expanded_decorators>(src, dst, size);
1228   }
1229 
1230   template <DecoratorSet decorators>
1231   inline void value_copy(void* src, void* dst, InlineKlass* md, LayoutKind lk) {
1232     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1233     PreRuntimeDispatch::value_copy<expanded_decorators>(src, dst, md, lk);
1234   }
1235 
1236   // Infer the type that should be returned from an Access::oop_load.
1237   template <typename P, DecoratorSet decorators>
1238   class OopLoadProxy: public StackObj {
1239   private:
1240     P *const _addr;
1241   public:
1242     explicit OopLoadProxy(P* addr) : _addr(addr) {}
1243 
1244     inline operator oop() {
1245       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr);
1246     }
1247 
1248     inline operator narrowOop() {
1249       return load<decorators | INTERNAL_VALUE_IS_OOP, P, narrowOop>(_addr);
1250     }
1251 
1252     template <typename T>
1253     inline bool operator ==(const T& other) const {
1254       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) == other;
1255     }
1256 
1257     template <typename T>
1258     inline bool operator !=(const T& other) const {
1259       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) != other;
1260     }
1261 
1262     inline bool operator ==(std::nullptr_t) const {
1263       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr) == nullptr;
1264     }
1265 
1266     inline bool operator !=(std::nullptr_t) const {
1267       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr) != nullptr;
1268     }
1269   };
1270 
1271   // Infer the type that should be returned from an Access::load_at.
1272   template <DecoratorSet decorators>
1273   class LoadAtProxy: public StackObj {
1274   private:
1275     const oop _base;
1276     const ptrdiff_t _offset;
1277   public:
1278     LoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1279 
1280     template <typename T>
1281     inline operator T() const {
1282       return load_at<decorators, T>(_base, _offset);
1283     }
1284 
1285     template <typename T>
1286     inline bool operator ==(const T& other) const { return load_at<decorators, T>(_base, _offset) == other; }
1287 
1288     template <typename T>
1289     inline bool operator !=(const T& other) const { return load_at<decorators, T>(_base, _offset) != other; }
1290   };
1291 
1292   // Infer the type that should be returned from an Access::oop_load_at.
1293   template <DecoratorSet decorators>
1294   class OopLoadAtProxy: public StackObj {
1295   private:
1296     const oop _base;
1297     const ptrdiff_t _offset;
1298   public:
1299     OopLoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1300 
1301     inline operator oop() const {
1302       return load_at<decorators | INTERNAL_VALUE_IS_OOP, oop>(_base, _offset);
1303     }
1304 
1305     inline operator narrowOop() const {
1306       return load_at<decorators | INTERNAL_VALUE_IS_OOP, narrowOop>(_base, _offset);
1307     }
1308 
1309     template <typename T>
1310     inline bool operator ==(const T& other) const {
1311       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) == other;
1312     }
1313 
1314     template <typename T>
1315     inline bool operator !=(const T& other) const {
1316       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) != other;
1317     }
1318   };
1319 }
1320 
1321 #endif // SHARE_OOPS_ACCESSBACKEND_HPP