1 /*
   2  * Copyright (c) 2017, 2022, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_OOPS_ACCESSBACKEND_HPP
  26 #define SHARE_OOPS_ACCESSBACKEND_HPP
  27 
  28 #include "gc/shared/barrierSetConfig.hpp"
  29 #include "memory/allocation.hpp"
  30 #include "metaprogramming/enableIf.hpp"
  31 #include "oops/accessDecorators.hpp"
  32 #include "oops/oopsHierarchy.hpp"
  33 #include "runtime/globals.hpp"
  34 #include "utilities/debug.hpp"
  35 #include "utilities/globalDefinitions.hpp"
  36 
  37 #include <type_traits>
  38 
  39 // This metafunction returns either oop or narrowOop depending on whether
  40 // an access needs to use compressed oops or not.
  41 template <DecoratorSet decorators>
  42 struct HeapOopType: AllStatic {
  43   static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value &&
  44                                          HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
  45   using type = std::conditional_t<needs_oop_compress, narrowOop, oop>;
  46 };
  47 
  48 // This meta-function returns either oop or narrowOop depending on whether
  49 // a back-end needs to consider compressed oops types or not.
  50 template <DecoratorSet decorators>
  51 struct ValueOopType: AllStatic {
  52   static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
  53   using type = std::conditional_t<needs_oop_compress, narrowOop, oop>;
  54 };
  55 
  56 namespace AccessInternal {
  57   enum BarrierType {
  58     BARRIER_STORE,
  59     BARRIER_STORE_AT,
  60     BARRIER_LOAD,
  61     BARRIER_LOAD_AT,
  62     BARRIER_ATOMIC_CMPXCHG,
  63     BARRIER_ATOMIC_CMPXCHG_AT,
  64     BARRIER_ATOMIC_XCHG,
  65     BARRIER_ATOMIC_XCHG_AT,
  66     BARRIER_ARRAYCOPY,
  67     BARRIER_CLONE,
  68     BARRIER_VALUE_COPY
  69   };
  70 
  71   template <DecoratorSet decorators, typename T>
  72   struct MustConvertCompressedOop: public std::integral_constant<bool,
  73     HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value &&
  74     std::is_same<typename HeapOopType<decorators>::type, narrowOop>::value &&
  75     std::is_same<T, oop>::value> {};
  76 
  77   // This metafunction returns an appropriate oop type if the value is oop-like
  78   // and otherwise returns the same type T.
  79   template <DecoratorSet decorators, typename T>
  80   struct EncodedType: AllStatic {
  81     using type = std::conditional_t<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
  82                                     typename HeapOopType<decorators>::type,
  83                                     T>;
  84   };
  85 
  86   template <DecoratorSet decorators>
  87   inline typename HeapOopType<decorators>::type*
  88   oop_field_addr(oop base, ptrdiff_t byte_offset) {
  89     return reinterpret_cast<typename HeapOopType<decorators>::type*>(
  90              reinterpret_cast<intptr_t>((void*)base) + byte_offset);
  91   }
  92 
  93   template <DecoratorSet decorators, typename T>
  94   struct AccessFunctionTypes {
  95     typedef T (*load_at_func_t)(oop base, ptrdiff_t offset);
  96     typedef void (*store_at_func_t)(oop base, ptrdiff_t offset, T value);
  97     typedef T (*atomic_cmpxchg_at_func_t)(oop base, ptrdiff_t offset, T compare_value, T new_value);
  98     typedef T (*atomic_xchg_at_func_t)(oop base, ptrdiff_t offset, T new_value);
  99 
 100     typedef T (*load_func_t)(void* addr);
 101     typedef void (*store_func_t)(void* addr, T value);
 102     typedef T (*atomic_cmpxchg_func_t)(void* addr, T compare_value, T new_value);
 103     typedef T (*atomic_xchg_func_t)(void* addr, T new_value);
 104 
 105     typedef void (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 106                                      arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 107                                      size_t length);
 108     typedef void (*clone_func_t)(oop src, oop dst, size_t size);
 109     typedef void (*value_copy_func_t)(void* src, void* dst, InlineKlass* md);
 110   };
 111 
 112   template <DecoratorSet decorators>
 113   struct AccessFunctionTypes<decorators, void> {
 114     typedef void (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, void* src,
 115                                      arrayOop dst_obj, size_t dst_offset_in_bytes, void* dst,
 116                                      size_t length);
 117   };
 118 
 119   template <DecoratorSet decorators, typename T, BarrierType barrier> struct AccessFunction {};
 120 
 121 #define ACCESS_GENERATE_ACCESS_FUNCTION(bt, func)                   \
 122   template <DecoratorSet decorators, typename T>                    \
 123   struct AccessFunction<decorators, T, bt>: AllStatic{              \
 124     typedef typename AccessFunctionTypes<decorators, T>::func type; \
 125   }
 126   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE, store_func_t);
 127   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE_AT, store_at_func_t);
 128   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD, load_func_t);
 129   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD_AT, load_at_func_t);
 130   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG, atomic_cmpxchg_func_t);
 131   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG_AT, atomic_cmpxchg_at_func_t);
 132   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG, atomic_xchg_func_t);
 133   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG_AT, atomic_xchg_at_func_t);
 134   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ARRAYCOPY, arraycopy_func_t);
 135   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_CLONE, clone_func_t);
 136   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_VALUE_COPY, value_copy_func_t);
 137 #undef ACCESS_GENERATE_ACCESS_FUNCTION
 138 
 139   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 140   typename AccessFunction<decorators, T, barrier_type>::type resolve_barrier();
 141 
 142   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 143   typename AccessFunction<decorators, T, barrier_type>::type resolve_oop_barrier();
 144 
 145   void* field_addr(oop base, ptrdiff_t offset);
 146 
 147   // Forward calls to Copy:: in the cpp file to reduce dependencies and allow
 148   // faster build times, given how frequently included access is.
 149   void arraycopy_arrayof_conjoint_oops(void* src, void* dst, size_t length);
 150   void arraycopy_conjoint_oops(oop* src, oop* dst, size_t length);
 151   void arraycopy_conjoint_oops(narrowOop* src, narrowOop* dst, size_t length);
 152 
 153   void arraycopy_disjoint_words(void* src, void* dst, size_t length);
 154   void arraycopy_disjoint_words_atomic(void* src, void* dst, size_t length);
 155 
 156   template<typename T>
 157   void arraycopy_conjoint(T* src, T* dst, size_t length);
 158   template<typename T>
 159   void arraycopy_arrayof_conjoint(T* src, T* dst, size_t length);
 160   template<typename T>
 161   void arraycopy_conjoint_atomic(T* src, T* dst, size_t length);
 162 }
 163 
 164 // This mask specifies what decorators are relevant for raw accesses. When passing
 165 // accesses to the raw layer, irrelevant decorators are removed.
 166 const DecoratorSet RAW_DECORATOR_MASK = INTERNAL_DECORATOR_MASK | MO_DECORATOR_MASK |
 167                                         ARRAYCOPY_DECORATOR_MASK | IS_NOT_NULL;
 168 
 169 // The RawAccessBarrier performs raw accesses with additional knowledge of
 170 // memory ordering, so that OrderAccess/Atomic is called when necessary.
 171 // It additionally handles compressed oops, and hence is not completely "raw"
 172 // strictly speaking.
 173 template <DecoratorSet decorators>
 174 class RawAccessBarrier: public AllStatic {
 175 protected:
 176   static inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 177     return AccessInternal::field_addr(base, byte_offset);
 178   }
 179 
 180 protected:
 181   // Only encode if INTERNAL_VALUE_IS_OOP
 182   template <DecoratorSet idecorators, typename T>
 183   static inline typename EnableIf<
 184     AccessInternal::MustConvertCompressedOop<idecorators, T>::value,
 185     typename HeapOopType<idecorators>::type>::type
 186   encode_internal(T value);
 187 
 188   template <DecoratorSet idecorators, typename T>
 189   static inline typename EnableIf<
 190     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 191   encode_internal(T value) {
 192     return value;
 193   }
 194 
 195   template <typename T>
 196   static inline typename AccessInternal::EncodedType<decorators, T>::type
 197   encode(T value) {
 198     return encode_internal<decorators, T>(value);
 199   }
 200 
 201   // Only decode if INTERNAL_VALUE_IS_OOP
 202   template <DecoratorSet idecorators, typename T>
 203   static inline typename EnableIf<
 204     AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 205   decode_internal(typename HeapOopType<idecorators>::type value);
 206 
 207   template <DecoratorSet idecorators, typename T>
 208   static inline typename EnableIf<
 209     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 210   decode_internal(T value) {
 211     return value;
 212   }
 213 
 214   template <typename T>
 215   static inline T decode(typename AccessInternal::EncodedType<decorators, T>::type value) {
 216     return decode_internal<decorators, T>(value);
 217   }
 218 
 219 protected:
 220   template <DecoratorSet ds, typename T>
 221   static typename EnableIf<
 222     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 223   load_internal(void* addr);
 224 
 225   template <DecoratorSet ds, typename T>
 226   static typename EnableIf<
 227     HasDecorator<ds, MO_ACQUIRE>::value, T>::type
 228   load_internal(void* addr);
 229 
 230   template <DecoratorSet ds, typename T>
 231   static typename EnableIf<
 232     HasDecorator<ds, MO_RELAXED>::value, T>::type
 233   load_internal(void* addr);
 234 
 235   template <DecoratorSet ds, typename T>
 236   static inline typename EnableIf<
 237     HasDecorator<ds, MO_UNORDERED>::value, T>::type
 238   load_internal(void* addr) {
 239     return *reinterpret_cast<T*>(addr);
 240   }
 241 
 242   template <DecoratorSet ds, typename T>
 243   static typename EnableIf<
 244     HasDecorator<ds, MO_SEQ_CST>::value>::type
 245   store_internal(void* addr, T value);
 246 
 247   template <DecoratorSet ds, typename T>
 248   static typename EnableIf<
 249     HasDecorator<ds, MO_RELEASE>::value>::type
 250   store_internal(void* addr, T value);
 251 
 252   template <DecoratorSet ds, typename T>
 253   static typename EnableIf<
 254     HasDecorator<ds, MO_RELAXED>::value>::type
 255   store_internal(void* addr, T value);
 256 
 257   template <DecoratorSet ds, typename T>
 258   static inline typename EnableIf<
 259     HasDecorator<ds, MO_UNORDERED>::value>::type
 260   store_internal(void* addr, T value) {
 261     *reinterpret_cast<T*>(addr) = value;
 262   }
 263 
 264   template <DecoratorSet ds, typename T>
 265   static typename EnableIf<
 266     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 267   atomic_cmpxchg_internal(void* addr, T compare_value, T new_value);
 268 
 269   template <DecoratorSet ds, typename T>
 270   static typename EnableIf<
 271     HasDecorator<ds, MO_RELAXED>::value, T>::type
 272   atomic_cmpxchg_internal(void* addr, T compare_value, T new_value);
 273 
 274   template <DecoratorSet ds, typename T>
 275   static typename EnableIf<
 276     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 277   atomic_xchg_internal(void* addr, T new_value);
 278 
 279 public:
 280   template <typename T>
 281   static inline void store(void* addr, T value) {
 282     store_internal<decorators>(addr, value);
 283   }
 284 
 285   template <typename T>
 286   static inline T load(void* addr) {
 287     return load_internal<decorators, T>(addr);
 288   }
 289 
 290   template <typename T>
 291   static inline T atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 292     return atomic_cmpxchg_internal<decorators>(addr, compare_value, new_value);
 293   }
 294 
 295   template <typename T>
 296   static inline T atomic_xchg(void* addr, T new_value) {
 297     return atomic_xchg_internal<decorators>(addr, new_value);
 298   }
 299 
 300   template <typename T>
 301   static void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 302                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 303                         size_t length);
 304 
 305   template <typename T>
 306   static void oop_store(void* addr, T value);
 307   template <typename T>
 308   static void oop_store_at(oop base, ptrdiff_t offset, T value);
 309 
 310   template <typename T>
 311   static T oop_load(void* addr);
 312   template <typename T>
 313   static T oop_load_at(oop base, ptrdiff_t offset);
 314 
 315   template <typename T>
 316   static T oop_atomic_cmpxchg(void* addr, T compare_value, T new_value);
 317   template <typename T>
 318   static T oop_atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value);
 319 
 320   template <typename T>
 321   static T oop_atomic_xchg(void* addr, T new_value);
 322   template <typename T>
 323   static T oop_atomic_xchg_at(oop base, ptrdiff_t offset, T new_value);
 324 
 325   template <typename T>
 326   static void store_at(oop base, ptrdiff_t offset, T value) {
 327     store(field_addr(base, offset), value);
 328   }
 329 
 330   template <typename T>
 331   static T load_at(oop base, ptrdiff_t offset) {
 332     return load<T>(field_addr(base, offset));
 333   }
 334 
 335   template <typename T>
 336   static T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 337     return atomic_cmpxchg(field_addr(base, offset), compare_value, new_value);
 338   }
 339 
 340   template <typename T>
 341   static T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 342     return atomic_xchg(field_addr(base, offset), new_value);
 343   }
 344 
 345   template <typename T>
 346   static void oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 347                             arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 348                             size_t length);
 349 
 350   static void clone(oop src, oop dst, size_t size);
 351   static void value_copy(void* src, void* dst, InlineKlass* md);
 352 
 353 };
 354 
 355 namespace AccessInternal {
 356   DEBUG_ONLY(void check_access_thread_state());
 357 #define assert_access_thread_state() DEBUG_ONLY(check_access_thread_state())
 358 }
 359 
 360 // Below is the implementation of the first 4 steps of the template pipeline:
 361 // * Step 1: Set default decorators and decay types. This step gets rid of CV qualifiers
 362 //           and sets default decorators to sensible values.
 363 // * Step 2: Reduce types. This step makes sure there is only a single T type and not
 364 //           multiple types. The P type of the address and T type of the value must
 365 //           match.
 366 // * Step 3: Pre-runtime dispatch. This step checks whether a runtime call can be
 367 //           avoided, and in that case avoids it (calling raw accesses or
 368 //           primitive accesses in a build that does not require primitive GC barriers)
 369 // * Step 4: Runtime-dispatch. This step performs a runtime dispatch to the corresponding
 370 //           BarrierSet::AccessBarrier accessor that attaches GC-required barriers
 371 //           to the access.
 372 
 373 namespace AccessInternal {
 374   template <typename T>
 375   struct OopOrNarrowOopInternal: AllStatic {
 376     typedef oop type;
 377   };
 378 
 379   template <>
 380   struct OopOrNarrowOopInternal<narrowOop>: AllStatic {
 381     typedef narrowOop type;
 382   };
 383 
 384   // This metafunction returns a canonicalized oop/narrowOop type for a passed
 385   // in oop-like types passed in from oop_* overloads where the user has sworn
 386   // that the passed in values should be oop-like (e.g. oop, oopDesc*, arrayOop,
 387   // narrowOoop, instanceOopDesc*, and random other things).
 388   // In the oop_* overloads, it must hold that if the passed in type T is not
 389   // narrowOop, then it by contract has to be one of many oop-like types implicitly
 390   // convertible to oop, and hence returns oop as the canonical oop type.
 391   // If it turns out it was not, then the implicit conversion to oop will fail
 392   // to compile, as desired.
 393   template <typename T>
 394   struct OopOrNarrowOop: AllStatic {
 395     typedef typename OopOrNarrowOopInternal<std::decay_t<T>>::type type;
 396   };
 397 
 398   inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 399     return reinterpret_cast<void*>(reinterpret_cast<intptr_t>((void*)base) + byte_offset);
 400   }
 401   // Step 4: Runtime dispatch
 402   // The RuntimeDispatch class is responsible for performing a runtime dispatch of the
 403   // accessor. This is required when the access either depends on whether compressed oops
 404   // is being used, or it depends on which GC implementation was chosen (e.g. requires GC
 405   // barriers). The way it works is that a function pointer initially pointing to an
 406   // accessor resolution function gets called for each access. Upon first invocation,
 407   // it resolves which accessor to be used in future invocations and patches the
 408   // function pointer to this new accessor.
 409 
 410   template <DecoratorSet decorators, typename T, BarrierType type>
 411   struct RuntimeDispatch: AllStatic {};
 412 
 413   template <DecoratorSet decorators, typename T>
 414   struct RuntimeDispatch<decorators, T, BARRIER_STORE>: AllStatic {
 415     typedef typename AccessFunction<decorators, T, BARRIER_STORE>::type func_t;
 416     static func_t _store_func;
 417 
 418     static void store_init(void* addr, T value);
 419 
 420     static inline void store(void* addr, T value) {
 421       assert_access_thread_state();
 422       _store_func(addr, value);
 423     }
 424   };
 425 
 426   template <DecoratorSet decorators, typename T>
 427   struct RuntimeDispatch<decorators, T, BARRIER_STORE_AT>: AllStatic {
 428     typedef typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type func_t;
 429     static func_t _store_at_func;
 430 
 431     static void store_at_init(oop base, ptrdiff_t offset, T value);
 432 
 433     static inline void store_at(oop base, ptrdiff_t offset, T value) {
 434       assert_access_thread_state();
 435       _store_at_func(base, offset, value);
 436     }
 437   };
 438 
 439   template <DecoratorSet decorators, typename T>
 440   struct RuntimeDispatch<decorators, T, BARRIER_LOAD>: AllStatic {
 441     typedef typename AccessFunction<decorators, T, BARRIER_LOAD>::type func_t;
 442     static func_t _load_func;
 443 
 444     static T load_init(void* addr);
 445 
 446     static inline T load(void* addr) {
 447       assert_access_thread_state();
 448       return _load_func(addr);
 449     }
 450   };
 451 
 452   template <DecoratorSet decorators, typename T>
 453   struct RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>: AllStatic {
 454     typedef typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type func_t;
 455     static func_t _load_at_func;
 456 
 457     static T load_at_init(oop base, ptrdiff_t offset);
 458 
 459     static inline T load_at(oop base, ptrdiff_t offset) {
 460       assert_access_thread_state();
 461       return _load_at_func(base, offset);
 462     }
 463   };
 464 
 465   template <DecoratorSet decorators, typename T>
 466   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>: AllStatic {
 467     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type func_t;
 468     static func_t _atomic_cmpxchg_func;
 469 
 470     static T atomic_cmpxchg_init(void* addr, T compare_value, T new_value);
 471 
 472     static inline T atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 473       assert_access_thread_state();
 474       return _atomic_cmpxchg_func(addr, compare_value, new_value);
 475     }
 476   };
 477 
 478   template <DecoratorSet decorators, typename T>
 479   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>: AllStatic {
 480     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type func_t;
 481     static func_t _atomic_cmpxchg_at_func;
 482 
 483     static T atomic_cmpxchg_at_init(oop base, ptrdiff_t offset, T compare_value, T new_value);
 484 
 485     static inline T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 486       assert_access_thread_state();
 487       return _atomic_cmpxchg_at_func(base, offset, compare_value, new_value);
 488     }
 489   };
 490 
 491   template <DecoratorSet decorators, typename T>
 492   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>: AllStatic {
 493     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type func_t;
 494     static func_t _atomic_xchg_func;
 495 
 496     static T atomic_xchg_init(void* addr, T new_value);
 497 
 498     static inline T atomic_xchg(void* addr, T new_value) {
 499       assert_access_thread_state();
 500       return _atomic_xchg_func(addr, new_value);
 501     }
 502   };
 503 
 504   template <DecoratorSet decorators, typename T>
 505   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>: AllStatic {
 506     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type func_t;
 507     static func_t _atomic_xchg_at_func;
 508 
 509     static T atomic_xchg_at_init(oop base, ptrdiff_t offset, T new_value);
 510 
 511     static inline T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 512       assert_access_thread_state();
 513       return _atomic_xchg_at_func(base, offset, new_value);
 514     }
 515   };
 516 
 517   template <DecoratorSet decorators, typename T>
 518   struct RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>: AllStatic {
 519     typedef typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type func_t;
 520     static func_t _arraycopy_func;
 521 
 522     static void arraycopy_init(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 523                                arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 524                                size_t length);
 525 
 526     static inline void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 527                                  arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 528                                  size_t length) {
 529       assert_access_thread_state();
 530       return _arraycopy_func(src_obj, src_offset_in_bytes, src_raw,
 531                              dst_obj, dst_offset_in_bytes, dst_raw,
 532                              length);
 533     }
 534   };
 535 
 536   template <DecoratorSet decorators, typename T>
 537   struct RuntimeDispatch<decorators, T, BARRIER_CLONE>: AllStatic {
 538     typedef typename AccessFunction<decorators, T, BARRIER_CLONE>::type func_t;
 539     static func_t _clone_func;
 540 
 541     static void clone_init(oop src, oop dst, size_t size);
 542 
 543     static inline void clone(oop src, oop dst, size_t size) {
 544       assert_access_thread_state();
 545       _clone_func(src, dst, size);
 546     }
 547   };
 548 
 549   template <DecoratorSet decorators, typename T>
 550   struct RuntimeDispatch<decorators, T, BARRIER_VALUE_COPY>: AllStatic {
 551     typedef typename AccessFunction<decorators, T, BARRIER_VALUE_COPY>::type func_t;
 552     static func_t _value_copy_func;
 553 
 554     static void value_copy_init(void* src, void* dst, InlineKlass* md);
 555 
 556     static inline void value_copy(void* src, void* dst, InlineKlass* md) {
 557       _value_copy_func(src, dst, md);
 558     }
 559   };
 560 
 561   // Initialize the function pointers to point to the resolving function.
 562   template <DecoratorSet decorators, typename T>
 563   typename AccessFunction<decorators, T, BARRIER_STORE>::type
 564   RuntimeDispatch<decorators, T, BARRIER_STORE>::_store_func = &store_init;
 565 
 566   template <DecoratorSet decorators, typename T>
 567   typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type
 568   RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::_store_at_func = &store_at_init;
 569 
 570   template <DecoratorSet decorators, typename T>
 571   typename AccessFunction<decorators, T, BARRIER_LOAD>::type
 572   RuntimeDispatch<decorators, T, BARRIER_LOAD>::_load_func = &load_init;
 573 
 574   template <DecoratorSet decorators, typename T>
 575   typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type
 576   RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::_load_at_func = &load_at_init;
 577 
 578   template <DecoratorSet decorators, typename T>
 579   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type
 580   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::_atomic_cmpxchg_func = &atomic_cmpxchg_init;
 581 
 582   template <DecoratorSet decorators, typename T>
 583   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type
 584   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::_atomic_cmpxchg_at_func = &atomic_cmpxchg_at_init;
 585 
 586   template <DecoratorSet decorators, typename T>
 587   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type
 588   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::_atomic_xchg_func = &atomic_xchg_init;
 589 
 590   template <DecoratorSet decorators, typename T>
 591   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
 592   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
 593 
 594   template <DecoratorSet decorators, typename T>
 595   typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
 596   RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
 597 
 598   template <DecoratorSet decorators, typename T>
 599   typename AccessFunction<decorators, T, BARRIER_CLONE>::type
 600   RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
 601 
 602   template <DecoratorSet decorators, typename T>
 603   typename AccessFunction<decorators, T, BARRIER_VALUE_COPY>::type
 604   RuntimeDispatch<decorators, T, BARRIER_VALUE_COPY>::_value_copy_func = &value_copy_init;
 605 
 606   // Step 3: Pre-runtime dispatching.
 607   // The PreRuntimeDispatch class is responsible for filtering the barrier strength
 608   // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
 609   // dispatch point. Otherwise it goes through a runtime check if hardwiring was
 610   // not possible.
 611   struct PreRuntimeDispatch: AllStatic {
 612     template<DecoratorSet decorators>
 613     struct CanHardwireRaw: public std::integral_constant<
 614       bool,
 615       !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
 616       !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
 617       HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
 618     {};
 619 
 620     static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
 621 
 622     template<DecoratorSet decorators>
 623     static bool is_hardwired_primitive() {
 624       return !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
 625     }
 626 
 627     template <DecoratorSet decorators, typename T>
 628     inline static typename EnableIf<
 629       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
 630     store(void* addr, T value) {
 631       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 632       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 633         Raw::oop_store(addr, value);
 634       } else {
 635         Raw::store(addr, value);
 636       }
 637     }
 638 
 639     template <DecoratorSet decorators, typename T>
 640     inline static typename EnableIf<
 641       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
 642     store(void* addr, T value) {
 643       if (UseCompressedOops) {
 644         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 645         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 646       } else {
 647         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 648         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 649       }
 650     }
 651 
 652     template <DecoratorSet decorators, typename T>
 653     inline static typename EnableIf<
 654       !HasDecorator<decorators, AS_RAW>::value>::type
 655     store(void* addr, T value) {
 656       if (is_hardwired_primitive<decorators>()) {
 657         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 658         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 659       } else {
 660         RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
 661       }
 662     }
 663 
 664     template <DecoratorSet decorators, typename T>
 665     inline static typename EnableIf<
 666       HasDecorator<decorators, AS_RAW>::value>::type
 667     store_at(oop base, ptrdiff_t offset, T value) {
 668       store<decorators>(field_addr(base, offset), value);
 669     }
 670 
 671     template <DecoratorSet decorators, typename T>
 672     inline static typename EnableIf<
 673       !HasDecorator<decorators, AS_RAW>::value>::type
 674     store_at(oop base, ptrdiff_t offset, T value) {
 675       if (is_hardwired_primitive<decorators>()) {
 676         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 677         PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
 678       } else {
 679         RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
 680       }
 681     }
 682 
 683     template <DecoratorSet decorators, typename T>
 684     inline static typename EnableIf<
 685       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 686     load(void* addr) {
 687       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 688       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 689         return Raw::template oop_load<T>(addr);
 690       } else {
 691         return Raw::template load<T>(addr);
 692       }
 693     }
 694 
 695     template <DecoratorSet decorators, typename T>
 696     inline static typename EnableIf<
 697       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 698     load(void* addr) {
 699       if (UseCompressedOops) {
 700         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 701         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 702       } else {
 703         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 704         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 705       }
 706     }
 707 
 708     template <DecoratorSet decorators, typename T>
 709     inline static typename EnableIf<
 710       !HasDecorator<decorators, AS_RAW>::value, T>::type
 711     load(void* addr) {
 712       if (is_hardwired_primitive<decorators>()) {
 713         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 714         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 715       } else {
 716         return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
 717       }
 718     }
 719 
 720     template <DecoratorSet decorators, typename T>
 721     inline static typename EnableIf<
 722       HasDecorator<decorators, AS_RAW>::value, T>::type
 723     load_at(oop base, ptrdiff_t offset) {
 724       return load<decorators, T>(field_addr(base, offset));
 725     }
 726 
 727     template <DecoratorSet decorators, typename T>
 728     inline static typename EnableIf<
 729       !HasDecorator<decorators, AS_RAW>::value, T>::type
 730     load_at(oop base, ptrdiff_t offset) {
 731       if (is_hardwired_primitive<decorators>()) {
 732         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 733         return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
 734       } else {
 735         return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
 736       }
 737     }
 738 
 739     template <DecoratorSet decorators, typename T>
 740     inline static typename EnableIf<
 741       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 742     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 743       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 744       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 745         return Raw::oop_atomic_cmpxchg(addr, compare_value, new_value);
 746       } else {
 747         return Raw::atomic_cmpxchg(addr, compare_value, new_value);
 748       }
 749     }
 750 
 751     template <DecoratorSet decorators, typename T>
 752     inline static typename EnableIf<
 753       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 754     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 755       if (UseCompressedOops) {
 756         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 757         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 758       } else {
 759         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 760         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 761       }
 762     }
 763 
 764     template <DecoratorSet decorators, typename T>
 765     inline static typename EnableIf<
 766       !HasDecorator<decorators, AS_RAW>::value, T>::type
 767     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 768       if (is_hardwired_primitive<decorators>()) {
 769         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 770         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 771       } else {
 772         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(addr, compare_value, new_value);
 773       }
 774     }
 775 
 776     template <DecoratorSet decorators, typename T>
 777     inline static typename EnableIf<
 778       HasDecorator<decorators, AS_RAW>::value, T>::type
 779     atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 780       return atomic_cmpxchg<decorators>(field_addr(base, offset), compare_value, new_value);
 781     }
 782 
 783     template <DecoratorSet decorators, typename T>
 784     inline static typename EnableIf<
 785       !HasDecorator<decorators, AS_RAW>::value, T>::type
 786     atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 787       if (is_hardwired_primitive<decorators>()) {
 788         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 789         return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(base, offset, compare_value, new_value);
 790       } else {
 791         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(base, offset, compare_value, new_value);
 792       }
 793     }
 794 
 795     template <DecoratorSet decorators, typename T>
 796     inline static typename EnableIf<
 797       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 798     atomic_xchg(void* addr, T new_value) {
 799       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 800       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 801         return Raw::oop_atomic_xchg(addr, new_value);
 802       } else {
 803         return Raw::atomic_xchg(addr, new_value);
 804       }
 805     }
 806 
 807     template <DecoratorSet decorators, typename T>
 808     inline static typename EnableIf<
 809       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 810     atomic_xchg(void* addr, T new_value) {
 811       if (UseCompressedOops) {
 812         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 813         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 814       } else {
 815         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 816         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 817       }
 818     }
 819 
 820     template <DecoratorSet decorators, typename T>
 821     inline static typename EnableIf<
 822       !HasDecorator<decorators, AS_RAW>::value, T>::type
 823     atomic_xchg(void* addr, T new_value) {
 824       if (is_hardwired_primitive<decorators>()) {
 825         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 826         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 827       } else {
 828         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(addr, new_value);
 829       }
 830     }
 831 
 832     template <DecoratorSet decorators, typename T>
 833     inline static typename EnableIf<
 834       HasDecorator<decorators, AS_RAW>::value, T>::type
 835     atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 836       return atomic_xchg<decorators>(field_addr(base, offset), new_value);
 837     }
 838 
 839     template <DecoratorSet decorators, typename T>
 840     inline static typename EnableIf<
 841       !HasDecorator<decorators, AS_RAW>::value, T>::type
 842     atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 843       if (is_hardwired_primitive<decorators>()) {
 844         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 845         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(base, offset, new_value);
 846       } else {
 847         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::atomic_xchg_at(base, offset, new_value);
 848       }
 849     }
 850 
 851     template <DecoratorSet decorators, typename T>
 852     inline static typename EnableIf<
 853       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, void>::type
 854     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 855               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 856               size_t length) {
 857       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 858       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 859         Raw::oop_arraycopy(src_obj, src_offset_in_bytes, src_raw,
 860                            dst_obj, dst_offset_in_bytes, dst_raw,
 861                            length);
 862       } else {
 863         Raw::arraycopy(src_obj, src_offset_in_bytes, src_raw,
 864                        dst_obj, dst_offset_in_bytes, dst_raw,
 865                        length);
 866       }
 867     }
 868 
 869     template <DecoratorSet decorators, typename T>
 870     inline static typename EnableIf<
 871       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, void>::type
 872     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 873               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 874               size_t length) {
 875       if (UseCompressedOops) {
 876         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 877         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 878                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 879                                                            length);
 880       } else {
 881         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 882         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 883                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 884                                                            length);
 885       }
 886     }
 887 
 888     template <DecoratorSet decorators, typename T>
 889     inline static typename EnableIf<
 890       !HasDecorator<decorators, AS_RAW>::value, void>::type
 891     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 892               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 893               size_t length) {
 894       if (is_hardwired_primitive<decorators>()) {
 895         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 896         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 897                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 898                                                            length);
 899       } else {
 900         RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::arraycopy(src_obj, src_offset_in_bytes, src_raw,
 901                                                                      dst_obj, dst_offset_in_bytes, dst_raw,
 902                                                                      length);
 903       }
 904     }
 905 
 906     template <DecoratorSet decorators>
 907     inline static typename EnableIf<
 908       HasDecorator<decorators, AS_RAW>::value>::type
 909     clone(oop src, oop dst, size_t size) {
 910       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 911       Raw::clone(src, dst, size);
 912     }
 913 
 914     template <DecoratorSet decorators>
 915     inline static typename EnableIf<
 916       !HasDecorator<decorators, AS_RAW>::value>::type
 917     clone(oop src, oop dst, size_t size) {
 918       RuntimeDispatch<decorators, oop, BARRIER_CLONE>::clone(src, dst, size);
 919     }
 920 
 921     template <DecoratorSet decorators>
 922     inline static typename EnableIf<
 923       HasDecorator<decorators, AS_RAW>::value>::type
 924     value_copy(void* src, void* dst, InlineKlass* md) {
 925       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 926       Raw::value_copy(src, dst, md);
 927     }
 928 
 929     template <DecoratorSet decorators>
 930     inline static typename EnableIf<
 931       !HasDecorator<decorators, AS_RAW>::value>::type
 932       value_copy(void* src, void* dst, InlineKlass* md) {
 933       const DecoratorSet expanded_decorators = decorators;
 934       RuntimeDispatch<expanded_decorators, void*, BARRIER_VALUE_COPY>::value_copy(src, dst, md);
 935     }
 936   };
 937 
 938   // Step 2: Reduce types.
 939   // Enforce that for non-oop types, T and P have to be strictly the same.
 940   // P is the type of the address and T is the type of the values.
 941   // As for oop types, it is allow to send T in {narrowOop, oop} and
 942   // P in {narrowOop, oop, HeapWord*}. The following rules apply according to
 943   // the subsequent table. (columns are P, rows are T)
 944   // |           | HeapWord  |   oop   | narrowOop |
 945   // |   oop     |  rt-comp  | hw-none |  hw-comp  |
 946   // | narrowOop |     x     |    x    |  hw-none  |
 947   //
 948   // x means not allowed
 949   // rt-comp means it must be checked at runtime whether the oop is compressed.
 950   // hw-none means it is statically known the oop will not be compressed.
 951   // hw-comp means it is statically known the oop will be compressed.
 952 
 953   template <DecoratorSet decorators, typename T>
 954   inline void store_reduce_types(T* addr, T value) {
 955     PreRuntimeDispatch::store<decorators>(addr, value);
 956   }
 957 
 958   template <DecoratorSet decorators>
 959   inline void store_reduce_types(narrowOop* addr, oop value) {
 960     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 961                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 962     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 963   }
 964 
 965   template <DecoratorSet decorators>
 966   inline void store_reduce_types(narrowOop* addr, narrowOop value) {
 967     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 968                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 969     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 970   }
 971 
 972   template <DecoratorSet decorators>
 973   inline void store_reduce_types(HeapWord* addr, oop value) {
 974     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
 975     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 976   }
 977 
 978   template <DecoratorSet decorators, typename T>
 979   inline T atomic_cmpxchg_reduce_types(T* addr, T compare_value, T new_value) {
 980     return PreRuntimeDispatch::atomic_cmpxchg<decorators>(addr, compare_value, new_value);
 981   }
 982 
 983   template <DecoratorSet decorators>
 984   inline oop atomic_cmpxchg_reduce_types(narrowOop* addr, oop compare_value, oop new_value) {
 985     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 986                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 987     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 988   }
 989 
 990   template <DecoratorSet decorators>
 991   inline narrowOop atomic_cmpxchg_reduce_types(narrowOop* addr, narrowOop compare_value, narrowOop new_value) {
 992     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 993                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 994     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 995   }
 996 
 997   template <DecoratorSet decorators>
 998   inline oop atomic_cmpxchg_reduce_types(HeapWord* addr,
 999                                          oop compare_value,
1000                                          oop new_value) {
1001     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1002     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
1003   }
1004 
1005   template <DecoratorSet decorators, typename T>
1006   inline T atomic_xchg_reduce_types(T* addr, T new_value) {
1007     const DecoratorSet expanded_decorators = decorators;
1008     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1009   }
1010 
1011   template <DecoratorSet decorators>
1012   inline oop atomic_xchg_reduce_types(narrowOop* addr, oop new_value) {
1013     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1014                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1015     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1016   }
1017 
1018   template <DecoratorSet decorators>
1019   inline narrowOop atomic_xchg_reduce_types(narrowOop* addr, narrowOop new_value) {
1020     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1021                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1022     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1023   }
1024 
1025   template <DecoratorSet decorators>
1026   inline oop atomic_xchg_reduce_types(HeapWord* addr, oop new_value) {
1027     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1028     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1029   }
1030 
1031   template <DecoratorSet decorators, typename T>
1032   inline T load_reduce_types(T* addr) {
1033     return PreRuntimeDispatch::load<decorators, T>(addr);
1034   }
1035 
1036   template <DecoratorSet decorators, typename T>
1037   inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
1038     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1039                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1040     return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
1041   }
1042 
1043   template <DecoratorSet decorators, typename T>
1044   inline oop load_reduce_types(HeapWord* addr) {
1045     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1046     return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
1047   }
1048 
1049   template <DecoratorSet decorators, typename T>
1050   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
1051                                      arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1052                                      size_t length) {
1053     PreRuntimeDispatch::arraycopy<decorators>(src_obj, src_offset_in_bytes, src_raw,
1054                                               dst_obj, dst_offset_in_bytes, dst_raw,
1055                                               length);
1056   }
1057 
1058   template <DecoratorSet decorators>
1059   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, HeapWord* src_raw,
1060                                      arrayOop dst_obj, size_t dst_offset_in_bytes, HeapWord* dst_raw,
1061                                      size_t length) {
1062     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1063     PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1064                                                        dst_obj, dst_offset_in_bytes, dst_raw,
1065                                                        length);
1066   }
1067 
1068   template <DecoratorSet decorators>
1069   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, narrowOop* src_raw,
1070                                      arrayOop dst_obj, size_t dst_offset_in_bytes, narrowOop* dst_raw,
1071                                      size_t length) {
1072     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1073                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1074     PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1075                                                        dst_obj, dst_offset_in_bytes, dst_raw,
1076                                                        length);
1077   }
1078 
1079   // Step 1: Set default decorators. This step remembers if a type was volatile
1080   // and then sets the MO_RELAXED decorator by default. Otherwise, a default
1081   // memory ordering is set for the access, and the implied decorator rules
1082   // are applied to select sensible defaults for decorators that have not been
1083   // explicitly set. For example, default object referent strength is set to strong.
1084   // This step also decays the types passed in (e.g. getting rid of CV qualifiers
1085   // and references from the types). This step also perform some type verification
1086   // that the passed in types make sense.
1087 
1088   template <DecoratorSet decorators, typename T>
1089   static void verify_types(){
1090     // If this fails to compile, then you have sent in something that is
1091     // not recognized as a valid primitive type to a primitive Access function.
1092     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // oops have already been validated
1093                    (std::is_pointer<T>::value || std::is_integral<T>::value) ||
1094                     std::is_floating_point<T>::value)); // not allowed primitive type
1095   }
1096 
1097   template <DecoratorSet decorators, typename P, typename T>
1098   inline void store(P* addr, T value) {
1099     verify_types<decorators, T>();
1100     using DecayedP = std::decay_t<P>;
1101     using DecayedT = std::decay_t<T>;
1102     DecayedT decayed_value = value;
1103     // If a volatile address is passed in but no memory ordering decorator,
1104     // set the memory ordering to MO_RELAXED by default.
1105     const DecoratorSet expanded_decorators = DecoratorFixup<
1106       (std::is_volatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1107       (MO_RELAXED | decorators) : decorators>::value;
1108     store_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr), decayed_value);
1109   }
1110 
1111   template <DecoratorSet decorators, typename T>
1112   inline void store_at(oop base, ptrdiff_t offset, T value) {
1113     verify_types<decorators, T>();
1114     using DecayedT = std::decay_t<T>;
1115     DecayedT decayed_value = value;
1116     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1117                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1118                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1119     PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, decayed_value);
1120   }
1121 
1122   template <DecoratorSet decorators, typename P, typename T>
1123   inline T load(P* addr) {
1124     verify_types<decorators, T>();
1125     using DecayedP = std::decay_t<P>;
1126     using DecayedT = std::conditional_t<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1127                                         typename OopOrNarrowOop<T>::type,
1128                                         std::decay_t<T>>;
1129     // If a volatile address is passed in but no memory ordering decorator,
1130     // set the memory ordering to MO_RELAXED by default.
1131     const DecoratorSet expanded_decorators = DecoratorFixup<
1132       (std::is_volatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1133       (MO_RELAXED | decorators) : decorators>::value;
1134     return load_reduce_types<expanded_decorators, DecayedT>(const_cast<DecayedP*>(addr));
1135   }
1136 
1137   template <DecoratorSet decorators, typename T>
1138   inline T load_at(oop base, ptrdiff_t offset) {
1139     verify_types<decorators, T>();
1140     using DecayedT = std::conditional_t<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1141                                         typename OopOrNarrowOop<T>::type,
1142                                         std::decay_t<T>>;
1143     // Expand the decorators (figure out sensible defaults)
1144     // Potentially remember if we need compressed oop awareness
1145     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1146                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1147                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1148     return PreRuntimeDispatch::load_at<expanded_decorators, DecayedT>(base, offset);
1149   }
1150 
1151   template <DecoratorSet decorators, typename P, typename T>
1152   inline T atomic_cmpxchg(P* addr, T compare_value, T new_value) {
1153     verify_types<decorators, T>();
1154     using DecayedP = std::decay_t<P>;
1155     using DecayedT = std::decay_t<T>;
1156     DecayedT new_decayed_value = new_value;
1157     DecayedT compare_decayed_value = compare_value;
1158     const DecoratorSet expanded_decorators = DecoratorFixup<
1159       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1160       (MO_SEQ_CST | decorators) : decorators>::value;
1161     return atomic_cmpxchg_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr),
1162                                                             compare_decayed_value,
1163                                                             new_decayed_value);
1164   }
1165 
1166   template <DecoratorSet decorators, typename T>
1167   inline T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
1168     verify_types<decorators, T>();
1169     using DecayedT = std::decay_t<T>;
1170     DecayedT new_decayed_value = new_value;
1171     DecayedT compare_decayed_value = compare_value;
1172     // Determine default memory ordering
1173     const DecoratorSet expanded_decorators = DecoratorFixup<
1174       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1175       (MO_SEQ_CST | decorators) : decorators>::value;
1176     // Potentially remember that we need compressed oop awareness
1177     const DecoratorSet final_decorators = expanded_decorators |
1178                                           (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1179                                            INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE);
1180     return PreRuntimeDispatch::atomic_cmpxchg_at<final_decorators>(base, offset, compare_decayed_value,
1181                                                                    new_decayed_value);
1182   }
1183 
1184   template <DecoratorSet decorators, typename P, typename T>
1185   inline T atomic_xchg(P* addr, T new_value) {
1186     verify_types<decorators, T>();
1187     using DecayedP = std::decay_t<P>;
1188     using DecayedT = std::decay_t<T>;
1189     DecayedT new_decayed_value = new_value;
1190     // atomic_xchg is only available in SEQ_CST flavour.
1191     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST>::value;
1192     return atomic_xchg_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr),
1193                                                          new_decayed_value);
1194   }
1195 
1196   template <DecoratorSet decorators, typename T>
1197   inline T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
1198     verify_types<decorators, T>();
1199     using DecayedT = std::decay_t<T>;
1200     DecayedT new_decayed_value = new_value;
1201     // atomic_xchg is only available in SEQ_CST flavour.
1202     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST |
1203                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1204                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1205     return PreRuntimeDispatch::atomic_xchg_at<expanded_decorators>(base, offset, new_decayed_value);
1206   }
1207 
1208   template <DecoratorSet decorators, typename T>
1209   inline void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, const T* src_raw,
1210                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1211                         size_t length) {
1212     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ||
1213                    (std::is_same<T, void>::value || std::is_integral<T>::value) ||
1214                     std::is_floating_point<T>::value)); // arraycopy allows type erased void elements
1215     using DecayedT = std::decay_t<T>;
1216     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | IS_ARRAY | IN_HEAP>::value;
1217     arraycopy_reduce_types<expanded_decorators>(src_obj, src_offset_in_bytes, const_cast<DecayedT*>(src_raw),
1218                                                 dst_obj, dst_offset_in_bytes, const_cast<DecayedT*>(dst_raw),
1219                                                 length);
1220   }
1221 
1222   template <DecoratorSet decorators>
1223   inline void clone(oop src, oop dst, size_t size) {
1224     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1225     PreRuntimeDispatch::clone<expanded_decorators>(src, dst, size);
1226   }
1227 
1228   template <DecoratorSet decorators>
1229   inline void value_copy(void* src, void* dst, InlineKlass* md) {
1230     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1231     PreRuntimeDispatch::value_copy<expanded_decorators>(src, dst, md);
1232   }
1233 
1234   // Infer the type that should be returned from an Access::oop_load.
1235   template <typename P, DecoratorSet decorators>
1236   class OopLoadProxy: public StackObj {
1237   private:
1238     P *const _addr;
1239   public:
1240     explicit OopLoadProxy(P* addr) : _addr(addr) {}
1241 
1242     inline operator oop() {
1243       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr);
1244     }
1245 
1246     inline operator narrowOop() {
1247       return load<decorators | INTERNAL_VALUE_IS_OOP, P, narrowOop>(_addr);
1248     }
1249 
1250     template <typename T>
1251     inline bool operator ==(const T& other) const {
1252       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) == other;
1253     }
1254 
1255     template <typename T>
1256     inline bool operator !=(const T& other) const {
1257       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) != other;
1258     }
1259 
1260     inline bool operator ==(std::nullptr_t) const {
1261       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr) == nullptr;
1262     }
1263 
1264     inline bool operator !=(std::nullptr_t) const {
1265       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr) != nullptr;
1266     }
1267   };
1268 
1269   // Infer the type that should be returned from an Access::load_at.
1270   template <DecoratorSet decorators>
1271   class LoadAtProxy: public StackObj {
1272   private:
1273     const oop _base;
1274     const ptrdiff_t _offset;
1275   public:
1276     LoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1277 
1278     template <typename T>
1279     inline operator T() const {
1280       return load_at<decorators, T>(_base, _offset);
1281     }
1282 
1283     template <typename T>
1284     inline bool operator ==(const T& other) const { return load_at<decorators, T>(_base, _offset) == other; }
1285 
1286     template <typename T>
1287     inline bool operator !=(const T& other) const { return load_at<decorators, T>(_base, _offset) != other; }
1288   };
1289 
1290   // Infer the type that should be returned from an Access::oop_load_at.
1291   template <DecoratorSet decorators>
1292   class OopLoadAtProxy: public StackObj {
1293   private:
1294     const oop _base;
1295     const ptrdiff_t _offset;
1296   public:
1297     OopLoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1298 
1299     inline operator oop() const {
1300       return load_at<decorators | INTERNAL_VALUE_IS_OOP, oop>(_base, _offset);
1301     }
1302 
1303     inline operator narrowOop() const {
1304       return load_at<decorators | INTERNAL_VALUE_IS_OOP, narrowOop>(_base, _offset);
1305     }
1306 
1307     template <typename T>
1308     inline bool operator ==(const T& other) const {
1309       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) == other;
1310     }
1311 
1312     template <typename T>
1313     inline bool operator !=(const T& other) const {
1314       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) != other;
1315     }
1316   };
1317 }
1318 
1319 #endif // SHARE_OOPS_ACCESSBACKEND_HPP