1 /*
   2  * Copyright (c) 2017, 2020, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_OOPS_ACCESSBACKEND_HPP
  26 #define SHARE_OOPS_ACCESSBACKEND_HPP
  27 
  28 #include "gc/shared/barrierSetConfig.hpp"
  29 #include "memory/allocation.hpp"
  30 #include "metaprogramming/conditional.hpp"
  31 #include "metaprogramming/decay.hpp"
  32 #include "metaprogramming/enableIf.hpp"
  33 #include "metaprogramming/integralConstant.hpp"
  34 #include "metaprogramming/isFloatingPoint.hpp"
  35 #include "metaprogramming/isIntegral.hpp"
  36 #include "metaprogramming/isPointer.hpp"
  37 #include "metaprogramming/isSame.hpp"
  38 #include "metaprogramming/isVolatile.hpp"
  39 #include "oops/accessDecorators.hpp"
  40 #include "oops/oopsHierarchy.hpp"
  41 #include "runtime/globals.hpp"
  42 #include "utilities/debug.hpp"
  43 #include "utilities/globalDefinitions.hpp"
  44 
  45 
  46 // This metafunction returns either oop or narrowOop depending on whether
  47 // an access needs to use compressed oops or not.
  48 template <DecoratorSet decorators>
  49 struct HeapOopType: AllStatic {
  50   static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value &&
  51                                          HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
  52   typedef typename Conditional<needs_oop_compress, narrowOop, oop>::type type;
  53 };
  54 
  55 // This meta-function returns either oop or narrowOop depending on whether
  56 // a back-end needs to consider compressed oops types or not.
  57 template <DecoratorSet decorators>
  58 struct ValueOopType: AllStatic {
  59   static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
  60   typedef typename Conditional<needs_oop_compress, narrowOop, oop>::type type;
  61 };
  62 
  63 namespace AccessInternal {
  64   enum BarrierType {
  65     BARRIER_STORE,
  66     BARRIER_STORE_AT,
  67     BARRIER_LOAD,
  68     BARRIER_LOAD_AT,
  69     BARRIER_ATOMIC_CMPXCHG,
  70     BARRIER_ATOMIC_CMPXCHG_AT,
  71     BARRIER_ATOMIC_XCHG,
  72     BARRIER_ATOMIC_XCHG_AT,
  73     BARRIER_ARRAYCOPY,
  74     BARRIER_CLONE,
  75     BARRIER_VALUE_COPY
  76   };
  77 
  78   template <DecoratorSet decorators, typename T>
  79   struct MustConvertCompressedOop: public IntegralConstant<bool,
  80     HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value &&
  81     IsSame<typename HeapOopType<decorators>::type, narrowOop>::value &&
  82     IsSame<T, oop>::value> {};
  83 
  84   // This metafunction returns an appropriate oop type if the value is oop-like
  85   // and otherwise returns the same type T.
  86   template <DecoratorSet decorators, typename T>
  87   struct EncodedType: AllStatic {
  88     typedef typename Conditional<
  89       HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
  90       typename HeapOopType<decorators>::type, T>::type type;
  91   };
  92 
  93   template <DecoratorSet decorators>
  94   inline typename HeapOopType<decorators>::type*
  95   oop_field_addr(oop base, ptrdiff_t byte_offset) {
  96     return reinterpret_cast<typename HeapOopType<decorators>::type*>(
  97              reinterpret_cast<intptr_t>((void*)base) + byte_offset);
  98   }
  99 
 100   // This metafunction returns whether it is possible for a type T to require
 101   // locking to support wide atomics or not.
 102   template <typename T>
 103 #ifdef SUPPORTS_NATIVE_CX8
 104   struct PossiblyLockedAccess: public IntegralConstant<bool, false> {};
 105 #else
 106   struct PossiblyLockedAccess: public IntegralConstant<bool, (sizeof(T) > 4)> {};
 107 #endif
 108 
 109   template <DecoratorSet decorators, typename T>
 110   struct AccessFunctionTypes {
 111     typedef T (*load_at_func_t)(oop base, ptrdiff_t offset);
 112     typedef void (*store_at_func_t)(oop base, ptrdiff_t offset, T value);
 113     typedef T (*atomic_cmpxchg_at_func_t)(oop base, ptrdiff_t offset, T compare_value, T new_value);
 114     typedef T (*atomic_xchg_at_func_t)(oop base, ptrdiff_t offset, T new_value);
 115 
 116     typedef T (*load_func_t)(void* addr);
 117     typedef void (*store_func_t)(void* addr, T value);
 118     typedef T (*atomic_cmpxchg_func_t)(void* addr, T compare_value, T new_value);
 119     typedef T (*atomic_xchg_func_t)(void* addr, T new_value);
 120 
 121     typedef void (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 122                                      arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 123                                      size_t length);
 124     typedef void (*clone_func_t)(oop src, oop dst, size_t size);
 125     typedef void (*value_copy_func_t)(void* src, void* dst, InlineKlass* md);
 126   };
 127 
 128   template <DecoratorSet decorators>
 129   struct AccessFunctionTypes<decorators, void> {
 130     typedef void (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, void* src,
 131                                      arrayOop dst_obj, size_t dst_offset_in_bytes, void* dst,
 132                                      size_t length);
 133   };
 134 
 135   template <DecoratorSet decorators, typename T, BarrierType barrier> struct AccessFunction {};
 136 
 137 #define ACCESS_GENERATE_ACCESS_FUNCTION(bt, func)                   \
 138   template <DecoratorSet decorators, typename T>                    \
 139   struct AccessFunction<decorators, T, bt>: AllStatic{              \
 140     typedef typename AccessFunctionTypes<decorators, T>::func type; \
 141   }
 142   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE, store_func_t);
 143   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE_AT, store_at_func_t);
 144   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD, load_func_t);
 145   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD_AT, load_at_func_t);
 146   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG, atomic_cmpxchg_func_t);
 147   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG_AT, atomic_cmpxchg_at_func_t);
 148   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG, atomic_xchg_func_t);
 149   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG_AT, atomic_xchg_at_func_t);
 150   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ARRAYCOPY, arraycopy_func_t);
 151   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_CLONE, clone_func_t);
 152   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_VALUE_COPY, value_copy_func_t);
 153 #undef ACCESS_GENERATE_ACCESS_FUNCTION
 154 
 155   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 156   typename AccessFunction<decorators, T, barrier_type>::type resolve_barrier();
 157 
 158   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 159   typename AccessFunction<decorators, T, barrier_type>::type resolve_oop_barrier();
 160 
 161   class AccessLocker {
 162   public:
 163     AccessLocker();
 164     ~AccessLocker();
 165   };
 166   bool wide_atomic_needs_locking();
 167 
 168   void* field_addr(oop base, ptrdiff_t offset);
 169 
 170   // Forward calls to Copy:: in the cpp file to reduce dependencies and allow
 171   // faster build times, given how frequently included access is.
 172   void arraycopy_arrayof_conjoint_oops(void* src, void* dst, size_t length);
 173   void arraycopy_conjoint_oops(oop* src, oop* dst, size_t length);
 174   void arraycopy_conjoint_oops(narrowOop* src, narrowOop* dst, size_t length);
 175 
 176   void arraycopy_disjoint_words(void* src, void* dst, size_t length);
 177   void arraycopy_disjoint_words_atomic(void* src, void* dst, size_t length);
 178 
 179   template<typename T>
 180   void arraycopy_conjoint(T* src, T* dst, size_t length);
 181   template<typename T>
 182   void arraycopy_arrayof_conjoint(T* src, T* dst, size_t length);
 183   template<typename T>
 184   void arraycopy_conjoint_atomic(T* src, T* dst, size_t length);
 185 }
 186 
 187 // This mask specifies what decorators are relevant for raw accesses. When passing
 188 // accesses to the raw layer, irrelevant decorators are removed.
 189 const DecoratorSet RAW_DECORATOR_MASK = INTERNAL_DECORATOR_MASK | MO_DECORATOR_MASK |
 190                                         ARRAYCOPY_DECORATOR_MASK | IS_NOT_NULL;
 191 
 192 // The RawAccessBarrier performs raw accesses with additional knowledge of
 193 // memory ordering, so that OrderAccess/Atomic is called when necessary.
 194 // It additionally handles compressed oops, and hence is not completely "raw"
 195 // strictly speaking.
 196 template <DecoratorSet decorators>
 197 class RawAccessBarrier: public AllStatic {
 198 protected:
 199   static inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 200     return AccessInternal::field_addr(base, byte_offset);
 201   }
 202 
 203 protected:
 204   // Only encode if INTERNAL_VALUE_IS_OOP
 205   template <DecoratorSet idecorators, typename T>
 206   static inline typename EnableIf<
 207     AccessInternal::MustConvertCompressedOop<idecorators, T>::value,
 208     typename HeapOopType<idecorators>::type>::type
 209   encode_internal(T value);
 210 
 211   template <DecoratorSet idecorators, typename T>
 212   static inline typename EnableIf<
 213     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 214   encode_internal(T value) {
 215     return value;
 216   }
 217 
 218   template <typename T>
 219   static inline typename AccessInternal::EncodedType<decorators, T>::type
 220   encode(T value) {
 221     return encode_internal<decorators, T>(value);
 222   }
 223 
 224   // Only decode if INTERNAL_VALUE_IS_OOP
 225   template <DecoratorSet idecorators, typename T>
 226   static inline typename EnableIf<
 227     AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 228   decode_internal(typename HeapOopType<idecorators>::type value);
 229 
 230   template <DecoratorSet idecorators, typename T>
 231   static inline typename EnableIf<
 232     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 233   decode_internal(T value) {
 234     return value;
 235   }
 236 
 237   template <typename T>
 238   static inline T decode(typename AccessInternal::EncodedType<decorators, T>::type value) {
 239     return decode_internal<decorators, T>(value);
 240   }
 241 
 242 protected:
 243   template <DecoratorSet ds, typename T>
 244   static typename EnableIf<
 245     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 246   load_internal(void* addr);
 247 
 248   template <DecoratorSet ds, typename T>
 249   static typename EnableIf<
 250     HasDecorator<ds, MO_ACQUIRE>::value, T>::type
 251   load_internal(void* addr);
 252 
 253   template <DecoratorSet ds, typename T>
 254   static typename EnableIf<
 255     HasDecorator<ds, MO_RELAXED>::value, T>::type
 256   load_internal(void* addr);
 257 
 258   template <DecoratorSet ds, typename T>
 259   static inline typename EnableIf<
 260     HasDecorator<ds, MO_UNORDERED>::value, T>::type
 261   load_internal(void* addr) {
 262     return *reinterpret_cast<T*>(addr);
 263   }
 264 
 265   template <DecoratorSet ds, typename T>
 266   static typename EnableIf<
 267     HasDecorator<ds, MO_SEQ_CST>::value>::type
 268   store_internal(void* addr, T value);
 269 
 270   template <DecoratorSet ds, typename T>
 271   static typename EnableIf<
 272     HasDecorator<ds, MO_RELEASE>::value>::type
 273   store_internal(void* addr, T value);
 274 
 275   template <DecoratorSet ds, typename T>
 276   static typename EnableIf<
 277     HasDecorator<ds, MO_RELAXED>::value>::type
 278   store_internal(void* addr, T value);
 279 
 280   template <DecoratorSet ds, typename T>
 281   static inline typename EnableIf<
 282     HasDecorator<ds, MO_UNORDERED>::value>::type
 283   store_internal(void* addr, T value) {
 284     *reinterpret_cast<T*>(addr) = value;
 285   }
 286 
 287   template <DecoratorSet ds, typename T>
 288   static typename EnableIf<
 289     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 290   atomic_cmpxchg_internal(void* addr, T compare_value, T new_value);
 291 
 292   template <DecoratorSet ds, typename T>
 293   static typename EnableIf<
 294     HasDecorator<ds, MO_RELAXED>::value, T>::type
 295   atomic_cmpxchg_internal(void* addr, T compare_value, T new_value);
 296 
 297   template <DecoratorSet ds, typename T>
 298   static typename EnableIf<
 299     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 300   atomic_xchg_internal(void* addr, T new_value);
 301 
 302   // The following *_locked mechanisms serve the purpose of handling atomic operations
 303   // that are larger than a machine can handle, and then possibly opt for using
 304   // a slower path using a mutex to perform the operation.
 305 
 306   template <DecoratorSet ds, typename T>
 307   static inline typename EnableIf<
 308     !AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 309   atomic_cmpxchg_maybe_locked(void* addr, T compare_value, T new_value) {
 310     return atomic_cmpxchg_internal<ds>(addr, compare_value, new_value);
 311   }
 312 
 313   template <DecoratorSet ds, typename T>
 314   static typename EnableIf<
 315     AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 316   atomic_cmpxchg_maybe_locked(void* addr, T compare_value, T new_value);
 317 
 318   template <DecoratorSet ds, typename T>
 319   static inline typename EnableIf<
 320     !AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 321   atomic_xchg_maybe_locked(void* addr, T new_value) {
 322     return atomic_xchg_internal<ds>(addr, new_value);
 323   }
 324 
 325   template <DecoratorSet ds, typename T>
 326   static typename EnableIf<
 327     AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 328   atomic_xchg_maybe_locked(void* addr, T new_value);
 329 
 330 public:
 331   template <typename T>
 332   static inline void store(void* addr, T value) {
 333     store_internal<decorators>(addr, value);
 334   }
 335 
 336   template <typename T>
 337   static inline T load(void* addr) {
 338     return load_internal<decorators, T>(addr);
 339   }
 340 
 341   template <typename T>
 342   static inline T atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 343     return atomic_cmpxchg_maybe_locked<decorators>(addr, compare_value, new_value);
 344   }
 345 
 346   template <typename T>
 347   static inline T atomic_xchg(void* addr, T new_value) {
 348     return atomic_xchg_maybe_locked<decorators>(addr, new_value);
 349   }
 350 
 351   template <typename T>
 352   static void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 353                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 354                         size_t length);
 355 
 356   template <typename T>
 357   static void oop_store(void* addr, T value);
 358   template <typename T>
 359   static void oop_store_at(oop base, ptrdiff_t offset, T value);
 360 
 361   template <typename T>
 362   static T oop_load(void* addr);
 363   template <typename T>
 364   static T oop_load_at(oop base, ptrdiff_t offset);
 365 
 366   template <typename T>
 367   static T oop_atomic_cmpxchg(void* addr, T compare_value, T new_value);
 368   template <typename T>
 369   static T oop_atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value);
 370 
 371   template <typename T>
 372   static T oop_atomic_xchg(void* addr, T new_value);
 373   template <typename T>
 374   static T oop_atomic_xchg_at(oop base, ptrdiff_t offset, T new_value);
 375 
 376   template <typename T>
 377   static void store_at(oop base, ptrdiff_t offset, T value) {
 378     store(field_addr(base, offset), value);
 379   }
 380 
 381   template <typename T>
 382   static T load_at(oop base, ptrdiff_t offset) {
 383     return load<T>(field_addr(base, offset));
 384   }
 385 
 386   template <typename T>
 387   static T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 388     return atomic_cmpxchg(field_addr(base, offset), compare_value, new_value);
 389   }
 390 
 391   template <typename T>
 392   static T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 393     return atomic_xchg(field_addr(base, offset), new_value);
 394   }
 395 
 396   template <typename T>
 397   static void oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 398                             arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 399                             size_t length);
 400 
 401   static void clone(oop src, oop dst, size_t size);
 402   static void value_copy(void* src, void* dst, InlineKlass* md);
 403 
 404 };
 405 
 406 // Below is the implementation of the first 4 steps of the template pipeline:
 407 // * Step 1: Set default decorators and decay types. This step gets rid of CV qualifiers
 408 //           and sets default decorators to sensible values.
 409 // * Step 2: Reduce types. This step makes sure there is only a single T type and not
 410 //           multiple types. The P type of the address and T type of the value must
 411 //           match.
 412 // * Step 3: Pre-runtime dispatch. This step checks whether a runtime call can be
 413 //           avoided, and in that case avoids it (calling raw accesses or
 414 //           primitive accesses in a build that does not require primitive GC barriers)
 415 // * Step 4: Runtime-dispatch. This step performs a runtime dispatch to the corresponding
 416 //           BarrierSet::AccessBarrier accessor that attaches GC-required barriers
 417 //           to the access.
 418 
 419 namespace AccessInternal {
 420   template <typename T>
 421   struct OopOrNarrowOopInternal: AllStatic {
 422     typedef oop type;
 423   };
 424 
 425   template <>
 426   struct OopOrNarrowOopInternal<narrowOop>: AllStatic {
 427     typedef narrowOop type;
 428   };
 429 
 430   // This metafunction returns a canonicalized oop/narrowOop type for a passed
 431   // in oop-like types passed in from oop_* overloads where the user has sworn
 432   // that the passed in values should be oop-like (e.g. oop, oopDesc*, arrayOop,
 433   // narrowOoop, instanceOopDesc*, and random other things).
 434   // In the oop_* overloads, it must hold that if the passed in type T is not
 435   // narrowOop, then it by contract has to be one of many oop-like types implicitly
 436   // convertible to oop, and hence returns oop as the canonical oop type.
 437   // If it turns out it was not, then the implicit conversion to oop will fail
 438   // to compile, as desired.
 439   template <typename T>
 440   struct OopOrNarrowOop: AllStatic {
 441     typedef typename OopOrNarrowOopInternal<typename Decay<T>::type>::type type;
 442   };
 443 
 444   inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 445     return reinterpret_cast<void*>(reinterpret_cast<intptr_t>((void*)base) + byte_offset);
 446   }
 447   // Step 4: Runtime dispatch
 448   // The RuntimeDispatch class is responsible for performing a runtime dispatch of the
 449   // accessor. This is required when the access either depends on whether compressed oops
 450   // is being used, or it depends on which GC implementation was chosen (e.g. requires GC
 451   // barriers). The way it works is that a function pointer initially pointing to an
 452   // accessor resolution function gets called for each access. Upon first invocation,
 453   // it resolves which accessor to be used in future invocations and patches the
 454   // function pointer to this new accessor.
 455 
 456   template <DecoratorSet decorators, typename T, BarrierType type>
 457   struct RuntimeDispatch: AllStatic {};
 458 
 459   template <DecoratorSet decorators, typename T>
 460   struct RuntimeDispatch<decorators, T, BARRIER_STORE>: AllStatic {
 461     typedef typename AccessFunction<decorators, T, BARRIER_STORE>::type func_t;
 462     static func_t _store_func;
 463 
 464     static void store_init(void* addr, T value);
 465 
 466     static inline void store(void* addr, T value) {
 467       _store_func(addr, value);
 468     }
 469   };
 470 
 471   template <DecoratorSet decorators, typename T>
 472   struct RuntimeDispatch<decorators, T, BARRIER_STORE_AT>: AllStatic {
 473     typedef typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type func_t;
 474     static func_t _store_at_func;
 475 
 476     static void store_at_init(oop base, ptrdiff_t offset, T value);
 477 
 478     static inline void store_at(oop base, ptrdiff_t offset, T value) {
 479       _store_at_func(base, offset, value);
 480     }
 481   };
 482 
 483   template <DecoratorSet decorators, typename T>
 484   struct RuntimeDispatch<decorators, T, BARRIER_LOAD>: AllStatic {
 485     typedef typename AccessFunction<decorators, T, BARRIER_LOAD>::type func_t;
 486     static func_t _load_func;
 487 
 488     static T load_init(void* addr);
 489 
 490     static inline T load(void* addr) {
 491       return _load_func(addr);
 492     }
 493   };
 494 
 495   template <DecoratorSet decorators, typename T>
 496   struct RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>: AllStatic {
 497     typedef typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type func_t;
 498     static func_t _load_at_func;
 499 
 500     static T load_at_init(oop base, ptrdiff_t offset);
 501 
 502     static inline T load_at(oop base, ptrdiff_t offset) {
 503       return _load_at_func(base, offset);
 504     }
 505   };
 506 
 507   template <DecoratorSet decorators, typename T>
 508   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>: AllStatic {
 509     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type func_t;
 510     static func_t _atomic_cmpxchg_func;
 511 
 512     static T atomic_cmpxchg_init(void* addr, T compare_value, T new_value);
 513 
 514     static inline T atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 515       return _atomic_cmpxchg_func(addr, compare_value, new_value);
 516     }
 517   };
 518 
 519   template <DecoratorSet decorators, typename T>
 520   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>: AllStatic {
 521     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type func_t;
 522     static func_t _atomic_cmpxchg_at_func;
 523 
 524     static T atomic_cmpxchg_at_init(oop base, ptrdiff_t offset, T compare_value, T new_value);
 525 
 526     static inline T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 527       return _atomic_cmpxchg_at_func(base, offset, compare_value, new_value);
 528     }
 529   };
 530 
 531   template <DecoratorSet decorators, typename T>
 532   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>: AllStatic {
 533     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type func_t;
 534     static func_t _atomic_xchg_func;
 535 
 536     static T atomic_xchg_init(void* addr, T new_value);
 537 
 538     static inline T atomic_xchg(void* addr, T new_value) {
 539       return _atomic_xchg_func(addr, new_value);
 540     }
 541   };
 542 
 543   template <DecoratorSet decorators, typename T>
 544   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>: AllStatic {
 545     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type func_t;
 546     static func_t _atomic_xchg_at_func;
 547 
 548     static T atomic_xchg_at_init(oop base, ptrdiff_t offset, T new_value);
 549 
 550     static inline T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 551       return _atomic_xchg_at_func(base, offset, new_value);
 552     }
 553   };
 554 
 555   template <DecoratorSet decorators, typename T>
 556   struct RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>: AllStatic {
 557     typedef typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type func_t;
 558     static func_t _arraycopy_func;
 559 
 560     static void arraycopy_init(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 561                                arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 562                                size_t length);
 563 
 564     static inline void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 565                                  arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 566                                  size_t length) {
 567       return _arraycopy_func(src_obj, src_offset_in_bytes, src_raw,
 568                              dst_obj, dst_offset_in_bytes, dst_raw,
 569                              length);
 570     }
 571   };
 572 
 573   template <DecoratorSet decorators, typename T>
 574   struct RuntimeDispatch<decorators, T, BARRIER_CLONE>: AllStatic {
 575     typedef typename AccessFunction<decorators, T, BARRIER_CLONE>::type func_t;
 576     static func_t _clone_func;
 577 
 578     static void clone_init(oop src, oop dst, size_t size);
 579 
 580     static inline void clone(oop src, oop dst, size_t size) {
 581       _clone_func(src, dst, size);
 582     }
 583   };
 584 
 585   template <DecoratorSet decorators, typename T>
 586   struct RuntimeDispatch<decorators, T, BARRIER_VALUE_COPY>: AllStatic {
 587     typedef typename AccessFunction<decorators, T, BARRIER_VALUE_COPY>::type func_t;
 588     static func_t _value_copy_func;
 589 
 590     static void value_copy_init(void* src, void* dst, InlineKlass* md);
 591 
 592     static inline void value_copy(void* src, void* dst, InlineKlass* md) {
 593       _value_copy_func(src, dst, md);
 594     }
 595   };
 596 
 597   // Initialize the function pointers to point to the resolving function.
 598   template <DecoratorSet decorators, typename T>
 599   typename AccessFunction<decorators, T, BARRIER_STORE>::type
 600   RuntimeDispatch<decorators, T, BARRIER_STORE>::_store_func = &store_init;
 601 
 602   template <DecoratorSet decorators, typename T>
 603   typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type
 604   RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::_store_at_func = &store_at_init;
 605 
 606   template <DecoratorSet decorators, typename T>
 607   typename AccessFunction<decorators, T, BARRIER_LOAD>::type
 608   RuntimeDispatch<decorators, T, BARRIER_LOAD>::_load_func = &load_init;
 609 
 610   template <DecoratorSet decorators, typename T>
 611   typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type
 612   RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::_load_at_func = &load_at_init;
 613 
 614   template <DecoratorSet decorators, typename T>
 615   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type
 616   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::_atomic_cmpxchg_func = &atomic_cmpxchg_init;
 617 
 618   template <DecoratorSet decorators, typename T>
 619   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type
 620   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::_atomic_cmpxchg_at_func = &atomic_cmpxchg_at_init;
 621 
 622   template <DecoratorSet decorators, typename T>
 623   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type
 624   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::_atomic_xchg_func = &atomic_xchg_init;
 625 
 626   template <DecoratorSet decorators, typename T>
 627   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
 628   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
 629 
 630   template <DecoratorSet decorators, typename T>
 631   typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
 632   RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
 633 
 634   template <DecoratorSet decorators, typename T>
 635   typename AccessFunction<decorators, T, BARRIER_CLONE>::type
 636   RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
 637 
 638   template <DecoratorSet decorators, typename T>
 639   typename AccessFunction<decorators, T, BARRIER_VALUE_COPY>::type
 640   RuntimeDispatch<decorators, T, BARRIER_VALUE_COPY>::_value_copy_func = &value_copy_init;
 641 
 642   // Step 3: Pre-runtime dispatching.
 643   // The PreRuntimeDispatch class is responsible for filtering the barrier strength
 644   // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
 645   // dispatch point. Otherwise it goes through a runtime check if hardwiring was
 646   // not possible.
 647   struct PreRuntimeDispatch: AllStatic {
 648     template<DecoratorSet decorators>
 649     struct CanHardwireRaw: public IntegralConstant<
 650       bool,
 651       !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
 652       !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
 653       HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
 654     {};
 655 
 656     static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
 657 
 658     template<DecoratorSet decorators>
 659     static bool is_hardwired_primitive() {
 660       return !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
 661     }
 662 
 663     template <DecoratorSet decorators, typename T>
 664     inline static typename EnableIf<
 665       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
 666     store(void* addr, T value) {
 667       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 668       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 669         Raw::oop_store(addr, value);
 670       } else {
 671         Raw::store(addr, value);
 672       }
 673     }
 674 
 675     template <DecoratorSet decorators, typename T>
 676     inline static typename EnableIf<
 677       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
 678     store(void* addr, T value) {
 679       if (UseCompressedOops) {
 680         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 681         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 682       } else {
 683         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 684         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 685       }
 686     }
 687 
 688     template <DecoratorSet decorators, typename T>
 689     inline static typename EnableIf<
 690       !HasDecorator<decorators, AS_RAW>::value>::type
 691     store(void* addr, T value) {
 692       if (is_hardwired_primitive<decorators>()) {
 693         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 694         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 695       } else {
 696         RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
 697       }
 698     }
 699 
 700     template <DecoratorSet decorators, typename T>
 701     inline static typename EnableIf<
 702       HasDecorator<decorators, AS_RAW>::value>::type
 703     store_at(oop base, ptrdiff_t offset, T value) {
 704       store<decorators>(field_addr(base, offset), value);
 705     }
 706 
 707     template <DecoratorSet decorators, typename T>
 708     inline static typename EnableIf<
 709       !HasDecorator<decorators, AS_RAW>::value>::type
 710     store_at(oop base, ptrdiff_t offset, T value) {
 711       if (is_hardwired_primitive<decorators>()) {
 712         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 713         PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
 714       } else {
 715         RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
 716       }
 717     }
 718 
 719     template <DecoratorSet decorators, typename T>
 720     inline static typename EnableIf<
 721       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 722     load(void* addr) {
 723       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 724       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 725         return Raw::template oop_load<T>(addr);
 726       } else {
 727         return Raw::template load<T>(addr);
 728       }
 729     }
 730 
 731     template <DecoratorSet decorators, typename T>
 732     inline static typename EnableIf<
 733       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 734     load(void* addr) {
 735       if (UseCompressedOops) {
 736         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 737         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 738       } else {
 739         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 740         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 741       }
 742     }
 743 
 744     template <DecoratorSet decorators, typename T>
 745     inline static typename EnableIf<
 746       !HasDecorator<decorators, AS_RAW>::value, T>::type
 747     load(void* addr) {
 748       if (is_hardwired_primitive<decorators>()) {
 749         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 750         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 751       } else {
 752         return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
 753       }
 754     }
 755 
 756     template <DecoratorSet decorators, typename T>
 757     inline static typename EnableIf<
 758       HasDecorator<decorators, AS_RAW>::value, T>::type
 759     load_at(oop base, ptrdiff_t offset) {
 760       return load<decorators, T>(field_addr(base, offset));
 761     }
 762 
 763     template <DecoratorSet decorators, typename T>
 764     inline static typename EnableIf<
 765       !HasDecorator<decorators, AS_RAW>::value, T>::type
 766     load_at(oop base, ptrdiff_t offset) {
 767       if (is_hardwired_primitive<decorators>()) {
 768         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 769         return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
 770       } else {
 771         return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
 772       }
 773     }
 774 
 775     template <DecoratorSet decorators, typename T>
 776     inline static typename EnableIf<
 777       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 778     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 779       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 780       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 781         return Raw::oop_atomic_cmpxchg(addr, compare_value, new_value);
 782       } else {
 783         return Raw::atomic_cmpxchg(addr, compare_value, new_value);
 784       }
 785     }
 786 
 787     template <DecoratorSet decorators, typename T>
 788     inline static typename EnableIf<
 789       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 790     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 791       if (UseCompressedOops) {
 792         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 793         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 794       } else {
 795         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 796         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 797       }
 798     }
 799 
 800     template <DecoratorSet decorators, typename T>
 801     inline static typename EnableIf<
 802       !HasDecorator<decorators, AS_RAW>::value, T>::type
 803     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 804       if (is_hardwired_primitive<decorators>()) {
 805         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 806         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 807       } else {
 808         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(addr, compare_value, new_value);
 809       }
 810     }
 811 
 812     template <DecoratorSet decorators, typename T>
 813     inline static typename EnableIf<
 814       HasDecorator<decorators, AS_RAW>::value, T>::type
 815     atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 816       return atomic_cmpxchg<decorators>(field_addr(base, offset), compare_value, new_value);
 817     }
 818 
 819     template <DecoratorSet decorators, typename T>
 820     inline static typename EnableIf<
 821       !HasDecorator<decorators, AS_RAW>::value, T>::type
 822     atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 823       if (is_hardwired_primitive<decorators>()) {
 824         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 825         return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(base, offset, compare_value, new_value);
 826       } else {
 827         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(base, offset, compare_value, new_value);
 828       }
 829     }
 830 
 831     template <DecoratorSet decorators, typename T>
 832     inline static typename EnableIf<
 833       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 834     atomic_xchg(void* addr, T new_value) {
 835       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 836       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 837         return Raw::oop_atomic_xchg(addr, new_value);
 838       } else {
 839         return Raw::atomic_xchg(addr, new_value);
 840       }
 841     }
 842 
 843     template <DecoratorSet decorators, typename T>
 844     inline static typename EnableIf<
 845       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 846     atomic_xchg(void* addr, T new_value) {
 847       if (UseCompressedOops) {
 848         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 849         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 850       } else {
 851         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 852         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 853       }
 854     }
 855 
 856     template <DecoratorSet decorators, typename T>
 857     inline static typename EnableIf<
 858       !HasDecorator<decorators, AS_RAW>::value, T>::type
 859     atomic_xchg(void* addr, T new_value) {
 860       if (is_hardwired_primitive<decorators>()) {
 861         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 862         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 863       } else {
 864         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(addr, new_value);
 865       }
 866     }
 867 
 868     template <DecoratorSet decorators, typename T>
 869     inline static typename EnableIf<
 870       HasDecorator<decorators, AS_RAW>::value, T>::type
 871     atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 872       return atomic_xchg<decorators>(field_addr(base, offset), new_value);
 873     }
 874 
 875     template <DecoratorSet decorators, typename T>
 876     inline static typename EnableIf<
 877       !HasDecorator<decorators, AS_RAW>::value, T>::type
 878     atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 879       if (is_hardwired_primitive<decorators>()) {
 880         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 881         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(base, offset, new_value);
 882       } else {
 883         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::atomic_xchg_at(base, offset, new_value);
 884       }
 885     }
 886 
 887     template <DecoratorSet decorators, typename T>
 888     inline static typename EnableIf<
 889       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, void>::type
 890     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 891               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 892               size_t length) {
 893       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 894       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 895         Raw::oop_arraycopy(src_obj, src_offset_in_bytes, src_raw,
 896                            dst_obj, dst_offset_in_bytes, dst_raw,
 897                            length);
 898       } else {
 899         Raw::arraycopy(src_obj, src_offset_in_bytes, src_raw,
 900                        dst_obj, dst_offset_in_bytes, dst_raw,
 901                        length);
 902       }
 903     }
 904 
 905     template <DecoratorSet decorators, typename T>
 906     inline static typename EnableIf<
 907       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, void>::type
 908     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 909               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 910               size_t length) {
 911       if (UseCompressedOops) {
 912         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 913         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 914                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 915                                                            length);
 916       } else {
 917         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 918         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 919                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 920                                                            length);
 921       }
 922     }
 923 
 924     template <DecoratorSet decorators, typename T>
 925     inline static typename EnableIf<
 926       !HasDecorator<decorators, AS_RAW>::value, void>::type
 927     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 928               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 929               size_t length) {
 930       if (is_hardwired_primitive<decorators>()) {
 931         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 932         PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 933                                                            dst_obj, dst_offset_in_bytes, dst_raw,
 934                                                            length);
 935       } else {
 936         RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::arraycopy(src_obj, src_offset_in_bytes, src_raw,
 937                                                                      dst_obj, dst_offset_in_bytes, dst_raw,
 938                                                                      length);
 939       }
 940     }
 941 
 942     template <DecoratorSet decorators>
 943     inline static typename EnableIf<
 944       HasDecorator<decorators, AS_RAW>::value>::type
 945     clone(oop src, oop dst, size_t size) {
 946       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 947       Raw::clone(src, dst, size);
 948     }
 949 
 950     template <DecoratorSet decorators>
 951     inline static typename EnableIf<
 952       !HasDecorator<decorators, AS_RAW>::value>::type
 953     clone(oop src, oop dst, size_t size) {
 954       RuntimeDispatch<decorators, oop, BARRIER_CLONE>::clone(src, dst, size);
 955     }
 956 
 957     template <DecoratorSet decorators>
 958     inline static typename EnableIf<
 959       HasDecorator<decorators, AS_RAW>::value>::type
 960     value_copy(void* src, void* dst, InlineKlass* md) {
 961       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 962       Raw::value_copy(src, dst, md);
 963     }
 964 
 965     template <DecoratorSet decorators>
 966     inline static typename EnableIf<
 967       !HasDecorator<decorators, AS_RAW>::value>::type
 968       value_copy(void* src, void* dst, InlineKlass* md) {
 969       const DecoratorSet expanded_decorators = decorators;
 970       RuntimeDispatch<expanded_decorators, void*, BARRIER_VALUE_COPY>::value_copy(src, dst, md);
 971     }
 972   };
 973 
 974   // Step 2: Reduce types.
 975   // Enforce that for non-oop types, T and P have to be strictly the same.
 976   // P is the type of the address and T is the type of the values.
 977   // As for oop types, it is allow to send T in {narrowOop, oop} and
 978   // P in {narrowOop, oop, HeapWord*}. The following rules apply according to
 979   // the subsequent table. (columns are P, rows are T)
 980   // |           | HeapWord  |   oop   | narrowOop |
 981   // |   oop     |  rt-comp  | hw-none |  hw-comp  |
 982   // | narrowOop |     x     |    x    |  hw-none  |
 983   //
 984   // x means not allowed
 985   // rt-comp means it must be checked at runtime whether the oop is compressed.
 986   // hw-none means it is statically known the oop will not be compressed.
 987   // hw-comp means it is statically known the oop will be compressed.
 988 
 989   template <DecoratorSet decorators, typename T>
 990   inline void store_reduce_types(T* addr, T value) {
 991     PreRuntimeDispatch::store<decorators>(addr, value);
 992   }
 993 
 994   template <DecoratorSet decorators>
 995   inline void store_reduce_types(narrowOop* addr, oop value) {
 996     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 997                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 998     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 999   }
1000 
1001   template <DecoratorSet decorators>
1002   inline void store_reduce_types(narrowOop* addr, narrowOop value) {
1003     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1004                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1005     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1006   }
1007 
1008   template <DecoratorSet decorators>
1009   inline void store_reduce_types(HeapWord* addr, oop value) {
1010     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1011     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1012   }
1013 
1014   template <DecoratorSet decorators, typename T>
1015   inline T atomic_cmpxchg_reduce_types(T* addr, T compare_value, T new_value) {
1016     return PreRuntimeDispatch::atomic_cmpxchg<decorators>(addr, compare_value, new_value);
1017   }
1018 
1019   template <DecoratorSet decorators>
1020   inline oop atomic_cmpxchg_reduce_types(narrowOop* addr, oop compare_value, oop new_value) {
1021     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1022                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1023     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
1024   }
1025 
1026   template <DecoratorSet decorators>
1027   inline narrowOop atomic_cmpxchg_reduce_types(narrowOop* addr, narrowOop compare_value, narrowOop new_value) {
1028     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1029                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1030     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
1031   }
1032 
1033   template <DecoratorSet decorators>
1034   inline oop atomic_cmpxchg_reduce_types(HeapWord* addr,
1035                                          oop compare_value,
1036                                          oop new_value) {
1037     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1038     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
1039   }
1040 
1041   template <DecoratorSet decorators, typename T>
1042   inline T atomic_xchg_reduce_types(T* addr, T new_value) {
1043     const DecoratorSet expanded_decorators = decorators;
1044     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1045   }
1046 
1047   template <DecoratorSet decorators>
1048   inline oop atomic_xchg_reduce_types(narrowOop* addr, oop new_value) {
1049     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1050                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1051     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1052   }
1053 
1054   template <DecoratorSet decorators>
1055   inline narrowOop atomic_xchg_reduce_types(narrowOop* addr, narrowOop new_value) {
1056     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1057                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1058     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1059   }
1060 
1061   template <DecoratorSet decorators>
1062   inline oop atomic_xchg_reduce_types(HeapWord* addr, oop new_value) {
1063     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1064     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1065   }
1066 
1067   template <DecoratorSet decorators, typename T>
1068   inline T load_reduce_types(T* addr) {
1069     return PreRuntimeDispatch::load<decorators, T>(addr);
1070   }
1071 
1072   template <DecoratorSet decorators, typename T>
1073   inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
1074     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1075                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1076     return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
1077   }
1078 
1079   template <DecoratorSet decorators, typename T>
1080   inline oop load_reduce_types(HeapWord* addr) {
1081     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1082     return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
1083   }
1084 
1085   template <DecoratorSet decorators, typename T>
1086   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
1087                                      arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1088                                      size_t length) {
1089     PreRuntimeDispatch::arraycopy<decorators>(src_obj, src_offset_in_bytes, src_raw,
1090                                               dst_obj, dst_offset_in_bytes, dst_raw,
1091                                               length);
1092   }
1093 
1094   template <DecoratorSet decorators>
1095   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, HeapWord* src_raw,
1096                                      arrayOop dst_obj, size_t dst_offset_in_bytes, HeapWord* dst_raw,
1097                                      size_t length) {
1098     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1099     PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1100                                                        dst_obj, dst_offset_in_bytes, dst_raw,
1101                                                        length);
1102   }
1103 
1104   template <DecoratorSet decorators>
1105   inline void arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, narrowOop* src_raw,
1106                                      arrayOop dst_obj, size_t dst_offset_in_bytes, narrowOop* dst_raw,
1107                                      size_t length) {
1108     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1109                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1110     PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1111                                                        dst_obj, dst_offset_in_bytes, dst_raw,
1112                                                        length);
1113   }
1114 
1115   // Step 1: Set default decorators. This step remembers if a type was volatile
1116   // and then sets the MO_RELAXED decorator by default. Otherwise, a default
1117   // memory ordering is set for the access, and the implied decorator rules
1118   // are applied to select sensible defaults for decorators that have not been
1119   // explicitly set. For example, default object referent strength is set to strong.
1120   // This step also decays the types passed in (e.g. getting rid of CV qualifiers
1121   // and references from the types). This step also perform some type verification
1122   // that the passed in types make sense.
1123 
1124   template <DecoratorSet decorators, typename T>
1125   static void verify_types(){
1126     // If this fails to compile, then you have sent in something that is
1127     // not recognized as a valid primitive type to a primitive Access function.
1128     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // oops have already been validated
1129                    (IsPointer<T>::value || IsIntegral<T>::value) ||
1130                     IsFloatingPoint<T>::value)); // not allowed primitive type
1131   }
1132 
1133   template <DecoratorSet decorators, typename P, typename T>
1134   inline void store(P* addr, T value) {
1135     verify_types<decorators, T>();
1136     typedef typename Decay<P>::type DecayedP;
1137     typedef typename Decay<T>::type DecayedT;
1138     DecayedT decayed_value = value;
1139     // If a volatile address is passed in but no memory ordering decorator,
1140     // set the memory ordering to MO_RELAXED by default.
1141     const DecoratorSet expanded_decorators = DecoratorFixup<
1142       (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1143       (MO_RELAXED | decorators) : decorators>::value;
1144     store_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr), decayed_value);
1145   }
1146 
1147   template <DecoratorSet decorators, typename T>
1148   inline void store_at(oop base, ptrdiff_t offset, T value) {
1149     verify_types<decorators, T>();
1150     typedef typename Decay<T>::type DecayedT;
1151     DecayedT decayed_value = value;
1152     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1153                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1154                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1155     PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, decayed_value);
1156   }
1157 
1158   template <DecoratorSet decorators, typename P, typename T>
1159   inline T load(P* addr) {
1160     verify_types<decorators, T>();
1161     typedef typename Decay<P>::type DecayedP;
1162     typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1163                                  typename OopOrNarrowOop<T>::type,
1164                                  typename Decay<T>::type>::type DecayedT;
1165     // If a volatile address is passed in but no memory ordering decorator,
1166     // set the memory ordering to MO_RELAXED by default.
1167     const DecoratorSet expanded_decorators = DecoratorFixup<
1168       (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1169       (MO_RELAXED | decorators) : decorators>::value;
1170     return load_reduce_types<expanded_decorators, DecayedT>(const_cast<DecayedP*>(addr));
1171   }
1172 
1173   template <DecoratorSet decorators, typename T>
1174   inline T load_at(oop base, ptrdiff_t offset) {
1175     verify_types<decorators, T>();
1176     typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1177                                  typename OopOrNarrowOop<T>::type,
1178                                  typename Decay<T>::type>::type DecayedT;
1179     // Expand the decorators (figure out sensible defaults)
1180     // Potentially remember if we need compressed oop awareness
1181     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1182                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1183                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1184     return PreRuntimeDispatch::load_at<expanded_decorators, DecayedT>(base, offset);
1185   }
1186 
1187   template <DecoratorSet decorators, typename P, typename T>
1188   inline T atomic_cmpxchg(P* addr, T compare_value, T new_value) {
1189     verify_types<decorators, T>();
1190     typedef typename Decay<P>::type DecayedP;
1191     typedef typename Decay<T>::type DecayedT;
1192     DecayedT new_decayed_value = new_value;
1193     DecayedT compare_decayed_value = compare_value;
1194     const DecoratorSet expanded_decorators = DecoratorFixup<
1195       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1196       (MO_SEQ_CST | decorators) : decorators>::value;
1197     return atomic_cmpxchg_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr),
1198                                                             compare_decayed_value,
1199                                                             new_decayed_value);
1200   }
1201 
1202   template <DecoratorSet decorators, typename T>
1203   inline T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
1204     verify_types<decorators, T>();
1205     typedef typename Decay<T>::type DecayedT;
1206     DecayedT new_decayed_value = new_value;
1207     DecayedT compare_decayed_value = compare_value;
1208     // Determine default memory ordering
1209     const DecoratorSet expanded_decorators = DecoratorFixup<
1210       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1211       (MO_SEQ_CST | decorators) : decorators>::value;
1212     // Potentially remember that we need compressed oop awareness
1213     const DecoratorSet final_decorators = expanded_decorators |
1214                                           (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1215                                            INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE);
1216     return PreRuntimeDispatch::atomic_cmpxchg_at<final_decorators>(base, offset, compare_decayed_value,
1217                                                                    new_decayed_value);
1218   }
1219 
1220   template <DecoratorSet decorators, typename P, typename T>
1221   inline T atomic_xchg(P* addr, T new_value) {
1222     verify_types<decorators, T>();
1223     typedef typename Decay<P>::type DecayedP;
1224     typedef typename Decay<T>::type DecayedT;
1225     DecayedT new_decayed_value = new_value;
1226     // atomic_xchg is only available in SEQ_CST flavour.
1227     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST>::value;
1228     return atomic_xchg_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr),
1229                                                          new_decayed_value);
1230   }
1231 
1232   template <DecoratorSet decorators, typename T>
1233   inline T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
1234     verify_types<decorators, T>();
1235     typedef typename Decay<T>::type DecayedT;
1236     DecayedT new_decayed_value = new_value;
1237     // atomic_xchg is only available in SEQ_CST flavour.
1238     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST |
1239                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1240                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1241     return PreRuntimeDispatch::atomic_xchg_at<expanded_decorators>(base, offset, new_decayed_value);
1242   }
1243 
1244   template <DecoratorSet decorators, typename T>
1245   inline void arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, const T* src_raw,
1246                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1247                         size_t length) {
1248     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ||
1249                    (IsSame<T, void>::value || IsIntegral<T>::value) ||
1250                     IsFloatingPoint<T>::value)); // arraycopy allows type erased void elements
1251     typedef typename Decay<T>::type DecayedT;
1252     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | IS_ARRAY | IN_HEAP>::value;
1253     arraycopy_reduce_types<expanded_decorators>(src_obj, src_offset_in_bytes, const_cast<DecayedT*>(src_raw),
1254                                                 dst_obj, dst_offset_in_bytes, const_cast<DecayedT*>(dst_raw),
1255                                                 length);
1256   }
1257 
1258   template <DecoratorSet decorators>
1259   inline void clone(oop src, oop dst, size_t size) {
1260     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1261     PreRuntimeDispatch::clone<expanded_decorators>(src, dst, size);
1262   }
1263 
1264   template <DecoratorSet decorators>
1265   inline void value_copy(void* src, void* dst, InlineKlass* md) {
1266     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1267     PreRuntimeDispatch::value_copy<expanded_decorators>(src, dst, md);
1268   }
1269 
1270   // Infer the type that should be returned from an Access::oop_load.
1271   template <typename P, DecoratorSet decorators>
1272   class OopLoadProxy: public StackObj {
1273   private:
1274     P *const _addr;
1275   public:
1276     OopLoadProxy(P* addr) : _addr(addr) {}
1277 
1278     inline operator oop() {
1279       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr);
1280     }
1281 
1282     inline operator narrowOop() {
1283       return load<decorators | INTERNAL_VALUE_IS_OOP, P, narrowOop>(_addr);
1284     }
1285 
1286     template <typename T>
1287     inline bool operator ==(const T& other) const {
1288       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) == other;
1289     }
1290 
1291     template <typename T>
1292     inline bool operator !=(const T& other) const {
1293       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) != other;
1294     }
1295   };
1296 
1297   // Infer the type that should be returned from an Access::load_at.
1298   template <DecoratorSet decorators>
1299   class LoadAtProxy: public StackObj {
1300   private:
1301     const oop _base;
1302     const ptrdiff_t _offset;
1303   public:
1304     LoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1305 
1306     template <typename T>
1307     inline operator T() const {
1308       return load_at<decorators, T>(_base, _offset);
1309     }
1310 
1311     template <typename T>
1312     inline bool operator ==(const T& other) const { return load_at<decorators, T>(_base, _offset) == other; }
1313 
1314     template <typename T>
1315     inline bool operator !=(const T& other) const { return load_at<decorators, T>(_base, _offset) != other; }
1316   };
1317 
1318   // Infer the type that should be returned from an Access::oop_load_at.
1319   template <DecoratorSet decorators>
1320   class OopLoadAtProxy: public StackObj {
1321   private:
1322     const oop _base;
1323     const ptrdiff_t _offset;
1324   public:
1325     OopLoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1326 
1327     inline operator oop() const {
1328       return load_at<decorators | INTERNAL_VALUE_IS_OOP, oop>(_base, _offset);
1329     }
1330 
1331     inline operator narrowOop() const {
1332       return load_at<decorators | INTERNAL_VALUE_IS_OOP, narrowOop>(_base, _offset);
1333     }
1334 
1335     template <typename T>
1336     inline bool operator ==(const T& other) const {
1337       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) == other;
1338     }
1339 
1340     template <typename T>
1341     inline bool operator !=(const T& other) const {
1342       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) != other;
1343     }
1344   };
1345 }
1346 
1347 #endif // SHARE_OOPS_ACCESSBACKEND_HPP