1 /*
   2  * Copyright (c) 2017, 2020, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_OOPS_ACCESSBACKEND_HPP
  26 #define SHARE_OOPS_ACCESSBACKEND_HPP
  27 
  28 #include "gc/shared/barrierSetConfig.hpp"
  29 #include "memory/allocation.hpp"
  30 #include "metaprogramming/conditional.hpp"
  31 #include "metaprogramming/decay.hpp"
  32 #include "metaprogramming/enableIf.hpp"
  33 #include "metaprogramming/integralConstant.hpp"
  34 #include "metaprogramming/isFloatingPoint.hpp"
  35 #include "metaprogramming/isIntegral.hpp"
  36 #include "metaprogramming/isPointer.hpp"
  37 #include "metaprogramming/isSame.hpp"
  38 #include "metaprogramming/isVolatile.hpp"
  39 #include "oops/accessDecorators.hpp"
  40 #include "oops/oopsHierarchy.hpp"
  41 #include "runtime/globals.hpp"
  42 #include "utilities/debug.hpp"
  43 #include "utilities/globalDefinitions.hpp"
  44 
  45 
  46 // This metafunction returns either oop or narrowOop depending on whether
  47 // an access needs to use compressed oops or not.
  48 template <DecoratorSet decorators>
  49 struct HeapOopType: AllStatic {
  50   static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value &&
  51                                          HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
  52   typedef typename Conditional<needs_oop_compress, narrowOop, oop>::type type;
  53 };
  54 
  55 namespace AccessInternal {
  56   enum BarrierType {
  57     BARRIER_STORE,
  58     BARRIER_STORE_AT,
  59     BARRIER_LOAD,
  60     BARRIER_LOAD_AT,
  61     BARRIER_ATOMIC_CMPXCHG,
  62     BARRIER_ATOMIC_CMPXCHG_AT,
  63     BARRIER_ATOMIC_XCHG,
  64     BARRIER_ATOMIC_XCHG_AT,
  65     BARRIER_ARRAYCOPY,
  66     BARRIER_CLONE
  67   };
  68 
  69   template <DecoratorSet decorators, typename T>
  70   struct MustConvertCompressedOop: public IntegralConstant<bool,
  71     HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value &&
  72     IsSame<typename HeapOopType<decorators>::type, narrowOop>::value &&
  73     IsSame<T, oop>::value> {};
  74 
  75   // This metafunction returns an appropriate oop type if the value is oop-like
  76   // and otherwise returns the same type T.
  77   template <DecoratorSet decorators, typename T>
  78   struct EncodedType: AllStatic {
  79     typedef typename Conditional<
  80       HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
  81       typename HeapOopType<decorators>::type, T>::type type;
  82   };
  83 
  84   template <DecoratorSet decorators>
  85   inline typename HeapOopType<decorators>::type*
  86   oop_field_addr(oop base, ptrdiff_t byte_offset) {
  87     return reinterpret_cast<typename HeapOopType<decorators>::type*>(
  88              reinterpret_cast<intptr_t>((void*)base) + byte_offset);
  89   }
  90 
  91   // This metafunction returns whether it is possible for a type T to require
  92   // locking to support wide atomics or not.
  93   template <typename T>
  94 #ifdef SUPPORTS_NATIVE_CX8
  95   struct PossiblyLockedAccess: public IntegralConstant<bool, false> {};
  96 #else
  97   struct PossiblyLockedAccess: public IntegralConstant<bool, (sizeof(T) > 4)> {};
  98 #endif
  99 
 100   template <DecoratorSet decorators, typename T>
 101   struct AccessFunctionTypes {
 102     typedef T (*load_at_func_t)(oop base, ptrdiff_t offset);
 103     typedef void (*store_at_func_t)(oop base, ptrdiff_t offset, T value);
 104     typedef T (*atomic_cmpxchg_at_func_t)(oop base, ptrdiff_t offset, T compare_value, T new_value);
 105     typedef T (*atomic_xchg_at_func_t)(oop base, ptrdiff_t offset, T new_value);
 106 
 107     typedef T (*load_func_t)(void* addr);
 108     typedef void (*store_func_t)(void* addr, T value);
 109     typedef T (*atomic_cmpxchg_func_t)(void* addr, T compare_value, T new_value);
 110     typedef T (*atomic_xchg_func_t)(void* addr, T new_value);
 111 
 112     typedef bool (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 113                                      arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 114                                      size_t length);
 115     typedef void (*clone_func_t)(oop src, oop dst, size_t size);
 116   };
 117 
 118   template <DecoratorSet decorators>
 119   struct AccessFunctionTypes<decorators, void> {
 120     typedef bool (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, void* src,
 121                                      arrayOop dst_obj, size_t dst_offset_in_bytes, void* dst,
 122                                      size_t length);
 123   };
 124 
 125   template <DecoratorSet decorators, typename T, BarrierType barrier> struct AccessFunction {};
 126 
 127 #define ACCESS_GENERATE_ACCESS_FUNCTION(bt, func)                   \
 128   template <DecoratorSet decorators, typename T>                    \
 129   struct AccessFunction<decorators, T, bt>: AllStatic{              \
 130     typedef typename AccessFunctionTypes<decorators, T>::func type; \
 131   }
 132   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE, store_func_t);
 133   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE_AT, store_at_func_t);
 134   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD, load_func_t);
 135   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD_AT, load_at_func_t);
 136   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG, atomic_cmpxchg_func_t);
 137   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG_AT, atomic_cmpxchg_at_func_t);
 138   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG, atomic_xchg_func_t);
 139   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG_AT, atomic_xchg_at_func_t);
 140   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ARRAYCOPY, arraycopy_func_t);
 141   ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_CLONE, clone_func_t);
 142 #undef ACCESS_GENERATE_ACCESS_FUNCTION
 143 
 144   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 145   typename AccessFunction<decorators, T, barrier_type>::type resolve_barrier();
 146 
 147   template <DecoratorSet decorators, typename T, BarrierType barrier_type>
 148   typename AccessFunction<decorators, T, barrier_type>::type resolve_oop_barrier();
 149 
 150   class AccessLocker {
 151   public:
 152     AccessLocker();
 153     ~AccessLocker();
 154   };
 155   bool wide_atomic_needs_locking();
 156 
 157   void* field_addr(oop base, ptrdiff_t offset);
 158 
 159   // Forward calls to Copy:: in the cpp file to reduce dependencies and allow
 160   // faster build times, given how frequently included access is.
 161   void arraycopy_arrayof_conjoint_oops(void* src, void* dst, size_t length);
 162   void arraycopy_conjoint_oops(oop* src, oop* dst, size_t length);
 163   void arraycopy_conjoint_oops(narrowOop* src, narrowOop* dst, size_t length);
 164 
 165   void arraycopy_disjoint_words(void* src, void* dst, size_t length);
 166   void arraycopy_disjoint_words_atomic(void* src, void* dst, size_t length);
 167 
 168   template<typename T>
 169   void arraycopy_conjoint(T* src, T* dst, size_t length);
 170   template<typename T>
 171   void arraycopy_arrayof_conjoint(T* src, T* dst, size_t length);
 172   template<typename T>
 173   void arraycopy_conjoint_atomic(T* src, T* dst, size_t length);
 174 }
 175 
 176 // This mask specifies what decorators are relevant for raw accesses. When passing
 177 // accesses to the raw layer, irrelevant decorators are removed.
 178 const DecoratorSet RAW_DECORATOR_MASK = INTERNAL_DECORATOR_MASK | MO_DECORATOR_MASK |
 179                                         ARRAYCOPY_DECORATOR_MASK | IS_NOT_NULL;
 180 
 181 // The RawAccessBarrier performs raw accesses with additional knowledge of
 182 // memory ordering, so that OrderAccess/Atomic is called when necessary.
 183 // It additionally handles compressed oops, and hence is not completely "raw"
 184 // strictly speaking.
 185 template <DecoratorSet decorators>
 186 class RawAccessBarrier: public AllStatic {
 187 protected:
 188   static inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 189     return AccessInternal::field_addr(base, byte_offset);
 190   }
 191 
 192 protected:
 193   // Only encode if INTERNAL_VALUE_IS_OOP
 194   template <DecoratorSet idecorators, typename T>
 195   static inline typename EnableIf<
 196     AccessInternal::MustConvertCompressedOop<idecorators, T>::value,
 197     typename HeapOopType<idecorators>::type>::type
 198   encode_internal(T value);
 199 
 200   template <DecoratorSet idecorators, typename T>
 201   static inline typename EnableIf<
 202     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 203   encode_internal(T value) {
 204     return value;
 205   }
 206 
 207   template <typename T>
 208   static inline typename AccessInternal::EncodedType<decorators, T>::type
 209   encode(T value) {
 210     return encode_internal<decorators, T>(value);
 211   }
 212 
 213   // Only decode if INTERNAL_VALUE_IS_OOP
 214   template <DecoratorSet idecorators, typename T>
 215   static inline typename EnableIf<
 216     AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 217   decode_internal(typename HeapOopType<idecorators>::type value);
 218 
 219   template <DecoratorSet idecorators, typename T>
 220   static inline typename EnableIf<
 221     !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
 222   decode_internal(T value) {
 223     return value;
 224   }
 225 
 226   template <typename T>
 227   static inline T decode(typename AccessInternal::EncodedType<decorators, T>::type value) {
 228     return decode_internal<decorators, T>(value);
 229   }
 230 
 231 protected:
 232   template <DecoratorSet ds, typename T>
 233   static typename EnableIf<
 234     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 235   load_internal(void* addr);
 236 
 237   template <DecoratorSet ds, typename T>
 238   static typename EnableIf<
 239     HasDecorator<ds, MO_ACQUIRE>::value, T>::type
 240   load_internal(void* addr);
 241 
 242   template <DecoratorSet ds, typename T>
 243   static typename EnableIf<
 244     HasDecorator<ds, MO_RELAXED>::value, T>::type
 245   load_internal(void* addr);
 246 
 247   template <DecoratorSet ds, typename T>
 248   static inline typename EnableIf<
 249     HasDecorator<ds, MO_UNORDERED>::value, T>::type
 250   load_internal(void* addr) {
 251     return *reinterpret_cast<T*>(addr);
 252   }
 253 
 254   template <DecoratorSet ds, typename T>
 255   static typename EnableIf<
 256     HasDecorator<ds, MO_SEQ_CST>::value>::type
 257   store_internal(void* addr, T value);
 258 
 259   template <DecoratorSet ds, typename T>
 260   static typename EnableIf<
 261     HasDecorator<ds, MO_RELEASE>::value>::type
 262   store_internal(void* addr, T value);
 263 
 264   template <DecoratorSet ds, typename T>
 265   static typename EnableIf<
 266     HasDecorator<ds, MO_RELAXED>::value>::type
 267   store_internal(void* addr, T value);
 268 
 269   template <DecoratorSet ds, typename T>
 270   static inline typename EnableIf<
 271     HasDecorator<ds, MO_UNORDERED>::value>::type
 272   store_internal(void* addr, T value) {
 273     *reinterpret_cast<T*>(addr) = value;
 274   }
 275 
 276   template <DecoratorSet ds, typename T>
 277   static typename EnableIf<
 278     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 279   atomic_cmpxchg_internal(void* addr, T compare_value, T new_value);
 280 
 281   template <DecoratorSet ds, typename T>
 282   static typename EnableIf<
 283     HasDecorator<ds, MO_RELAXED>::value, T>::type
 284   atomic_cmpxchg_internal(void* addr, T compare_value, T new_value);
 285 
 286   template <DecoratorSet ds, typename T>
 287   static typename EnableIf<
 288     HasDecorator<ds, MO_SEQ_CST>::value, T>::type
 289   atomic_xchg_internal(void* addr, T new_value);
 290 
 291   // The following *_locked mechanisms serve the purpose of handling atomic operations
 292   // that are larger than a machine can handle, and then possibly opt for using
 293   // a slower path using a mutex to perform the operation.
 294 
 295   template <DecoratorSet ds, typename T>
 296   static inline typename EnableIf<
 297     !AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 298   atomic_cmpxchg_maybe_locked(void* addr, T compare_value, T new_value) {
 299     return atomic_cmpxchg_internal<ds>(addr, compare_value, new_value);
 300   }
 301 
 302   template <DecoratorSet ds, typename T>
 303   static typename EnableIf<
 304     AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 305   atomic_cmpxchg_maybe_locked(void* addr, T compare_value, T new_value);
 306 
 307   template <DecoratorSet ds, typename T>
 308   static inline typename EnableIf<
 309     !AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 310   atomic_xchg_maybe_locked(void* addr, T new_value) {
 311     return atomic_xchg_internal<ds>(addr, new_value);
 312   }
 313 
 314   template <DecoratorSet ds, typename T>
 315   static typename EnableIf<
 316     AccessInternal::PossiblyLockedAccess<T>::value, T>::type
 317   atomic_xchg_maybe_locked(void* addr, T new_value);
 318 
 319 public:
 320   template <typename T>
 321   static inline void store(void* addr, T value) {
 322     store_internal<decorators>(addr, value);
 323   }
 324 
 325   template <typename T>
 326   static inline T load(void* addr) {
 327     return load_internal<decorators, T>(addr);
 328   }
 329 
 330   template <typename T>
 331   static inline T atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 332     return atomic_cmpxchg_maybe_locked<decorators>(addr, compare_value, new_value);
 333   }
 334 
 335   template <typename T>
 336   static inline T atomic_xchg(void* addr, T new_value) {
 337     return atomic_xchg_maybe_locked<decorators>(addr, new_value);
 338   }
 339 
 340   template <typename T>
 341   static bool arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 342                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 343                         size_t length);
 344 
 345   template <typename T>
 346   static void oop_store(void* addr, T value);
 347   template <typename T>
 348   static void oop_store_at(oop base, ptrdiff_t offset, T value);
 349 
 350   template <typename T>
 351   static T oop_load(void* addr);
 352   template <typename T>
 353   static T oop_load_at(oop base, ptrdiff_t offset);
 354 
 355   template <typename T>
 356   static T oop_atomic_cmpxchg(void* addr, T compare_value, T new_value);
 357   template <typename T>
 358   static T oop_atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value);
 359 
 360   template <typename T>
 361   static T oop_atomic_xchg(void* addr, T new_value);
 362   template <typename T>
 363   static T oop_atomic_xchg_at(oop base, ptrdiff_t offset, T new_value);
 364 
 365   template <typename T>
 366   static void store_at(oop base, ptrdiff_t offset, T value) {
 367     store(field_addr(base, offset), value);
 368   }
 369 
 370   template <typename T>
 371   static T load_at(oop base, ptrdiff_t offset) {
 372     return load<T>(field_addr(base, offset));
 373   }
 374 
 375   template <typename T>
 376   static T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 377     return atomic_cmpxchg(field_addr(base, offset), compare_value, new_value);
 378   }
 379 
 380   template <typename T>
 381   static T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 382     return atomic_xchg(field_addr(base, offset), new_value);
 383   }
 384 
 385   template <typename T>
 386   static bool oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 387                             arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 388                             size_t length);
 389 
 390   static void clone(oop src, oop dst, size_t size);
 391 };
 392 
 393 // Below is the implementation of the first 4 steps of the template pipeline:
 394 // * Step 1: Set default decorators and decay types. This step gets rid of CV qualifiers
 395 //           and sets default decorators to sensible values.
 396 // * Step 2: Reduce types. This step makes sure there is only a single T type and not
 397 //           multiple types. The P type of the address and T type of the value must
 398 //           match.
 399 // * Step 3: Pre-runtime dispatch. This step checks whether a runtime call can be
 400 //           avoided, and in that case avoids it (calling raw accesses or
 401 //           primitive accesses in a build that does not require primitive GC barriers)
 402 // * Step 4: Runtime-dispatch. This step performs a runtime dispatch to the corresponding
 403 //           BarrierSet::AccessBarrier accessor that attaches GC-required barriers
 404 //           to the access.
 405 
 406 namespace AccessInternal {
 407   template <typename T>
 408   struct OopOrNarrowOopInternal: AllStatic {
 409     typedef oop type;
 410   };
 411 
 412   template <>
 413   struct OopOrNarrowOopInternal<narrowOop>: AllStatic {
 414     typedef narrowOop type;
 415   };
 416 
 417   // This metafunction returns a canonicalized oop/narrowOop type for a passed
 418   // in oop-like types passed in from oop_* overloads where the user has sworn
 419   // that the passed in values should be oop-like (e.g. oop, oopDesc*, arrayOop,
 420   // narrowOoop, instanceOopDesc*, and random other things).
 421   // In the oop_* overloads, it must hold that if the passed in type T is not
 422   // narrowOop, then it by contract has to be one of many oop-like types implicitly
 423   // convertible to oop, and hence returns oop as the canonical oop type.
 424   // If it turns out it was not, then the implicit conversion to oop will fail
 425   // to compile, as desired.
 426   template <typename T>
 427   struct OopOrNarrowOop: AllStatic {
 428     typedef typename OopOrNarrowOopInternal<typename Decay<T>::type>::type type;
 429   };
 430 
 431   inline void* field_addr(oop base, ptrdiff_t byte_offset) {
 432     return reinterpret_cast<void*>(reinterpret_cast<intptr_t>((void*)base) + byte_offset);
 433   }
 434   // Step 4: Runtime dispatch
 435   // The RuntimeDispatch class is responsible for performing a runtime dispatch of the
 436   // accessor. This is required when the access either depends on whether compressed oops
 437   // is being used, or it depends on which GC implementation was chosen (e.g. requires GC
 438   // barriers). The way it works is that a function pointer initially pointing to an
 439   // accessor resolution function gets called for each access. Upon first invocation,
 440   // it resolves which accessor to be used in future invocations and patches the
 441   // function pointer to this new accessor.
 442 
 443   template <DecoratorSet decorators, typename T, BarrierType type>
 444   struct RuntimeDispatch: AllStatic {};
 445 
 446   template <DecoratorSet decorators, typename T>
 447   struct RuntimeDispatch<decorators, T, BARRIER_STORE>: AllStatic {
 448     typedef typename AccessFunction<decorators, T, BARRIER_STORE>::type func_t;
 449     static func_t _store_func;
 450 
 451     static void store_init(void* addr, T value);
 452 
 453     static inline void store(void* addr, T value) {
 454       _store_func(addr, value);
 455     }
 456   };
 457 
 458   template <DecoratorSet decorators, typename T>
 459   struct RuntimeDispatch<decorators, T, BARRIER_STORE_AT>: AllStatic {
 460     typedef typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type func_t;
 461     static func_t _store_at_func;
 462 
 463     static void store_at_init(oop base, ptrdiff_t offset, T value);
 464 
 465     static inline void store_at(oop base, ptrdiff_t offset, T value) {
 466       _store_at_func(base, offset, value);
 467     }
 468   };
 469 
 470   template <DecoratorSet decorators, typename T>
 471   struct RuntimeDispatch<decorators, T, BARRIER_LOAD>: AllStatic {
 472     typedef typename AccessFunction<decorators, T, BARRIER_LOAD>::type func_t;
 473     static func_t _load_func;
 474 
 475     static T load_init(void* addr);
 476 
 477     static inline T load(void* addr) {
 478       return _load_func(addr);
 479     }
 480   };
 481 
 482   template <DecoratorSet decorators, typename T>
 483   struct RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>: AllStatic {
 484     typedef typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type func_t;
 485     static func_t _load_at_func;
 486 
 487     static T load_at_init(oop base, ptrdiff_t offset);
 488 
 489     static inline T load_at(oop base, ptrdiff_t offset) {
 490       return _load_at_func(base, offset);
 491     }
 492   };
 493 
 494   template <DecoratorSet decorators, typename T>
 495   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>: AllStatic {
 496     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type func_t;
 497     static func_t _atomic_cmpxchg_func;
 498 
 499     static T atomic_cmpxchg_init(void* addr, T compare_value, T new_value);
 500 
 501     static inline T atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 502       return _atomic_cmpxchg_func(addr, compare_value, new_value);
 503     }
 504   };
 505 
 506   template <DecoratorSet decorators, typename T>
 507   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>: AllStatic {
 508     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type func_t;
 509     static func_t _atomic_cmpxchg_at_func;
 510 
 511     static T atomic_cmpxchg_at_init(oop base, ptrdiff_t offset, T compare_value, T new_value);
 512 
 513     static inline T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 514       return _atomic_cmpxchg_at_func(base, offset, compare_value, new_value);
 515     }
 516   };
 517 
 518   template <DecoratorSet decorators, typename T>
 519   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>: AllStatic {
 520     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type func_t;
 521     static func_t _atomic_xchg_func;
 522 
 523     static T atomic_xchg_init(void* addr, T new_value);
 524 
 525     static inline T atomic_xchg(void* addr, T new_value) {
 526       return _atomic_xchg_func(addr, new_value);
 527     }
 528   };
 529 
 530   template <DecoratorSet decorators, typename T>
 531   struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>: AllStatic {
 532     typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type func_t;
 533     static func_t _atomic_xchg_at_func;
 534 
 535     static T atomic_xchg_at_init(oop base, ptrdiff_t offset, T new_value);
 536 
 537     static inline T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 538       return _atomic_xchg_at_func(base, offset, new_value);
 539     }
 540   };
 541 
 542   template <DecoratorSet decorators, typename T>
 543   struct RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>: AllStatic {
 544     typedef typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type func_t;
 545     static func_t _arraycopy_func;
 546 
 547     static bool arraycopy_init(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 548                                arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 549                                size_t length);
 550 
 551     static inline bool arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 552                                  arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 553                                  size_t length) {
 554       return _arraycopy_func(src_obj, src_offset_in_bytes, src_raw,
 555                              dst_obj, dst_offset_in_bytes, dst_raw,
 556                              length);
 557     }
 558   };
 559 
 560   template <DecoratorSet decorators, typename T>
 561   struct RuntimeDispatch<decorators, T, BARRIER_CLONE>: AllStatic {
 562     typedef typename AccessFunction<decorators, T, BARRIER_CLONE>::type func_t;
 563     static func_t _clone_func;
 564 
 565     static void clone_init(oop src, oop dst, size_t size);
 566 
 567     static inline void clone(oop src, oop dst, size_t size) {
 568       _clone_func(src, dst, size);
 569     }
 570   };
 571 
 572   // Initialize the function pointers to point to the resolving function.
 573   template <DecoratorSet decorators, typename T>
 574   typename AccessFunction<decorators, T, BARRIER_STORE>::type
 575   RuntimeDispatch<decorators, T, BARRIER_STORE>::_store_func = &store_init;
 576 
 577   template <DecoratorSet decorators, typename T>
 578   typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type
 579   RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::_store_at_func = &store_at_init;
 580 
 581   template <DecoratorSet decorators, typename T>
 582   typename AccessFunction<decorators, T, BARRIER_LOAD>::type
 583   RuntimeDispatch<decorators, T, BARRIER_LOAD>::_load_func = &load_init;
 584 
 585   template <DecoratorSet decorators, typename T>
 586   typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type
 587   RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::_load_at_func = &load_at_init;
 588 
 589   template <DecoratorSet decorators, typename T>
 590   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type
 591   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::_atomic_cmpxchg_func = &atomic_cmpxchg_init;
 592 
 593   template <DecoratorSet decorators, typename T>
 594   typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type
 595   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::_atomic_cmpxchg_at_func = &atomic_cmpxchg_at_init;
 596 
 597   template <DecoratorSet decorators, typename T>
 598   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type
 599   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::_atomic_xchg_func = &atomic_xchg_init;
 600 
 601   template <DecoratorSet decorators, typename T>
 602   typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
 603   RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
 604 
 605   template <DecoratorSet decorators, typename T>
 606   typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
 607   RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
 608 
 609   template <DecoratorSet decorators, typename T>
 610   typename AccessFunction<decorators, T, BARRIER_CLONE>::type
 611   RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
 612 
 613   // Step 3: Pre-runtime dispatching.
 614   // The PreRuntimeDispatch class is responsible for filtering the barrier strength
 615   // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
 616   // dispatch point. Otherwise it goes through a runtime check if hardwiring was
 617   // not possible.
 618   struct PreRuntimeDispatch: AllStatic {
 619     template<DecoratorSet decorators>
 620     struct CanHardwireRaw: public IntegralConstant<
 621       bool,
 622       !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
 623       !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
 624       HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
 625     {};
 626 
 627     static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
 628 
 629     template<DecoratorSet decorators>
 630     static bool is_hardwired_primitive() {
 631       return !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
 632     }
 633 
 634     template <DecoratorSet decorators, typename T>
 635     inline static typename EnableIf<
 636       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
 637     store(void* addr, T value) {
 638       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 639       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 640         Raw::oop_store(addr, value);
 641       } else {
 642         Raw::store(addr, value);
 643       }
 644     }
 645 
 646     template <DecoratorSet decorators, typename T>
 647     inline static typename EnableIf<
 648       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
 649     store(void* addr, T value) {
 650       if (UseCompressedOops) {
 651         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 652         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 653       } else {
 654         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 655         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 656       }
 657     }
 658 
 659     template <DecoratorSet decorators, typename T>
 660     inline static typename EnableIf<
 661       !HasDecorator<decorators, AS_RAW>::value>::type
 662     store(void* addr, T value) {
 663       if (is_hardwired_primitive<decorators>()) {
 664         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 665         PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 666       } else {
 667         RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
 668       }
 669     }
 670 
 671     template <DecoratorSet decorators, typename T>
 672     inline static typename EnableIf<
 673       HasDecorator<decorators, AS_RAW>::value>::type
 674     store_at(oop base, ptrdiff_t offset, T value) {
 675       store<decorators>(field_addr(base, offset), value);
 676     }
 677 
 678     template <DecoratorSet decorators, typename T>
 679     inline static typename EnableIf<
 680       !HasDecorator<decorators, AS_RAW>::value>::type
 681     store_at(oop base, ptrdiff_t offset, T value) {
 682       if (is_hardwired_primitive<decorators>()) {
 683         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 684         PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
 685       } else {
 686         RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
 687       }
 688     }
 689 
 690     template <DecoratorSet decorators, typename T>
 691     inline static typename EnableIf<
 692       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 693     load(void* addr) {
 694       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 695       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 696         return Raw::template oop_load<T>(addr);
 697       } else {
 698         return Raw::template load<T>(addr);
 699       }
 700     }
 701 
 702     template <DecoratorSet decorators, typename T>
 703     inline static typename EnableIf<
 704       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 705     load(void* addr) {
 706       if (UseCompressedOops) {
 707         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 708         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 709       } else {
 710         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 711         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 712       }
 713     }
 714 
 715     template <DecoratorSet decorators, typename T>
 716     inline static typename EnableIf<
 717       !HasDecorator<decorators, AS_RAW>::value, T>::type
 718     load(void* addr) {
 719       if (is_hardwired_primitive<decorators>()) {
 720         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 721         return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
 722       } else {
 723         return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
 724       }
 725     }
 726 
 727     template <DecoratorSet decorators, typename T>
 728     inline static typename EnableIf<
 729       HasDecorator<decorators, AS_RAW>::value, T>::type
 730     load_at(oop base, ptrdiff_t offset) {
 731       return load<decorators, T>(field_addr(base, offset));
 732     }
 733 
 734     template <DecoratorSet decorators, typename T>
 735     inline static typename EnableIf<
 736       !HasDecorator<decorators, AS_RAW>::value, T>::type
 737     load_at(oop base, ptrdiff_t offset) {
 738       if (is_hardwired_primitive<decorators>()) {
 739         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 740         return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
 741       } else {
 742         return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
 743       }
 744     }
 745 
 746     template <DecoratorSet decorators, typename T>
 747     inline static typename EnableIf<
 748       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 749     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 750       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 751       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 752         return Raw::oop_atomic_cmpxchg(addr, compare_value, new_value);
 753       } else {
 754         return Raw::atomic_cmpxchg(addr, compare_value, new_value);
 755       }
 756     }
 757 
 758     template <DecoratorSet decorators, typename T>
 759     inline static typename EnableIf<
 760       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 761     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 762       if (UseCompressedOops) {
 763         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 764         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 765       } else {
 766         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 767         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 768       }
 769     }
 770 
 771     template <DecoratorSet decorators, typename T>
 772     inline static typename EnableIf<
 773       !HasDecorator<decorators, AS_RAW>::value, T>::type
 774     atomic_cmpxchg(void* addr, T compare_value, T new_value) {
 775       if (is_hardwired_primitive<decorators>()) {
 776         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 777         return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 778       } else {
 779         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(addr, compare_value, new_value);
 780       }
 781     }
 782 
 783     template <DecoratorSet decorators, typename T>
 784     inline static typename EnableIf<
 785       HasDecorator<decorators, AS_RAW>::value, T>::type
 786     atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 787       return atomic_cmpxchg<decorators>(field_addr(base, offset), compare_value, new_value);
 788     }
 789 
 790     template <DecoratorSet decorators, typename T>
 791     inline static typename EnableIf<
 792       !HasDecorator<decorators, AS_RAW>::value, T>::type
 793     atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
 794       if (is_hardwired_primitive<decorators>()) {
 795         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 796         return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(base, offset, compare_value, new_value);
 797       } else {
 798         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(base, offset, compare_value, new_value);
 799       }
 800     }
 801 
 802     template <DecoratorSet decorators, typename T>
 803     inline static typename EnableIf<
 804       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
 805     atomic_xchg(void* addr, T new_value) {
 806       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 807       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 808         return Raw::oop_atomic_xchg(addr, new_value);
 809       } else {
 810         return Raw::atomic_xchg(addr, new_value);
 811       }
 812     }
 813 
 814     template <DecoratorSet decorators, typename T>
 815     inline static typename EnableIf<
 816       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
 817     atomic_xchg(void* addr, T new_value) {
 818       if (UseCompressedOops) {
 819         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 820         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 821       } else {
 822         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 823         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 824       }
 825     }
 826 
 827     template <DecoratorSet decorators, typename T>
 828     inline static typename EnableIf<
 829       !HasDecorator<decorators, AS_RAW>::value, T>::type
 830     atomic_xchg(void* addr, T new_value) {
 831       if (is_hardwired_primitive<decorators>()) {
 832         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 833         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
 834       } else {
 835         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(addr, new_value);
 836       }
 837     }
 838 
 839     template <DecoratorSet decorators, typename T>
 840     inline static typename EnableIf<
 841       HasDecorator<decorators, AS_RAW>::value, T>::type
 842     atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 843       return atomic_xchg<decorators>(field_addr(base, offset), new_value);
 844     }
 845 
 846     template <DecoratorSet decorators, typename T>
 847     inline static typename EnableIf<
 848       !HasDecorator<decorators, AS_RAW>::value, T>::type
 849     atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
 850       if (is_hardwired_primitive<decorators>()) {
 851         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 852         return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(base, offset, new_value);
 853       } else {
 854         return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::atomic_xchg_at(base, offset, new_value);
 855       }
 856     }
 857 
 858     template <DecoratorSet decorators, typename T>
 859     inline static typename EnableIf<
 860       HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, bool>::type
 861     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 862               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 863               size_t length) {
 864       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 865       if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
 866         return Raw::oop_arraycopy(src_obj, src_offset_in_bytes, src_raw,
 867                                   dst_obj, dst_offset_in_bytes, dst_raw,
 868                                   length);
 869       } else {
 870         return Raw::arraycopy(src_obj, src_offset_in_bytes, src_raw,
 871                               dst_obj, dst_offset_in_bytes, dst_raw,
 872                               length);
 873       }
 874     }
 875 
 876     template <DecoratorSet decorators, typename T>
 877     inline static typename EnableIf<
 878       HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, bool>::type
 879     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 880               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 881               size_t length) {
 882       if (UseCompressedOops) {
 883         const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
 884         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 885                                                                   dst_obj, dst_offset_in_bytes, dst_raw,
 886                                                                   length);
 887       } else {
 888         const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
 889         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 890                                                                   dst_obj, dst_offset_in_bytes, dst_raw,
 891                                                                   length);
 892       }
 893     }
 894 
 895     template <DecoratorSet decorators, typename T>
 896     inline static typename EnableIf<
 897       !HasDecorator<decorators, AS_RAW>::value, bool>::type
 898     arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
 899               arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
 900               size_t length) {
 901       if (is_hardwired_primitive<decorators>()) {
 902         const DecoratorSet expanded_decorators = decorators | AS_RAW;
 903         return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
 904                                                                   dst_obj, dst_offset_in_bytes, dst_raw,
 905                                                                   length);
 906       } else {
 907         return RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::arraycopy(src_obj, src_offset_in_bytes, src_raw,
 908                                                                             dst_obj, dst_offset_in_bytes, dst_raw,
 909                                                                             length);
 910       }
 911     }
 912 
 913     template <DecoratorSet decorators>
 914     inline static typename EnableIf<
 915       HasDecorator<decorators, AS_RAW>::value>::type
 916     clone(oop src, oop dst, size_t size) {
 917       typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
 918       Raw::clone(src, dst, size);
 919     }
 920 
 921     template <DecoratorSet decorators>
 922     inline static typename EnableIf<
 923       !HasDecorator<decorators, AS_RAW>::value>::type
 924     clone(oop src, oop dst, size_t size) {
 925       RuntimeDispatch<decorators, oop, BARRIER_CLONE>::clone(src, dst, size);
 926     }
 927   };
 928 
 929   // Step 2: Reduce types.
 930   // Enforce that for non-oop types, T and P have to be strictly the same.
 931   // P is the type of the address and T is the type of the values.
 932   // As for oop types, it is allow to send T in {narrowOop, oop} and
 933   // P in {narrowOop, oop, HeapWord*}. The following rules apply according to
 934   // the subsequent table. (columns are P, rows are T)
 935   // |           | HeapWord  |   oop   | narrowOop |
 936   // |   oop     |  rt-comp  | hw-none |  hw-comp  |
 937   // | narrowOop |     x     |    x    |  hw-none  |
 938   //
 939   // x means not allowed
 940   // rt-comp means it must be checked at runtime whether the oop is compressed.
 941   // hw-none means it is statically known the oop will not be compressed.
 942   // hw-comp means it is statically known the oop will be compressed.
 943 
 944   template <DecoratorSet decorators, typename T>
 945   inline void store_reduce_types(T* addr, T value) {
 946     PreRuntimeDispatch::store<decorators>(addr, value);
 947   }
 948 
 949   template <DecoratorSet decorators>
 950   inline void store_reduce_types(narrowOop* addr, oop value) {
 951     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 952                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 953     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 954   }
 955 
 956   template <DecoratorSet decorators>
 957   inline void store_reduce_types(narrowOop* addr, narrowOop value) {
 958     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 959                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 960     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 961   }
 962 
 963   template <DecoratorSet decorators>
 964   inline void store_reduce_types(HeapWord* addr, oop value) {
 965     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
 966     PreRuntimeDispatch::store<expanded_decorators>(addr, value);
 967   }
 968 
 969   template <DecoratorSet decorators, typename T>
 970   inline T atomic_cmpxchg_reduce_types(T* addr, T compare_value, T new_value) {
 971     return PreRuntimeDispatch::atomic_cmpxchg<decorators>(addr, compare_value, new_value);
 972   }
 973 
 974   template <DecoratorSet decorators>
 975   inline oop atomic_cmpxchg_reduce_types(narrowOop* addr, oop compare_value, oop new_value) {
 976     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 977                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 978     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 979   }
 980 
 981   template <DecoratorSet decorators>
 982   inline narrowOop atomic_cmpxchg_reduce_types(narrowOop* addr, narrowOop compare_value, narrowOop new_value) {
 983     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
 984                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
 985     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 986   }
 987 
 988   template <DecoratorSet decorators>
 989   inline oop atomic_cmpxchg_reduce_types(HeapWord* addr,
 990                                          oop compare_value,
 991                                          oop new_value) {
 992     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
 993     return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(addr, compare_value, new_value);
 994   }
 995 
 996   template <DecoratorSet decorators, typename T>
 997   inline T atomic_xchg_reduce_types(T* addr, T new_value) {
 998     const DecoratorSet expanded_decorators = decorators;
 999     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1000   }
1001 
1002   template <DecoratorSet decorators>
1003   inline oop atomic_xchg_reduce_types(narrowOop* addr, oop new_value) {
1004     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1005                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1006     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1007   }
1008 
1009   template <DecoratorSet decorators>
1010   inline narrowOop atomic_xchg_reduce_types(narrowOop* addr, narrowOop new_value) {
1011     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1012                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1013     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1014   }
1015 
1016   template <DecoratorSet decorators>
1017   inline oop atomic_xchg_reduce_types(HeapWord* addr, oop new_value) {
1018     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1019     return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(addr, new_value);
1020   }
1021 
1022   template <DecoratorSet decorators, typename T>
1023   inline T load_reduce_types(T* addr) {
1024     return PreRuntimeDispatch::load<decorators, T>(addr);
1025   }
1026 
1027   template <DecoratorSet decorators, typename T>
1028   inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
1029     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1030                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1031     return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
1032   }
1033 
1034   template <DecoratorSet decorators, typename T>
1035   inline oop load_reduce_types(HeapWord* addr) {
1036     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1037     return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
1038   }
1039 
1040   template <DecoratorSet decorators, typename T>
1041   inline bool arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
1042                                      arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1043                                      size_t length) {
1044     return PreRuntimeDispatch::arraycopy<decorators>(src_obj, src_offset_in_bytes, src_raw,
1045                                                      dst_obj, dst_offset_in_bytes, dst_raw,
1046                                                      length);
1047   }
1048 
1049   template <DecoratorSet decorators>
1050   inline bool arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, HeapWord* src_raw,
1051                                      arrayOop dst_obj, size_t dst_offset_in_bytes, HeapWord* dst_raw,
1052                                      size_t length) {
1053     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1054     return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1055                                                               dst_obj, dst_offset_in_bytes, dst_raw,
1056                                                               length);
1057   }
1058 
1059   template <DecoratorSet decorators>
1060   inline bool arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, narrowOop* src_raw,
1061                                      arrayOop dst_obj, size_t dst_offset_in_bytes, narrowOop* dst_raw,
1062                                      size_t length) {
1063     const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1064                                              INTERNAL_RT_USE_COMPRESSED_OOPS;
1065     return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1066                                                               dst_obj, dst_offset_in_bytes, dst_raw,
1067                                                               length);
1068   }
1069 
1070   // Step 1: Set default decorators. This step remembers if a type was volatile
1071   // and then sets the MO_RELAXED decorator by default. Otherwise, a default
1072   // memory ordering is set for the access, and the implied decorator rules
1073   // are applied to select sensible defaults for decorators that have not been
1074   // explicitly set. For example, default object referent strength is set to strong.
1075   // This step also decays the types passed in (e.g. getting rid of CV qualifiers
1076   // and references from the types). This step also perform some type verification
1077   // that the passed in types make sense.
1078 
1079   template <DecoratorSet decorators, typename T>
1080   static void verify_types(){
1081     // If this fails to compile, then you have sent in something that is
1082     // not recognized as a valid primitive type to a primitive Access function.
1083     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // oops have already been validated
1084                    (IsPointer<T>::value || IsIntegral<T>::value) ||
1085                     IsFloatingPoint<T>::value)); // not allowed primitive type
1086   }
1087 
1088   template <DecoratorSet decorators, typename P, typename T>
1089   inline void store(P* addr, T value) {
1090     verify_types<decorators, T>();
1091     typedef typename Decay<P>::type DecayedP;
1092     typedef typename Decay<T>::type DecayedT;
1093     DecayedT decayed_value = value;
1094     // If a volatile address is passed in but no memory ordering decorator,
1095     // set the memory ordering to MO_RELAXED by default.
1096     const DecoratorSet expanded_decorators = DecoratorFixup<
1097       (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1098       (MO_RELAXED | decorators) : decorators>::value;
1099     store_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr), decayed_value);
1100   }
1101 
1102   template <DecoratorSet decorators, typename T>
1103   inline void store_at(oop base, ptrdiff_t offset, T value) {
1104     verify_types<decorators, T>();
1105     typedef typename Decay<T>::type DecayedT;
1106     DecayedT decayed_value = value;
1107     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1108                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1109                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1110     PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, decayed_value);
1111   }
1112 
1113   template <DecoratorSet decorators, typename P, typename T>
1114   inline T load(P* addr) {
1115     verify_types<decorators, T>();
1116     typedef typename Decay<P>::type DecayedP;
1117     typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1118                                  typename OopOrNarrowOop<T>::type,
1119                                  typename Decay<T>::type>::type DecayedT;
1120     // If a volatile address is passed in but no memory ordering decorator,
1121     // set the memory ordering to MO_RELAXED by default.
1122     const DecoratorSet expanded_decorators = DecoratorFixup<
1123       (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1124       (MO_RELAXED | decorators) : decorators>::value;
1125     return load_reduce_types<expanded_decorators, DecayedT>(const_cast<DecayedP*>(addr));
1126   }
1127 
1128   template <DecoratorSet decorators, typename T>
1129   inline T load_at(oop base, ptrdiff_t offset) {
1130     verify_types<decorators, T>();
1131     typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1132                                  typename OopOrNarrowOop<T>::type,
1133                                  typename Decay<T>::type>::type DecayedT;
1134     // Expand the decorators (figure out sensible defaults)
1135     // Potentially remember if we need compressed oop awareness
1136     const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1137                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1138                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1139     return PreRuntimeDispatch::load_at<expanded_decorators, DecayedT>(base, offset);
1140   }
1141 
1142   template <DecoratorSet decorators, typename P, typename T>
1143   inline T atomic_cmpxchg(P* addr, T compare_value, T new_value) {
1144     verify_types<decorators, T>();
1145     typedef typename Decay<P>::type DecayedP;
1146     typedef typename Decay<T>::type DecayedT;
1147     DecayedT new_decayed_value = new_value;
1148     DecayedT compare_decayed_value = compare_value;
1149     const DecoratorSet expanded_decorators = DecoratorFixup<
1150       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1151       (MO_SEQ_CST | decorators) : decorators>::value;
1152     return atomic_cmpxchg_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr),
1153                                                             compare_decayed_value,
1154                                                             new_decayed_value);
1155   }
1156 
1157   template <DecoratorSet decorators, typename T>
1158   inline T atomic_cmpxchg_at(oop base, ptrdiff_t offset, T compare_value, T new_value) {
1159     verify_types<decorators, T>();
1160     typedef typename Decay<T>::type DecayedT;
1161     DecayedT new_decayed_value = new_value;
1162     DecayedT compare_decayed_value = compare_value;
1163     // Determine default memory ordering
1164     const DecoratorSet expanded_decorators = DecoratorFixup<
1165       (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1166       (MO_SEQ_CST | decorators) : decorators>::value;
1167     // Potentially remember that we need compressed oop awareness
1168     const DecoratorSet final_decorators = expanded_decorators |
1169                                           (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1170                                            INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE);
1171     return PreRuntimeDispatch::atomic_cmpxchg_at<final_decorators>(base, offset, compare_decayed_value,
1172                                                                    new_decayed_value);
1173   }
1174 
1175   template <DecoratorSet decorators, typename P, typename T>
1176   inline T atomic_xchg(P* addr, T new_value) {
1177     verify_types<decorators, T>();
1178     typedef typename Decay<P>::type DecayedP;
1179     typedef typename Decay<T>::type DecayedT;
1180     DecayedT new_decayed_value = new_value;
1181     // atomic_xchg is only available in SEQ_CST flavour.
1182     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST>::value;
1183     return atomic_xchg_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr),
1184                                                          new_decayed_value);
1185   }
1186 
1187   template <DecoratorSet decorators, typename T>
1188   inline T atomic_xchg_at(oop base, ptrdiff_t offset, T new_value) {
1189     verify_types<decorators, T>();
1190     typedef typename Decay<T>::type DecayedT;
1191     DecayedT new_decayed_value = new_value;
1192     // atomic_xchg is only available in SEQ_CST flavour.
1193     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST |
1194                                              (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1195                                               INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1196     return PreRuntimeDispatch::atomic_xchg_at<expanded_decorators>(base, offset, new_decayed_value);
1197   }
1198 
1199   template <DecoratorSet decorators, typename T>
1200   inline bool arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, const T* src_raw,
1201                         arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1202                         size_t length) {
1203     STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ||
1204                    (IsSame<T, void>::value || IsIntegral<T>::value) ||
1205                     IsFloatingPoint<T>::value)); // arraycopy allows type erased void elements
1206     typedef typename Decay<T>::type DecayedT;
1207     const DecoratorSet expanded_decorators = DecoratorFixup<decorators | IS_ARRAY | IN_HEAP>::value;
1208     return arraycopy_reduce_types<expanded_decorators>(src_obj, src_offset_in_bytes, const_cast<DecayedT*>(src_raw),
1209                                                        dst_obj, dst_offset_in_bytes, const_cast<DecayedT*>(dst_raw),
1210                                                        length);
1211   }
1212 
1213   template <DecoratorSet decorators>
1214   inline void clone(oop src, oop dst, size_t size) {
1215     const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1216     PreRuntimeDispatch::clone<expanded_decorators>(src, dst, size);
1217   }
1218 
1219   // Infer the type that should be returned from an Access::oop_load.
1220   template <typename P, DecoratorSet decorators>
1221   class OopLoadProxy: public StackObj {
1222   private:
1223     P *const _addr;
1224   public:
1225     OopLoadProxy(P* addr) : _addr(addr) {}
1226 
1227     inline operator oop() {
1228       return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr);
1229     }
1230 
1231     inline operator narrowOop() {
1232       return load<decorators | INTERNAL_VALUE_IS_OOP, P, narrowOop>(_addr);
1233     }
1234 
1235     template <typename T>
1236     inline bool operator ==(const T& other) const {
1237       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) == other;
1238     }
1239 
1240     template <typename T>
1241     inline bool operator !=(const T& other) const {
1242       return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) != other;
1243     }
1244   };
1245 
1246   // Infer the type that should be returned from an Access::load_at.
1247   template <DecoratorSet decorators>
1248   class LoadAtProxy: public StackObj {
1249   private:
1250     const oop _base;
1251     const ptrdiff_t _offset;
1252   public:
1253     LoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1254 
1255     template <typename T>
1256     inline operator T() const {
1257       return load_at<decorators, T>(_base, _offset);
1258     }
1259 
1260     template <typename T>
1261     inline bool operator ==(const T& other) const { return load_at<decorators, T>(_base, _offset) == other; }
1262 
1263     template <typename T>
1264     inline bool operator !=(const T& other) const { return load_at<decorators, T>(_base, _offset) != other; }
1265   };
1266 
1267   // Infer the type that should be returned from an Access::oop_load_at.
1268   template <DecoratorSet decorators>
1269   class OopLoadAtProxy: public StackObj {
1270   private:
1271     const oop _base;
1272     const ptrdiff_t _offset;
1273   public:
1274     OopLoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1275 
1276     inline operator oop() const {
1277       return load_at<decorators | INTERNAL_VALUE_IS_OOP, oop>(_base, _offset);
1278     }
1279 
1280     inline operator narrowOop() const {
1281       return load_at<decorators | INTERNAL_VALUE_IS_OOP, narrowOop>(_base, _offset);
1282     }
1283 
1284     template <typename T>
1285     inline bool operator ==(const T& other) const {
1286       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) == other;
1287     }
1288 
1289     template <typename T>
1290     inline bool operator !=(const T& other) const {
1291       return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) != other;
1292     }
1293   };
1294 }
1295 
1296 #endif // SHARE_OOPS_ACCESSBACKEND_HPP