< prev index next > src/hotspot/share/oops/accessBackend.inline.hpp
Print this page
#include "oops/arrayOop.hpp"
#include "oops/compressedOops.inline.hpp"
#include "oops/oopsHierarchy.hpp"
#include "runtime/atomic.hpp"
#include "runtime/orderAccess.hpp"
+ #include "oops/inlineKlass.hpp"
#include <type_traits>
template <DecoratorSet decorators>
template <DecoratorSet idecorators, typename T>
return oop_atomic_xchg(field_addr(base, offset), new_value);
}
template <DecoratorSet decorators>
template <typename T>
- inline bool RawAccessBarrier<decorators>::oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
+ inline void RawAccessBarrier<decorators>::oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
size_t length) {
- return arraycopy(src_obj, src_offset_in_bytes, src_raw,
- dst_obj, dst_offset_in_bytes, dst_raw,
- length);
+ arraycopy(src_obj, src_offset_in_bytes, src_raw,
+ dst_obj, dst_offset_in_bytes, dst_raw,
+ length);
}
template <DecoratorSet decorators>
template <DecoratorSet ds, typename T>
inline typename EnableIf<
template<> struct RawAccessBarrierArrayCopy::IsHeapWordSized<void>: public std::false_type { };
template <DecoratorSet decorators>
template <typename T>
- inline bool RawAccessBarrier<decorators>::arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
+ inline void RawAccessBarrier<decorators>::arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
size_t length) {
RawAccessBarrierArrayCopy::arraycopy<decorators>(src_obj, src_offset_in_bytes, src_raw,
dst_obj, dst_offset_in_bytes, dst_raw,
length);
- return true;
}
template <DecoratorSet decorators>
inline void RawAccessBarrier<decorators>::clone(oop src, oop dst, size_t size) {
// 4839641 (4840070): We must do an oop-atomic copy, because if another thread
align_object_size(size) / HeapWordsPerLong);
// Clear the header
dst->init_mark();
}
+ template <DecoratorSet decorators>
+ inline void RawAccessBarrier<decorators>::value_copy(void* src, void* dst, InlineKlass* md, LayoutKind lk) {
+ assert(is_aligned(src, md->layout_alignment(lk)) && is_aligned(dst, md->layout_alignment(lk)), "Unaligned value_copy");
+ AccessInternal::value_copy_internal(src, dst, static_cast<size_t>(md->layout_size_in_bytes(lk)));
+ }
#endif // SHARE_OOPS_ACCESSBACKEND_INLINE_HPP
< prev index next >