1 /*
 2  * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
 3  * Copyright (c) 2014, Red Hat Inc. All rights reserved.
 4  * Copyright (c) 2020, 2021, Huawei Technologies Co., Ltd. All rights reserved.
 5  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 6  *
 7  * This code is free software; you can redistribute it and/or modify it
 8  * under the terms of the GNU General Public License version 2 only, as
 9  * published by the Free Software Foundation.
10  *
11  * This code is distributed in the hope that it will be useful, but WITHOUT
12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14  * version 2 for more details (a copy is included in the LICENSE file that
15  * accompanied this code).
16  *
17  * You should have received a copy of the GNU General Public License version
18  * 2 along with this work; if not, write to the Free Software Foundation,
19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20  *
21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22  * or visit www.oracle.com if you need additional information or have any
23  * questions.
24  *
25  */
26 
27 #ifndef CPU_RISCV_ASSEMBLER_RISCV_INLINE_HPP
28 #define CPU_RISCV_ASSEMBLER_RISCV_INLINE_HPP
29 
30 #include "asm/assembler.inline.hpp"
31 #include "asm/codeBuffer.hpp"
32 #include "code/codeCache.hpp"
33 
34 inline bool is_imm_in_range(long value, unsigned bits, unsigned align_bits) {
35   intx sign_bits = (value >> (bits + align_bits - 1));
36   return ((value & right_n_bits(align_bits)) == 0) && ((sign_bits == 0) || (sign_bits == -1));
37 }
38 
39 inline bool is_unsigned_imm_in_range(intx value, unsigned bits, unsigned align_bits) {
40   return (value >= 0) && ((value & right_n_bits(align_bits)) == 0) && ((value >> (align_bits + bits)) == 0);
41 }
42 
43 inline bool is_offset_in_range(intx offset, unsigned bits) {
44   return is_imm_in_range(offset, bits, 0);
45 }
46 
47 #endif // CPU_RISCV_ASSEMBLER_RISCV_INLINE_HPP