Files
clang-p2996/llvm/test/CodeGen/RISCV/bswap-bitreverse.ll
Philip Reames 8624075105 [RISCV] Strip W suffix from ADDIW (#68425)
The motivation of this change is simply to reduce test duplication. As
can be seen in the (massive) test delta, we have many tests whose output
differ only due to the use of addi on rv32 vs addiw on rv64 when the
high bits are don't care.

As an aside, we don't need to worry about the non-zero immediate
restriction on the compressed variants because we're not directly
forming the compressed variants. If we happen to get a zero immediate
for the ADDI, then either a later optimization will strip the useless
instruction or the encoder is responsible for not compressing the
instruction.
2023-10-06 10:28:01 -07:00

1578 lines
48 KiB
LLVM

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
; RUN: | FileCheck %s -check-prefix=RV32I
; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
; RUN: | FileCheck %s -check-prefix=RV64I
; RUN: llc -mtriple=riscv32 -mattr=+zbb -verify-machineinstrs < %s \
; RUN: | FileCheck %s -check-prefixes=RV32ZB,RV32ZBB
; RUN: llc -mtriple=riscv64 -mattr=+zbb -verify-machineinstrs < %s \
; RUN: | FileCheck %s -check-prefixes=RV64ZB,RV64ZBB
; RUN: llc -mtriple=riscv32 -mattr=+zbkb -verify-machineinstrs < %s \
; RUN: | FileCheck %s -check-prefixes=RV32ZB,RV32ZBKB
; RUN: llc -mtriple=riscv64 -mattr=+zbkb -verify-machineinstrs < %s \
; RUN: | FileCheck %s -check-prefixes=RV64ZB,RV64ZBKB
declare i16 @llvm.bswap.i16(i16)
declare i32 @llvm.bswap.i32(i32)
declare i64 @llvm.bswap.i64(i64)
declare i8 @llvm.bitreverse.i8(i8)
declare i16 @llvm.bitreverse.i16(i16)
declare i32 @llvm.bitreverse.i32(i32)
declare i64 @llvm.bitreverse.i64(i64)
define i16 @test_bswap_i16(i16 %a) nounwind {
; RV32I-LABEL: test_bswap_i16:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a0, 8
; RV32I-NEXT: slli a0, a0, 16
; RV32I-NEXT: srli a0, a0, 24
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bswap_i16:
; RV64I: # %bb.0:
; RV64I-NEXT: slli a1, a0, 8
; RV64I-NEXT: slli a0, a0, 48
; RV64I-NEXT: srli a0, a0, 56
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZB-LABEL: test_bswap_i16:
; RV32ZB: # %bb.0:
; RV32ZB-NEXT: rev8 a0, a0
; RV32ZB-NEXT: srli a0, a0, 16
; RV32ZB-NEXT: ret
;
; RV64ZB-LABEL: test_bswap_i16:
; RV64ZB: # %bb.0:
; RV64ZB-NEXT: rev8 a0, a0
; RV64ZB-NEXT: srli a0, a0, 48
; RV64ZB-NEXT: ret
%tmp = call i16 @llvm.bswap.i16(i16 %a)
ret i16 %tmp
}
define i32 @test_bswap_i32(i32 %a) nounwind {
; RV32I-LABEL: test_bswap_i32:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a1, a0, 8
; RV32I-NEXT: lui a2, 16
; RV32I-NEXT: addi a2, a2, -256
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: srli a3, a0, 24
; RV32I-NEXT: or a1, a1, a3
; RV32I-NEXT: and a2, a0, a2
; RV32I-NEXT: slli a2, a2, 8
; RV32I-NEXT: slli a0, a0, 24
; RV32I-NEXT: or a0, a0, a2
; RV32I-NEXT: or a0, a0, a1
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bswap_i32:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 8
; RV64I-NEXT: lui a2, 16
; RV64I-NEXT: addiw a2, a2, -256
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: srliw a3, a0, 24
; RV64I-NEXT: or a1, a1, a3
; RV64I-NEXT: and a2, a0, a2
; RV64I-NEXT: slli a2, a2, 8
; RV64I-NEXT: slliw a0, a0, 24
; RV64I-NEXT: or a0, a0, a2
; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: ret
;
; RV32ZB-LABEL: test_bswap_i32:
; RV32ZB: # %bb.0:
; RV32ZB-NEXT: rev8 a0, a0
; RV32ZB-NEXT: ret
;
; RV64ZB-LABEL: test_bswap_i32:
; RV64ZB: # %bb.0:
; RV64ZB-NEXT: rev8 a0, a0
; RV64ZB-NEXT: srli a0, a0, 32
; RV64ZB-NEXT: ret
%tmp = call i32 @llvm.bswap.i32(i32 %a)
ret i32 %tmp
}
define i64 @test_bswap_i64(i64 %a) nounwind {
; RV32I-LABEL: test_bswap_i64:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a2, a1, 8
; RV32I-NEXT: lui a3, 16
; RV32I-NEXT: addi a3, a3, -256
; RV32I-NEXT: and a2, a2, a3
; RV32I-NEXT: srli a4, a1, 24
; RV32I-NEXT: or a2, a2, a4
; RV32I-NEXT: and a4, a1, a3
; RV32I-NEXT: slli a4, a4, 8
; RV32I-NEXT: slli a1, a1, 24
; RV32I-NEXT: or a1, a1, a4
; RV32I-NEXT: or a2, a1, a2
; RV32I-NEXT: srli a1, a0, 8
; RV32I-NEXT: and a1, a1, a3
; RV32I-NEXT: srli a4, a0, 24
; RV32I-NEXT: or a1, a1, a4
; RV32I-NEXT: and a3, a0, a3
; RV32I-NEXT: slli a3, a3, 8
; RV32I-NEXT: slli a0, a0, 24
; RV32I-NEXT: or a0, a0, a3
; RV32I-NEXT: or a1, a0, a1
; RV32I-NEXT: mv a0, a2
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bswap_i64:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 40
; RV64I-NEXT: lui a2, 16
; RV64I-NEXT: addiw a2, a2, -256
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: srli a3, a0, 56
; RV64I-NEXT: or a1, a1, a3
; RV64I-NEXT: srli a3, a0, 24
; RV64I-NEXT: lui a4, 4080
; RV64I-NEXT: and a3, a3, a4
; RV64I-NEXT: srli a5, a0, 8
; RV64I-NEXT: srliw a5, a5, 24
; RV64I-NEXT: slli a5, a5, 24
; RV64I-NEXT: or a3, a5, a3
; RV64I-NEXT: or a1, a3, a1
; RV64I-NEXT: and a4, a0, a4
; RV64I-NEXT: slli a4, a4, 24
; RV64I-NEXT: srliw a3, a0, 24
; RV64I-NEXT: slli a3, a3, 32
; RV64I-NEXT: or a3, a4, a3
; RV64I-NEXT: and a2, a0, a2
; RV64I-NEXT: slli a2, a2, 40
; RV64I-NEXT: slli a0, a0, 56
; RV64I-NEXT: or a0, a0, a2
; RV64I-NEXT: or a0, a0, a3
; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: ret
;
; RV32ZB-LABEL: test_bswap_i64:
; RV32ZB: # %bb.0:
; RV32ZB-NEXT: rev8 a2, a1
; RV32ZB-NEXT: rev8 a1, a0
; RV32ZB-NEXT: mv a0, a2
; RV32ZB-NEXT: ret
;
; RV64ZB-LABEL: test_bswap_i64:
; RV64ZB: # %bb.0:
; RV64ZB-NEXT: rev8 a0, a0
; RV64ZB-NEXT: ret
%tmp = call i64 @llvm.bswap.i64(i64 %a)
ret i64 %tmp
}
define i8 @test_bitreverse_i8(i8 %a) nounwind {
; RV32I-LABEL: test_bitreverse_i8:
; RV32I: # %bb.0:
; RV32I-NEXT: andi a1, a0, 15
; RV32I-NEXT: slli a1, a1, 4
; RV32I-NEXT: slli a0, a0, 24
; RV32I-NEXT: srli a0, a0, 28
; RV32I-NEXT: or a0, a0, a1
; RV32I-NEXT: andi a1, a0, 51
; RV32I-NEXT: slli a1, a1, 2
; RV32I-NEXT: srli a0, a0, 2
; RV32I-NEXT: andi a0, a0, 51
; RV32I-NEXT: or a0, a0, a1
; RV32I-NEXT: andi a1, a0, 85
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: srli a0, a0, 1
; RV32I-NEXT: andi a0, a0, 85
; RV32I-NEXT: or a0, a0, a1
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bitreverse_i8:
; RV64I: # %bb.0:
; RV64I-NEXT: andi a1, a0, 15
; RV64I-NEXT: slli a1, a1, 4
; RV64I-NEXT: slli a0, a0, 56
; RV64I-NEXT: srli a0, a0, 60
; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: andi a1, a0, 51
; RV64I-NEXT: slli a1, a1, 2
; RV64I-NEXT: srli a0, a0, 2
; RV64I-NEXT: andi a0, a0, 51
; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: andi a1, a0, 85
; RV64I-NEXT: slli a1, a1, 1
; RV64I-NEXT: srli a0, a0, 1
; RV64I-NEXT: andi a0, a0, 85
; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bitreverse_i8:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: andi a1, a0, 15
; RV32ZBB-NEXT: slli a1, a1, 4
; RV32ZBB-NEXT: slli a0, a0, 24
; RV32ZBB-NEXT: srli a0, a0, 28
; RV32ZBB-NEXT: or a0, a0, a1
; RV32ZBB-NEXT: andi a1, a0, 51
; RV32ZBB-NEXT: slli a1, a1, 2
; RV32ZBB-NEXT: srli a0, a0, 2
; RV32ZBB-NEXT: andi a0, a0, 51
; RV32ZBB-NEXT: or a0, a0, a1
; RV32ZBB-NEXT: andi a1, a0, 85
; RV32ZBB-NEXT: slli a1, a1, 1
; RV32ZBB-NEXT: srli a0, a0, 1
; RV32ZBB-NEXT: andi a0, a0, 85
; RV32ZBB-NEXT: or a0, a0, a1
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bitreverse_i8:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: andi a1, a0, 15
; RV64ZBB-NEXT: slli a1, a1, 4
; RV64ZBB-NEXT: slli a0, a0, 56
; RV64ZBB-NEXT: srli a0, a0, 60
; RV64ZBB-NEXT: or a0, a0, a1
; RV64ZBB-NEXT: andi a1, a0, 51
; RV64ZBB-NEXT: slli a1, a1, 2
; RV64ZBB-NEXT: srli a0, a0, 2
; RV64ZBB-NEXT: andi a0, a0, 51
; RV64ZBB-NEXT: or a0, a0, a1
; RV64ZBB-NEXT: andi a1, a0, 85
; RV64ZBB-NEXT: slli a1, a1, 1
; RV64ZBB-NEXT: srli a0, a0, 1
; RV64ZBB-NEXT: andi a0, a0, 85
; RV64ZBB-NEXT: or a0, a0, a1
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bitreverse_i8:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: rev8 a0, a0
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: srli a0, a0, 24
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bitreverse_i8:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: rev8 a0, a0
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: srli a0, a0, 56
; RV64ZBKB-NEXT: ret
%tmp = call i8 @llvm.bitreverse.i8(i8 %a)
ret i8 %tmp
}
define i16 @test_bitreverse_i16(i16 %a) nounwind {
; RV32I-LABEL: test_bitreverse_i16:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a0, 8
; RV32I-NEXT: slli a0, a0, 16
; RV32I-NEXT: srli a0, a0, 24
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: lui a2, 1
; RV32I-NEXT: addi a2, a2, -241
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 2
; RV32I-NEXT: lui a2, 3
; RV32I-NEXT: addi a2, a2, 819
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 1
; RV32I-NEXT: lui a2, 5
; RV32I-NEXT: addi a2, a2, 1365
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bitreverse_i16:
; RV64I: # %bb.0:
; RV64I-NEXT: slli a1, a0, 8
; RV64I-NEXT: slli a0, a0, 48
; RV64I-NEXT: srli a0, a0, 56
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 1
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 3
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 5
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bitreverse_i16:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: rev8 a0, a0
; RV32ZBB-NEXT: srli a1, a0, 12
; RV32ZBB-NEXT: lui a2, 15
; RV32ZBB-NEXT: addi a2, a2, 240
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: srli a0, a0, 20
; RV32ZBB-NEXT: andi a0, a0, -241
; RV32ZBB-NEXT: or a0, a0, a1
; RV32ZBB-NEXT: srli a1, a0, 2
; RV32ZBB-NEXT: lui a2, 3
; RV32ZBB-NEXT: addi a2, a2, 819
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 1
; RV32ZBB-NEXT: lui a2, 5
; RV32ZBB-NEXT: addi a2, a2, 1365
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bitreverse_i16:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: rev8 a0, a0
; RV64ZBB-NEXT: srli a1, a0, 44
; RV64ZBB-NEXT: lui a2, 15
; RV64ZBB-NEXT: addiw a2, a2, 240
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: srli a0, a0, 52
; RV64ZBB-NEXT: andi a0, a0, -241
; RV64ZBB-NEXT: or a0, a0, a1
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 3
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 5
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bitreverse_i16:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: rev8 a0, a0
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: srli a0, a0, 16
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bitreverse_i16:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: rev8 a0, a0
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: srli a0, a0, 48
; RV64ZBKB-NEXT: ret
%tmp = call i16 @llvm.bitreverse.i16(i16 %a)
ret i16 %tmp
}
define i32 @test_bitreverse_i32(i32 %a) nounwind {
; RV32I-LABEL: test_bitreverse_i32:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a1, a0, 8
; RV32I-NEXT: lui a2, 16
; RV32I-NEXT: addi a2, a2, -256
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: srli a3, a0, 24
; RV32I-NEXT: or a1, a1, a3
; RV32I-NEXT: and a2, a0, a2
; RV32I-NEXT: slli a2, a2, 8
; RV32I-NEXT: slli a0, a0, 24
; RV32I-NEXT: or a0, a0, a2
; RV32I-NEXT: or a0, a0, a1
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: lui a2, 61681
; RV32I-NEXT: addi a2, a2, -241
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 2
; RV32I-NEXT: lui a2, 209715
; RV32I-NEXT: addi a2, a2, 819
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 1
; RV32I-NEXT: lui a2, 349525
; RV32I-NEXT: addi a2, a2, 1365
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bitreverse_i32:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 8
; RV64I-NEXT: lui a2, 16
; RV64I-NEXT: addiw a2, a2, -256
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: srliw a3, a0, 24
; RV64I-NEXT: or a1, a1, a3
; RV64I-NEXT: and a2, a0, a2
; RV64I-NEXT: slli a2, a2, 8
; RV64I-NEXT: slliw a0, a0, 24
; RV64I-NEXT: or a0, a0, a2
; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 61681
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 209715
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 349525
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bitreverse_i32:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: rev8 a0, a0
; RV32ZBB-NEXT: srli a1, a0, 4
; RV32ZBB-NEXT: lui a2, 61681
; RV32ZBB-NEXT: addi a2, a2, -241
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 4
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 2
; RV32ZBB-NEXT: lui a2, 209715
; RV32ZBB-NEXT: addi a2, a2, 819
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 1
; RV32ZBB-NEXT: lui a2, 349525
; RV32ZBB-NEXT: addi a2, a2, 1365
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bitreverse_i32:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: rev8 a0, a0
; RV64ZBB-NEXT: srli a1, a0, 36
; RV64ZBB-NEXT: lui a2, 61681
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: srli a0, a0, 28
; RV64ZBB-NEXT: lui a2, 986895
; RV64ZBB-NEXT: addi a2, a2, 240
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: sext.w a0, a0
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 209715
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slliw a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 349525
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slliw a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bitreverse_i32:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: rev8 a0, a0
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bitreverse_i32:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: rev8 a0, a0
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: srli a0, a0, 32
; RV64ZBKB-NEXT: ret
%tmp = call i32 @llvm.bitreverse.i32(i32 %a)
ret i32 %tmp
}
define i64 @test_bitreverse_i64(i64 %a) nounwind {
; RV32I-LABEL: test_bitreverse_i64:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a2, a1, 8
; RV32I-NEXT: lui a3, 16
; RV32I-NEXT: addi a3, a3, -256
; RV32I-NEXT: and a2, a2, a3
; RV32I-NEXT: srli a4, a1, 24
; RV32I-NEXT: or a2, a2, a4
; RV32I-NEXT: and a4, a1, a3
; RV32I-NEXT: slli a4, a4, 8
; RV32I-NEXT: slli a1, a1, 24
; RV32I-NEXT: or a1, a1, a4
; RV32I-NEXT: or a1, a1, a2
; RV32I-NEXT: srli a2, a1, 4
; RV32I-NEXT: lui a4, 61681
; RV32I-NEXT: addi a4, a4, -241
; RV32I-NEXT: and a2, a2, a4
; RV32I-NEXT: and a1, a1, a4
; RV32I-NEXT: slli a1, a1, 4
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: srli a2, a1, 2
; RV32I-NEXT: lui a5, 209715
; RV32I-NEXT: addi a5, a5, 819
; RV32I-NEXT: and a2, a2, a5
; RV32I-NEXT: and a1, a1, a5
; RV32I-NEXT: slli a1, a1, 2
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: srli a2, a1, 1
; RV32I-NEXT: lui a6, 349525
; RV32I-NEXT: addi a6, a6, 1365
; RV32I-NEXT: and a2, a2, a6
; RV32I-NEXT: and a1, a1, a6
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: or a2, a2, a1
; RV32I-NEXT: srli a1, a0, 8
; RV32I-NEXT: and a1, a1, a3
; RV32I-NEXT: srli a7, a0, 24
; RV32I-NEXT: or a1, a1, a7
; RV32I-NEXT: and a3, a0, a3
; RV32I-NEXT: slli a3, a3, 8
; RV32I-NEXT: slli a0, a0, 24
; RV32I-NEXT: or a0, a0, a3
; RV32I-NEXT: or a0, a0, a1
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: and a1, a1, a4
; RV32I-NEXT: and a0, a0, a4
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 2
; RV32I-NEXT: and a1, a1, a5
; RV32I-NEXT: and a0, a0, a5
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 1
; RV32I-NEXT: and a1, a1, a6
; RV32I-NEXT: and a0, a0, a6
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a1, a1, a0
; RV32I-NEXT: mv a0, a2
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bitreverse_i64:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 40
; RV64I-NEXT: lui a2, 16
; RV64I-NEXT: addiw a2, a2, -256
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: srli a3, a0, 56
; RV64I-NEXT: or a1, a1, a3
; RV64I-NEXT: srli a3, a0, 24
; RV64I-NEXT: lui a4, 4080
; RV64I-NEXT: and a3, a3, a4
; RV64I-NEXT: srli a5, a0, 8
; RV64I-NEXT: srliw a5, a5, 24
; RV64I-NEXT: slli a5, a5, 24
; RV64I-NEXT: or a3, a5, a3
; RV64I-NEXT: or a1, a3, a1
; RV64I-NEXT: and a4, a0, a4
; RV64I-NEXT: slli a4, a4, 24
; RV64I-NEXT: srliw a3, a0, 24
; RV64I-NEXT: slli a3, a3, 32
; RV64I-NEXT: or a3, a4, a3
; RV64I-NEXT: and a2, a0, a2
; RV64I-NEXT: slli a2, a2, 40
; RV64I-NEXT: slli a0, a0, 56
; RV64I-NEXT: or a0, a0, a2
; RV64I-NEXT: or a0, a0, a3
; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 61681
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 209715
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 349525
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bitreverse_i64:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: rev8 a1, a1
; RV32ZBB-NEXT: srli a2, a1, 4
; RV32ZBB-NEXT: lui a3, 61681
; RV32ZBB-NEXT: addi a3, a3, -241
; RV32ZBB-NEXT: and a2, a2, a3
; RV32ZBB-NEXT: and a1, a1, a3
; RV32ZBB-NEXT: slli a1, a1, 4
; RV32ZBB-NEXT: or a1, a2, a1
; RV32ZBB-NEXT: srli a2, a1, 2
; RV32ZBB-NEXT: lui a4, 209715
; RV32ZBB-NEXT: addi a4, a4, 819
; RV32ZBB-NEXT: and a2, a2, a4
; RV32ZBB-NEXT: and a1, a1, a4
; RV32ZBB-NEXT: slli a1, a1, 2
; RV32ZBB-NEXT: or a1, a2, a1
; RV32ZBB-NEXT: srli a2, a1, 1
; RV32ZBB-NEXT: lui a5, 349525
; RV32ZBB-NEXT: addi a5, a5, 1365
; RV32ZBB-NEXT: and a2, a2, a5
; RV32ZBB-NEXT: and a1, a1, a5
; RV32ZBB-NEXT: slli a1, a1, 1
; RV32ZBB-NEXT: or a2, a2, a1
; RV32ZBB-NEXT: rev8 a0, a0
; RV32ZBB-NEXT: srli a1, a0, 4
; RV32ZBB-NEXT: and a1, a1, a3
; RV32ZBB-NEXT: and a0, a0, a3
; RV32ZBB-NEXT: slli a0, a0, 4
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 2
; RV32ZBB-NEXT: and a1, a1, a4
; RV32ZBB-NEXT: and a0, a0, a4
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 1
; RV32ZBB-NEXT: and a1, a1, a5
; RV32ZBB-NEXT: and a0, a0, a5
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a1, a1, a0
; RV32ZBB-NEXT: mv a0, a2
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bitreverse_i64:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: rev8 a0, a0
; RV64ZBB-NEXT: srli a1, a0, 4
; RV64ZBB-NEXT: lui a2, 61681
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 4
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 209715
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 349525
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bitreverse_i64:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: rev8 a1, a1
; RV32ZBKB-NEXT: brev8 a2, a1
; RV32ZBKB-NEXT: rev8 a0, a0
; RV32ZBKB-NEXT: brev8 a1, a0
; RV32ZBKB-NEXT: mv a0, a2
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bitreverse_i64:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: rev8 a0, a0
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: ret
%tmp = call i64 @llvm.bitreverse.i64(i64 %a)
ret i64 %tmp
}
define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
; RV32I-LABEL: test_bswap_bitreverse_i16:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: lui a2, 1
; RV32I-NEXT: addi a2, a2, -241
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 2
; RV32I-NEXT: lui a2, 3
; RV32I-NEXT: addi a2, a2, 819
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 1
; RV32I-NEXT: lui a2, 5
; RV32I-NEXT: addi a2, a2, 1365
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bswap_bitreverse_i16:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 1
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 3
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 5
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bswap_bitreverse_i16:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: srli a1, a0, 4
; RV32ZBB-NEXT: lui a2, 1
; RV32ZBB-NEXT: addi a2, a2, -241
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 4
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 2
; RV32ZBB-NEXT: lui a2, 3
; RV32ZBB-NEXT: addi a2, a2, 819
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 1
; RV32ZBB-NEXT: lui a2, 5
; RV32ZBB-NEXT: addi a2, a2, 1365
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bswap_bitreverse_i16:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: srli a1, a0, 4
; RV64ZBB-NEXT: lui a2, 1
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 4
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 3
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 5
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bswap_bitreverse_i16:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bswap_bitreverse_i16:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: ret
%tmp = call i16 @llvm.bswap.i16(i16 %a)
%tmp2 = call i16 @llvm.bitreverse.i16(i16 %tmp)
ret i16 %tmp2
}
define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind {
; RV32I-LABEL: test_bswap_bitreverse_i32:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: lui a2, 61681
; RV32I-NEXT: addi a2, a2, -241
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 2
; RV32I-NEXT: lui a2, 209715
; RV32I-NEXT: addi a2, a2, 819
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 1
; RV32I-NEXT: lui a2, 349525
; RV32I-NEXT: addi a2, a2, 1365
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bswap_bitreverse_i32:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 61681
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 209715
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 349525
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bswap_bitreverse_i32:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: srli a1, a0, 4
; RV32ZBB-NEXT: lui a2, 61681
; RV32ZBB-NEXT: addi a2, a2, -241
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 4
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 2
; RV32ZBB-NEXT: lui a2, 209715
; RV32ZBB-NEXT: addi a2, a2, 819
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 1
; RV32ZBB-NEXT: lui a2, 349525
; RV32ZBB-NEXT: addi a2, a2, 1365
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bswap_bitreverse_i32:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: srli a1, a0, 4
; RV64ZBB-NEXT: lui a2, 61681
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slliw a0, a0, 4
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 209715
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slliw a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 349525
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slliw a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bswap_bitreverse_i32:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bswap_bitreverse_i32:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: ret
%tmp = call i32 @llvm.bswap.i32(i32 %a)
%tmp2 = call i32 @llvm.bitreverse.i32(i32 %tmp)
ret i32 %tmp2
}
define i64 @test_bswap_bitreverse_i64(i64 %a) nounwind {
; RV32I-LABEL: test_bswap_bitreverse_i64:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a2, a0, 4
; RV32I-NEXT: lui a3, 61681
; RV32I-NEXT: addi a3, a3, -241
; RV32I-NEXT: and a2, a2, a3
; RV32I-NEXT: and a0, a0, a3
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a2, a0
; RV32I-NEXT: srli a2, a0, 2
; RV32I-NEXT: lui a4, 209715
; RV32I-NEXT: addi a4, a4, 819
; RV32I-NEXT: and a2, a2, a4
; RV32I-NEXT: and a0, a0, a4
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a2, a0
; RV32I-NEXT: srli a2, a0, 1
; RV32I-NEXT: lui a5, 349525
; RV32I-NEXT: addi a5, a5, 1365
; RV32I-NEXT: and a2, a2, a5
; RV32I-NEXT: and a0, a0, a5
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a2, a0
; RV32I-NEXT: srli a2, a1, 4
; RV32I-NEXT: and a2, a2, a3
; RV32I-NEXT: and a1, a1, a3
; RV32I-NEXT: slli a1, a1, 4
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: srli a2, a1, 2
; RV32I-NEXT: and a2, a2, a4
; RV32I-NEXT: and a1, a1, a4
; RV32I-NEXT: slli a1, a1, 2
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: srli a2, a1, 1
; RV32I-NEXT: and a2, a2, a5
; RV32I-NEXT: and a1, a1, a5
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bswap_bitreverse_i64:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 61681
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 209715
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 349525
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bswap_bitreverse_i64:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: srli a2, a0, 4
; RV32ZBB-NEXT: lui a3, 61681
; RV32ZBB-NEXT: addi a3, a3, -241
; RV32ZBB-NEXT: and a2, a2, a3
; RV32ZBB-NEXT: and a0, a0, a3
; RV32ZBB-NEXT: slli a0, a0, 4
; RV32ZBB-NEXT: or a0, a2, a0
; RV32ZBB-NEXT: srli a2, a0, 2
; RV32ZBB-NEXT: lui a4, 209715
; RV32ZBB-NEXT: addi a4, a4, 819
; RV32ZBB-NEXT: and a2, a2, a4
; RV32ZBB-NEXT: and a0, a0, a4
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a2, a0
; RV32ZBB-NEXT: srli a2, a0, 1
; RV32ZBB-NEXT: lui a5, 349525
; RV32ZBB-NEXT: addi a5, a5, 1365
; RV32ZBB-NEXT: and a2, a2, a5
; RV32ZBB-NEXT: and a0, a0, a5
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a0, a2, a0
; RV32ZBB-NEXT: srli a2, a1, 4
; RV32ZBB-NEXT: and a2, a2, a3
; RV32ZBB-NEXT: and a1, a1, a3
; RV32ZBB-NEXT: slli a1, a1, 4
; RV32ZBB-NEXT: or a1, a2, a1
; RV32ZBB-NEXT: srli a2, a1, 2
; RV32ZBB-NEXT: and a2, a2, a4
; RV32ZBB-NEXT: and a1, a1, a4
; RV32ZBB-NEXT: slli a1, a1, 2
; RV32ZBB-NEXT: or a1, a2, a1
; RV32ZBB-NEXT: srli a2, a1, 1
; RV32ZBB-NEXT: and a2, a2, a5
; RV32ZBB-NEXT: and a1, a1, a5
; RV32ZBB-NEXT: slli a1, a1, 1
; RV32ZBB-NEXT: or a1, a2, a1
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bswap_bitreverse_i64:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: srli a1, a0, 4
; RV64ZBB-NEXT: lui a2, 61681
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 4
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 209715
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 349525
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bswap_bitreverse_i64:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: brev8 a1, a1
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bswap_bitreverse_i64:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: ret
%tmp = call i64 @llvm.bswap.i64(i64 %a)
%tmp2 = call i64 @llvm.bitreverse.i64(i64 %tmp)
ret i64 %tmp2
}
define i16 @test_bitreverse_bswap_i16(i16 %a) nounwind {
; RV32I-LABEL: test_bitreverse_bswap_i16:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: lui a2, 1
; RV32I-NEXT: addi a2, a2, -241
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 2
; RV32I-NEXT: lui a2, 3
; RV32I-NEXT: addi a2, a2, 819
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 1
; RV32I-NEXT: lui a2, 5
; RV32I-NEXT: addi a2, a2, 1365
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bitreverse_bswap_i16:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 1
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 3
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 5
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bitreverse_bswap_i16:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: srli a1, a0, 4
; RV32ZBB-NEXT: lui a2, 1
; RV32ZBB-NEXT: addi a2, a2, -241
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 4
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 2
; RV32ZBB-NEXT: lui a2, 3
; RV32ZBB-NEXT: addi a2, a2, 819
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 1
; RV32ZBB-NEXT: lui a2, 5
; RV32ZBB-NEXT: addi a2, a2, 1365
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bitreverse_bswap_i16:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: srli a1, a0, 4
; RV64ZBB-NEXT: lui a2, 1
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 4
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 3
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 5
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bitreverse_bswap_i16:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bitreverse_bswap_i16:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: ret
%tmp = call i16 @llvm.bitreverse.i16(i16 %a)
%tmp2 = call i16 @llvm.bswap.i16(i16 %tmp)
ret i16 %tmp2
}
define i32 @test_bitreverse_bswap_i32(i32 %a) nounwind {
; RV32I-LABEL: test_bitreverse_bswap_i32:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a1, a0, 4
; RV32I-NEXT: lui a2, 61681
; RV32I-NEXT: addi a2, a2, -241
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 2
; RV32I-NEXT: lui a2, 209715
; RV32I-NEXT: addi a2, a2, 819
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: srli a1, a0, 1
; RV32I-NEXT: lui a2, 349525
; RV32I-NEXT: addi a2, a2, 1365
; RV32I-NEXT: and a1, a1, a2
; RV32I-NEXT: and a0, a0, a2
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a1, a0
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bitreverse_bswap_i32:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 61681
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 209715
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 349525
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slliw a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bitreverse_bswap_i32:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: srli a1, a0, 4
; RV32ZBB-NEXT: lui a2, 61681
; RV32ZBB-NEXT: addi a2, a2, -241
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 4
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 2
; RV32ZBB-NEXT: lui a2, 209715
; RV32ZBB-NEXT: addi a2, a2, 819
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: srli a1, a0, 1
; RV32ZBB-NEXT: lui a2, 349525
; RV32ZBB-NEXT: addi a2, a2, 1365
; RV32ZBB-NEXT: and a1, a1, a2
; RV32ZBB-NEXT: and a0, a0, a2
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bitreverse_bswap_i32:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: srli a1, a0, 4
; RV64ZBB-NEXT: lui a2, 61681
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slliw a0, a0, 4
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 209715
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slliw a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 349525
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slliw a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bitreverse_bswap_i32:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bitreverse_bswap_i32:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: ret
%tmp = call i32 @llvm.bitreverse.i32(i32 %a)
%tmp2 = call i32 @llvm.bswap.i32(i32 %tmp)
ret i32 %tmp2
}
define i64 @test_bitreverse_bswap_i64(i64 %a) nounwind {
; RV32I-LABEL: test_bitreverse_bswap_i64:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a2, a0, 4
; RV32I-NEXT: lui a3, 61681
; RV32I-NEXT: addi a3, a3, -241
; RV32I-NEXT: and a2, a2, a3
; RV32I-NEXT: and a0, a0, a3
; RV32I-NEXT: slli a0, a0, 4
; RV32I-NEXT: or a0, a2, a0
; RV32I-NEXT: srli a2, a0, 2
; RV32I-NEXT: lui a4, 209715
; RV32I-NEXT: addi a4, a4, 819
; RV32I-NEXT: and a2, a2, a4
; RV32I-NEXT: and a0, a0, a4
; RV32I-NEXT: slli a0, a0, 2
; RV32I-NEXT: or a0, a2, a0
; RV32I-NEXT: srli a2, a0, 1
; RV32I-NEXT: lui a5, 349525
; RV32I-NEXT: addi a5, a5, 1365
; RV32I-NEXT: and a2, a2, a5
; RV32I-NEXT: and a0, a0, a5
; RV32I-NEXT: slli a0, a0, 1
; RV32I-NEXT: or a0, a2, a0
; RV32I-NEXT: srli a2, a1, 4
; RV32I-NEXT: and a2, a2, a3
; RV32I-NEXT: and a1, a1, a3
; RV32I-NEXT: slli a1, a1, 4
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: srli a2, a1, 2
; RV32I-NEXT: and a2, a2, a4
; RV32I-NEXT: and a1, a1, a4
; RV32I-NEXT: slli a1, a1, 2
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: srli a2, a1, 1
; RV32I-NEXT: and a2, a2, a5
; RV32I-NEXT: and a1, a1, a5
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: or a1, a2, a1
; RV32I-NEXT: ret
;
; RV64I-LABEL: test_bitreverse_bswap_i64:
; RV64I: # %bb.0:
; RV64I-NEXT: srli a1, a0, 4
; RV64I-NEXT: lui a2, 61681
; RV64I-NEXT: addiw a2, a2, -241
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 4
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 2
; RV64I-NEXT: lui a2, 209715
; RV64I-NEXT: addiw a2, a2, 819
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 2
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: srli a1, a0, 1
; RV64I-NEXT: lui a2, 349525
; RV64I-NEXT: addiw a2, a2, 1365
; RV64I-NEXT: slli a3, a2, 32
; RV64I-NEXT: add a2, a2, a3
; RV64I-NEXT: and a1, a1, a2
; RV64I-NEXT: and a0, a0, a2
; RV64I-NEXT: slli a0, a0, 1
; RV64I-NEXT: or a0, a1, a0
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: test_bitreverse_bswap_i64:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: srli a2, a0, 4
; RV32ZBB-NEXT: lui a3, 61681
; RV32ZBB-NEXT: addi a3, a3, -241
; RV32ZBB-NEXT: and a2, a2, a3
; RV32ZBB-NEXT: and a0, a0, a3
; RV32ZBB-NEXT: slli a0, a0, 4
; RV32ZBB-NEXT: or a0, a2, a0
; RV32ZBB-NEXT: srli a2, a0, 2
; RV32ZBB-NEXT: lui a4, 209715
; RV32ZBB-NEXT: addi a4, a4, 819
; RV32ZBB-NEXT: and a2, a2, a4
; RV32ZBB-NEXT: and a0, a0, a4
; RV32ZBB-NEXT: slli a0, a0, 2
; RV32ZBB-NEXT: or a0, a2, a0
; RV32ZBB-NEXT: srli a2, a0, 1
; RV32ZBB-NEXT: lui a5, 349525
; RV32ZBB-NEXT: addi a5, a5, 1365
; RV32ZBB-NEXT: and a2, a2, a5
; RV32ZBB-NEXT: and a0, a0, a5
; RV32ZBB-NEXT: slli a0, a0, 1
; RV32ZBB-NEXT: or a0, a2, a0
; RV32ZBB-NEXT: srli a2, a1, 4
; RV32ZBB-NEXT: and a2, a2, a3
; RV32ZBB-NEXT: and a1, a1, a3
; RV32ZBB-NEXT: slli a1, a1, 4
; RV32ZBB-NEXT: or a1, a2, a1
; RV32ZBB-NEXT: srli a2, a1, 2
; RV32ZBB-NEXT: and a2, a2, a4
; RV32ZBB-NEXT: and a1, a1, a4
; RV32ZBB-NEXT: slli a1, a1, 2
; RV32ZBB-NEXT: or a1, a2, a1
; RV32ZBB-NEXT: srli a2, a1, 1
; RV32ZBB-NEXT: and a2, a2, a5
; RV32ZBB-NEXT: and a1, a1, a5
; RV32ZBB-NEXT: slli a1, a1, 1
; RV32ZBB-NEXT: or a1, a2, a1
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: test_bitreverse_bswap_i64:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: srli a1, a0, 4
; RV64ZBB-NEXT: lui a2, 61681
; RV64ZBB-NEXT: addiw a2, a2, -241
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 4
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 2
; RV64ZBB-NEXT: lui a2, 209715
; RV64ZBB-NEXT: addiw a2, a2, 819
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 2
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: srli a1, a0, 1
; RV64ZBB-NEXT: lui a2, 349525
; RV64ZBB-NEXT: addiw a2, a2, 1365
; RV64ZBB-NEXT: slli a3, a2, 32
; RV64ZBB-NEXT: add a2, a2, a3
; RV64ZBB-NEXT: and a1, a1, a2
; RV64ZBB-NEXT: and a0, a0, a2
; RV64ZBB-NEXT: slli a0, a0, 1
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: test_bitreverse_bswap_i64:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: brev8 a0, a0
; RV32ZBKB-NEXT: brev8 a1, a1
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: test_bitreverse_bswap_i64:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: brev8 a0, a0
; RV64ZBKB-NEXT: ret
%tmp = call i64 @llvm.bitreverse.i64(i64 %a)
%tmp2 = call i64 @llvm.bswap.i64(i64 %tmp)
ret i64 %tmp2
}
define i32 @pr55484(i32 %0) {
; RV32I-LABEL: pr55484:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a0, 8
; RV32I-NEXT: slli a0, a0, 24
; RV32I-NEXT: or a0, a0, a1
; RV32I-NEXT: srai a0, a0, 16
; RV32I-NEXT: ret
;
; RV64I-LABEL: pr55484:
; RV64I: # %bb.0:
; RV64I-NEXT: slli a1, a0, 40
; RV64I-NEXT: slli a0, a0, 56
; RV64I-NEXT: or a0, a0, a1
; RV64I-NEXT: srai a0, a0, 48
; RV64I-NEXT: ret
;
; RV32ZBB-LABEL: pr55484:
; RV32ZBB: # %bb.0:
; RV32ZBB-NEXT: srli a1, a0, 8
; RV32ZBB-NEXT: slli a0, a0, 8
; RV32ZBB-NEXT: or a0, a1, a0
; RV32ZBB-NEXT: sext.h a0, a0
; RV32ZBB-NEXT: ret
;
; RV64ZBB-LABEL: pr55484:
; RV64ZBB: # %bb.0:
; RV64ZBB-NEXT: srli a1, a0, 8
; RV64ZBB-NEXT: slli a0, a0, 8
; RV64ZBB-NEXT: or a0, a1, a0
; RV64ZBB-NEXT: sext.h a0, a0
; RV64ZBB-NEXT: ret
;
; RV32ZBKB-LABEL: pr55484:
; RV32ZBKB: # %bb.0:
; RV32ZBKB-NEXT: slli a1, a0, 8
; RV32ZBKB-NEXT: slli a0, a0, 24
; RV32ZBKB-NEXT: or a0, a0, a1
; RV32ZBKB-NEXT: srai a0, a0, 16
; RV32ZBKB-NEXT: ret
;
; RV64ZBKB-LABEL: pr55484:
; RV64ZBKB: # %bb.0:
; RV64ZBKB-NEXT: slli a1, a0, 40
; RV64ZBKB-NEXT: slli a0, a0, 56
; RV64ZBKB-NEXT: or a0, a0, a1
; RV64ZBKB-NEXT: srai a0, a0, 48
; RV64ZBKB-NEXT: ret
%2 = lshr i32 %0, 8
%3 = shl i32 %0, 8
%4 = or i32 %2, %3
%5 = trunc i32 %4 to i16
%6 = sext i16 %5 to i32
ret i32 %6
}