Files
clang-p2996/llvm/test/CodeGen/RISCV/memcmp.ll
Pengcheng Wang 7a5b040e20 [RISCV] Add initial support of memcmp expansion
There are two passes that have dependency on the implementation
of `TargetTransformInfo::enableMemCmpExpansion` : `MergeICmps` and
`ExpandMemCmp`.

This PR adds the initial implementation of `enableMemCmpExpansion`
so that we can have some basic benefits from these two passes.

We don't enable expansion when there is no unaligned access support
currently because there are some issues about unaligned loads and
stores in `ExpandMemcmp` pass. We should fix these issues and enable
the expansion later.

Vector case hasn't been tested as we don't generate inlined vector
instructions for memcmp currently.

Reviewers: preames, arcbbb, topperc, asb, dtcxzyw

Reviewed By: topperc, preames

Pull Request: https://github.com/llvm/llvm-project/pull/107548
2024-11-06 15:44:12 +08:00

6379 lines
283 KiB
LLVM

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbb -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-ZBB
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbb -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-ZBB
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbkb -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-ZBKB
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbkb -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-ZBKB
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-V
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-V
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbb,+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-ZBB
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbb,+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-ZBB
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbkb,+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-ZBKB
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbkb,+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-ZBKB
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v,+unaligned-scalar-mem,+unaligned-vector-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-V
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v,+unaligned-scalar-mem,+unaligned-vector-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-V
declare i32 @bcmp(ptr, ptr, iXLen) nounwind readonly
declare i32 @memcmp(ptr, ptr, iXLen) nounwind readonly
define i32 @bcmp_size_0(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_0:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 0
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: bcmp_size_0:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 0
; CHECK-RV64-NEXT: call bcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 0)
ret i32 %bcmp
}
define i32 @bcmp_size_1(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_1:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 1)
ret i32 %bcmp
}
define i32 @bcmp_size_2(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_2:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lhu a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lhu a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 2)
ret i32 %bcmp
}
define i32 @bcmp_size_3(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_3:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lhu a2, 0(a0)
; CHECK-UNALIGNED-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-NEXT: lhu a3, 0(a1)
; CHECK-UNALIGNED-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 3)
ret i32 %bcmp
}
define i32 @bcmp_size_4(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_4:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
ret i32 %bcmp
}
define i32 @bcmp_size_5(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_5:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 5)
ret i32 %bcmp
}
define i32 @bcmp_size_6(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_6:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-NEXT: lhu a0, 4(a0)
; CHECK-UNALIGNED-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-NEXT: lhu a1, 4(a1)
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 6)
ret i32 %bcmp
}
define i32 @bcmp_size_7(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_7:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 7)
ret i32 %bcmp
}
define i32 @bcmp_size_8(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 8)
ret i32 %bcmp
}
define i32 @bcmp_size_15(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-V-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-V-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 15)
ret i32 %bcmp
}
define i32 @bcmp_size_16(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-V-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-V-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 16)
ret i32 %bcmp
}
define i32 @bcmp_size_31(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 20(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw t0, 24(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw t1, 16(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t2, 20(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t3, 24(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV32-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV32-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 20(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t0, 24(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t1, 16(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t2, 20(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 20(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t0, 24(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t1, 16(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t2, 20(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-V-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-V-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 20(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t0, 24(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t1, 16(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t2, 20(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t3, 24(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV32-V-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV32-V-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-V-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-V-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-V-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 31)
ret i32 %bcmp
}
define i32 @bcmp_size_32(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 20(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw t0, 24(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw t1, 16(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t2, 20(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t3, 24(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV32-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV32-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 20(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t0, 24(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t1, 16(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t2, 20(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 20(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t0, 24(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t1, 16(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t2, 20(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-V-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-V-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 20(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t0, 24(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t1, 16(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t2, 20(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t3, 24(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV32-V-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV32-V-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-V-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-V-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-V-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 32)
ret i32 %bcmp
}
define i32 @bcmp_size_63(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_63:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 63
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_63:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_63:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_63:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_63:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_63:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 40(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld t0, 48(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld t1, 32(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t2, 40(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t3, 48(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV64-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV64-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_63:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 40(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t0, 48(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t1, 32(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t2, 40(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_63:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 40(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t0, 48(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t1, 32(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t2, 40(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_63:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-V-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-V-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 40(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t0, 48(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t1, 32(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t2, 40(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t3, 48(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV64-V-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV64-V-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-V-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 63)
ret i32 %bcmp
}
define i32 @bcmp_size_64(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_64:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 64
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_64:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_64:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_64:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_64:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_64:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 40(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld t0, 48(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld t1, 32(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t2, 40(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t3, 48(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV64-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV64-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_64:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 40(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t0, 48(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t1, 32(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t2, 40(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_64:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 40(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t0, 48(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t1, 32(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t2, 40(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_64:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-V-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-V-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 40(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t0, 48(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t1, 32(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t2, 40(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t3, 48(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a6, a6, t1
; CHECK-UNALIGNED-RV64-V-NEXT: xor a7, a7, t2
; CHECK-UNALIGNED-RV64-V-NEXT: xor t0, t0, t3
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-V-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 64)
ret i32 %bcmp
}
define i32 @bcmp_size_127(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_127:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 127
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: bcmp_size_127:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 127
; CHECK-RV64-NEXT: call bcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 127)
ret i32 %bcmp
}
define i32 @bcmp_size_128(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_128:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 128
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: bcmp_size_128:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 128
; CHECK-RV64-NEXT: call bcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 128)
ret i32 %bcmp
}
define i32 @bcmp_size_runtime(ptr %s1, ptr %s2, iXLen %len) nounwind {
; CHECK-RV32-LABEL: bcmp_size_runtime:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: bcmp_size_runtime:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: call bcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen %len)
ret i32 %bcmp
}
define i1 @bcmp_eq_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_eq_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: seqz a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp eq i32 %bcmp, 0
ret i1 %ret
}
define i1 @bcmp_lt_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_lt_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: li a0, 0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp slt i32 %bcmp, 0
ret i1 %ret
}
define i1 @bcmp_gt_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_gt_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp sgt i32 %bcmp, 0
ret i1 %ret
}
define i32 @memcmp_size_0(ptr %s1, ptr %s2) nounwind {
; CHECK-LABEL: memcmp_size_0:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: li a0, 0
; CHECK-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 0)
ret i32 %memcmp
}
define i32 @memcmp_size_1(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 1
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 1
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 1
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 1
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 1)
ret i32 %memcmp
}
define i32 @memcmp_size_2(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 2
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 2
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a0, a0, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a1, a1, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 48
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 48
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a1, a1, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 48
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 48
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 2
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 2
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 2)
ret i32 %memcmp
}
define i32 @memcmp_size_3(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 3
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 3
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a2, a2, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a3, a3, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB24_2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB24_2: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 48
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 48
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB24_2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB24_2: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a2, a2, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a3, a3, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB24_2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB24_2: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 48
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 48
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB24_2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB24_2: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 3
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 3
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 3)
ret i32 %memcmp
}
define i32 @memcmp_size_4(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a2, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a2, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
ret i32 %memcmp
}
define i32 @memcmp_size_5(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 5
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 5
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB26_2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB26_2: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB26_2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB26_2: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB26_2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB26_2: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB26_2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB26_2: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 5
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 5
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 5)
ret i32 %memcmp
}
define i32 @memcmp_size_6(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 6
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 6
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB27_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a2, a2, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a3, a3, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB27_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB27_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB27_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a0, 4(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a1, 4(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 48
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 48
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB27_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB27_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB27_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a2, a2, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a3, a3, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB27_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB27_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB27_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a0, 4(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a1, 4(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 48
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 48
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB27_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB27_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 6
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 6
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 6)
ret i32 %memcmp
}
define i32 @memcmp_size_7(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 7
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 7
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB28_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB28_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB28_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB28_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB28_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB28_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB28_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB28_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB28_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB28_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB28_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB28_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 7
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 7
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 7)
ret i32 %memcmp
}
define i32 @memcmp_size_8(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 8
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 8
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB29_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB29_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB29_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB29_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB29_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB29_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 8
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 8
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 8)
ret i32 %memcmp
}
define i32 @memcmp_size_15(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 15
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 15
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB30_5: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB30_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB30_5: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB30_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 15
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 15
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 15)
ret i32 %memcmp
}
define i32 @memcmp_size_16(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 16
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 16
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB31_5: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB31_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB31_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB31_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB31_5: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB31_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB31_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB31_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 16
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 16
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 16)
ret i32 %memcmp
}
define i32 @memcmp_size_31(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 31
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 31
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 16(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 16(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 20(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 24(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB32_9: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB32_5: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 16(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 16(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 20(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 24(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB32_9: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB32_5: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 31
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 31
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 31)
ret i32 %memcmp
}
define i32 @memcmp_size_32(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 32
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 32
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 16(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 16(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 20(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 24(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB33_9: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB33_5: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 16(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 16(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 20(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 24(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB33_9: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB33_5: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 32
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 32
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 32)
ret i32 %memcmp
}
define i32 @memcmp_size_63(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: memcmp_size_63:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 63
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_63:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_63:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_63:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_63:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_63:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 63
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_63:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 32(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 32(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 40(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 48(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB34_9: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_63:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 32(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 32(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 40(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 48(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB34_9: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_63:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 63
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 63)
ret i32 %memcmp
}
define i32 @memcmp_size_64(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: memcmp_size_64:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 64
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_64:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_64:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_64:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_64:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_64:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 64
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_64:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 32(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 32(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 40(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 48(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB35_9: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_64:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 32(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 32(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 40(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 48(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB35_9: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_64:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 64
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 64)
ret i32 %memcmp
}
define i32 @memcmp_size_127(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: memcmp_size_127:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 127
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: memcmp_size_127:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 127
; CHECK-RV64-NEXT: call memcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 127)
ret i32 %memcmp
}
define i32 @memcmp_size_128(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: memcmp_size_128:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 128
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: memcmp_size_128:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 128
; CHECK-RV64-NEXT: call memcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 128)
ret i32 %memcmp
}
define i32 @memcmp_size_runtime(ptr %s1, ptr %s2, iXLen %len) nounwind {
; CHECK-RV32-LABEL: memcmp_size_runtime:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: memcmp_size_runtime:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: call memcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen %len)
ret i32 %memcmp
}
define i1 @memcmp_eq_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: lbu a2, 1(a1)
; CHECK-ALIGNED-RV32-NEXT: lbu a3, 0(a1)
; CHECK-ALIGNED-RV32-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV32-NEXT: lbu a1, 3(a1)
; CHECK-ALIGNED-RV32-NEXT: slli a2, a2, 8
; CHECK-ALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-ALIGNED-RV32-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV32-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV32-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV32-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV32-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV32-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV32-NEXT: lbu a2, 2(a0)
; CHECK-ALIGNED-RV32-NEXT: lbu a0, 3(a0)
; CHECK-ALIGNED-RV32-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV32-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV32-NEXT: slli a2, a2, 16
; CHECK-ALIGNED-RV32-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV32-NEXT: or a0, a0, a2
; CHECK-ALIGNED-RV32-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV32-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: lbu a2, 1(a1)
; CHECK-ALIGNED-RV64-NEXT: lbu a3, 0(a1)
; CHECK-ALIGNED-RV64-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV64-NEXT: lb a1, 3(a1)
; CHECK-ALIGNED-RV64-NEXT: slli a2, a2, 8
; CHECK-ALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-ALIGNED-RV64-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV64-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV64-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV64-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV64-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV64-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV64-NEXT: lbu a2, 2(a0)
; CHECK-ALIGNED-RV64-NEXT: lb a0, 3(a0)
; CHECK-ALIGNED-RV64-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV64-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV64-NEXT: slli a2, a2, 16
; CHECK-ALIGNED-RV64-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV64-NEXT: or a0, a0, a2
; CHECK-ALIGNED-RV64-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV64-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a2, 1(a1)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a3, 0(a1)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a1, 3(a1)
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a2, a2, 8
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a2, 2(a0)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a0, 3(a0)
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a2, a2, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a0, a0, a2
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV32-ZBB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a2, 1(a1)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a3, 0(a1)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lb a1, 3(a1)
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a2, a2, 8
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a2, 2(a0)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lb a0, 3(a0)
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a2, a2, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a0, a0, a2
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV64-ZBB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a1, 3(a1)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a5, 0(a0)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a6, 1(a0)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a7, 2(a0)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a0, 3(a0)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a1, a4, a1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a2, a2, a3
; CHECK-ALIGNED-RV32-ZBKB-NEXT: pack a1, a2, a1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a0, a7, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a2, a5, a6
; CHECK-ALIGNED-RV32-ZBKB-NEXT: pack a0, a2, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lb a1, 3(a1)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: packh a2, a2, a3
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a5, 2(a0)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lb a0, 3(a0)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV64-ZBKB-NEXT: packh a2, a3, a4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a5, a5, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a0, a0, a5
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a0, a0, a2
; CHECK-ALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: lbu a2, 1(a1)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a3, 0(a1)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a1, 3(a1)
; CHECK-ALIGNED-RV32-V-NEXT: slli a2, a2, 8
; CHECK-ALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-ALIGNED-RV32-V-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV32-V-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV32-V-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV32-V-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV32-V-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV32-V-NEXT: lbu a2, 2(a0)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a0, 3(a0)
; CHECK-ALIGNED-RV32-V-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV32-V-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV32-V-NEXT: slli a2, a2, 16
; CHECK-ALIGNED-RV32-V-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV32-V-NEXT: or a0, a0, a2
; CHECK-ALIGNED-RV32-V-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV32-V-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: lbu a2, 1(a1)
; CHECK-ALIGNED-RV64-V-NEXT: lbu a3, 0(a1)
; CHECK-ALIGNED-RV64-V-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV64-V-NEXT: lb a1, 3(a1)
; CHECK-ALIGNED-RV64-V-NEXT: slli a2, a2, 8
; CHECK-ALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-ALIGNED-RV64-V-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV64-V-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV64-V-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV64-V-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV64-V-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV64-V-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV64-V-NEXT: lbu a2, 2(a0)
; CHECK-ALIGNED-RV64-V-NEXT: lb a0, 3(a0)
; CHECK-ALIGNED-RV64-V-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV64-V-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV64-V-NEXT: slli a2, a2, 16
; CHECK-ALIGNED-RV64-V-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV64-V-NEXT: or a0, a0, a2
; CHECK-ALIGNED-RV64-V-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV64-V-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: memcmp_eq_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: seqz a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp eq i32 %memcmp, 0
ret i1 %ret
}
define i1 @memcmp_lt_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: srli a0, a0, 31
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: srli a0, a0, 31
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp slt i32 %memcmp, 0
ret i1 %ret
}
define i1 @memcmp_gt_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: sgtz a0, a0
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: sgtz a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: sgtz a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: sgtz a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp sgt i32 %memcmp, 0
ret i1 %ret
}
;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
; CHECK-ALIGNED: {{.*}}