Files
clang-p2996/llvm/test/CodeGen/RISCV/memcmp.ll
Philip Reames 859c871184 [RISCV] Default to MicroOpBufferSize = 1 for scheduling purposes (#126608)
This change introduces a default schedule model for the RISCV target
which leaves everything unchanged except the MicroOpBufferSize. The
default value of this flag in NoSched is 0. Both configurations
represent in order cores (i.e. no reorder window), the difference
between them comes down to whether heuristics other than latency are
allowed to apply. (Implementation details below)

I left the processor models which explicitly set MicroOpBufferSize=0
unchanged in this patch, but strongly suspect we should change those
too. Honestly, I think the LLVM wide default for this flag should be
changed, but don't have the energy to manage the updates for all
targets.

Implementation wise, the effect of this change is that schedule units
which are ready to run *except that* one of their predecessors may not
have completed yet are added to the Available list, not the Pending one.
The result of this is that it becomes possible to chose to schedule a
node before it's ready cycle if the heuristics prefer. This is
essentially chosing to insert a resource stall instead of e.g.
increasing register pressure.

Note that I was initially concerned there might be a correctness aspect
(as in some kind of exposed pipeline design), but the generic scheduler
doesn't seem to know how to insert noop instructions. Without that, a
program wouldn't be guaranteed to schedule on an exposed pipeline
depending on the program and schedule model in question.

The effect of this is that we sometimes prefer register pressure in
codegen results. This is mostly churn (or small wins) on scalar because
we have many more registers, but is of major importance on vector -
particularly high LMUL - because we effectively have many fewer
registers and the relative cost of spilling is much higher. This is a
significant improvement on high LMUL code quality for default rva23u
configurations - or any non -mcpu vector configuration for that matter.

Fixes #107532
2025-02-12 12:31:39 -08:00

6927 lines
307 KiB
LLVM

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbb -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-ZBB
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbb -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-ZBB
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbkb -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-ZBKB
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbkb -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-ZBKB
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-ALIGNED,CHECK-ALIGNED-RV32-V
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-ALIGNED,CHECK-ALIGNED-RV64-V
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbb,+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-ZBB
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbb,+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-ZBB
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+zbkb,+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-ZBKB
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+zbkb,+unaligned-scalar-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-ZBKB
; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=riscv32 -mattr=+v,+unaligned-scalar-mem,+unaligned-vector-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV32,CHECK-UNALIGNED,CHECK-UNALIGNED-RV32-V
; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v,+unaligned-scalar-mem,+unaligned-vector-mem -O2 \
; RUN: | FileCheck %s --check-prefixes=CHECK,CHECK-RV64,CHECK-UNALIGNED,CHECK-UNALIGNED-RV64-V
declare i32 @bcmp(ptr, ptr, iXLen) nounwind readonly
declare i32 @memcmp(ptr, ptr, iXLen) nounwind readonly
define i32 @bcmp_size_0(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_0:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 0
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: bcmp_size_0:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 0
; CHECK-RV64-NEXT: call bcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 0)
ret i32 %bcmp
}
define i32 @bcmp_size_1(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_1:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_1:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 1)
ret i32 %bcmp
}
define i32 @bcmp_size_2(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_2:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_2:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lhu a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lhu a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 2)
ret i32 %bcmp
}
define i32 @bcmp_size_3(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_3:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_3:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lhu a2, 0(a0)
; CHECK-UNALIGNED-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-NEXT: lhu a3, 0(a1)
; CHECK-UNALIGNED-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 3)
ret i32 %bcmp
}
define i32 @bcmp_size_4(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_4:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_4:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
ret i32 %bcmp
}
define i32 @bcmp_size_5(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_5:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_5:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 5)
ret i32 %bcmp
}
define i32 @bcmp_size_6(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_6:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_6:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-NEXT: lhu a0, 4(a0)
; CHECK-UNALIGNED-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-NEXT: lhu a1, 4(a1)
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 6)
ret i32 %bcmp
}
define i32 @bcmp_size_7(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_7:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_size_7:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 7)
ret i32 %bcmp
}
define i32 @bcmp_size_8(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_8:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_8:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 8)
ret i32 %bcmp
}
define i32 @bcmp_size_15(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_15:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-V-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-V-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_15:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 15)
ret i32 %bcmp
}
define i32 @bcmp_size_16(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_16:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a5, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 8(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV32-V-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV32-V-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_16:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 16)
ret i32 %bcmp
}
define i32 @bcmp_size_31(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_31:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw t2, 20(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw t3, 24(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 16(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t0, 20(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t1, 24(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV32-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t3, 24(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 16(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t0, 20(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t1, 24(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t3, 24(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 16(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t0, 20(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t1, 24(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t2, 20(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t3, 24(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-V-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 16(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t0, 20(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t1, 24(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV32-V-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-V-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_31:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-V-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-V-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 31)
ret i32 %bcmp
}
define i32 @bcmp_size_32(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_32:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw t2, 20(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw t3, 24(a0)
; CHECK-UNALIGNED-RV32-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-NEXT: lw a7, 16(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t0, 20(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw t1, 24(a1)
; CHECK-UNALIGNED-RV32-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV32-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV32-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t3, 24(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a7, 16(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t0, 20(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw t1, 24(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t3, 24(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a7, 16(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t0, 20(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw t1, 24(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a3, 4(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a4, 8(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a5, 12(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 0(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 4(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t0, 8(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t1, 12(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV32-V-NEXT: lw a6, 16(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t2, 20(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t3, 24(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV32-V-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV32-V-NEXT: lw a7, 16(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t0, 20(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw t1, 24(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-V-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV32-V-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV32-V-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV32-V-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV32-V-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV32-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_32:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a5, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 16(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a5
; CHECK-UNALIGNED-RV64-V-NEXT: xor a3, a3, a6
; CHECK-UNALIGNED-RV64-V-NEXT: xor a4, a4, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a4, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 32)
ret i32 %bcmp
}
define i32 @bcmp_size_63(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_63:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 63
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_63:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_63:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_63:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_63:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_63:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld t2, 40(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld t3, 48(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 32(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t0, 40(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t1, 48(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV64-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_63:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t3, 48(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 32(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t0, 40(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t1, 48(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_63:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t3, 48(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 32(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t0, 40(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t1, 48(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_63:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t2, 40(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t3, 48(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-V-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 32(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t0, 40(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t1, 48(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV64-V-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-V-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 63)
ret i32 %bcmp
}
define i32 @bcmp_size_64(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_64:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 64
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_size_64:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_size_64:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_size_64:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_size_64:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: bcmp_size_64:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld t2, 40(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld t3, 48(a0)
; CHECK-UNALIGNED-RV64-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-NEXT: ld a7, 32(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t0, 40(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld t1, 48(a1)
; CHECK-UNALIGNED-RV64-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV64-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV64-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: bcmp_size_64:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t3, 48(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a7, 32(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t0, 40(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld t1, 48(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: bcmp_size_64:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t3, 48(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a7, 32(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t0, 40(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld t1, 48(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: bcmp_size_64:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a3, 8(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a4, 16(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a5, 24(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 0(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 8(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t0, 16(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t1, 24(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a2, a2, a6
; CHECK-UNALIGNED-RV64-V-NEXT: ld a6, 32(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t2, 40(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t3, 48(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a3, a3, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a4, a4, t0
; CHECK-UNALIGNED-RV64-V-NEXT: xor a5, a5, t1
; CHECK-UNALIGNED-RV64-V-NEXT: ld a7, 32(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t0, 40(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld t1, 48(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-V-NEXT: xor a6, a6, a7
; CHECK-UNALIGNED-RV64-V-NEXT: xor a7, t2, t0
; CHECK-UNALIGNED-RV64-V-NEXT: xor t0, t3, t1
; CHECK-UNALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a3
; CHECK-UNALIGNED-RV64-V-NEXT: or a4, a4, a5
; CHECK-UNALIGNED-RV64-V-NEXT: or a1, a6, a7
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, t0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a2, a2, a4
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a1, a0
; CHECK-UNALIGNED-RV64-V-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-V-NEXT: snez a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 64)
ret i32 %bcmp
}
define i32 @bcmp_size_127(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_127:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 127
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: bcmp_size_127:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 127
; CHECK-RV64-NEXT: call bcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 127)
ret i32 %bcmp
}
define i32 @bcmp_size_128(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: bcmp_size_128:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 128
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: bcmp_size_128:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 128
; CHECK-RV64-NEXT: call bcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 128)
ret i32 %bcmp
}
define i32 @bcmp_size_runtime(ptr %s1, ptr %s2, iXLen %len) nounwind {
; CHECK-RV32-LABEL: bcmp_size_runtime:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: call bcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: bcmp_size_runtime:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: call bcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen %len)
ret i32 %bcmp
}
define i1 @bcmp_eq_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_eq_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_eq_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: seqz a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp eq i32 %bcmp, 0
ret i1 %ret
}
define i1 @bcmp_lt_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_lt_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_lt_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: li a0, 0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp slt i32 %bcmp, 0
ret i1 %ret
}
define i1 @bcmp_gt_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_gt_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_gt_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp sgt i32 %bcmp, 0
ret i1 %ret
}
define i1 @bcmp_le_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_le_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_le_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_le_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_le_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_le_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_le_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_le_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_le_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_le_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: snez a0, a0
; CHECK-UNALIGNED-NEXT: slti a0, a0, 1
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp slt i32 %bcmp, 1
ret i1 %ret
}
define i1 @bcmp_ge_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: bcmp_ge_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call bcmp
; CHECK-ALIGNED-RV32-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV32-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: bcmp_ge_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call bcmp
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: bcmp_ge_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV32-ZBB-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: bcmp_ge_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBB-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: bcmp_ge_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: bcmp_ge_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call bcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: bcmp_ge_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call bcmp
; CHECK-ALIGNED-RV32-V-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV32-V-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: bcmp_ge_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call bcmp
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-V-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: bcmp_ge_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: li a0, 1
; CHECK-UNALIGNED-NEXT: ret
entry:
%bcmp = call signext i32 @bcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp sgt i32 %bcmp, -1
ret i1 %ret
}
define i32 @memcmp_size_0(ptr %s1, ptr %s2) nounwind {
; CHECK-LABEL: memcmp_size_0:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: li a0, 0
; CHECK-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 0)
ret i32 %memcmp
}
define i32 @memcmp_size_1(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 1
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_1:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 1
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 1
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 1
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 1
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_1:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 1
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 1)
ret i32 %memcmp
}
define i32 @memcmp_size_2(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 2
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_2:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 2
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 2
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 2
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a0, a0, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a1, a1, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lh a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 48
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 48
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a1, a1, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lh a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 48
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 48
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 2
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_2:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 2
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 2)
ret i32 %memcmp
}
define i32 @memcmp_size_3(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 3
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_3:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 3
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 3
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 3
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lhu a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: slli a0, a0, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lhu a2, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: slli a1, a1, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: or a1, a2, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a0, a0, 16
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a2, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a1, a1, 16
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a2, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lhu a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: pack a0, a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lhu a2, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: pack a1, a2, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 2(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: slli a0, a0, 16
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a2, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 2(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: slli a1, a1, 16
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: or a1, a2, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 3
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_3:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 3
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 3)
ret i32 %memcmp
}
define i32 @memcmp_size_4(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_4:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_4:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
ret i32 %memcmp
}
define i32 @memcmp_size_5(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 5
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_5:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 5
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 5
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 5
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB28_2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB28_2: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lwu a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lwu a2, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a2, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB28_2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sub a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB28_2: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lwu a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a0, 4(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: pack a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lwu a2, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lbu a1, 4(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: pack a1, a2, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 5
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_5:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 5
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 5)
ret i32 %memcmp
}
define i32 @memcmp_size_6(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 6
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_6:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 6
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 6
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 6
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB29_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lh a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a2, a2, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: srli a3, a3, 16
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB29_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB29_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lwu a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a0, 4(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lwu a2, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lhu a1, 4(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: slli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: or a1, a2, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB29_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lh a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a2, a2, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: srli a3, a3, 16
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB29_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB29_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lwu a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a0, 4(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: pack a0, a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lwu a2, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lhu a1, 4(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: pack a1, a2, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 6
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_6:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 6
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 6)
ret i32 %memcmp
}
define i32 @memcmp_size_7(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 7
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_7:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 7
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 7
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 7
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB30_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB30_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB30_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 3(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 3(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a2, a2, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a3, a3, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB30_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB30_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 7
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_7:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 7
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 7)
ret i32 %memcmp
}
define i32 @memcmp_size_8(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 8
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_8:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 8
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 8
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 8
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB31_3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB31_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB31_3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB31_3: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a2, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sub a0, a0, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 8
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_8:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 8
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 8)
ret i32 %memcmp
}
define i32 @memcmp_size_15(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 15
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_15:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 15
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 15
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 15
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB32_5: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB32_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB32_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 11(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 11(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB32_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB32_5: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 7(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 7(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB32_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB32_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 15
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_15:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 15
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 15)
ret i32 %memcmp
}
define i32 @memcmp_size_16(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 16
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_16:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 16
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 16
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 16
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB33_5: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB33_3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB33_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB33_5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB33_5: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB33_3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB33_3: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 16
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_16:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 16
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 16)
ret i32 %memcmp
}
define i32 @memcmp_size_31(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 31
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_31:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 31
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 31
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 31
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 16(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 16(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 20(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 24(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB34_9: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB34_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB34_5: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 16(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 16(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 20(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 24(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 27(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 27(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB34_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB34_9: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 23(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 23(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB34_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB34_5: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 31
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_31:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 31
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 31)
ret i32 %memcmp
}
define i32 @memcmp_size_32(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 32
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_32:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 32
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 32
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 32
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 12(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 12(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 16(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 16(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 20(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a2, 24(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV32-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBB-NEXT: .LBB35_9: # %res_block
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB35_5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB35_5: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 4(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 4(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 8(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 8(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 12(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 12(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 16(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 16(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 20(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 20(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a2, 24(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a3, 24(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 28(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 28(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: bne a2, a3, .LBB35_9
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: .LBB35_9: # %res_block
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB35_5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB35_5: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 32
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_32:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 32
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 32)
ret i32 %memcmp
}
define i32 @memcmp_size_63(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: memcmp_size_63:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 63
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_63:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_63:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_63:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_63:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 63
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_63:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 63
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_63:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 32(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 32(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 40(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 48(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB36_9: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_63:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 32(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 32(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 40(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 48(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 55(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 55(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB36_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB36_9: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_63:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 63
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 63)
ret i32 %memcmp
}
define i32 @memcmp_size_64(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: memcmp_size_64:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 64
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_size_64:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_size_64:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_size_64:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_size_64:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 64
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_size_64:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 64
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_size_64:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 24(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 24(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 32(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 32(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 40(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a2, 48(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV64-ZBB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBB-NEXT: .LBB37_9: # %res_block
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_size_64:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.1: # %loadbb1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 8(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 8(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.2: # %loadbb2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 16(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 16(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.3: # %loadbb3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 24(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 24(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.4: # %loadbb4
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 32(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 32(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.5: # %loadbb5
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 40(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 40(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.6: # %loadbb6
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a2, 48(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a3, 48(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a2
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.7: # %loadbb7
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a0, 56(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ld a1, 56(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a2, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a3, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: bne a2, a3, .LBB37_9
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: # %bb.8:
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: li a0, 0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: .LBB37_9: # %res_block
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a2, a3
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: neg a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_size_64:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 64
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 64)
ret i32 %memcmp
}
define i32 @memcmp_size_127(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: memcmp_size_127:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 127
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: memcmp_size_127:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 127
; CHECK-RV64-NEXT: call memcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 127)
ret i32 %memcmp
}
define i32 @memcmp_size_128(ptr %s1, ptr %s2) nounwind {
; CHECK-RV32-LABEL: memcmp_size_128:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: li a2, 128
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: memcmp_size_128:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: li a2, 128
; CHECK-RV64-NEXT: call memcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 128)
ret i32 %memcmp
}
define i32 @memcmp_size_runtime(ptr %s1, ptr %s2, iXLen %len) nounwind {
; CHECK-RV32-LABEL: memcmp_size_runtime:
; CHECK-RV32: # %bb.0: # %entry
; CHECK-RV32-NEXT: addi sp, sp, -16
; CHECK-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-RV32-NEXT: call memcmp
; CHECK-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-RV32-NEXT: addi sp, sp, 16
; CHECK-RV32-NEXT: ret
;
; CHECK-RV64-LABEL: memcmp_size_runtime:
; CHECK-RV64: # %bb.0: # %entry
; CHECK-RV64-NEXT: addi sp, sp, -16
; CHECK-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-RV64-NEXT: call memcmp
; CHECK-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-RV64-NEXT: addi sp, sp, 16
; CHECK-RV64-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen %len)
ret i32 %memcmp
}
define i1 @memcmp_eq_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV32-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV32-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV32-NEXT: lbu a1, 3(a1)
; CHECK-ALIGNED-RV32-NEXT: slli a3, a3, 8
; CHECK-ALIGNED-RV32-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV32-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV32-NEXT: or a2, a3, a2
; CHECK-ALIGNED-RV32-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV32-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV32-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV32-NEXT: lbu a5, 2(a0)
; CHECK-ALIGNED-RV32-NEXT: lbu a0, 3(a0)
; CHECK-ALIGNED-RV32-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV32-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV32-NEXT: slli a5, a5, 16
; CHECK-ALIGNED-RV32-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV32-NEXT: or a0, a0, a5
; CHECK-ALIGNED-RV32-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV32-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV32-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV32-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV64-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV64-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV64-NEXT: lb a1, 3(a1)
; CHECK-ALIGNED-RV64-NEXT: slli a3, a3, 8
; CHECK-ALIGNED-RV64-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV64-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV64-NEXT: or a2, a3, a2
; CHECK-ALIGNED-RV64-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV64-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV64-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV64-NEXT: lbu a5, 2(a0)
; CHECK-ALIGNED-RV64-NEXT: lb a0, 3(a0)
; CHECK-ALIGNED-RV64-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV64-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV64-NEXT: slli a5, a5, 16
; CHECK-ALIGNED-RV64-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV64-NEXT: or a0, a0, a5
; CHECK-ALIGNED-RV64-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV64-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV64-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV64-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a1, 3(a1)
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a3, a3, 8
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a2, a3, a2
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a5, 2(a0)
; CHECK-ALIGNED-RV32-ZBB-NEXT: lbu a0, 3(a0)
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a5, a5, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a0, a0, a5
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV32-ZBB-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV32-ZBB-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV32-ZBB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lb a1, 3(a1)
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a3, a3, 8
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a2, a3, a2
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lbu a5, 2(a0)
; CHECK-ALIGNED-RV64-ZBB-NEXT: lb a0, 3(a0)
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a5, a5, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a0, a0, a5
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV64-ZBB-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV64-ZBB-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV64-ZBB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a1, 3(a1)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a1, a4, a1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a2, a2, a3
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a5, 2(a0)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lbu a0, 3(a0)
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a0, a5, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: packh a3, a3, a4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: pack a1, a2, a1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: pack a0, a3, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lb a1, 3(a1)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: packh a2, a2, a3
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a5, 1(a0)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lbu a6, 2(a0)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: lb a0, 3(a0)
; CHECK-ALIGNED-RV64-ZBKB-NEXT: packh a3, a3, a5
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a6, a6, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a0, a0, a6
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV64-ZBKB-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV64-ZBKB-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a1, 3(a1)
; CHECK-ALIGNED-RV32-V-NEXT: slli a3, a3, 8
; CHECK-ALIGNED-RV32-V-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV32-V-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV32-V-NEXT: or a2, a3, a2
; CHECK-ALIGNED-RV32-V-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV32-V-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a5, 2(a0)
; CHECK-ALIGNED-RV32-V-NEXT: lbu a0, 3(a0)
; CHECK-ALIGNED-RV32-V-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV32-V-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV32-V-NEXT: slli a5, a5, 16
; CHECK-ALIGNED-RV32-V-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV32-V-NEXT: or a0, a0, a5
; CHECK-ALIGNED-RV32-V-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV32-V-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV32-V-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV32-V-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_eq_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: lbu a2, 0(a1)
; CHECK-ALIGNED-RV64-V-NEXT: lbu a3, 1(a1)
; CHECK-ALIGNED-RV64-V-NEXT: lbu a4, 2(a1)
; CHECK-ALIGNED-RV64-V-NEXT: lb a1, 3(a1)
; CHECK-ALIGNED-RV64-V-NEXT: slli a3, a3, 8
; CHECK-ALIGNED-RV64-V-NEXT: slli a4, a4, 16
; CHECK-ALIGNED-RV64-V-NEXT: slli a1, a1, 24
; CHECK-ALIGNED-RV64-V-NEXT: or a2, a3, a2
; CHECK-ALIGNED-RV64-V-NEXT: or a1, a1, a4
; CHECK-ALIGNED-RV64-V-NEXT: lbu a3, 0(a0)
; CHECK-ALIGNED-RV64-V-NEXT: lbu a4, 1(a0)
; CHECK-ALIGNED-RV64-V-NEXT: lbu a5, 2(a0)
; CHECK-ALIGNED-RV64-V-NEXT: lb a0, 3(a0)
; CHECK-ALIGNED-RV64-V-NEXT: slli a4, a4, 8
; CHECK-ALIGNED-RV64-V-NEXT: or a3, a4, a3
; CHECK-ALIGNED-RV64-V-NEXT: slli a5, a5, 16
; CHECK-ALIGNED-RV64-V-NEXT: slli a0, a0, 24
; CHECK-ALIGNED-RV64-V-NEXT: or a0, a0, a5
; CHECK-ALIGNED-RV64-V-NEXT: or a1, a1, a2
; CHECK-ALIGNED-RV64-V-NEXT: or a0, a0, a3
; CHECK-ALIGNED-RV64-V-NEXT: xor a0, a0, a1
; CHECK-ALIGNED-RV64-V-NEXT: seqz a0, a0
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-LABEL: memcmp_eq_zero:
; CHECK-UNALIGNED: # %bb.0: # %entry
; CHECK-UNALIGNED-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-NEXT: xor a0, a0, a1
; CHECK-UNALIGNED-NEXT: seqz a0, a0
; CHECK-UNALIGNED-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp eq i32 %memcmp, 0
ret i1 %ret
}
define i1 @memcmp_lt_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: srli a0, a0, 31
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_lt_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: srli a0, a0, 31
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: srli a0, a0, 31
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_lt_zero:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp slt i32 %memcmp, 0
ret i1 %ret
}
define i1 @memcmp_gt_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_gt_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: sgtz a0, a0
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: sgtz a0, a0
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: sgtz a0, a0
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: sgtz a0, a0
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_gt_zero:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: sgtz a0, a0
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp sgt i32 %memcmp, 0
ret i1 %ret
}
define i1 @memcmp_le_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_le_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_le_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_le_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_le_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_le_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_le_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_le_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_le_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 1
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_le_zero:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: slti a0, a0, 1
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_le_zero:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: slti a0, a0, 1
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_le_zero:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_le_zero:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_le_zero:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_le_zero:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a1, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_le_zero:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: slti a0, a0, 1
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_le_zero:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: slti a0, a0, 1
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp slt i32 %memcmp, 1
ret i1 %ret
}
define i1 @memcmp_ge_zero(ptr %s1, ptr %s2) nounwind {
; CHECK-ALIGNED-RV32-LABEL: memcmp_ge_zero:
; CHECK-ALIGNED-RV32: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-NEXT: call memcmp
; CHECK-ALIGNED-RV32-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV32-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-NEXT: ret
;
; CHECK-ALIGNED-RV64-LABEL: memcmp_ge_zero:
; CHECK-ALIGNED-RV64: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-NEXT: call memcmp
; CHECK-ALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBB-LABEL: memcmp_ge_zero:
; CHECK-ALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV32-ZBB-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV32-ZBB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBB-LABEL: memcmp_ge_zero:
; CHECK-ALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBB-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV64-ZBB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-ALIGNED-RV32-ZBKB-LABEL: memcmp_ge_zero:
; CHECK-ALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV32-ZBKB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV32-ZBKB-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV32-ZBKB-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV64-ZBKB-LABEL: memcmp_ge_zero:
; CHECK-ALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-ZBKB-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-ZBKB-NEXT: call memcmp
; CHECK-ALIGNED-RV64-ZBKB-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-ZBKB-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-ZBKB-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-ALIGNED-RV32-V-LABEL: memcmp_ge_zero:
; CHECK-ALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-ALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV32-V-NEXT: call memcmp
; CHECK-ALIGNED-RV32-V-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV32-V-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-ALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV32-V-NEXT: ret
;
; CHECK-ALIGNED-RV64-V-LABEL: memcmp_ge_zero:
; CHECK-ALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-ALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-ALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-ALIGNED-RV64-V-NEXT: call memcmp
; CHECK-ALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-ALIGNED-RV64-V-NEXT: xori a0, a0, 1
; CHECK-ALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-ALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-ALIGNED-RV64-V-NEXT: ret
;
; CHECK-UNALIGNED-RV32-LABEL: memcmp_ge_zero:
; CHECK-UNALIGNED-RV32: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-NEXT: slti a0, a0, 0
; CHECK-UNALIGNED-RV32-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV32-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-NEXT: ret
;
; CHECK-UNALIGNED-RV64-LABEL: memcmp_ge_zero:
; CHECK-UNALIGNED-RV64: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-NEXT: slti a0, a0, 0
; CHECK-UNALIGNED-RV64-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV64-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBB-LABEL: memcmp_ge_zero:
; CHECK-UNALIGNED-RV32-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBB-LABEL: memcmp_ge_zero:
; CHECK-UNALIGNED-RV64-ZBB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-ZBKB-LABEL: memcmp_ge_zero:
; CHECK-UNALIGNED-RV32-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV32-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV64-ZBKB-LABEL: memcmp_ge_zero:
; CHECK-UNALIGNED-RV64-ZBKB: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a0, 0(a0)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: lw a1, 0(a1)
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a0, a0
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: rev8 a1, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a0, a0, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: srli a1, a1, 32
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: sltu a0, a0, a1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV64-ZBKB-NEXT: ret
;
; CHECK-UNALIGNED-RV32-V-LABEL: memcmp_ge_zero:
; CHECK-UNALIGNED-RV32-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV32-V-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
; CHECK-UNALIGNED-RV32-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV32-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV32-V-NEXT: slti a0, a0, 0
; CHECK-UNALIGNED-RV32-V-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV32-V-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
; CHECK-UNALIGNED-RV32-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV32-V-NEXT: ret
;
; CHECK-UNALIGNED-RV64-V-LABEL: memcmp_ge_zero:
; CHECK-UNALIGNED-RV64-V: # %bb.0: # %entry
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, -16
; CHECK-UNALIGNED-RV64-V-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
; CHECK-UNALIGNED-RV64-V-NEXT: li a2, 4
; CHECK-UNALIGNED-RV64-V-NEXT: call memcmp
; CHECK-UNALIGNED-RV64-V-NEXT: slti a0, a0, 0
; CHECK-UNALIGNED-RV64-V-NEXT: xori a0, a0, 1
; CHECK-UNALIGNED-RV64-V-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
; CHECK-UNALIGNED-RV64-V-NEXT: addi sp, sp, 16
; CHECK-UNALIGNED-RV64-V-NEXT: ret
entry:
%memcmp = call signext i32 @memcmp(ptr %s1, ptr %s2, iXLen 4)
%ret = icmp sgt i32 %memcmp, -1
ret i1 %ret
}
;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
; CHECK-ALIGNED: {{.*}}