Files
clang-p2996/llvm/test/CodeGen/X86/memcmp.ll
Simon Pilgrim aab8b2eb08 [X86] MatchVectorAllZeroTest - add support for icmp(bitcast(icmp_ne(X,Y)),0) vector reduction patterns
Many allof/anyof/noneof reduction patterns are canonicalized by bitcasting a vXi1 vector comparison result to iN and compared against 0/-1.

This patch adds support for recognizing a icmp_ne vector comparison against 0, which matches an 'whole vectors are equal' comparison pattern.

There are a few more steps to follow in future patches - we need to add support to MatchVectorAllZeroTest for comparing against -1 (in some cases), and this initial refactoring of LowerVectorAllZero to LowerVectorAllEqual needs to be extended so we can fully merge with the similar combineVectorSizedSetCCEquality code (which deals with scalar integer memcmp patterns).

Another step towards Issue #53419

Differential Revision: https://reviews.llvm.org/D147243
2023-03-31 15:44:49 +01:00

3074 lines
96 KiB
LLVM

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefixes=X64,X64-SSE,X64-SSE2
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=sse4.1 | FileCheck %s --check-prefixes=X64,X64-SSE,X64-SSE41
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX1
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx2 | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX2
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512bw,+prefer-256-bit | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX2
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512bw,-prefer-256-bit | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX512,X64-AVX512BW
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512f,+prefer-256-bit,-prefer-mask-registers | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX2
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512f,-prefer-256-bit,-prefer-mask-registers | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX512,X64-AVX512F
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512f,+prefer-256-bit,+prefer-mask-registers | FileCheck %s --check-prefixes=X64,X64-MIC-AVX,X64-MIC-AVX2
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512f,-prefer-256-bit,+prefer-mask-registers | FileCheck %s --check-prefixes=X64,X64-MIC-AVX,X64-MIC-AVX512F
; This tests codegen time inlining/optimization of memcmp
; rdar://6480398
@.str = private constant [513 x i8] c"01234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901\00", align 1
declare dso_local i32 @memcmp(ptr, ptr, i64)
define i32 @length0(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length0:
; X64: # %bb.0:
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 0) nounwind
ret i32 %m
}
define i1 @length0_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length0_eq:
; X64: # %bb.0:
; X64-NEXT: movb $1, %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 0) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i1 @length0_lt(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length0_lt:
; X64: # %bb.0:
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 0) nounwind
%c = icmp slt i32 %m, 0
ret i1 %c
}
define i32 @length2(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length2:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %eax
; X64-NEXT: movzwl (%rsi), %ecx
; X64-NEXT: rolw $8, %ax
; X64-NEXT: rolw $8, %cx
; X64-NEXT: movzwl %ax, %eax
; X64-NEXT: movzwl %cx, %ecx
; X64-NEXT: subl %ecx, %eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 2) nounwind
ret i32 %m
}
define i32 @length2_const(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length2_const:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %eax
; X64-NEXT: rolw $8, %ax
; X64-NEXT: movzwl %ax, %eax
; X64-NEXT: addl $-12594, %eax # imm = 0xCECE
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr getelementptr inbounds ([513 x i8], ptr @.str, i32 0, i32 1), i64 2) nounwind
ret i32 %m
}
define i1 @length2_gt_const(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length2_gt_const:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %eax
; X64-NEXT: rolw $8, %ax
; X64-NEXT: movzwl %ax, %eax
; X64-NEXT: addl $-12594, %eax # imm = 0xCECE
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr getelementptr inbounds ([513 x i8], ptr @.str, i32 0, i32 1), i64 2) nounwind
%c = icmp sgt i32 %m, 0
ret i1 %c
}
define i1 @length2_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length2_eq:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %eax
; X64-NEXT: cmpw (%rsi), %ax
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 2) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i1 @length2_lt(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length2_lt:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %eax
; X64-NEXT: movzwl (%rsi), %ecx
; X64-NEXT: rolw $8, %ax
; X64-NEXT: rolw $8, %cx
; X64-NEXT: movzwl %ax, %eax
; X64-NEXT: movzwl %cx, %ecx
; X64-NEXT: subl %ecx, %eax
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 2) nounwind
%c = icmp slt i32 %m, 0
ret i1 %c
}
define i1 @length2_gt(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length2_gt:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %eax
; X64-NEXT: movzwl (%rsi), %ecx
; X64-NEXT: rolw $8, %ax
; X64-NEXT: rolw $8, %cx
; X64-NEXT: movzwl %ax, %eax
; X64-NEXT: movzwl %cx, %ecx
; X64-NEXT: subl %ecx, %eax
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 2) nounwind
%c = icmp sgt i32 %m, 0
ret i1 %c
}
define i1 @length2_eq_const(ptr %X) nounwind {
; X64-LABEL: length2_eq_const:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %eax
; X64-NEXT: cmpl $12849, %eax # imm = 0x3231
; X64-NEXT: setne %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr getelementptr inbounds ([513 x i8], ptr @.str, i32 0, i32 1), i64 2) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i1 @length2_eq_nobuiltin_attr(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length2_eq_nobuiltin_attr:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $2, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 2) nounwind nobuiltin
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length3(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length3:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %ecx
; X64-NEXT: movzwl (%rsi), %edx
; X64-NEXT: rolw $8, %cx
; X64-NEXT: rolw $8, %dx
; X64-NEXT: cmpw %dx, %cx
; X64-NEXT: jne .LBB11_3
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movzbl 2(%rdi), %eax
; X64-NEXT: movzbl 2(%rsi), %ecx
; X64-NEXT: subl %ecx, %eax
; X64-NEXT: retq
; X64-NEXT: .LBB11_3: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpw %dx, %cx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 3) nounwind
ret i32 %m
}
define i1 @length3_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length3_eq:
; X64: # %bb.0:
; X64-NEXT: movzwl (%rdi), %eax
; X64-NEXT: xorw (%rsi), %ax
; X64-NEXT: movzbl 2(%rdi), %ecx
; X64-NEXT: xorb 2(%rsi), %cl
; X64-NEXT: movzbl %cl, %ecx
; X64-NEXT: orw %ax, %cx
; X64-NEXT: setne %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 3) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i32 @length4(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length4:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %ecx
; X64-NEXT: movl (%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: seta %al
; X64-NEXT: sbbl $0, %eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 4) nounwind
ret i32 %m
}
define i1 @length4_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length4_eq:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %eax
; X64-NEXT: cmpl (%rsi), %eax
; X64-NEXT: setne %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 4) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i1 @length4_lt(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length4_lt:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %ecx
; X64-NEXT: movl (%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: seta %al
; X64-NEXT: sbbl $0, %eax
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 4) nounwind
%c = icmp slt i32 %m, 0
ret i1 %c
}
define i1 @length4_gt(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length4_gt:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %eax
; X64-NEXT: movl (%rsi), %ecx
; X64-NEXT: bswapl %eax
; X64-NEXT: bswapl %ecx
; X64-NEXT: xorl %edx, %edx
; X64-NEXT: cmpl %ecx, %eax
; X64-NEXT: seta %dl
; X64-NEXT: sbbl $0, %edx
; X64-NEXT: testl %edx, %edx
; X64-NEXT: setg %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 4) nounwind
%c = icmp sgt i32 %m, 0
ret i1 %c
}
define i1 @length4_eq_const(ptr %X) nounwind {
; X64-LABEL: length4_eq_const:
; X64: # %bb.0:
; X64-NEXT: cmpl $875770417, (%rdi) # imm = 0x34333231
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr getelementptr inbounds ([513 x i8], ptr @.str, i32 0, i32 1), i64 4) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length5(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length5:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %ecx
; X64-NEXT: movl (%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: jne .LBB18_3
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movzbl 4(%rdi), %eax
; X64-NEXT: movzbl 4(%rsi), %ecx
; X64-NEXT: subl %ecx, %eax
; X64-NEXT: retq
; X64-NEXT: .LBB18_3: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 5) nounwind
ret i32 %m
}
define i1 @length5_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length5_eq:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %eax
; X64-NEXT: xorl (%rsi), %eax
; X64-NEXT: movzbl 4(%rdi), %ecx
; X64-NEXT: xorb 4(%rsi), %cl
; X64-NEXT: movzbl %cl, %ecx
; X64-NEXT: orl %eax, %ecx
; X64-NEXT: setne %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 5) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i1 @length5_lt(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length5_lt:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %ecx
; X64-NEXT: movl (%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: jne .LBB20_3
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movzbl 4(%rdi), %eax
; X64-NEXT: movzbl 4(%rsi), %ecx
; X64-NEXT: subl %ecx, %eax
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
; X64-NEXT: .LBB20_3: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 5) nounwind
%c = icmp slt i32 %m, 0
ret i1 %c
}
define i32 @length7(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length7:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %ecx
; X64-NEXT: movl (%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: jne .LBB21_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movl 3(%rdi), %ecx
; X64-NEXT: movl 3(%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: je .LBB21_3
; X64-NEXT: .LBB21_2: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: .LBB21_3: # %endblock
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 7) nounwind
ret i32 %m
}
define i1 @length7_lt(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length7_lt:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %ecx
; X64-NEXT: movl (%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: jne .LBB22_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movl 3(%rdi), %ecx
; X64-NEXT: movl 3(%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: je .LBB22_3
; X64-NEXT: .LBB22_2: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpl %edx, %ecx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: .LBB22_3: # %endblock
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 7) nounwind
%c = icmp slt i32 %m, 0
ret i1 %c
}
define i1 @length7_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length7_eq:
; X64: # %bb.0:
; X64-NEXT: movl (%rdi), %eax
; X64-NEXT: movl 3(%rdi), %ecx
; X64-NEXT: xorl (%rsi), %eax
; X64-NEXT: xorl 3(%rsi), %ecx
; X64-NEXT: orl %eax, %ecx
; X64-NEXT: setne %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 7) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i32 @length8(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length8:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rcx
; X64-NEXT: movq (%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: seta %al
; X64-NEXT: sbbl $0, %eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 8) nounwind
ret i32 %m
}
define i1 @length8_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length8_eq:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: cmpq (%rsi), %rax
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 8) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i1 @length8_eq_const(ptr %X) nounwind {
; X64-LABEL: length8_eq_const:
; X64: # %bb.0:
; X64-NEXT: movabsq $3978425819141910832, %rax # imm = 0x3736353433323130
; X64-NEXT: cmpq %rax, (%rdi)
; X64-NEXT: setne %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 8) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i1 @length9_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length9_eq:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: xorq (%rsi), %rax
; X64-NEXT: movzbl 8(%rdi), %ecx
; X64-NEXT: xorb 8(%rsi), %cl
; X64-NEXT: movzbl %cl, %ecx
; X64-NEXT: orq %rax, %rcx
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 9) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i1 @length10_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length10_eq:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: xorq (%rsi), %rax
; X64-NEXT: movzwl 8(%rdi), %ecx
; X64-NEXT: xorw 8(%rsi), %cx
; X64-NEXT: movzwl %cx, %ecx
; X64-NEXT: orq %rax, %rcx
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 10) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i1 @length11_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length11_eq:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: movq 3(%rdi), %rcx
; X64-NEXT: xorq (%rsi), %rax
; X64-NEXT: xorq 3(%rsi), %rcx
; X64-NEXT: orq %rax, %rcx
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 11) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i1 @length12_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length12_eq:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: xorq (%rsi), %rax
; X64-NEXT: movl 8(%rdi), %ecx
; X64-NEXT: xorl 8(%rsi), %ecx
; X64-NEXT: orq %rax, %rcx
; X64-NEXT: setne %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 12) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i32 @length12(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length12:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rcx
; X64-NEXT: movq (%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: jne .LBB31_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movl 8(%rdi), %ecx
; X64-NEXT: movl 8(%rsi), %edx
; X64-NEXT: bswapl %ecx
; X64-NEXT: bswapl %edx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: je .LBB31_3
; X64-NEXT: .LBB31_2: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: .LBB31_3: # %endblock
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 12) nounwind
ret i32 %m
}
define i1 @length13_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length13_eq:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: movq 5(%rdi), %rcx
; X64-NEXT: xorq (%rsi), %rax
; X64-NEXT: xorq 5(%rsi), %rcx
; X64-NEXT: orq %rax, %rcx
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 13) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i1 @length14_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length14_eq:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: movq 6(%rdi), %rcx
; X64-NEXT: xorq (%rsi), %rax
; X64-NEXT: xorq 6(%rsi), %rcx
; X64-NEXT: orq %rax, %rcx
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 14) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length15(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length15:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rcx
; X64-NEXT: movq (%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: jne .LBB34_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movq 7(%rdi), %rcx
; X64-NEXT: movq 7(%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: je .LBB34_3
; X64-NEXT: .LBB34_2: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: .LBB34_3: # %endblock
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 15) nounwind
ret i32 %m
}
define i1 @length15_lt(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length15_lt:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rcx
; X64-NEXT: movq (%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: jne .LBB35_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movq 7(%rdi), %rcx
; X64-NEXT: movq 7(%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: je .LBB35_3
; X64-NEXT: .LBB35_2: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: .LBB35_3: # %endblock
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 15) nounwind
%c = icmp slt i32 %m, 0
ret i1 %c
}
define i32 @length15_const(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length15_const:
; X64: # %bb.0:
; X64-NEXT: movabsq $3544952156018063160, %rcx # imm = 0x3132333435363738
; X64-NEXT: movq (%rdi), %rdx
; X64-NEXT: bswapq %rdx
; X64-NEXT: cmpq %rcx, %rdx
; X64-NEXT: jne .LBB36_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movabsq $4051322327650219061, %rcx # imm = 0x3839303132333435
; X64-NEXT: movq 7(%rdi), %rdx
; X64-NEXT: bswapq %rdx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rcx, %rdx
; X64-NEXT: je .LBB36_3
; X64-NEXT: .LBB36_2: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rcx, %rdx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: .LBB36_3: # %endblock
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr getelementptr inbounds ([513 x i8], ptr @.str, i32 0, i32 1), i64 15) nounwind
ret i32 %m
}
define i1 @length15_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length15_eq:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: movq 7(%rdi), %rcx
; X64-NEXT: xorq (%rsi), %rax
; X64-NEXT: xorq 7(%rsi), %rcx
; X64-NEXT: orq %rax, %rcx
; X64-NEXT: sete %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 15) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i1 @length15_gt_const(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length15_gt_const:
; X64: # %bb.0:
; X64-NEXT: movabsq $3544952156018063160, %rax # imm = 0x3132333435363738
; X64-NEXT: movq (%rdi), %rcx
; X64-NEXT: bswapq %rcx
; X64-NEXT: cmpq %rax, %rcx
; X64-NEXT: jne .LBB38_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movabsq $4051322327650219061, %rax # imm = 0x3839303132333435
; X64-NEXT: movq 7(%rdi), %rcx
; X64-NEXT: bswapq %rcx
; X64-NEXT: xorl %edx, %edx
; X64-NEXT: cmpq %rax, %rcx
; X64-NEXT: je .LBB38_3
; X64-NEXT: .LBB38_2: # %res_block
; X64-NEXT: xorl %edx, %edx
; X64-NEXT: cmpq %rax, %rcx
; X64-NEXT: sbbl %edx, %edx
; X64-NEXT: orl $1, %edx
; X64-NEXT: .LBB38_3: # %endblock
; X64-NEXT: testl %edx, %edx
; X64-NEXT: setg %al
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr getelementptr inbounds ([513 x i8], ptr @.str, i32 0, i32 1), i64 15) nounwind
%c = icmp sgt i32 %m, 0
ret i1 %c
}
; PR33329 - https://bugs.llvm.org/show_bug.cgi?id=33329
define i32 @length16(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length16:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rcx
; X64-NEXT: movq (%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: jne .LBB39_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movq 8(%rdi), %rcx
; X64-NEXT: movq 8(%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: je .LBB39_3
; X64-NEXT: .LBB39_2: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: .LBB39_3: # %endblock
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 16) nounwind
ret i32 %m
}
define i1 @length16_eq(ptr %x, ptr %y) nounwind {
; X64-SSE2-LABEL: length16_eq:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movdqu (%rsi), %xmm1
; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm1
; X64-SSE2-NEXT: pmovmskb %xmm1, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: setne %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length16_eq:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movdqu (%rsi), %xmm1
; X64-SSE41-NEXT: pxor %xmm0, %xmm1
; X64-SSE41-NEXT: ptest %xmm1, %xmm1
; X64-SSE41-NEXT: setne %al
; X64-SSE41-NEXT: retq
;
; X64-AVX-LABEL: length16_eq:
; X64-AVX: # %bb.0:
; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-AVX-NEXT: vpxor (%rsi), %xmm0, %xmm0
; X64-AVX-NEXT: vptest %xmm0, %xmm0
; X64-AVX-NEXT: setne %al
; X64-AVX-NEXT: retq
;
; X64-MIC-AVX-LABEL: length16_eq:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-MIC-AVX-NEXT: vmovdqu (%rsi), %xmm1
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k0
; X64-MIC-AVX-NEXT: kortestw %k0, %k0
; X64-MIC-AVX-NEXT: setne %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 16) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length16_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length16_lt:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rcx
; X64-NEXT: movq (%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: jne .LBB41_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movq 8(%rdi), %rcx
; X64-NEXT: movq 8(%rsi), %rdx
; X64-NEXT: bswapq %rcx
; X64-NEXT: bswapq %rdx
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: je .LBB41_3
; X64-NEXT: .LBB41_2: # %res_block
; X64-NEXT: xorl %eax, %eax
; X64-NEXT: cmpq %rdx, %rcx
; X64-NEXT: sbbl %eax, %eax
; X64-NEXT: orl $1, %eax
; X64-NEXT: .LBB41_3: # %endblock
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 16) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length16_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length16_gt:
; X64: # %bb.0:
; X64-NEXT: movq (%rdi), %rax
; X64-NEXT: movq (%rsi), %rcx
; X64-NEXT: bswapq %rax
; X64-NEXT: bswapq %rcx
; X64-NEXT: cmpq %rcx, %rax
; X64-NEXT: jne .LBB42_2
; X64-NEXT: # %bb.1: # %loadbb1
; X64-NEXT: movq 8(%rdi), %rax
; X64-NEXT: movq 8(%rsi), %rcx
; X64-NEXT: bswapq %rax
; X64-NEXT: bswapq %rcx
; X64-NEXT: xorl %edx, %edx
; X64-NEXT: cmpq %rcx, %rax
; X64-NEXT: je .LBB42_3
; X64-NEXT: .LBB42_2: # %res_block
; X64-NEXT: xorl %edx, %edx
; X64-NEXT: cmpq %rcx, %rax
; X64-NEXT: sbbl %edx, %edx
; X64-NEXT: orl $1, %edx
; X64-NEXT: .LBB42_3: # %endblock
; X64-NEXT: testl %edx, %edx
; X64-NEXT: setg %al
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 16) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length16_eq_const(ptr %X) nounwind {
; X64-SSE2-LABEL: length16_eq_const:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: pcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
; X64-SSE2-NEXT: pmovmskb %xmm0, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: sete %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length16_eq_const:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: pxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
; X64-SSE41-NEXT: ptest %xmm0, %xmm0
; X64-SSE41-NEXT: sete %al
; X64-SSE41-NEXT: retq
;
; X64-AVX-LABEL: length16_eq_const:
; X64-AVX: # %bb.0:
; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-AVX-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
; X64-AVX-NEXT: vptest %xmm0, %xmm0
; X64-AVX-NEXT: sete %al
; X64-AVX-NEXT: retq
;
; X64-MIC-AVX-LABEL: length16_eq_const:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [858927408,926299444,825243960,892613426]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k0
; X64-MIC-AVX-NEXT: kortestw %k0, %k0
; X64-MIC-AVX-NEXT: sete %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 16) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
; PR33914 - https://bugs.llvm.org/show_bug.cgi?id=33914
define i32 @length24(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length24:
; X64: # %bb.0:
; X64-NEXT: movl $24, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 24) nounwind
ret i32 %m
}
define i1 @length24_eq(ptr %x, ptr %y) nounwind {
; X64-SSE2-LABEL: length24_eq:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movdqu (%rsi), %xmm1
; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm1
; X64-SSE2-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
; X64-SSE2-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm2
; X64-SSE2-NEXT: pand %xmm1, %xmm2
; X64-SSE2-NEXT: pmovmskb %xmm2, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: sete %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length24_eq:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movdqu (%rsi), %xmm1
; X64-SSE41-NEXT: pxor %xmm0, %xmm1
; X64-SSE41-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
; X64-SSE41-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
; X64-SSE41-NEXT: pxor %xmm0, %xmm2
; X64-SSE41-NEXT: por %xmm1, %xmm2
; X64-SSE41-NEXT: ptest %xmm2, %xmm2
; X64-SSE41-NEXT: sete %al
; X64-SSE41-NEXT: retq
;
; X64-AVX-LABEL: length24_eq:
; X64-AVX: # %bb.0:
; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-AVX-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
; X64-AVX-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
; X64-AVX-NEXT: vpxor %xmm2, %xmm1, %xmm1
; X64-AVX-NEXT: vpxor (%rsi), %xmm0, %xmm0
; X64-AVX-NEXT: vpor %xmm0, %xmm1, %xmm0
; X64-AVX-NEXT: vptest %xmm0, %xmm0
; X64-AVX-NEXT: sete %al
; X64-AVX-NEXT: retq
;
; X64-MIC-AVX-LABEL: length24_eq:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-MIC-AVX-NEXT: vmovdqu (%rsi), %xmm1
; X64-MIC-AVX-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
; X64-MIC-AVX-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm3, %zmm2, %k0
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: sete %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 24) nounwind
%cmp = icmp eq i32 %call, 0
ret i1 %cmp
}
define i1 @length24_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length24_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $24, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 24) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length24_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length24_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $24, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 24) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length24_eq_const(ptr %X) nounwind {
; X64-SSE2-LABEL: length24_eq_const:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
; X64-SSE2-NEXT: pcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1
; X64-SSE2-NEXT: pcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
; X64-SSE2-NEXT: pand %xmm1, %xmm0
; X64-SSE2-NEXT: pmovmskb %xmm0, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: setne %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length24_eq_const:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
; X64-SSE41-NEXT: pxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1
; X64-SSE41-NEXT: pxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
; X64-SSE41-NEXT: por %xmm1, %xmm0
; X64-SSE41-NEXT: ptest %xmm0, %xmm0
; X64-SSE41-NEXT: setne %al
; X64-SSE41-NEXT: retq
;
; X64-AVX-LABEL: length24_eq_const:
; X64-AVX: # %bb.0:
; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-AVX-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
; X64-AVX-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1, %xmm1
; X64-AVX-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
; X64-AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
; X64-AVX-NEXT: vptest %xmm0, %xmm0
; X64-AVX-NEXT: setne %al
; X64-AVX-NEXT: retq
;
; X64-MIC-AVX-LABEL: length24_eq_const:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-MIC-AVX-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [959985462,858927408,0,0]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm2, %zmm1, %k0
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [858927408,926299444,825243960,892613426]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: setne %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 24) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i32 @length31(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length31:
; X64: # %bb.0:
; X64-NEXT: movl $31, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 31) nounwind
ret i32 %m
}
define i1 @length31_eq(ptr %x, ptr %y) nounwind {
; X64-SSE2-LABEL: length31_eq:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movdqu 15(%rdi), %xmm1
; X64-SSE2-NEXT: movdqu (%rsi), %xmm2
; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm2
; X64-SSE2-NEXT: movdqu 15(%rsi), %xmm0
; X64-SSE2-NEXT: pcmpeqb %xmm1, %xmm0
; X64-SSE2-NEXT: pand %xmm2, %xmm0
; X64-SSE2-NEXT: pmovmskb %xmm0, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: sete %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length31_eq:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movdqu 15(%rdi), %xmm1
; X64-SSE41-NEXT: movdqu (%rsi), %xmm2
; X64-SSE41-NEXT: pxor %xmm0, %xmm2
; X64-SSE41-NEXT: movdqu 15(%rsi), %xmm0
; X64-SSE41-NEXT: pxor %xmm1, %xmm0
; X64-SSE41-NEXT: por %xmm2, %xmm0
; X64-SSE41-NEXT: ptest %xmm0, %xmm0
; X64-SSE41-NEXT: sete %al
; X64-SSE41-NEXT: retq
;
; X64-AVX-LABEL: length31_eq:
; X64-AVX: # %bb.0:
; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-AVX-NEXT: vmovdqu 15(%rdi), %xmm1
; X64-AVX-NEXT: vpxor 15(%rsi), %xmm1, %xmm1
; X64-AVX-NEXT: vpxor (%rsi), %xmm0, %xmm0
; X64-AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
; X64-AVX-NEXT: vptest %xmm0, %xmm0
; X64-AVX-NEXT: sete %al
; X64-AVX-NEXT: retq
;
; X64-MIC-AVX-LABEL: length31_eq:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-MIC-AVX-NEXT: vmovdqu 15(%rdi), %xmm1
; X64-MIC-AVX-NEXT: vmovdqu (%rsi), %xmm2
; X64-MIC-AVX-NEXT: vmovdqu 15(%rsi), %xmm3
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm3, %zmm1, %k0
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm2, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: sete %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 31) nounwind
%cmp = icmp eq i32 %call, 0
ret i1 %cmp
}
define i1 @length31_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length31_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $31, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 31) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length31_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length31_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $31, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 31) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length31_eq_prefer128(ptr %x, ptr %y) nounwind "prefer-vector-width"="128" {
; X64-SSE2-LABEL: length31_eq_prefer128:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movdqu 15(%rdi), %xmm1
; X64-SSE2-NEXT: movdqu (%rsi), %xmm2
; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm2
; X64-SSE2-NEXT: movdqu 15(%rsi), %xmm0
; X64-SSE2-NEXT: pcmpeqb %xmm1, %xmm0
; X64-SSE2-NEXT: pand %xmm2, %xmm0
; X64-SSE2-NEXT: pmovmskb %xmm0, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: sete %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length31_eq_prefer128:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movdqu 15(%rdi), %xmm1
; X64-SSE41-NEXT: movdqu (%rsi), %xmm2
; X64-SSE41-NEXT: pxor %xmm0, %xmm2
; X64-SSE41-NEXT: movdqu 15(%rsi), %xmm0
; X64-SSE41-NEXT: pxor %xmm1, %xmm0
; X64-SSE41-NEXT: por %xmm2, %xmm0
; X64-SSE41-NEXT: ptest %xmm0, %xmm0
; X64-SSE41-NEXT: sete %al
; X64-SSE41-NEXT: retq
;
; X64-AVX-LABEL: length31_eq_prefer128:
; X64-AVX: # %bb.0:
; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-AVX-NEXT: vmovdqu 15(%rdi), %xmm1
; X64-AVX-NEXT: vpxor 15(%rsi), %xmm1, %xmm1
; X64-AVX-NEXT: vpxor (%rsi), %xmm0, %xmm0
; X64-AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
; X64-AVX-NEXT: vptest %xmm0, %xmm0
; X64-AVX-NEXT: sete %al
; X64-AVX-NEXT: retq
;
; X64-MIC-AVX-LABEL: length31_eq_prefer128:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-MIC-AVX-NEXT: vmovdqu 15(%rdi), %xmm1
; X64-MIC-AVX-NEXT: vmovdqu (%rsi), %xmm2
; X64-MIC-AVX-NEXT: vmovdqu 15(%rsi), %xmm3
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm3, %zmm1, %k0
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm2, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: sete %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 31) nounwind
%cmp = icmp eq i32 %call, 0
ret i1 %cmp
}
define i1 @length31_eq_const(ptr %X) nounwind {
; X64-SSE2-LABEL: length31_eq_const:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movdqu 15(%rdi), %xmm1
; X64-SSE2-NEXT: pcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1
; X64-SSE2-NEXT: pcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
; X64-SSE2-NEXT: pand %xmm1, %xmm0
; X64-SSE2-NEXT: pmovmskb %xmm0, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: setne %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length31_eq_const:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movdqu 15(%rdi), %xmm1
; X64-SSE41-NEXT: pxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1
; X64-SSE41-NEXT: pxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
; X64-SSE41-NEXT: por %xmm1, %xmm0
; X64-SSE41-NEXT: ptest %xmm0, %xmm0
; X64-SSE41-NEXT: setne %al
; X64-SSE41-NEXT: retq
;
; X64-AVX-LABEL: length31_eq_const:
; X64-AVX: # %bb.0:
; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-AVX-NEXT: vmovdqu 15(%rdi), %xmm1
; X64-AVX-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1, %xmm1
; X64-AVX-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
; X64-AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
; X64-AVX-NEXT: vptest %xmm0, %xmm0
; X64-AVX-NEXT: setne %al
; X64-AVX-NEXT: retq
;
; X64-MIC-AVX-LABEL: length31_eq_const:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-MIC-AVX-NEXT: vmovdqu 15(%rdi), %xmm1
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [943142453,842084409,909456435,809056311]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm2, %zmm1, %k0
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [858927408,926299444,825243960,892613426]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: setne %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 31) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i32 @length32(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length32:
; X64: # %bb.0:
; X64-NEXT: movl $32, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 32) nounwind
ret i32 %m
}
; PR33325 - https://bugs.llvm.org/show_bug.cgi?id=33325
define i1 @length32_eq(ptr %x, ptr %y) nounwind {
; X64-SSE2-LABEL: length32_eq:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movdqu 16(%rdi), %xmm1
; X64-SSE2-NEXT: movdqu (%rsi), %xmm2
; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm2
; X64-SSE2-NEXT: movdqu 16(%rsi), %xmm0
; X64-SSE2-NEXT: pcmpeqb %xmm1, %xmm0
; X64-SSE2-NEXT: pand %xmm2, %xmm0
; X64-SSE2-NEXT: pmovmskb %xmm0, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: sete %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length32_eq:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movdqu 16(%rdi), %xmm1
; X64-SSE41-NEXT: movdqu (%rsi), %xmm2
; X64-SSE41-NEXT: pxor %xmm0, %xmm2
; X64-SSE41-NEXT: movdqu 16(%rsi), %xmm0
; X64-SSE41-NEXT: pxor %xmm1, %xmm0
; X64-SSE41-NEXT: por %xmm2, %xmm0
; X64-SSE41-NEXT: ptest %xmm0, %xmm0
; X64-SSE41-NEXT: sete %al
; X64-SSE41-NEXT: retq
;
; X64-AVX1-LABEL: length32_eq:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: vmovups (%rdi), %ymm0
; X64-AVX1-NEXT: vxorps (%rsi), %ymm0, %ymm0
; X64-AVX1-NEXT: vptest %ymm0, %ymm0
; X64-AVX1-NEXT: sete %al
; X64-AVX1-NEXT: vzeroupper
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length32_eq:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX2-NEXT: vpxor (%rsi), %ymm0, %ymm0
; X64-AVX2-NEXT: vptest %ymm0, %ymm0
; X64-AVX2-NEXT: sete %al
; X64-AVX2-NEXT: vzeroupper
; X64-AVX2-NEXT: retq
;
; X64-AVX512-LABEL: length32_eq:
; X64-AVX512: # %bb.0:
; X64-AVX512-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX512-NEXT: vpxor (%rsi), %ymm0, %ymm0
; X64-AVX512-NEXT: vptest %ymm0, %ymm0
; X64-AVX512-NEXT: sete %al
; X64-AVX512-NEXT: vzeroupper
; X64-AVX512-NEXT: retq
;
; X64-MIC-AVX-LABEL: length32_eq:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %ymm0
; X64-MIC-AVX-NEXT: vmovdqu (%rsi), %ymm1
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k0
; X64-MIC-AVX-NEXT: kortestw %k0, %k0
; X64-MIC-AVX-NEXT: sete %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 32) nounwind
%cmp = icmp eq i32 %call, 0
ret i1 %cmp
}
define i1 @length32_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length32_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $32, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 32) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length32_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length32_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $32, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 32) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length32_eq_prefer128(ptr %x, ptr %y) nounwind "prefer-vector-width"="128" {
; X64-SSE2-LABEL: length32_eq_prefer128:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movdqu 16(%rdi), %xmm1
; X64-SSE2-NEXT: movdqu (%rsi), %xmm2
; X64-SSE2-NEXT: pcmpeqb %xmm0, %xmm2
; X64-SSE2-NEXT: movdqu 16(%rsi), %xmm0
; X64-SSE2-NEXT: pcmpeqb %xmm1, %xmm0
; X64-SSE2-NEXT: pand %xmm2, %xmm0
; X64-SSE2-NEXT: pmovmskb %xmm0, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: sete %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length32_eq_prefer128:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movdqu 16(%rdi), %xmm1
; X64-SSE41-NEXT: movdqu (%rsi), %xmm2
; X64-SSE41-NEXT: pxor %xmm0, %xmm2
; X64-SSE41-NEXT: movdqu 16(%rsi), %xmm0
; X64-SSE41-NEXT: pxor %xmm1, %xmm0
; X64-SSE41-NEXT: por %xmm2, %xmm0
; X64-SSE41-NEXT: ptest %xmm0, %xmm0
; X64-SSE41-NEXT: sete %al
; X64-SSE41-NEXT: retq
;
; X64-AVX-LABEL: length32_eq_prefer128:
; X64-AVX: # %bb.0:
; X64-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-AVX-NEXT: vmovdqu 16(%rdi), %xmm1
; X64-AVX-NEXT: vpxor 16(%rsi), %xmm1, %xmm1
; X64-AVX-NEXT: vpxor (%rsi), %xmm0, %xmm0
; X64-AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
; X64-AVX-NEXT: vptest %xmm0, %xmm0
; X64-AVX-NEXT: sete %al
; X64-AVX-NEXT: retq
;
; X64-MIC-AVX-LABEL: length32_eq_prefer128:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %xmm0
; X64-MIC-AVX-NEXT: vmovdqu 16(%rdi), %xmm1
; X64-MIC-AVX-NEXT: vmovdqu (%rsi), %xmm2
; X64-MIC-AVX-NEXT: vmovdqu 16(%rsi), %xmm3
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm3, %zmm1, %k0
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm2, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: sete %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 32) nounwind
%cmp = icmp eq i32 %call, 0
ret i1 %cmp
}
define i1 @length32_eq_const(ptr %X) nounwind {
; X64-SSE2-LABEL: length32_eq_const:
; X64-SSE2: # %bb.0:
; X64-SSE2-NEXT: movdqu (%rdi), %xmm0
; X64-SSE2-NEXT: movdqu 16(%rdi), %xmm1
; X64-SSE2-NEXT: pcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1
; X64-SSE2-NEXT: pcmpeqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
; X64-SSE2-NEXT: pand %xmm1, %xmm0
; X64-SSE2-NEXT: pmovmskb %xmm0, %eax
; X64-SSE2-NEXT: cmpl $65535, %eax # imm = 0xFFFF
; X64-SSE2-NEXT: setne %al
; X64-SSE2-NEXT: retq
;
; X64-SSE41-LABEL: length32_eq_const:
; X64-SSE41: # %bb.0:
; X64-SSE41-NEXT: movdqu (%rdi), %xmm0
; X64-SSE41-NEXT: movdqu 16(%rdi), %xmm1
; X64-SSE41-NEXT: pxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1
; X64-SSE41-NEXT: pxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
; X64-SSE41-NEXT: por %xmm1, %xmm0
; X64-SSE41-NEXT: ptest %xmm0, %xmm0
; X64-SSE41-NEXT: setne %al
; X64-SSE41-NEXT: retq
;
; X64-AVX1-LABEL: length32_eq_const:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: vmovups (%rdi), %ymm0
; X64-AVX1-NEXT: vxorps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX1-NEXT: vptest %ymm0, %ymm0
; X64-AVX1-NEXT: setne %al
; X64-AVX1-NEXT: vzeroupper
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length32_eq_const:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX2-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX2-NEXT: vptest %ymm0, %ymm0
; X64-AVX2-NEXT: setne %al
; X64-AVX2-NEXT: vzeroupper
; X64-AVX2-NEXT: retq
;
; X64-AVX512-LABEL: length32_eq_const:
; X64-AVX512: # %bb.0:
; X64-AVX512-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX512-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX512-NEXT: vptest %ymm0, %ymm0
; X64-AVX512-NEXT: setne %al
; X64-AVX512-NEXT: vzeroupper
; X64-AVX512-NEXT: retq
;
; X64-MIC-AVX-LABEL: length32_eq_const:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %ymm0
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [858927408,926299444,825243960,892613426,959985462,858927408,926299444,825243960]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k0
; X64-MIC-AVX-NEXT: kortestw %k0, %k0
; X64-MIC-AVX-NEXT: setne %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 32) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i32 @length48(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length48:
; X64: # %bb.0:
; X64-NEXT: movl $48, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 48) nounwind
ret i32 %m
}
define i1 @length48_eq(ptr %x, ptr %y) nounwind {
; X64-SSE-LABEL: length48_eq:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $48, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: sete %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length48_eq:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: vmovups (%rdi), %ymm0
; X64-AVX1-NEXT: vmovups 32(%rdi), %xmm1
; X64-AVX1-NEXT: vmovups 32(%rsi), %xmm2
; X64-AVX1-NEXT: vxorps (%rsi), %ymm0, %ymm0
; X64-AVX1-NEXT: vxorps %ymm2, %ymm1, %ymm1
; X64-AVX1-NEXT: vorps %ymm1, %ymm0, %ymm0
; X64-AVX1-NEXT: vptest %ymm0, %ymm0
; X64-AVX1-NEXT: sete %al
; X64-AVX1-NEXT: vzeroupper
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length48_eq:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX2-NEXT: vmovdqu 32(%rdi), %xmm1
; X64-AVX2-NEXT: vmovdqu 32(%rsi), %xmm2
; X64-AVX2-NEXT: vpxor (%rsi), %ymm0, %ymm0
; X64-AVX2-NEXT: vpxor %ymm2, %ymm1, %ymm1
; X64-AVX2-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX2-NEXT: vptest %ymm0, %ymm0
; X64-AVX2-NEXT: sete %al
; X64-AVX2-NEXT: vzeroupper
; X64-AVX2-NEXT: retq
;
; X64-AVX512-LABEL: length48_eq:
; X64-AVX512: # %bb.0:
; X64-AVX512-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX512-NEXT: vmovdqu 32(%rdi), %xmm1
; X64-AVX512-NEXT: vmovdqu 32(%rsi), %xmm2
; X64-AVX512-NEXT: vpxor (%rsi), %ymm0, %ymm0
; X64-AVX512-NEXT: vpxor %ymm2, %ymm1, %ymm1
; X64-AVX512-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX512-NEXT: vptest %ymm0, %ymm0
; X64-AVX512-NEXT: sete %al
; X64-AVX512-NEXT: vzeroupper
; X64-AVX512-NEXT: retq
;
; X64-MIC-AVX-LABEL: length48_eq:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %ymm0
; X64-MIC-AVX-NEXT: vmovdqu (%rsi), %ymm1
; X64-MIC-AVX-NEXT: vmovdqu 32(%rdi), %xmm2
; X64-MIC-AVX-NEXT: vmovdqu 32(%rsi), %xmm3
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm3, %zmm2, %k0
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: sete %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 48) nounwind
%cmp = icmp eq i32 %call, 0
ret i1 %cmp
}
define i1 @length48_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length48_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $48, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 48) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length48_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length48_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $48, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 48) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length48_eq_prefer128(ptr %x, ptr %y) nounwind "prefer-vector-width"="128" {
; X64-LABEL: length48_eq_prefer128:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $48, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 48) nounwind
%cmp = icmp eq i32 %call, 0
ret i1 %cmp
}
define i1 @length48_eq_const(ptr %X) nounwind {
; X64-SSE-LABEL: length48_eq_const:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $.L.str, %esi
; X64-SSE-NEXT: movl $48, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: setne %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length48_eq_const:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: vmovups (%rdi), %ymm0
; X64-AVX1-NEXT: vmovups 32(%rdi), %xmm1
; X64-AVX1-NEXT: vxorps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX1-NEXT: vxorps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
; X64-AVX1-NEXT: vorps %ymm1, %ymm0, %ymm0
; X64-AVX1-NEXT: vptest %ymm0, %ymm0
; X64-AVX1-NEXT: setne %al
; X64-AVX1-NEXT: vzeroupper
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length48_eq_const:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX2-NEXT: vmovdqu 32(%rdi), %xmm1
; X64-AVX2-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX2-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
; X64-AVX2-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX2-NEXT: vptest %ymm0, %ymm0
; X64-AVX2-NEXT: setne %al
; X64-AVX2-NEXT: vzeroupper
; X64-AVX2-NEXT: retq
;
; X64-AVX512-LABEL: length48_eq_const:
; X64-AVX512: # %bb.0:
; X64-AVX512-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX512-NEXT: vmovdqu 32(%rdi), %xmm1
; X64-AVX512-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX512-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
; X64-AVX512-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX512-NEXT: vptest %ymm0, %ymm0
; X64-AVX512-NEXT: setne %al
; X64-AVX512-NEXT: vzeroupper
; X64-AVX512-NEXT: retq
;
; X64-MIC-AVX-LABEL: length48_eq_const:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %ymm0
; X64-MIC-AVX-NEXT: vmovdqu 32(%rdi), %xmm1
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} ymm2 = [892613426,959985462,858927408,926299444,0,0,0,0]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm2, %zmm1, %k0
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [858927408,926299444,825243960,892613426,959985462,858927408,926299444,825243960]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: setne %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 48) nounwind
%c = icmp ne i32 %m, 0
ret i1 %c
}
define i32 @length63(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length63:
; X64: # %bb.0:
; X64-NEXT: movl $63, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 63) nounwind
ret i32 %m
}
define i1 @length63_eq(ptr %x, ptr %y) nounwind {
; X64-SSE-LABEL: length63_eq:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $63, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: setne %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length63_eq:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: vmovups (%rdi), %ymm0
; X64-AVX1-NEXT: vmovups 31(%rdi), %ymm1
; X64-AVX1-NEXT: vxorps 31(%rsi), %ymm1, %ymm1
; X64-AVX1-NEXT: vxorps (%rsi), %ymm0, %ymm0
; X64-AVX1-NEXT: vorps %ymm1, %ymm0, %ymm0
; X64-AVX1-NEXT: vptest %ymm0, %ymm0
; X64-AVX1-NEXT: setne %al
; X64-AVX1-NEXT: vzeroupper
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length63_eq:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX2-NEXT: vmovdqu 31(%rdi), %ymm1
; X64-AVX2-NEXT: vpxor 31(%rsi), %ymm1, %ymm1
; X64-AVX2-NEXT: vpxor (%rsi), %ymm0, %ymm0
; X64-AVX2-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX2-NEXT: vptest %ymm0, %ymm0
; X64-AVX2-NEXT: setne %al
; X64-AVX2-NEXT: vzeroupper
; X64-AVX2-NEXT: retq
;
; X64-AVX512-LABEL: length63_eq:
; X64-AVX512: # %bb.0:
; X64-AVX512-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX512-NEXT: vmovdqu 31(%rdi), %ymm1
; X64-AVX512-NEXT: vpxor 31(%rsi), %ymm1, %ymm1
; X64-AVX512-NEXT: vpxor (%rsi), %ymm0, %ymm0
; X64-AVX512-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX512-NEXT: vptest %ymm0, %ymm0
; X64-AVX512-NEXT: setne %al
; X64-AVX512-NEXT: vzeroupper
; X64-AVX512-NEXT: retq
;
; X64-MIC-AVX-LABEL: length63_eq:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %ymm0
; X64-MIC-AVX-NEXT: vmovdqu 31(%rdi), %ymm1
; X64-MIC-AVX-NEXT: vmovdqu (%rsi), %ymm2
; X64-MIC-AVX-NEXT: vmovdqu 31(%rsi), %ymm3
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm3, %zmm1, %k0
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm2, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: setne %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 63) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length63_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length63_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $63, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 63) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length63_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length63_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $63, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 63) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length63_eq_const(ptr %X) nounwind {
; X64-SSE-LABEL: length63_eq_const:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $.L.str, %esi
; X64-SSE-NEXT: movl $63, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: sete %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length63_eq_const:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: vmovups (%rdi), %ymm0
; X64-AVX1-NEXT: vmovups 31(%rdi), %ymm1
; X64-AVX1-NEXT: vxorps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
; X64-AVX1-NEXT: vxorps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX1-NEXT: vorps %ymm1, %ymm0, %ymm0
; X64-AVX1-NEXT: vptest %ymm0, %ymm0
; X64-AVX1-NEXT: sete %al
; X64-AVX1-NEXT: vzeroupper
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length63_eq_const:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX2-NEXT: vmovdqu 31(%rdi), %ymm1
; X64-AVX2-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
; X64-AVX2-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX2-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX2-NEXT: vptest %ymm0, %ymm0
; X64-AVX2-NEXT: sete %al
; X64-AVX2-NEXT: vzeroupper
; X64-AVX2-NEXT: retq
;
; X64-AVX512-LABEL: length63_eq_const:
; X64-AVX512: # %bb.0:
; X64-AVX512-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX512-NEXT: vmovdqu 31(%rdi), %ymm1
; X64-AVX512-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
; X64-AVX512-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX512-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX512-NEXT: vptest %ymm0, %ymm0
; X64-AVX512-NEXT: sete %al
; X64-AVX512-NEXT: vzeroupper
; X64-AVX512-NEXT: retq
;
; X64-MIC-AVX-LABEL: length63_eq_const:
; X64-MIC-AVX: # %bb.0:
; X64-MIC-AVX-NEXT: vmovdqu (%rdi), %ymm0
; X64-MIC-AVX-NEXT: vmovdqu 31(%rdi), %ymm1
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} ymm2 = [875770417,943142453,842084409,909456435,809056311,875770417,943142453,842084409]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm2, %zmm1, %k0
; X64-MIC-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [858927408,926299444,825243960,892613426,959985462,858927408,926299444,825243960]
; X64-MIC-AVX-NEXT: vpcmpneqd %zmm1, %zmm0, %k1
; X64-MIC-AVX-NEXT: kortestw %k0, %k1
; X64-MIC-AVX-NEXT: sete %al
; X64-MIC-AVX-NEXT: vzeroupper
; X64-MIC-AVX-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 63) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length64(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length64:
; X64: # %bb.0:
; X64-NEXT: movl $64, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 64) nounwind
ret i32 %m
}
define i1 @length64_eq(ptr %x, ptr %y) nounwind {
; X64-SSE-LABEL: length64_eq:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $64, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: setne %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length64_eq:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: vmovups (%rdi), %ymm0
; X64-AVX1-NEXT: vmovups 32(%rdi), %ymm1
; X64-AVX1-NEXT: vxorps 32(%rsi), %ymm1, %ymm1
; X64-AVX1-NEXT: vxorps (%rsi), %ymm0, %ymm0
; X64-AVX1-NEXT: vorps %ymm1, %ymm0, %ymm0
; X64-AVX1-NEXT: vptest %ymm0, %ymm0
; X64-AVX1-NEXT: setne %al
; X64-AVX1-NEXT: vzeroupper
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length64_eq:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX2-NEXT: vmovdqu 32(%rdi), %ymm1
; X64-AVX2-NEXT: vpxor 32(%rsi), %ymm1, %ymm1
; X64-AVX2-NEXT: vpxor (%rsi), %ymm0, %ymm0
; X64-AVX2-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX2-NEXT: vptest %ymm0, %ymm0
; X64-AVX2-NEXT: setne %al
; X64-AVX2-NEXT: vzeroupper
; X64-AVX2-NEXT: retq
;
; X64-AVX512-LABEL: length64_eq:
; X64-AVX512: # %bb.0:
; X64-AVX512-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512-NEXT: vpcmpneqd (%rsi), %zmm0, %k0
; X64-AVX512-NEXT: kortestw %k0, %k0
; X64-AVX512-NEXT: setne %al
; X64-AVX512-NEXT: vzeroupper
; X64-AVX512-NEXT: retq
;
; X64-MIC-AVX2-LABEL: length64_eq:
; X64-MIC-AVX2: # %bb.0:
; X64-MIC-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-MIC-AVX2-NEXT: vmovdqu 32(%rdi), %ymm1
; X64-MIC-AVX2-NEXT: vmovdqu (%rsi), %ymm2
; X64-MIC-AVX2-NEXT: vmovdqu 32(%rsi), %ymm3
; X64-MIC-AVX2-NEXT: vpcmpneqd %zmm3, %zmm1, %k0
; X64-MIC-AVX2-NEXT: vpcmpneqd %zmm2, %zmm0, %k1
; X64-MIC-AVX2-NEXT: kortestw %k0, %k1
; X64-MIC-AVX2-NEXT: setne %al
; X64-MIC-AVX2-NEXT: vzeroupper
; X64-MIC-AVX2-NEXT: retq
;
; X64-MIC-AVX512F-LABEL: length64_eq:
; X64-MIC-AVX512F: # %bb.0:
; X64-MIC-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-MIC-AVX512F-NEXT: vpcmpneqd (%rsi), %zmm0, %k0
; X64-MIC-AVX512F-NEXT: kortestw %k0, %k0
; X64-MIC-AVX512F-NEXT: setne %al
; X64-MIC-AVX512F-NEXT: vzeroupper
; X64-MIC-AVX512F-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 64) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length64_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length64_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $64, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 64) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length64_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length64_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $64, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 64) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length64_eq_const(ptr %X) nounwind {
; X64-SSE-LABEL: length64_eq_const:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $.L.str, %esi
; X64-SSE-NEXT: movl $64, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: sete %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length64_eq_const:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: vmovups (%rdi), %ymm0
; X64-AVX1-NEXT: vmovups 32(%rdi), %ymm1
; X64-AVX1-NEXT: vxorps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
; X64-AVX1-NEXT: vxorps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX1-NEXT: vorps %ymm1, %ymm0, %ymm0
; X64-AVX1-NEXT: vptest %ymm0, %ymm0
; X64-AVX1-NEXT: sete %al
; X64-AVX1-NEXT: vzeroupper
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length64_eq_const:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-AVX2-NEXT: vmovdqu 32(%rdi), %ymm1
; X64-AVX2-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
; X64-AVX2-NEXT: vpxor {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; X64-AVX2-NEXT: vpor %ymm1, %ymm0, %ymm0
; X64-AVX2-NEXT: vptest %ymm0, %ymm0
; X64-AVX2-NEXT: sete %al
; X64-AVX2-NEXT: vzeroupper
; X64-AVX2-NEXT: retq
;
; X64-AVX512-LABEL: length64_eq_const:
; X64-AVX512: # %bb.0:
; X64-AVX512-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512-NEXT: vpcmpneqd .L.str(%rip), %zmm0, %k0
; X64-AVX512-NEXT: kortestw %k0, %k0
; X64-AVX512-NEXT: sete %al
; X64-AVX512-NEXT: vzeroupper
; X64-AVX512-NEXT: retq
;
; X64-MIC-AVX2-LABEL: length64_eq_const:
; X64-MIC-AVX2: # %bb.0:
; X64-MIC-AVX2-NEXT: vmovdqu (%rdi), %ymm0
; X64-MIC-AVX2-NEXT: vmovdqu 32(%rdi), %ymm1
; X64-MIC-AVX2-NEXT: vmovdqa {{.*#+}} ymm2 = [892613426,959985462,858927408,926299444,825243960,892613426,959985462,858927408]
; X64-MIC-AVX2-NEXT: vpcmpneqd %zmm2, %zmm1, %k0
; X64-MIC-AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [858927408,926299444,825243960,892613426,959985462,858927408,926299444,825243960]
; X64-MIC-AVX2-NEXT: vpcmpneqd %zmm1, %zmm0, %k1
; X64-MIC-AVX2-NEXT: kortestw %k0, %k1
; X64-MIC-AVX2-NEXT: sete %al
; X64-MIC-AVX2-NEXT: vzeroupper
; X64-MIC-AVX2-NEXT: retq
;
; X64-MIC-AVX512F-LABEL: length64_eq_const:
; X64-MIC-AVX512F: # %bb.0:
; X64-MIC-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-MIC-AVX512F-NEXT: vpcmpneqd .L.str(%rip), %zmm0, %k0
; X64-MIC-AVX512F-NEXT: kortestw %k0, %k0
; X64-MIC-AVX512F-NEXT: sete %al
; X64-MIC-AVX512F-NEXT: vzeroupper
; X64-MIC-AVX512F-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 64) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length96(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length96:
; X64: # %bb.0:
; X64-NEXT: movl $96, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 96) nounwind
ret i32 %m
}
define i1 @length96_eq(ptr %x, ptr %y) nounwind {
; X64-SSE-LABEL: length96_eq:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $96, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: setne %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length96_eq:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: pushq %rax
; X64-AVX1-NEXT: movl $96, %edx
; X64-AVX1-NEXT: callq memcmp
; X64-AVX1-NEXT: testl %eax, %eax
; X64-AVX1-NEXT: setne %al
; X64-AVX1-NEXT: popq %rcx
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length96_eq:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: pushq %rax
; X64-AVX2-NEXT: movl $96, %edx
; X64-AVX2-NEXT: callq memcmp
; X64-AVX2-NEXT: testl %eax, %eax
; X64-AVX2-NEXT: setne %al
; X64-AVX2-NEXT: popq %rcx
; X64-AVX2-NEXT: retq
;
; X64-AVX512BW-LABEL: length96_eq:
; X64-AVX512BW: # %bb.0:
; X64-AVX512BW-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512BW-NEXT: vmovdqu 64(%rdi), %ymm1
; X64-AVX512BW-NEXT: vmovdqu 64(%rsi), %ymm2
; X64-AVX512BW-NEXT: vpcmpneqb (%rsi), %zmm0, %k0
; X64-AVX512BW-NEXT: vpcmpneqb %zmm2, %zmm1, %k1
; X64-AVX512BW-NEXT: kortestq %k1, %k0
; X64-AVX512BW-NEXT: setne %al
; X64-AVX512BW-NEXT: vzeroupper
; X64-AVX512BW-NEXT: retq
;
; X64-AVX512F-LABEL: length96_eq:
; X64-AVX512F: # %bb.0:
; X64-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512F-NEXT: vmovdqu 64(%rdi), %ymm1
; X64-AVX512F-NEXT: vmovdqu 64(%rsi), %ymm2
; X64-AVX512F-NEXT: vpcmpneqd (%rsi), %zmm0, %k0
; X64-AVX512F-NEXT: vpcmpneqd %zmm2, %zmm1, %k1
; X64-AVX512F-NEXT: kortestw %k1, %k0
; X64-AVX512F-NEXT: setne %al
; X64-AVX512F-NEXT: vzeroupper
; X64-AVX512F-NEXT: retq
;
; X64-MIC-AVX2-LABEL: length96_eq:
; X64-MIC-AVX2: # %bb.0:
; X64-MIC-AVX2-NEXT: pushq %rax
; X64-MIC-AVX2-NEXT: movl $96, %edx
; X64-MIC-AVX2-NEXT: callq memcmp
; X64-MIC-AVX2-NEXT: testl %eax, %eax
; X64-MIC-AVX2-NEXT: setne %al
; X64-MIC-AVX2-NEXT: popq %rcx
; X64-MIC-AVX2-NEXT: retq
;
; X64-MIC-AVX512F-LABEL: length96_eq:
; X64-MIC-AVX512F: # %bb.0:
; X64-MIC-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-MIC-AVX512F-NEXT: vmovdqu 64(%rdi), %ymm1
; X64-MIC-AVX512F-NEXT: vmovdqu 64(%rsi), %ymm2
; X64-MIC-AVX512F-NEXT: vpcmpneqd (%rsi), %zmm0, %k0
; X64-MIC-AVX512F-NEXT: vpcmpneqd %zmm2, %zmm1, %k1
; X64-MIC-AVX512F-NEXT: kortestw %k1, %k0
; X64-MIC-AVX512F-NEXT: setne %al
; X64-MIC-AVX512F-NEXT: vzeroupper
; X64-MIC-AVX512F-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 96) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length96_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length96_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $96, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 96) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length96_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length96_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $96, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 96) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length96_eq_const(ptr %X) nounwind {
; X64-SSE-LABEL: length96_eq_const:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $.L.str, %esi
; X64-SSE-NEXT: movl $96, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: sete %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length96_eq_const:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: pushq %rax
; X64-AVX1-NEXT: movl $.L.str, %esi
; X64-AVX1-NEXT: movl $96, %edx
; X64-AVX1-NEXT: callq memcmp
; X64-AVX1-NEXT: testl %eax, %eax
; X64-AVX1-NEXT: sete %al
; X64-AVX1-NEXT: popq %rcx
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length96_eq_const:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: pushq %rax
; X64-AVX2-NEXT: movl $.L.str, %esi
; X64-AVX2-NEXT: movl $96, %edx
; X64-AVX2-NEXT: callq memcmp
; X64-AVX2-NEXT: testl %eax, %eax
; X64-AVX2-NEXT: sete %al
; X64-AVX2-NEXT: popq %rcx
; X64-AVX2-NEXT: retq
;
; X64-AVX512BW-LABEL: length96_eq_const:
; X64-AVX512BW: # %bb.0:
; X64-AVX512BW-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512BW-NEXT: vmovdqu 64(%rdi), %ymm1
; X64-AVX512BW-NEXT: vpcmpneqb .L.str(%rip), %zmm0, %k0
; X64-AVX512BW-NEXT: vpcmpneqb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %k1
; X64-AVX512BW-NEXT: kortestq %k1, %k0
; X64-AVX512BW-NEXT: sete %al
; X64-AVX512BW-NEXT: vzeroupper
; X64-AVX512BW-NEXT: retq
;
; X64-AVX512F-LABEL: length96_eq_const:
; X64-AVX512F: # %bb.0:
; X64-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512F-NEXT: vmovdqu 64(%rdi), %ymm1
; X64-AVX512F-NEXT: vpcmpneqd .L.str(%rip), %zmm0, %k0
; X64-AVX512F-NEXT: vpcmpneqd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %k1
; X64-AVX512F-NEXT: kortestw %k1, %k0
; X64-AVX512F-NEXT: sete %al
; X64-AVX512F-NEXT: vzeroupper
; X64-AVX512F-NEXT: retq
;
; X64-MIC-AVX2-LABEL: length96_eq_const:
; X64-MIC-AVX2: # %bb.0:
; X64-MIC-AVX2-NEXT: pushq %rax
; X64-MIC-AVX2-NEXT: movl $.L.str, %esi
; X64-MIC-AVX2-NEXT: movl $96, %edx
; X64-MIC-AVX2-NEXT: callq memcmp
; X64-MIC-AVX2-NEXT: testl %eax, %eax
; X64-MIC-AVX2-NEXT: sete %al
; X64-MIC-AVX2-NEXT: popq %rcx
; X64-MIC-AVX2-NEXT: retq
;
; X64-MIC-AVX512F-LABEL: length96_eq_const:
; X64-MIC-AVX512F: # %bb.0:
; X64-MIC-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-MIC-AVX512F-NEXT: vmovdqu 64(%rdi), %ymm1
; X64-MIC-AVX512F-NEXT: vpcmpneqd .L.str(%rip), %zmm0, %k0
; X64-MIC-AVX512F-NEXT: vpcmpneqd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %k1
; X64-MIC-AVX512F-NEXT: kortestw %k1, %k0
; X64-MIC-AVX512F-NEXT: sete %al
; X64-MIC-AVX512F-NEXT: vzeroupper
; X64-MIC-AVX512F-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 96) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length127(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length127:
; X64: # %bb.0:
; X64-NEXT: movl $127, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 127) nounwind
ret i32 %m
}
define i1 @length127_eq(ptr %x, ptr %y) nounwind {
; X64-SSE-LABEL: length127_eq:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $127, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: setne %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length127_eq:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: pushq %rax
; X64-AVX1-NEXT: movl $127, %edx
; X64-AVX1-NEXT: callq memcmp
; X64-AVX1-NEXT: testl %eax, %eax
; X64-AVX1-NEXT: setne %al
; X64-AVX1-NEXT: popq %rcx
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length127_eq:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: pushq %rax
; X64-AVX2-NEXT: movl $127, %edx
; X64-AVX2-NEXT: callq memcmp
; X64-AVX2-NEXT: testl %eax, %eax
; X64-AVX2-NEXT: setne %al
; X64-AVX2-NEXT: popq %rcx
; X64-AVX2-NEXT: retq
;
; X64-AVX512BW-LABEL: length127_eq:
; X64-AVX512BW: # %bb.0:
; X64-AVX512BW-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512BW-NEXT: vmovdqu64 63(%rdi), %zmm1
; X64-AVX512BW-NEXT: vpcmpneqb 63(%rsi), %zmm1, %k0
; X64-AVX512BW-NEXT: vpcmpneqb (%rsi), %zmm0, %k1
; X64-AVX512BW-NEXT: kortestq %k0, %k1
; X64-AVX512BW-NEXT: setne %al
; X64-AVX512BW-NEXT: vzeroupper
; X64-AVX512BW-NEXT: retq
;
; X64-AVX512F-LABEL: length127_eq:
; X64-AVX512F: # %bb.0:
; X64-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512F-NEXT: vmovdqu64 63(%rdi), %zmm1
; X64-AVX512F-NEXT: vpcmpneqd 63(%rsi), %zmm1, %k0
; X64-AVX512F-NEXT: vpcmpneqd (%rsi), %zmm0, %k1
; X64-AVX512F-NEXT: kortestw %k0, %k1
; X64-AVX512F-NEXT: setne %al
; X64-AVX512F-NEXT: vzeroupper
; X64-AVX512F-NEXT: retq
;
; X64-MIC-AVX2-LABEL: length127_eq:
; X64-MIC-AVX2: # %bb.0:
; X64-MIC-AVX2-NEXT: pushq %rax
; X64-MIC-AVX2-NEXT: movl $127, %edx
; X64-MIC-AVX2-NEXT: callq memcmp
; X64-MIC-AVX2-NEXT: testl %eax, %eax
; X64-MIC-AVX2-NEXT: setne %al
; X64-MIC-AVX2-NEXT: popq %rcx
; X64-MIC-AVX2-NEXT: retq
;
; X64-MIC-AVX512F-LABEL: length127_eq:
; X64-MIC-AVX512F: # %bb.0:
; X64-MIC-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-MIC-AVX512F-NEXT: vmovdqu64 63(%rdi), %zmm1
; X64-MIC-AVX512F-NEXT: vpcmpneqd 63(%rsi), %zmm1, %k0
; X64-MIC-AVX512F-NEXT: vpcmpneqd (%rsi), %zmm0, %k1
; X64-MIC-AVX512F-NEXT: kortestw %k0, %k1
; X64-MIC-AVX512F-NEXT: setne %al
; X64-MIC-AVX512F-NEXT: vzeroupper
; X64-MIC-AVX512F-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 127) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length127_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length127_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $127, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 127) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length127_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length127_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $127, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 127) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length127_eq_const(ptr %X) nounwind {
; X64-SSE-LABEL: length127_eq_const:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $.L.str, %esi
; X64-SSE-NEXT: movl $127, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: sete %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length127_eq_const:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: pushq %rax
; X64-AVX1-NEXT: movl $.L.str, %esi
; X64-AVX1-NEXT: movl $127, %edx
; X64-AVX1-NEXT: callq memcmp
; X64-AVX1-NEXT: testl %eax, %eax
; X64-AVX1-NEXT: sete %al
; X64-AVX1-NEXT: popq %rcx
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length127_eq_const:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: pushq %rax
; X64-AVX2-NEXT: movl $.L.str, %esi
; X64-AVX2-NEXT: movl $127, %edx
; X64-AVX2-NEXT: callq memcmp
; X64-AVX2-NEXT: testl %eax, %eax
; X64-AVX2-NEXT: sete %al
; X64-AVX2-NEXT: popq %rcx
; X64-AVX2-NEXT: retq
;
; X64-AVX512BW-LABEL: length127_eq_const:
; X64-AVX512BW: # %bb.0:
; X64-AVX512BW-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512BW-NEXT: vmovdqu64 63(%rdi), %zmm1
; X64-AVX512BW-NEXT: vpcmpneqb .L.str+63(%rip), %zmm1, %k0
; X64-AVX512BW-NEXT: vpcmpneqb .L.str(%rip), %zmm0, %k1
; X64-AVX512BW-NEXT: kortestq %k0, %k1
; X64-AVX512BW-NEXT: sete %al
; X64-AVX512BW-NEXT: vzeroupper
; X64-AVX512BW-NEXT: retq
;
; X64-AVX512F-LABEL: length127_eq_const:
; X64-AVX512F: # %bb.0:
; X64-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512F-NEXT: vmovdqu64 63(%rdi), %zmm1
; X64-AVX512F-NEXT: vpcmpneqd .L.str+63(%rip), %zmm1, %k0
; X64-AVX512F-NEXT: vpcmpneqd .L.str(%rip), %zmm0, %k1
; X64-AVX512F-NEXT: kortestw %k0, %k1
; X64-AVX512F-NEXT: sete %al
; X64-AVX512F-NEXT: vzeroupper
; X64-AVX512F-NEXT: retq
;
; X64-MIC-AVX2-LABEL: length127_eq_const:
; X64-MIC-AVX2: # %bb.0:
; X64-MIC-AVX2-NEXT: pushq %rax
; X64-MIC-AVX2-NEXT: movl $.L.str, %esi
; X64-MIC-AVX2-NEXT: movl $127, %edx
; X64-MIC-AVX2-NEXT: callq memcmp
; X64-MIC-AVX2-NEXT: testl %eax, %eax
; X64-MIC-AVX2-NEXT: sete %al
; X64-MIC-AVX2-NEXT: popq %rcx
; X64-MIC-AVX2-NEXT: retq
;
; X64-MIC-AVX512F-LABEL: length127_eq_const:
; X64-MIC-AVX512F: # %bb.0:
; X64-MIC-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-MIC-AVX512F-NEXT: vmovdqu64 63(%rdi), %zmm1
; X64-MIC-AVX512F-NEXT: vpcmpneqd .L.str+63(%rip), %zmm1, %k0
; X64-MIC-AVX512F-NEXT: vpcmpneqd .L.str(%rip), %zmm0, %k1
; X64-MIC-AVX512F-NEXT: kortestw %k0, %k1
; X64-MIC-AVX512F-NEXT: sete %al
; X64-MIC-AVX512F-NEXT: vzeroupper
; X64-MIC-AVX512F-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 127) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length128(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length128:
; X64: # %bb.0:
; X64-NEXT: movl $128, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 128) nounwind
ret i32 %m
}
define i1 @length128_eq(ptr %x, ptr %y) nounwind {
; X64-SSE-LABEL: length128_eq:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $128, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: setne %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length128_eq:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: pushq %rax
; X64-AVX1-NEXT: movl $128, %edx
; X64-AVX1-NEXT: callq memcmp
; X64-AVX1-NEXT: testl %eax, %eax
; X64-AVX1-NEXT: setne %al
; X64-AVX1-NEXT: popq %rcx
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length128_eq:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: pushq %rax
; X64-AVX2-NEXT: movl $128, %edx
; X64-AVX2-NEXT: callq memcmp
; X64-AVX2-NEXT: testl %eax, %eax
; X64-AVX2-NEXT: setne %al
; X64-AVX2-NEXT: popq %rcx
; X64-AVX2-NEXT: retq
;
; X64-AVX512BW-LABEL: length128_eq:
; X64-AVX512BW: # %bb.0:
; X64-AVX512BW-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512BW-NEXT: vmovdqu64 64(%rdi), %zmm1
; X64-AVX512BW-NEXT: vpcmpneqb 64(%rsi), %zmm1, %k0
; X64-AVX512BW-NEXT: vpcmpneqb (%rsi), %zmm0, %k1
; X64-AVX512BW-NEXT: kortestq %k0, %k1
; X64-AVX512BW-NEXT: setne %al
; X64-AVX512BW-NEXT: vzeroupper
; X64-AVX512BW-NEXT: retq
;
; X64-AVX512F-LABEL: length128_eq:
; X64-AVX512F: # %bb.0:
; X64-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512F-NEXT: vmovdqu64 64(%rdi), %zmm1
; X64-AVX512F-NEXT: vpcmpneqd 64(%rsi), %zmm1, %k0
; X64-AVX512F-NEXT: vpcmpneqd (%rsi), %zmm0, %k1
; X64-AVX512F-NEXT: kortestw %k0, %k1
; X64-AVX512F-NEXT: setne %al
; X64-AVX512F-NEXT: vzeroupper
; X64-AVX512F-NEXT: retq
;
; X64-MIC-AVX2-LABEL: length128_eq:
; X64-MIC-AVX2: # %bb.0:
; X64-MIC-AVX2-NEXT: pushq %rax
; X64-MIC-AVX2-NEXT: movl $128, %edx
; X64-MIC-AVX2-NEXT: callq memcmp
; X64-MIC-AVX2-NEXT: testl %eax, %eax
; X64-MIC-AVX2-NEXT: setne %al
; X64-MIC-AVX2-NEXT: popq %rcx
; X64-MIC-AVX2-NEXT: retq
;
; X64-MIC-AVX512F-LABEL: length128_eq:
; X64-MIC-AVX512F: # %bb.0:
; X64-MIC-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-MIC-AVX512F-NEXT: vmovdqu64 64(%rdi), %zmm1
; X64-MIC-AVX512F-NEXT: vpcmpneqd 64(%rsi), %zmm1, %k0
; X64-MIC-AVX512F-NEXT: vpcmpneqd (%rsi), %zmm0, %k1
; X64-MIC-AVX512F-NEXT: kortestw %k0, %k1
; X64-MIC-AVX512F-NEXT: setne %al
; X64-MIC-AVX512F-NEXT: vzeroupper
; X64-MIC-AVX512F-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 128) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length128_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length128_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $128, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 128) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length128_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length128_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $128, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 128) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length128_eq_const(ptr %X) nounwind {
; X64-SSE-LABEL: length128_eq_const:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: pushq %rax
; X64-SSE-NEXT: movl $.L.str, %esi
; X64-SSE-NEXT: movl $128, %edx
; X64-SSE-NEXT: callq memcmp
; X64-SSE-NEXT: testl %eax, %eax
; X64-SSE-NEXT: sete %al
; X64-SSE-NEXT: popq %rcx
; X64-SSE-NEXT: retq
;
; X64-AVX1-LABEL: length128_eq_const:
; X64-AVX1: # %bb.0:
; X64-AVX1-NEXT: pushq %rax
; X64-AVX1-NEXT: movl $.L.str, %esi
; X64-AVX1-NEXT: movl $128, %edx
; X64-AVX1-NEXT: callq memcmp
; X64-AVX1-NEXT: testl %eax, %eax
; X64-AVX1-NEXT: sete %al
; X64-AVX1-NEXT: popq %rcx
; X64-AVX1-NEXT: retq
;
; X64-AVX2-LABEL: length128_eq_const:
; X64-AVX2: # %bb.0:
; X64-AVX2-NEXT: pushq %rax
; X64-AVX2-NEXT: movl $.L.str, %esi
; X64-AVX2-NEXT: movl $128, %edx
; X64-AVX2-NEXT: callq memcmp
; X64-AVX2-NEXT: testl %eax, %eax
; X64-AVX2-NEXT: sete %al
; X64-AVX2-NEXT: popq %rcx
; X64-AVX2-NEXT: retq
;
; X64-AVX512BW-LABEL: length128_eq_const:
; X64-AVX512BW: # %bb.0:
; X64-AVX512BW-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512BW-NEXT: vmovdqu64 64(%rdi), %zmm1
; X64-AVX512BW-NEXT: vpcmpneqb .L.str+64(%rip), %zmm1, %k0
; X64-AVX512BW-NEXT: vpcmpneqb .L.str(%rip), %zmm0, %k1
; X64-AVX512BW-NEXT: kortestq %k0, %k1
; X64-AVX512BW-NEXT: sete %al
; X64-AVX512BW-NEXT: vzeroupper
; X64-AVX512BW-NEXT: retq
;
; X64-AVX512F-LABEL: length128_eq_const:
; X64-AVX512F: # %bb.0:
; X64-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-AVX512F-NEXT: vmovdqu64 64(%rdi), %zmm1
; X64-AVX512F-NEXT: vpcmpneqd .L.str+64(%rip), %zmm1, %k0
; X64-AVX512F-NEXT: vpcmpneqd .L.str(%rip), %zmm0, %k1
; X64-AVX512F-NEXT: kortestw %k0, %k1
; X64-AVX512F-NEXT: sete %al
; X64-AVX512F-NEXT: vzeroupper
; X64-AVX512F-NEXT: retq
;
; X64-MIC-AVX2-LABEL: length128_eq_const:
; X64-MIC-AVX2: # %bb.0:
; X64-MIC-AVX2-NEXT: pushq %rax
; X64-MIC-AVX2-NEXT: movl $.L.str, %esi
; X64-MIC-AVX2-NEXT: movl $128, %edx
; X64-MIC-AVX2-NEXT: callq memcmp
; X64-MIC-AVX2-NEXT: testl %eax, %eax
; X64-MIC-AVX2-NEXT: sete %al
; X64-MIC-AVX2-NEXT: popq %rcx
; X64-MIC-AVX2-NEXT: retq
;
; X64-MIC-AVX512F-LABEL: length128_eq_const:
; X64-MIC-AVX512F: # %bb.0:
; X64-MIC-AVX512F-NEXT: vmovdqu64 (%rdi), %zmm0
; X64-MIC-AVX512F-NEXT: vmovdqu64 64(%rdi), %zmm1
; X64-MIC-AVX512F-NEXT: vpcmpneqd .L.str+64(%rip), %zmm1, %k0
; X64-MIC-AVX512F-NEXT: vpcmpneqd .L.str(%rip), %zmm0, %k1
; X64-MIC-AVX512F-NEXT: kortestw %k0, %k1
; X64-MIC-AVX512F-NEXT: sete %al
; X64-MIC-AVX512F-NEXT: vzeroupper
; X64-MIC-AVX512F-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 128) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length192(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length192:
; X64: # %bb.0:
; X64-NEXT: movl $192, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 192) nounwind
ret i32 %m
}
define i1 @length192_eq(ptr %x, ptr %y) nounwind {
; X64-LABEL: length192_eq:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $192, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setne %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 192) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length192_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length192_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $192, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 192) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length192_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length192_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $192, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 192) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length192_eq_const(ptr %X) nounwind {
; X64-LABEL: length192_eq_const:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $.L.str, %esi
; X64-NEXT: movl $192, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 192) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length255(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length255:
; X64: # %bb.0:
; X64-NEXT: movl $255, %edx
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 255) nounwind
ret i32 %m
}
define i1 @length255_eq(ptr %x, ptr %y) nounwind {
; X64-LABEL: length255_eq:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $255, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setne %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 255) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length255_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length255_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $255, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 255) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length255_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length255_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $255, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 255) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length255_eq_const(ptr %X) nounwind {
; X64-LABEL: length255_eq_const:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $.L.str, %esi
; X64-NEXT: movl $255, %edx
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 255) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length256(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length256:
; X64: # %bb.0:
; X64-NEXT: movl $256, %edx # imm = 0x100
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 256) nounwind
ret i32 %m
}
define i1 @length256_eq(ptr %x, ptr %y) nounwind {
; X64-LABEL: length256_eq:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $256, %edx # imm = 0x100
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setne %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 256) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length256_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length256_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $256, %edx # imm = 0x100
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 256) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length256_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length256_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $256, %edx # imm = 0x100
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 256) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length256_eq_const(ptr %X) nounwind {
; X64-LABEL: length256_eq_const:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $.L.str, %esi
; X64-NEXT: movl $256, %edx # imm = 0x100
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 256) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length384(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length384:
; X64: # %bb.0:
; X64-NEXT: movl $384, %edx # imm = 0x180
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 384) nounwind
ret i32 %m
}
define i1 @length384_eq(ptr %x, ptr %y) nounwind {
; X64-LABEL: length384_eq:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $384, %edx # imm = 0x180
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setne %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 384) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length384_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length384_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $384, %edx # imm = 0x180
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 384) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length384_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length384_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $384, %edx # imm = 0x180
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 384) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length384_eq_const(ptr %X) nounwind {
; X64-LABEL: length384_eq_const:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $.L.str, %esi
; X64-NEXT: movl $384, %edx # imm = 0x180
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 384) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length511(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length511:
; X64: # %bb.0:
; X64-NEXT: movl $511, %edx # imm = 0x1FF
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 511) nounwind
ret i32 %m
}
define i1 @length511_eq(ptr %x, ptr %y) nounwind {
; X64-LABEL: length511_eq:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $511, %edx # imm = 0x1FF
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setne %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 511) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length511_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length511_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $511, %edx # imm = 0x1FF
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 511) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length511_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length511_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $511, %edx # imm = 0x1FF
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 511) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length511_eq_const(ptr %X) nounwind {
; X64-LABEL: length511_eq_const:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $.L.str, %esi
; X64-NEXT: movl $511, %edx # imm = 0x1FF
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 511) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
define i32 @length512(ptr %X, ptr %Y) nounwind {
; X64-LABEL: length512:
; X64: # %bb.0:
; X64-NEXT: movl $512, %edx # imm = 0x200
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 512) nounwind
ret i32 %m
}
define i1 @length512_eq(ptr %x, ptr %y) nounwind {
; X64-LABEL: length512_eq:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $512, %edx # imm = 0x200
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setne %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 512) nounwind
%cmp = icmp ne i32 %call, 0
ret i1 %cmp
}
define i1 @length512_lt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length512_lt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $512, %edx # imm = 0x200
; X64-NEXT: callq memcmp
; X64-NEXT: shrl $31, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 512) nounwind
%cmp = icmp slt i32 %call, 0
ret i1 %cmp
}
define i1 @length512_gt(ptr %x, ptr %y) nounwind {
; X64-LABEL: length512_gt:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $512, %edx # imm = 0x200
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: setg %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%call = tail call i32 @memcmp(ptr %x, ptr %y, i64 512) nounwind
%cmp = icmp sgt i32 %call, 0
ret i1 %cmp
}
define i1 @length512_eq_const(ptr %X) nounwind {
; X64-LABEL: length512_eq_const:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movl $.L.str, %esi
; X64-NEXT: movl $512, %edx # imm = 0x200
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr @.str, i64 512) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
; This checks that we do not do stupid things with huge sizes.
define i32 @huge_length(ptr %X, ptr %Y) nounwind {
; X64-LABEL: huge_length:
; X64: # %bb.0:
; X64-NEXT: movabsq $9223372036854775807, %rdx # imm = 0x7FFFFFFFFFFFFFFF
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 9223372036854775807) nounwind
ret i32 %m
}
define i1 @huge_length_eq(ptr %X, ptr %Y) nounwind {
; X64-LABEL: huge_length_eq:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: movabsq $9223372036854775807, %rdx # imm = 0x7FFFFFFFFFFFFFFF
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 9223372036854775807) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}
; This checks non-constant sizes.
define i32 @nonconst_length(ptr %X, ptr %Y, i64 %size) nounwind {
; X64-LABEL: nonconst_length:
; X64: # %bb.0:
; X64-NEXT: jmp memcmp # TAILCALL
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 %size) nounwind
ret i32 %m
}
define i1 @nonconst_length_eq(ptr %X, ptr %Y, i64 %size) nounwind {
; X64-LABEL: nonconst_length_eq:
; X64: # %bb.0:
; X64-NEXT: pushq %rax
; X64-NEXT: callq memcmp
; X64-NEXT: testl %eax, %eax
; X64-NEXT: sete %al
; X64-NEXT: popq %rcx
; X64-NEXT: retq
%m = tail call i32 @memcmp(ptr %X, ptr %Y, i64 %size) nounwind
%c = icmp eq i32 %m, 0
ret i1 %c
}