[GlobalISel] Allow expansion of urem by constant in prelegalizer (#145914)

This patch allows urem by a constant to be expanded more efficiently to
avoid the need for expensive udiv instructions. This is part of the
resolution to issue #118090
This commit is contained in:
jyli0116
2025-07-02 13:46:36 +01:00
committed by GitHub
parent 0aafeb8ba1
commit 9c0743fbc5
7 changed files with 3779 additions and 607 deletions

View File

@@ -693,18 +693,19 @@ public:
/// feeding a G_AND instruction \p MI. /// feeding a G_AND instruction \p MI.
bool matchNarrowBinopFeedingAnd(MachineInstr &MI, BuildFnTy &MatchInfo) const; bool matchNarrowBinopFeedingAnd(MachineInstr &MI, BuildFnTy &MatchInfo) const;
/// Given an G_UDIV \p MI expressing a divide by constant, return an /// Given an G_UDIV \p MI or G_UREM \p MI expressing a divide by constant,
/// expression that implements it by multiplying by a magic number. /// return an expression that implements it by multiplying by a magic number.
/// Ref: "Hacker's Delight" or "The PowerPC Compiler Writer's Guide". /// Ref: "Hacker's Delight" or "The PowerPC Compiler Writer's Guide".
MachineInstr *buildUDivUsingMul(MachineInstr &MI) const; MachineInstr *buildUDivorURemUsingMul(MachineInstr &MI) const;
/// Combine G_UDIV by constant into a multiply by magic constant. /// Combine G_UDIV or G_UREM by constant into a multiply by magic constant.
bool matchUDivByConst(MachineInstr &MI) const; bool matchUDivorURemByConst(MachineInstr &MI) const;
void applyUDivByConst(MachineInstr &MI) const; void applyUDivorURemByConst(MachineInstr &MI) const;
/// Given an G_SDIV \p MI expressing a signed divide by constant, return an /// Given an G_SDIV \p MI expressing a signed divide by constant, return an
/// expression that implements it by multiplying by a magic number. /// expression that implements it by multiplying by a magic number.
/// Ref: "Hacker's Delight" or "The PowerPC Compiler Writer's Guide". /// Ref: "Hacker's Delight" or "The PowerPC Compiler Writer's Guide".
MachineInstr *buildSDivUsingMul(MachineInstr &MI) const; MachineInstr *buildSDivUsingMul(MachineInstr &MI) const;
/// Combine G_SDIV by constant into a multiply by magic constant.
bool matchSDivByConst(MachineInstr &MI) const; bool matchSDivByConst(MachineInstr &MI) const;
void applySDivByConst(MachineInstr &MI) const; void applySDivByConst(MachineInstr &MI) const;

View File

@@ -1132,8 +1132,8 @@ def form_bitfield_extract : GICombineGroup<[bitfield_extract_from_sext_inreg,
def udiv_by_const : GICombineRule< def udiv_by_const : GICombineRule<
(defs root:$root), (defs root:$root),
(match (wip_match_opcode G_UDIV):$root, (match (wip_match_opcode G_UDIV):$root,
[{ return Helper.matchUDivByConst(*${root}); }]), [{ return Helper.matchUDivorURemByConst(*${root}); }]),
(apply [{ Helper.applyUDivByConst(*${root}); }])>; (apply [{ Helper.applyUDivorURemByConst(*${root}); }])>;
def sdiv_by_const : GICombineRule< def sdiv_by_const : GICombineRule<
(defs root:$root), (defs root:$root),
@@ -1156,6 +1156,14 @@ def udiv_by_pow2 : GICombineRule<
def intdiv_combines : GICombineGroup<[udiv_by_const, sdiv_by_const, def intdiv_combines : GICombineGroup<[udiv_by_const, sdiv_by_const,
sdiv_by_pow2, udiv_by_pow2]>; sdiv_by_pow2, udiv_by_pow2]>;
def urem_by_const : GICombineRule<
(defs root:$root),
(match (G_UREM $dst, $x, $y):$root,
[{ return Helper.matchUDivorURemByConst(*${root}); }]),
(apply [{ Helper.applyUDivorURemByConst(*${root}); }])>;
def intrem_combines : GICombineGroup<[urem_by_const]>;
def reassoc_ptradd : GICombineRule< def reassoc_ptradd : GICombineRule<
(defs root:$root, build_fn_matchinfo:$matchinfo), (defs root:$root, build_fn_matchinfo:$matchinfo),
(match (wip_match_opcode G_PTR_ADD):$root, (match (wip_match_opcode G_PTR_ADD):$root,
@@ -2048,7 +2056,7 @@ def all_combines : GICombineGroup<[integer_reassoc_combines, trivial_combines,
constant_fold_cast_op, fabs_fneg_fold, constant_fold_cast_op, fabs_fneg_fold,
intdiv_combines, mulh_combines, redundant_neg_operands, intdiv_combines, mulh_combines, redundant_neg_operands,
and_or_disjoint_mask, fma_combines, fold_binop_into_select, and_or_disjoint_mask, fma_combines, fold_binop_into_select,
sub_add_reg, select_to_minmax, intrem_combines, sub_add_reg, select_to_minmax,
fsub_to_fneg, commute_constant_to_rhs, match_ands, match_ors, fsub_to_fneg, commute_constant_to_rhs, match_ands, match_ors,
simplify_neg_minmax, combine_concat_vector, simplify_neg_minmax, combine_concat_vector,
sext_trunc, zext_trunc, prefer_sign_combines, shuffle_combines, sext_trunc, zext_trunc, prefer_sign_combines, shuffle_combines,

View File

@@ -5295,12 +5295,13 @@ bool CombinerHelper::matchSubAddSameReg(MachineInstr &MI,
return false; return false;
} }
MachineInstr *CombinerHelper::buildUDivUsingMul(MachineInstr &MI) const { MachineInstr *CombinerHelper::buildUDivorURemUsingMul(MachineInstr &MI) const {
assert(MI.getOpcode() == TargetOpcode::G_UDIV); unsigned Opcode = MI.getOpcode();
auto &UDiv = cast<GenericMachineInstr>(MI); assert(Opcode == TargetOpcode::G_UDIV || Opcode == TargetOpcode::G_UREM);
Register Dst = UDiv.getReg(0); auto &UDivorRem = cast<GenericMachineInstr>(MI);
Register LHS = UDiv.getReg(1); Register Dst = UDivorRem.getReg(0);
Register RHS = UDiv.getReg(2); Register LHS = UDivorRem.getReg(1);
Register RHS = UDivorRem.getReg(2);
LLT Ty = MRI.getType(Dst); LLT Ty = MRI.getType(Dst);
LLT ScalarTy = Ty.getScalarType(); LLT ScalarTy = Ty.getScalarType();
const unsigned EltBits = ScalarTy.getScalarSizeInBits(); const unsigned EltBits = ScalarTy.getScalarSizeInBits();
@@ -5453,11 +5454,18 @@ MachineInstr *CombinerHelper::buildUDivUsingMul(MachineInstr &MI) const {
auto IsOne = MIB.buildICmp( auto IsOne = MIB.buildICmp(
CmpInst::Predicate::ICMP_EQ, CmpInst::Predicate::ICMP_EQ,
Ty.isScalar() ? LLT::scalar(1) : Ty.changeElementSize(1), RHS, One); Ty.isScalar() ? LLT::scalar(1) : Ty.changeElementSize(1), RHS, One);
return MIB.buildSelect(Ty, IsOne, LHS, Q); auto ret = MIB.buildSelect(Ty, IsOne, LHS, Q);
if (Opcode == TargetOpcode::G_UREM) {
auto Prod = MIB.buildMul(Ty, ret, RHS);
return MIB.buildSub(Ty, LHS, Prod);
}
return ret;
} }
bool CombinerHelper::matchUDivByConst(MachineInstr &MI) const { bool CombinerHelper::matchUDivorURemByConst(MachineInstr &MI) const {
assert(MI.getOpcode() == TargetOpcode::G_UDIV); unsigned Opcode = MI.getOpcode();
assert(Opcode == TargetOpcode::G_UDIV || Opcode == TargetOpcode::G_UREM);
Register Dst = MI.getOperand(0).getReg(); Register Dst = MI.getOperand(0).getReg();
Register RHS = MI.getOperand(2).getReg(); Register RHS = MI.getOperand(2).getReg();
LLT DstTy = MRI.getType(Dst); LLT DstTy = MRI.getType(Dst);
@@ -5474,7 +5482,8 @@ bool CombinerHelper::matchUDivByConst(MachineInstr &MI) const {
if (MF.getFunction().hasMinSize()) if (MF.getFunction().hasMinSize())
return false; return false;
if (MI.getFlag(MachineInstr::MIFlag::IsExact)) { if (Opcode == TargetOpcode::G_UDIV &&
MI.getFlag(MachineInstr::MIFlag::IsExact)) {
return matchUnaryPredicate( return matchUnaryPredicate(
MRI, RHS, [](const Constant *C) { return C && !C->isNullValue(); }); MRI, RHS, [](const Constant *C) { return C && !C->isNullValue(); });
} }
@@ -5494,14 +5503,17 @@ bool CombinerHelper::matchUDivByConst(MachineInstr &MI) const {
{DstTy.isVector() ? DstTy.changeElementSize(1) : LLT::scalar(1), {DstTy.isVector() ? DstTy.changeElementSize(1) : LLT::scalar(1),
DstTy}})) DstTy}}))
return false; return false;
if (Opcode == TargetOpcode::G_UREM &&
!isLegalOrBeforeLegalizer({TargetOpcode::G_SUB, {DstTy, DstTy}}))
return false;
} }
return matchUnaryPredicate( return matchUnaryPredicate(
MRI, RHS, [](const Constant *C) { return C && !C->isNullValue(); }); MRI, RHS, [](const Constant *C) { return C && !C->isNullValue(); });
} }
void CombinerHelper::applyUDivByConst(MachineInstr &MI) const { void CombinerHelper::applyUDivorURemByConst(MachineInstr &MI) const {
auto *NewMI = buildUDivUsingMul(MI); auto *NewMI = buildUDivorURemUsingMul(MI);
replaceSingleDefInstWithReg(MI, NewMI->getOperand(0).getReg()); replaceSingleDefInstWithReg(MI, NewMI->getOperand(0).getReg());
} }

View File

@@ -4,10 +4,12 @@
define i32 @f(i64 %0) { define i32 @f(i64 %0) {
; CHECK-LABEL: f: ; CHECK-LABEL: f:
; CHECK: // %bb.0: ; CHECK: // %bb.0:
; CHECK-NEXT: mov w8, #10 // =0xa ; CHECK-NEXT: mov x8, #-7378697629483820647 // =0x9999999999999999
; CHECK-NEXT: mov w9, w0 ; CHECK-NEXT: mov w9, w0
; CHECK-NEXT: udiv x10, x9, x8 ; CHECK-NEXT: mov w10, #10 // =0xa
; CHECK-NEXT: msub x0, x10, x8, x9 ; CHECK-NEXT: eor x8, x8, #0x8000000000000003
; CHECK-NEXT: umulh x8, x9, x8
; CHECK-NEXT: msub x0, x8, x10, x9
; CHECK-NEXT: // kill: def $w0 killed $w0 killed $x0 ; CHECK-NEXT: // kill: def $w0 killed $w0 killed $x0
; CHECK-NEXT: ret ; CHECK-NEXT: ret
%2 = trunc i64 %0 to i32 %2 = trunc i64 %0 to i32

File diff suppressed because it is too large Load Diff

View File

@@ -211,91 +211,41 @@ define i32 @v_urem_i32_oddk_denom(i32 %num) {
; CHECK-LABEL: v_urem_i32_oddk_denom: ; CHECK-LABEL: v_urem_i32_oddk_denom:
; CHECK: ; %bb.0: ; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) ; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: v_rcp_iflag_f32_e32 v1, 0x4996c7d8 ; CHECK-NEXT: v_mov_b32_e32 v1, 0xb2a50881
; CHECK-NEXT: v_mov_b32_e32 v2, 0xffed2705
; CHECK-NEXT: v_mov_b32_e32 v3, 0x12d8fb
; CHECK-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
; CHECK-NEXT: v_cvt_u32_f32_e32 v1, v1
; CHECK-NEXT: v_mul_lo_u32 v2, v1, v2
; CHECK-NEXT: v_mul_hi_u32 v2, v1, v2
; CHECK-NEXT: v_add_i32_e32 v1, vcc, v1, v2
; CHECK-NEXT: v_mul_hi_u32 v1, v0, v1 ; CHECK-NEXT: v_mul_hi_u32 v1, v0, v1
; CHECK-NEXT: v_mul_lo_u32 v1, v1, v3 ; CHECK-NEXT: v_sub_i32_e32 v2, vcc, v0, v1
; CHECK-NEXT: v_lshrrev_b32_e32 v2, 1, v2
; CHECK-NEXT: v_add_i32_e32 v1, vcc, v2, v1
; CHECK-NEXT: v_lshrrev_b32_e32 v1, 20, v1
; CHECK-NEXT: v_mov_b32_e32 v2, 0x12d8fb
; CHECK-NEXT: v_mul_lo_u32 v1, v1, v2
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v1 ; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v1
; CHECK-NEXT: v_add_i32_e32 v1, vcc, 0xffed2705, v0
; CHECK-NEXT: v_cmp_ge_u32_e32 vcc, v0, v3
; CHECK-NEXT: v_cndmask_b32_e32 v0, v0, v1, vcc
; CHECK-NEXT: v_add_i32_e32 v1, vcc, 0xffed2705, v0
; CHECK-NEXT: v_cmp_ge_u32_e32 vcc, v0, v3
; CHECK-NEXT: v_cndmask_b32_e32 v0, v0, v1, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31] ; CHECK-NEXT: s_setpc_b64 s[30:31]
%result = urem i32 %num, 1235195 %result = urem i32 %num, 1235195
ret i32 %result ret i32 %result
} }
define <2 x i32> @v_urem_v2i32_oddk_denom(<2 x i32> %num) { define <2 x i32> @v_urem_v2i32_oddk_denom(<2 x i32> %num) {
; GISEL-LABEL: v_urem_v2i32_oddk_denom: ; CHECK-LABEL: v_urem_v2i32_oddk_denom:
; GISEL: ; %bb.0: ; CHECK: ; %bb.0:
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) ; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GISEL-NEXT: v_mov_b32_e32 v2, 0x12d8fb ; CHECK-NEXT: v_mov_b32_e32 v2, 0xb2a50881
; GISEL-NEXT: v_cvt_f32_u32_e32 v3, 0x12d8fb ; CHECK-NEXT: v_mov_b32_e32 v3, 0x12d8fb
; GISEL-NEXT: v_mov_b32_e32 v4, 0xffed2705 ; CHECK-NEXT: v_mul_hi_u32 v4, v0, v2
; GISEL-NEXT: v_rcp_iflag_f32_e32 v3, v3 ; CHECK-NEXT: v_mul_hi_u32 v2, v1, v2
; GISEL-NEXT: v_mul_f32_e32 v3, 0x4f7ffffe, v3 ; CHECK-NEXT: v_sub_i32_e32 v5, vcc, v0, v4
; GISEL-NEXT: v_cvt_u32_f32_e32 v3, v3 ; CHECK-NEXT: v_sub_i32_e32 v6, vcc, v1, v2
; GISEL-NEXT: v_mul_lo_u32 v5, v3, v4 ; CHECK-NEXT: v_lshrrev_b32_e32 v5, 1, v5
; GISEL-NEXT: v_mul_hi_u32 v5, v3, v5 ; CHECK-NEXT: v_lshrrev_b32_e32 v6, 1, v6
; GISEL-NEXT: v_add_i32_e32 v3, vcc, v3, v5 ; CHECK-NEXT: v_add_i32_e32 v4, vcc, v5, v4
; GISEL-NEXT: v_mul_hi_u32 v5, v0, v3 ; CHECK-NEXT: v_add_i32_e32 v2, vcc, v6, v2
; GISEL-NEXT: v_mul_hi_u32 v3, v1, v3 ; CHECK-NEXT: v_lshrrev_b32_e32 v4, 20, v4
; GISEL-NEXT: v_mul_lo_u32 v5, v5, v2 ; CHECK-NEXT: v_lshrrev_b32_e32 v2, 20, v2
; GISEL-NEXT: v_mul_lo_u32 v3, v3, v2 ; CHECK-NEXT: v_mul_lo_u32 v4, v4, v3
; GISEL-NEXT: v_sub_i32_e32 v0, vcc, v0, v5 ; CHECK-NEXT: v_mul_lo_u32 v2, v2, v3
; GISEL-NEXT: v_sub_i32_e32 v1, vcc, v1, v3 ; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v4
; GISEL-NEXT: v_add_i32_e32 v3, vcc, v0, v4 ; CHECK-NEXT: v_sub_i32_e32 v1, vcc, v1, v2
; GISEL-NEXT: v_add_i32_e32 v5, vcc, 0xffed2705, v1 ; CHECK-NEXT: s_setpc_b64 s[30:31]
; GISEL-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
; GISEL-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
; GISEL-NEXT: v_cmp_ge_u32_e32 vcc, v1, v2
; GISEL-NEXT: v_cndmask_b32_e32 v1, v1, v5, vcc
; GISEL-NEXT: v_add_i32_e32 v3, vcc, v0, v4
; GISEL-NEXT: v_add_i32_e32 v4, vcc, 0xffed2705, v1
; GISEL-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
; GISEL-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
; GISEL-NEXT: v_cmp_ge_u32_e32 vcc, v1, v2
; GISEL-NEXT: v_cndmask_b32_e32 v1, v1, v4, vcc
; GISEL-NEXT: s_setpc_b64 s[30:31]
;
; CGP-LABEL: v_urem_v2i32_oddk_denom:
; CGP: ; %bb.0:
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: v_rcp_iflag_f32_e32 v2, 0x4996c7d8
; CGP-NEXT: v_mov_b32_e32 v3, 0xffed2705
; CGP-NEXT: v_mov_b32_e32 v4, 0x12d8fb
; CGP-NEXT: v_mul_f32_e32 v2, 0x4f7ffffe, v2
; CGP-NEXT: v_cvt_u32_f32_e32 v2, v2
; CGP-NEXT: v_mul_lo_u32 v5, v2, v3
; CGP-NEXT: v_mul_hi_u32 v5, v2, v5
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_mul_hi_u32 v5, v0, v2
; CGP-NEXT: v_mul_hi_u32 v2, v1, v2
; CGP-NEXT: v_mul_lo_u32 v5, v5, v4
; CGP-NEXT: v_mul_lo_u32 v2, v2, v4
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_sub_i32_e32 v1, vcc, v1, v2
; CGP-NEXT: v_add_i32_e32 v2, vcc, v0, v3
; CGP-NEXT: v_add_i32_e32 v5, vcc, 0xffed2705, v1
; CGP-NEXT: v_cmp_ge_u32_e32 vcc, v0, v4
; CGP-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
; CGP-NEXT: v_cmp_ge_u32_e32 vcc, v1, v4
; CGP-NEXT: v_cndmask_b32_e32 v1, v1, v5, vcc
; CGP-NEXT: v_add_i32_e32 v2, vcc, v0, v3
; CGP-NEXT: v_add_i32_e32 v3, vcc, 0xffed2705, v1
; CGP-NEXT: v_cmp_ge_u32_e32 vcc, v0, v4
; CGP-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
; CGP-NEXT: v_cmp_ge_u32_e32 vcc, v1, v4
; CGP-NEXT: v_cndmask_b32_e32 v1, v1, v3, vcc
; CGP-NEXT: s_setpc_b64 s[30:31]
%result = urem <2 x i32> %num, <i32 1235195, i32 1235195> %result = urem <2 x i32> %num, <i32 1235195, i32 1235195>
ret <2 x i32> %result ret <2 x i32> %result
} }

View File

@@ -968,523 +968,106 @@ define i64 @v_urem_i64_oddk_denom(i64 %num) {
; CHECK-LABEL: v_urem_i64_oddk_denom: ; CHECK-LABEL: v_urem_i64_oddk_denom:
; CHECK: ; %bb.0: ; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) ; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: v_mov_b32_e32 v2, 0x12d8fb ; CHECK-NEXT: v_mov_b32_e32 v2, 0x1fb03c31
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, 0x12d8fb ; CHECK-NEXT: v_mov_b32_e32 v3, 0xd9528440
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v4, 0 ; CHECK-NEXT: v_mov_b32_e32 v4, 0x12d8fb
; CHECK-NEXT: v_mov_b32_e32 v5, 0xffed2705 ; CHECK-NEXT: v_mul_lo_u32 v5, v1, v2
; CHECK-NEXT: v_mac_f32_e32 v3, 0x4f800000, v4 ; CHECK-NEXT: v_mul_lo_u32 v6, v0, v3
; CHECK-NEXT: v_rcp_iflag_f32_e32 v3, v3 ; CHECK-NEXT: v_mul_hi_u32 v7, v0, v2
; CHECK-NEXT: v_mul_f32_e32 v3, 0x5f7ffffc, v3 ; CHECK-NEXT: v_mul_lo_u32 v8, v1, v3
; CHECK-NEXT: v_mul_f32_e32 v4, 0x2f800000, v3 ; CHECK-NEXT: v_mul_hi_u32 v2, v1, v2
; CHECK-NEXT: v_trunc_f32_e32 v4, v4 ; CHECK-NEXT: v_mul_hi_u32 v9, v0, v3
; CHECK-NEXT: v_mac_f32_e32 v3, 0xcf800000, v4
; CHECK-NEXT: v_cvt_u32_f32_e32 v4, v4
; CHECK-NEXT: v_cvt_u32_f32_e32 v3, v3
; CHECK-NEXT: v_mul_lo_u32 v6, v4, v5
; CHECK-NEXT: v_mul_lo_u32 v7, v3, v5
; CHECK-NEXT: v_mul_hi_u32 v8, v3, v5
; CHECK-NEXT: v_sub_i32_e32 v6, vcc, v6, v3
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v8
; CHECK-NEXT: v_mul_lo_u32 v8, v4, v7
; CHECK-NEXT: v_mul_hi_u32 v9, v3, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v4, v7
; CHECK-NEXT: v_mul_lo_u32 v10, v3, v6
; CHECK-NEXT: v_mul_lo_u32 v11, v4, v6
; CHECK-NEXT: v_mul_hi_u32 v12, v3, v6
; CHECK-NEXT: v_mul_hi_u32 v6, v4, v6
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v8, v10
; CHECK-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v11, v7
; CHECK-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v12
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v10, v8
; CHECK-NEXT: v_add_i32_e32 v9, vcc, v11, v9
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v8
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v9, v8
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v8
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v7
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, v4, v6, vcc
; CHECK-NEXT: v_mul_lo_u32 v6, v3, v5
; CHECK-NEXT: v_mul_hi_u32 v7, v3, v5
; CHECK-NEXT: v_mul_lo_u32 v5, v4, v5
; CHECK-NEXT: v_mul_lo_u32 v8, v4, v6
; CHECK-NEXT: v_mul_hi_u32 v9, v3, v6
; CHECK-NEXT: v_mul_hi_u32 v6, v4, v6
; CHECK-NEXT: v_sub_i32_e32 v5, vcc, v5, v3
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_mul_lo_u32 v7, v3, v5
; CHECK-NEXT: v_mul_lo_u32 v10, v4, v5
; CHECK-NEXT: v_mul_hi_u32 v11, v3, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v4, v5
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v10, v6
; CHECK-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v9
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v11
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v10, v9
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v7
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v6
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, v4, v5, vcc
; CHECK-NEXT: v_mul_lo_u32 v5, v1, v3
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v3
; CHECK-NEXT: v_mul_hi_u32 v3, v1, v3 ; CHECK-NEXT: v_mul_hi_u32 v3, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v7, v0, v4
; CHECK-NEXT: v_mul_lo_u32 v8, v1, v4
; CHECK-NEXT: v_mul_hi_u32 v9, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v4, v1, v4
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v8, v3
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6 ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v9
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc ; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v7, v5 ; CHECK-NEXT: v_add_i32_e32 v2, vcc, v8, v2
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v8, v6 ; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v5 ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v9
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v8, v7
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc ; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5 ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_mul_lo_u32 v6, v3, v2 ; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v5
; CHECK-NEXT: v_mul_hi_u32 v3, v3, v2 ; CHECK-NEXT: v_lshr_b64 v[2:3], v[2:3], 20
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v4, v5 ; CHECK-NEXT: v_mul_lo_u32 v5, v2, v4
; CHECK-NEXT: v_mul_lo_u32 v4, v4, v2 ; CHECK-NEXT: v_mul_lo_u32 v3, v3, v4
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v4, v3 ; CHECK-NEXT: v_mul_hi_u32 v2, v2, v4
; CHECK-NEXT: v_sub_i32_e64 v0, s[4:5], v0, v6 ; CHECK-NEXT: v_add_i32_e32 v2, vcc, v3, v2
; CHECK-NEXT: v_subb_u32_e64 v4, vcc, v1, v3, s[4:5] ; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v5
; CHECK-NEXT: v_sub_i32_e32 v1, vcc, v1, v3 ; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v2, vcc
; CHECK-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
; CHECK-NEXT: v_cndmask_b32_e64 v3, 0, -1, vcc
; CHECK-NEXT: v_sub_i32_e32 v5, vcc, v0, v2
; CHECK-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v4
; CHECK-NEXT: v_cndmask_b32_e64 v3, -1, v3, s[6:7]
; CHECK-NEXT: v_subbrev_u32_e64 v1, s[4:5], 0, v1, s[4:5]
; CHECK-NEXT: v_cmp_ge_u32_e64 s[4:5], v5, v2
; CHECK-NEXT: v_cndmask_b32_e64 v2, 0, -1, s[4:5]
; CHECK-NEXT: s_mov_b64 s[4:5], vcc
; CHECK-NEXT: v_subrev_i32_e32 v6, vcc, 0x12d8fb, v5
; CHECK-NEXT: v_subbrev_u32_e64 v1, s[4:5], 0, v1, s[4:5]
; CHECK-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v1
; CHECK-NEXT: v_cndmask_b32_e64 v2, -1, v2, s[4:5]
; CHECK-NEXT: v_subbrev_u32_e32 v7, vcc, 0, v1, vcc
; CHECK-NEXT: v_cmp_ne_u32_e32 vcc, 0, v2
; CHECK-NEXT: v_cndmask_b32_e32 v2, v5, v6, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v1, v7, vcc
; CHECK-NEXT: v_cmp_ne_u32_e32 vcc, 0, v3
; CHECK-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v4, v1, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31] ; CHECK-NEXT: s_setpc_b64 s[30:31]
%result = urem i64 %num, 1235195 %result = urem i64 %num, 1235195
ret i64 %result ret i64 %result
} }
define <2 x i64> @v_urem_v2i64_oddk_denom(<2 x i64> %num) { define <2 x i64> @v_urem_v2i64_oddk_denom(<2 x i64> %num) {
; GISEL-LABEL: v_urem_v2i64_oddk_denom: ; CHECK-LABEL: v_urem_v2i64_oddk_denom:
; GISEL: ; %bb.0: ; CHECK: ; %bb.0:
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) ; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GISEL-NEXT: v_mov_b32_e32 v4, 0x12d8fb ; CHECK-NEXT: v_mov_b32_e32 v4, 0x1fb03c31
; GISEL-NEXT: v_cvt_f32_u32_e32 v6, 0x12d8fb ; CHECK-NEXT: v_mov_b32_e32 v5, 0xd9528440
; GISEL-NEXT: v_cvt_f32_ubyte0_e32 v7, 0 ; CHECK-NEXT: v_mov_b32_e32 v8, 0x12d8fb
; GISEL-NEXT: s_mov_b32 s4, 1 ; CHECK-NEXT: v_mul_lo_u32 v6, v1, v4
; GISEL-NEXT: v_mov_b32_e32 v5, 0xffed2705 ; CHECK-NEXT: v_mul_lo_u32 v7, v0, v5
; GISEL-NEXT: s_mov_b32 s5, 1 ; CHECK-NEXT: v_mul_hi_u32 v9, v0, v4
; GISEL-NEXT: v_mac_f32_e32 v6, 0x4f800000, v7 ; CHECK-NEXT: v_mul_lo_u32 v10, v1, v5
; GISEL-NEXT: v_rcp_iflag_f32_e32 v6, v6 ; CHECK-NEXT: v_mul_hi_u32 v11, v1, v4
; GISEL-NEXT: s_cmp_lg_u32 s4, 0 ; CHECK-NEXT: v_mul_hi_u32 v12, v0, v5
; GISEL-NEXT: s_subb_u32 s6, 0, 0 ; CHECK-NEXT: v_mul_hi_u32 v13, v1, v5
; GISEL-NEXT: v_mul_f32_e32 v6, 0x5f7ffffc, v6 ; CHECK-NEXT: v_mul_lo_u32 v14, v3, v4
; GISEL-NEXT: v_mul_f32_e32 v7, 0x2f800000, v6 ; CHECK-NEXT: v_mul_lo_u32 v15, v2, v5
; GISEL-NEXT: s_cmp_lg_u32 s5, 0 ; CHECK-NEXT: v_mul_hi_u32 v16, v2, v4
; GISEL-NEXT: s_subb_u32 s7, 0, 0 ; CHECK-NEXT: v_mul_lo_u32 v17, v3, v5
; GISEL-NEXT: v_trunc_f32_e32 v7, v7 ; CHECK-NEXT: v_mul_hi_u32 v4, v3, v4
; GISEL-NEXT: v_mac_f32_e32 v6, 0xcf800000, v7 ; CHECK-NEXT: v_mul_hi_u32 v18, v2, v5
; GISEL-NEXT: v_cvt_u32_f32_e32 v7, v7 ; CHECK-NEXT: v_mul_hi_u32 v19, v3, v5
; GISEL-NEXT: v_cvt_u32_f32_e32 v6, v6 ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v7
; GISEL-NEXT: v_mul_lo_u32 v8, v7, v5 ; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; GISEL-NEXT: v_mul_lo_u32 v9, v6, v5 ; CHECK-NEXT: v_add_i32_e32 v7, vcc, v10, v11
; GISEL-NEXT: v_mul_lo_u32 v10, s6, v6 ; CHECK-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; GISEL-NEXT: v_mul_hi_u32 v11, v6, v5 ; CHECK-NEXT: v_add_i32_e32 v11, vcc, v14, v15
; GISEL-NEXT: v_mul_lo_u32 v12, s7, v6 ; CHECK-NEXT: v_cndmask_b32_e64 v14, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v8 ; CHECK-NEXT: v_add_i32_e32 v4, vcc, v17, v4
; GISEL-NEXT: v_mul_lo_u32 v13, v7, v9 ; CHECK-NEXT: v_cndmask_b32_e64 v15, 0, 1, vcc
; GISEL-NEXT: v_mul_hi_u32 v14, v6, v9 ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v9
; GISEL-NEXT: v_mul_hi_u32 v9, v7, v9 ; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v12, v8 ; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v12
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v11 ; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v8, v11 ; CHECK-NEXT: v_add_i32_e32 v11, vcc, v11, v16
; GISEL-NEXT: v_mul_lo_u32 v11, v6, v10 ; CHECK-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; GISEL-NEXT: v_mul_lo_u32 v12, v7, v10 ; CHECK-NEXT: v_add_i32_e32 v12, vcc, v4, v18
; GISEL-NEXT: v_mul_hi_u32 v15, v6, v10 ; CHECK-NEXT: v_cndmask_b32_e64 v4, 0, 1, vcc
; GISEL-NEXT: v_mul_hi_u32 v10, v7, v10 ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; GISEL-NEXT: v_mul_lo_u32 v16, v6, v8 ; CHECK-NEXT: v_add_i32_e32 v9, vcc, v10, v9
; GISEL-NEXT: v_mul_lo_u32 v17, v7, v8 ; CHECK-NEXT: v_add_i32_e32 v6, vcc, v14, v11
; GISEL-NEXT: v_mul_hi_u32 v18, v6, v8 ; CHECK-NEXT: v_add_i32_e32 v10, vcc, v15, v4
; GISEL-NEXT: v_mul_hi_u32 v8, v7, v8 ; CHECK-NEXT: v_add_i32_e32 v4, vcc, v7, v5
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v13, v11 ; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; GISEL-NEXT: v_cndmask_b32_e64 v19, 0, 1, vcc ; CHECK-NEXT: v_add_i32_e32 v6, vcc, v12, v6
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v16 ; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; GISEL-NEXT: v_cndmask_b32_e64 v16, 0, 1, vcc ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v9, v5
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v14 ; CHECK-NEXT: v_add_i32_e32 v7, vcc, v10, v7
; GISEL-NEXT: v_add_i32_e64 v12, s[4:5], v12, v9 ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v13, v5
; GISEL-NEXT: v_cndmask_b32_e64 v13, 0, 1, s[4:5] ; CHECK-NEXT: v_add_i32_e32 v7, vcc, v19, v7
; GISEL-NEXT: v_add_i32_e64 v9, s[4:5], v17, v9 ; CHECK-NEXT: v_lshr_b64 v[4:5], v[4:5], 20
; GISEL-NEXT: v_cndmask_b32_e64 v17, 0, 1, s[4:5] ; CHECK-NEXT: v_lshr_b64 v[6:7], v[6:7], 20
; GISEL-NEXT: v_add_i32_e64 v11, s[4:5], v11, v14 ; CHECK-NEXT: v_mul_lo_u32 v9, v4, v8
; GISEL-NEXT: v_cndmask_b32_e64 v11, 0, 1, s[4:5] ; CHECK-NEXT: v_mul_lo_u32 v5, v5, v8
; GISEL-NEXT: v_add_i32_e64 v12, s[4:5], v12, v15 ; CHECK-NEXT: v_mul_hi_u32 v4, v4, v8
; GISEL-NEXT: v_cndmask_b32_e64 v14, 0, 1, s[4:5] ; CHECK-NEXT: v_mul_lo_u32 v10, v6, v8
; GISEL-NEXT: v_cndmask_b32_e64 v15, 0, 1, vcc ; CHECK-NEXT: v_mul_lo_u32 v7, v7, v8
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v9, v18 ; CHECK-NEXT: v_mul_hi_u32 v6, v6, v8
; GISEL-NEXT: v_cndmask_b32_e64 v18, 0, 1, vcc ; CHECK-NEXT: v_add_i32_e32 v4, vcc, v5, v4
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v19, v11 ; CHECK-NEXT: v_add_i32_e32 v5, vcc, v7, v6
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v14 ; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v9
; GISEL-NEXT: v_add_i32_e32 v14, vcc, v16, v15 ; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v4, vcc
; GISEL-NEXT: v_add_i32_e32 v15, vcc, v17, v18 ; CHECK-NEXT: v_sub_i32_e32 v2, vcc, v2, v10
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v12, v11 ; CHECK-NEXT: v_subb_u32_e32 v3, vcc, v3, v5, vcc
; GISEL-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc ; CHECK-NEXT: s_setpc_b64 s[30:31]
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v9, v14
; GISEL-NEXT: v_cndmask_b32_e64 v14, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v12, vcc, v13, v12
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v15, v14
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v12
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v8, v13
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v6, v11
; GISEL-NEXT: v_addc_u32_e32 v10, vcc, v7, v10, vcc
; GISEL-NEXT: v_mul_lo_u32 v12, v11, v5
; GISEL-NEXT: v_mul_lo_u32 v13, s6, v11
; GISEL-NEXT: v_mul_hi_u32 v14, v11, v5
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v6, v9
; GISEL-NEXT: v_addc_u32_e32 v7, vcc, v7, v8, vcc
; GISEL-NEXT: v_mul_lo_u32 v8, v6, v5
; GISEL-NEXT: v_mul_lo_u32 v9, s7, v6
; GISEL-NEXT: v_mul_hi_u32 v15, v6, v5
; GISEL-NEXT: v_mul_lo_u32 v16, v10, v5
; GISEL-NEXT: v_mul_lo_u32 v17, v10, v12
; GISEL-NEXT: v_mul_hi_u32 v18, v11, v12
; GISEL-NEXT: v_mul_hi_u32 v12, v10, v12
; GISEL-NEXT: v_mul_lo_u32 v5, v7, v5
; GISEL-NEXT: v_mul_lo_u32 v19, v7, v8
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v16
; GISEL-NEXT: v_mul_hi_u32 v16, v6, v8
; GISEL-NEXT: v_mul_hi_u32 v8, v7, v8
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v9, v5
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v13, v14
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v5, v15
; GISEL-NEXT: v_mul_lo_u32 v13, v11, v9
; GISEL-NEXT: v_mul_lo_u32 v14, v10, v9
; GISEL-NEXT: v_mul_hi_u32 v15, v11, v9
; GISEL-NEXT: v_mul_hi_u32 v9, v10, v9
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v17, v13
; GISEL-NEXT: v_cndmask_b32_e64 v17, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v18
; GISEL-NEXT: v_mul_lo_u32 v13, v6, v5
; GISEL-NEXT: v_mul_lo_u32 v18, v7, v5
; GISEL-NEXT: v_add_i32_e64 v13, s[4:5], v19, v13
; GISEL-NEXT: v_cndmask_b32_e64 v19, 0, 1, s[4:5]
; GISEL-NEXT: v_add_i32_e64 v13, s[4:5], v13, v16
; GISEL-NEXT: v_mul_hi_u32 v13, v6, v5
; GISEL-NEXT: v_mul_hi_u32 v5, v7, v5
; GISEL-NEXT: v_add_i32_e64 v12, s[6:7], v14, v12
; GISEL-NEXT: v_cndmask_b32_e64 v14, 0, 1, s[6:7]
; GISEL-NEXT: v_add_i32_e64 v8, s[6:7], v18, v8
; GISEL-NEXT: v_cndmask_b32_e64 v16, 0, 1, s[6:7]
; GISEL-NEXT: v_cndmask_b32_e64 v18, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v12, vcc, v12, v15
; GISEL-NEXT: v_cndmask_b32_e64 v15, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v17, vcc, v17, v18
; GISEL-NEXT: v_cndmask_b32_e64 v18, 0, 1, s[4:5]
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v8, v13
; GISEL-NEXT: v_cndmask_b32_e64 v13, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v14, vcc, v14, v15
; GISEL-NEXT: v_add_i32_e32 v15, vcc, v19, v18
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v16, v13
; GISEL-NEXT: v_add_i32_e32 v12, vcc, v12, v17
; GISEL-NEXT: v_cndmask_b32_e64 v16, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v8, v15
; GISEL-NEXT: v_cndmask_b32_e64 v15, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v14, vcc, v14, v16
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v15
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v9, v14
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v5, v13
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v11, v12
; GISEL-NEXT: v_addc_u32_e32 v9, vcc, v10, v9, vcc
; GISEL-NEXT: v_mul_lo_u32 v10, v1, v11
; GISEL-NEXT: v_mul_hi_u32 v12, v0, v11
; GISEL-NEXT: v_mul_hi_u32 v11, v1, v11
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v6, v8
; GISEL-NEXT: v_addc_u32_e32 v5, vcc, v7, v5, vcc
; GISEL-NEXT: v_mul_lo_u32 v7, v3, v6
; GISEL-NEXT: v_mul_hi_u32 v8, v2, v6
; GISEL-NEXT: v_mul_hi_u32 v6, v3, v6
; GISEL-NEXT: v_mul_lo_u32 v13, v0, v9
; GISEL-NEXT: v_mul_lo_u32 v14, v1, v9
; GISEL-NEXT: v_mul_hi_u32 v15, v0, v9
; GISEL-NEXT: v_mul_hi_u32 v9, v1, v9
; GISEL-NEXT: v_mul_lo_u32 v16, v2, v5
; GISEL-NEXT: v_mul_lo_u32 v17, v3, v5
; GISEL-NEXT: v_mul_hi_u32 v18, v2, v5
; GISEL-NEXT: v_mul_hi_u32 v5, v3, v5
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v13
; GISEL-NEXT: v_cndmask_b32_e64 v13, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v14, v11
; GISEL-NEXT: v_cndmask_b32_e64 v14, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v7, v16
; GISEL-NEXT: v_cndmask_b32_e64 v16, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v17, v6
; GISEL-NEXT: v_cndmask_b32_e64 v17, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v12
; GISEL-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v11, v15
; GISEL-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v7, v8
; GISEL-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v6, v18
; GISEL-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v13, v10
; GISEL-NEXT: v_add_i32_e32 v12, vcc, v14, v12
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v16, v7
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v17, v8
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v11, v10
; GISEL-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v6, v7
; GISEL-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v12, v11
; GISEL-NEXT: v_mul_lo_u32 v12, v10, v4
; GISEL-NEXT: v_mul_hi_u32 v10, v10, v4
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; GISEL-NEXT: v_mul_lo_u32 v8, v6, v4
; GISEL-NEXT: v_mul_hi_u32 v6, v6, v4
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v9, v11
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; GISEL-NEXT: v_mul_lo_u32 v7, v9, v4
; GISEL-NEXT: v_mul_lo_u32 v5, v5, v4
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v7, v10
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; GISEL-NEXT: v_sub_i32_e64 v0, s[4:5], v0, v12
; GISEL-NEXT: v_subb_u32_e64 v6, vcc, v1, v7, s[4:5]
; GISEL-NEXT: v_sub_i32_e32 v1, vcc, v1, v7
; GISEL-NEXT: v_cmp_ge_u32_e32 vcc, v0, v4
; GISEL-NEXT: v_cndmask_b32_e64 v7, 0, -1, vcc
; GISEL-NEXT: v_sub_i32_e64 v2, s[6:7], v2, v8
; GISEL-NEXT: v_subb_u32_e64 v8, vcc, v3, v5, s[6:7]
; GISEL-NEXT: v_sub_i32_e32 v3, vcc, v3, v5
; GISEL-NEXT: v_cmp_ge_u32_e32 vcc, v2, v4
; GISEL-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
; GISEL-NEXT: v_sub_i32_e32 v9, vcc, v2, v4
; GISEL-NEXT: v_cmp_eq_u32_e64 s[8:9], 0, v6
; GISEL-NEXT: v_cndmask_b32_e64 v7, -1, v7, s[8:9]
; GISEL-NEXT: v_subbrev_u32_e64 v1, s[4:5], 0, v1, s[4:5]
; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v8
; GISEL-NEXT: v_cndmask_b32_e64 v5, -1, v5, s[4:5]
; GISEL-NEXT: v_subbrev_u32_e64 v3, s[4:5], 0, v3, s[6:7]
; GISEL-NEXT: v_cmp_ge_u32_e64 s[4:5], v9, v4
; GISEL-NEXT: v_cndmask_b32_e64 v10, 0, -1, s[4:5]
; GISEL-NEXT: s_mov_b64 s[4:5], vcc
; GISEL-NEXT: v_subrev_i32_e32 v11, vcc, 0x12d8fb, v9
; GISEL-NEXT: v_sub_i32_e64 v12, s[6:7], v0, v4
; GISEL-NEXT: v_subbrev_u32_e64 v1, s[6:7], 0, v1, s[6:7]
; GISEL-NEXT: v_cmp_ge_u32_e64 s[6:7], v12, v4
; GISEL-NEXT: v_cndmask_b32_e64 v13, 0, -1, s[6:7]
; GISEL-NEXT: v_subbrev_u32_e64 v3, s[4:5], 0, v3, s[4:5]
; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v1
; GISEL-NEXT: v_cndmask_b32_e64 v13, -1, v13, s[4:5]
; GISEL-NEXT: v_sub_i32_e64 v4, s[4:5], v12, v4
; GISEL-NEXT: v_subbrev_u32_e64 v14, s[4:5], 0, v1, s[4:5]
; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v3
; GISEL-NEXT: v_cndmask_b32_e64 v10, -1, v10, s[4:5]
; GISEL-NEXT: v_subbrev_u32_e32 v15, vcc, 0, v3, vcc
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v13
; GISEL-NEXT: v_cndmask_b32_e32 v4, v12, v4, vcc
; GISEL-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v10
; GISEL-NEXT: v_cndmask_b32_e64 v9, v9, v11, s[4:5]
; GISEL-NEXT: v_cndmask_b32_e32 v1, v1, v14, vcc
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v7
; GISEL-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc
; GISEL-NEXT: v_cndmask_b32_e64 v3, v3, v15, s[4:5]
; GISEL-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v5
; GISEL-NEXT: v_cndmask_b32_e64 v2, v2, v9, s[4:5]
; GISEL-NEXT: v_cndmask_b32_e32 v1, v6, v1, vcc
; GISEL-NEXT: v_cndmask_b32_e64 v3, v8, v3, s[4:5]
; GISEL-NEXT: s_setpc_b64 s[30:31]
;
; CGP-LABEL: v_urem_v2i64_oddk_denom:
; CGP: ; %bb.0:
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: v_mov_b32_e32 v4, 0x12d8fb
; CGP-NEXT: v_cvt_f32_u32_e32 v5, 0x12d8fb
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v6, 0
; CGP-NEXT: v_mov_b32_e32 v7, 0xffed2705
; CGP-NEXT: v_mac_f32_e32 v5, 0x4f800000, v6
; CGP-NEXT: v_rcp_iflag_f32_e32 v5, v5
; CGP-NEXT: v_mul_f32_e32 v5, 0x5f7ffffc, v5
; CGP-NEXT: v_mul_f32_e32 v6, 0x2f800000, v5
; CGP-NEXT: v_trunc_f32_e32 v6, v6
; CGP-NEXT: v_mac_f32_e32 v5, 0xcf800000, v6
; CGP-NEXT: v_cvt_u32_f32_e32 v6, v6
; CGP-NEXT: v_cvt_u32_f32_e32 v5, v5
; CGP-NEXT: v_mul_lo_u32 v8, v6, v7
; CGP-NEXT: v_mul_lo_u32 v9, v5, v7
; CGP-NEXT: v_mul_hi_u32 v10, v5, v7
; CGP-NEXT: v_sub_i32_e32 v8, vcc, v8, v5
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v10
; CGP-NEXT: v_mul_lo_u32 v10, v6, v9
; CGP-NEXT: v_mul_hi_u32 v11, v5, v9
; CGP-NEXT: v_mul_hi_u32 v9, v6, v9
; CGP-NEXT: v_mul_lo_u32 v12, v5, v8
; CGP-NEXT: v_mul_lo_u32 v13, v6, v8
; CGP-NEXT: v_mul_hi_u32 v14, v5, v8
; CGP-NEXT: v_mul_hi_u32 v8, v6, v8
; CGP-NEXT: v_add_i32_e32 v10, vcc, v10, v12
; CGP-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v13, v9
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v10, vcc, v10, v11
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v14
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v10, vcc, v12, v10
; CGP-NEXT: v_add_i32_e32 v11, vcc, v13, v11
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v10
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v10, vcc, v11, v10
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v10
; CGP-NEXT: v_add_i32_e32 v5, vcc, v5, v9
; CGP-NEXT: v_addc_u32_e32 v6, vcc, v6, v8, vcc
; CGP-NEXT: v_mul_lo_u32 v8, v5, v7
; CGP-NEXT: v_mul_hi_u32 v9, v5, v7
; CGP-NEXT: v_mul_lo_u32 v7, v6, v7
; CGP-NEXT: v_mul_lo_u32 v10, v6, v8
; CGP-NEXT: v_mul_hi_u32 v11, v5, v8
; CGP-NEXT: v_mul_hi_u32 v8, v6, v8
; CGP-NEXT: v_sub_i32_e32 v7, vcc, v7, v5
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v9
; CGP-NEXT: v_mul_lo_u32 v9, v5, v7
; CGP-NEXT: v_mul_lo_u32 v12, v6, v7
; CGP-NEXT: v_mul_hi_u32 v13, v5, v7
; CGP-NEXT: v_mul_hi_u32 v7, v6, v7
; CGP-NEXT: v_add_i32_e32 v9, vcc, v10, v9
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v12, v8
; CGP-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v11
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v13
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v10, v9
; CGP-NEXT: v_add_i32_e32 v10, vcc, v12, v11
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v10, v9
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v9
; CGP-NEXT: v_add_i32_e32 v5, vcc, v5, v8
; CGP-NEXT: v_addc_u32_e32 v6, vcc, v6, v7, vcc
; CGP-NEXT: v_mul_lo_u32 v7, v1, v5
; CGP-NEXT: v_mul_hi_u32 v8, v0, v5
; CGP-NEXT: v_mul_hi_u32 v9, v1, v5
; CGP-NEXT: v_mul_lo_u32 v10, v3, v5
; CGP-NEXT: v_mul_hi_u32 v11, v2, v5
; CGP-NEXT: v_mul_hi_u32 v5, v3, v5
; CGP-NEXT: v_mul_lo_u32 v12, v0, v6
; CGP-NEXT: v_mul_lo_u32 v13, v1, v6
; CGP-NEXT: v_mul_hi_u32 v14, v0, v6
; CGP-NEXT: v_mul_hi_u32 v15, v1, v6
; CGP-NEXT: v_mul_lo_u32 v16, v2, v6
; CGP-NEXT: v_mul_lo_u32 v17, v3, v6
; CGP-NEXT: v_mul_hi_u32 v18, v2, v6
; CGP-NEXT: v_mul_hi_u32 v6, v3, v6
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v12
; CGP-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v13, v9
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v10, vcc, v10, v16
; CGP-NEXT: v_cndmask_b32_e64 v16, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v17, v5
; CGP-NEXT: v_cndmask_b32_e64 v17, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v8
; CGP-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v9, v14
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v10, vcc, v10, v11
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v5, v18
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v7, vcc, v12, v7
; CGP-NEXT: v_add_i32_e32 v9, vcc, v13, v9
; CGP-NEXT: v_add_i32_e32 v10, vcc, v16, v10
; CGP-NEXT: v_add_i32_e32 v11, vcc, v17, v11
; CGP-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CGP-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v5, v10
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v9, v8
; CGP-NEXT: v_mul_lo_u32 v9, v7, v4
; CGP-NEXT: v_mul_hi_u32 v7, v7, v4
; CGP-NEXT: v_add_i32_e32 v10, vcc, v11, v10
; CGP-NEXT: v_mul_lo_u32 v11, v5, v4
; CGP-NEXT: v_mul_hi_u32 v5, v5, v4
; CGP-NEXT: v_add_i32_e32 v8, vcc, v15, v8
; CGP-NEXT: v_add_i32_e32 v6, vcc, v6, v10
; CGP-NEXT: v_mul_lo_u32 v8, v8, v4
; CGP-NEXT: v_mul_lo_u32 v6, v6, v4
; CGP-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CGP-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CGP-NEXT: v_sub_i32_e64 v0, s[4:5], v0, v9
; CGP-NEXT: v_subb_u32_e64 v6, vcc, v1, v7, s[4:5]
; CGP-NEXT: v_sub_i32_e32 v1, vcc, v1, v7
; CGP-NEXT: v_cmp_ge_u32_e32 vcc, v0, v4
; CGP-NEXT: v_cndmask_b32_e64 v7, 0, -1, vcc
; CGP-NEXT: v_sub_i32_e64 v2, s[6:7], v2, v11
; CGP-NEXT: v_subb_u32_e64 v8, vcc, v3, v5, s[6:7]
; CGP-NEXT: v_sub_i32_e32 v3, vcc, v3, v5
; CGP-NEXT: v_cmp_ge_u32_e32 vcc, v2, v4
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
; CGP-NEXT: v_sub_i32_e32 v9, vcc, v2, v4
; CGP-NEXT: v_cmp_eq_u32_e64 s[8:9], 0, v6
; CGP-NEXT: v_cndmask_b32_e64 v7, -1, v7, s[8:9]
; CGP-NEXT: v_subbrev_u32_e64 v1, s[4:5], 0, v1, s[4:5]
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v8
; CGP-NEXT: v_cndmask_b32_e64 v5, -1, v5, s[4:5]
; CGP-NEXT: v_subbrev_u32_e64 v3, s[4:5], 0, v3, s[6:7]
; CGP-NEXT: v_cmp_ge_u32_e64 s[4:5], v9, v4
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, -1, s[4:5]
; CGP-NEXT: s_mov_b64 s[4:5], vcc
; CGP-NEXT: v_subrev_i32_e32 v11, vcc, 0x12d8fb, v9
; CGP-NEXT: v_sub_i32_e64 v12, s[6:7], v0, v4
; CGP-NEXT: v_subbrev_u32_e64 v1, s[6:7], 0, v1, s[6:7]
; CGP-NEXT: v_cmp_ge_u32_e64 s[6:7], v12, v4
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, -1, s[6:7]
; CGP-NEXT: v_subbrev_u32_e64 v3, s[4:5], 0, v3, s[4:5]
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v1
; CGP-NEXT: v_cndmask_b32_e64 v13, -1, v13, s[4:5]
; CGP-NEXT: v_sub_i32_e64 v4, s[4:5], v12, v4
; CGP-NEXT: v_subbrev_u32_e64 v14, s[4:5], 0, v1, s[4:5]
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v3
; CGP-NEXT: v_cndmask_b32_e64 v10, -1, v10, s[4:5]
; CGP-NEXT: v_subbrev_u32_e32 v15, vcc, 0, v3, vcc
; CGP-NEXT: v_cmp_ne_u32_e32 vcc, 0, v13
; CGP-NEXT: v_cndmask_b32_e32 v4, v12, v4, vcc
; CGP-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v10
; CGP-NEXT: v_cndmask_b32_e64 v9, v9, v11, s[4:5]
; CGP-NEXT: v_cndmask_b32_e32 v1, v1, v14, vcc
; CGP-NEXT: v_cmp_ne_u32_e32 vcc, 0, v7
; CGP-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc
; CGP-NEXT: v_cndmask_b32_e64 v3, v3, v15, s[4:5]
; CGP-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v5
; CGP-NEXT: v_cndmask_b32_e64 v2, v2, v9, s[4:5]
; CGP-NEXT: v_cndmask_b32_e32 v1, v6, v1, vcc
; CGP-NEXT: v_cndmask_b32_e64 v3, v8, v3, s[4:5]
; CGP-NEXT: s_setpc_b64 s[30:31]
%result = urem <2 x i64> %num, <i64 1235195, i64 1235195> %result = urem <2 x i64> %num, <i64 1235195, i64 1235195>
ret <2 x i64> %result ret <2 x i64> %result
} }