There was an error where dividend of type i64 and actual used number of bits of 32 fell into path that assumes only 24 bits being used. Check that AtLeast field is used correctly when using computeNumSignBits and add necessary extend/trunc for 32 bits path. Regolden and update testcases. @jrbyrnes @bcahoon @arsenm @rampitec
1083 lines
47 KiB
LLVM
1083 lines
47 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
|
; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefix=GFX9 %s
|
|
|
|
; 64-bit divides and rems should be split into a fast and slow path
|
|
; where the fast path uses a 32-bit operation.
|
|
|
|
define i64 @sdiv64(i64 %a, i64 %b) {
|
|
; GFX9-LABEL: sdiv64:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_or_b32_e32 v5, v1, v3
|
|
; GFX9-NEXT: v_mov_b32_e32 v4, 0
|
|
; GFX9-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5]
|
|
; GFX9-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GFX9-NEXT: s_and_saveexec_b64 s[4:5], vcc
|
|
; GFX9-NEXT: s_xor_b64 s[6:7], exec, s[4:5]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB0_2
|
|
; GFX9-NEXT: ; %bb.1:
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v9, 31, v3
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v9
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v9, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v10, v3, v9
|
|
; GFX9-NEXT: v_xor_b32_e32 v11, v2, v9
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v2, v11
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v3, v10
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v7, vcc, 0, v11
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v8, vcc, 0, v10, vcc
|
|
; GFX9-NEXT: v_madmk_f32 v2, v3, 0x4f800000, v2
|
|
; GFX9-NEXT: v_rcp_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v2, 0x5f7ffffc, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v3, 0x2f800000, v2
|
|
; GFX9-NEXT: v_trunc_f32_e32 v3, v3
|
|
; GFX9-NEXT: v_madmk_f32 v2, v3, 0xcf800000, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v6, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v12, v3
|
|
; GFX9-NEXT: v_mul_lo_u32 v4, v8, v6
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v7, v6, 0
|
|
; GFX9-NEXT: v_mul_lo_u32 v5, v7, v12
|
|
; GFX9-NEXT: v_mul_hi_u32 v13, v6, v2
|
|
; GFX9-NEXT: v_add3_u32 v5, v3, v5, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v6, v5, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v13, vcc, v13, v3
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v12, v2, 0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v14, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v12, v5, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v13, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v2, vcc, v14, v3, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v13, vcc, v6, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v12, vcc, v12, v3, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v4, v7, v12
|
|
; GFX9-NEXT: v_mul_lo_u32 v5, v8, v13
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v7, v13, 0
|
|
; GFX9-NEXT: v_add3_u32 v5, v3, v4, v5
|
|
; GFX9-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v12, v5, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v13, v5, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v14, v13, v2
|
|
; GFX9-NEXT: v_mad_u64_u32 v[7:8], s[4:5], v12, v2, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v14, v5
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v7
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v2, vcc, v5, v8, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v3
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v13, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, v12, v3, vcc
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v4, 31, v1
|
|
; GFX9-NEXT: v_add_co_u32_e32 v0, vcc, v0, v4
|
|
; GFX9-NEXT: v_xor_b32_e32 v6, v0, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, v1, v4, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v6, v3, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v7, v6, v2
|
|
; GFX9-NEXT: v_xor_b32_e32 v5, v5, v4
|
|
; GFX9-NEXT: v_add_co_u32_e32 v7, vcc, v7, v0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v8, vcc, 0, v1, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v5, v2, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v5, v3, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v0, vcc, v7, v0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v0, vcc, v8, v1, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v0, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v1, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v7, v10, v2
|
|
; GFX9-NEXT: v_mul_lo_u32 v8, v11, v3
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v11, v2, 0
|
|
; GFX9-NEXT: v_add3_u32 v1, v1, v8, v7
|
|
; GFX9-NEXT: v_sub_u32_e32 v7, v5, v1
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v0, vcc, v6, v0
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v6, s[4:5], v7, v10, vcc
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v7, s[4:5], v0, v11
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v6, s[4:5], 0, v6, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[4:5], v6, v10
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v8, 0, -1, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[4:5], v7, v11
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v7, 0, -1, s[4:5]
|
|
; GFX9-NEXT: v_cmp_eq_u32_e64 s[4:5], v6, v10
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v6, v8, v7, s[4:5]
|
|
; GFX9-NEXT: v_add_co_u32_e64 v7, s[4:5], 2, v2
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v1, vcc, v5, v1, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e64 v8, s[4:5], 0, v3, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v1, v10
|
|
; GFX9-NEXT: v_add_co_u32_e64 v12, s[4:5], 1, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v11
|
|
; GFX9-NEXT: v_addc_co_u32_e64 v13, s[4:5], 0, v3, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v0, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_eq_u32_e32 vcc, v1, v10
|
|
; GFX9-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v6
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v5, v0, vcc
|
|
; GFX9-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v1, v12, v7, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v6, v13, v8, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v2, v1, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v2, v4, v9
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v3, v6, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v1, v1, v2
|
|
; GFX9-NEXT: v_xor_b32_e32 v0, v0, v2
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v4, vcc, v1, v2
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v5, vcc, v0, v2, vcc
|
|
; GFX9-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; GFX9-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; GFX9-NEXT: .LBB0_2: ; %Flow
|
|
; GFX9-NEXT: s_andn2_saveexec_b64 s[4:5], s[6:7]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB0_4
|
|
; GFX9-NEXT: ; %bb.3:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v2
|
|
; GFX9-NEXT: v_mov_b32_e32 v5, 0
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v1
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v1, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v1, v0, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v1, v2
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v3
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, v0, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v1, v4, vcc
|
|
; GFX9-NEXT: v_add_u32_e32 v3, 1, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v1, v3, vcc
|
|
; GFX9-NEXT: .LBB0_4:
|
|
; GFX9-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; GFX9-NEXT: v_mov_b32_e32 v0, v4
|
|
; GFX9-NEXT: v_mov_b32_e32 v1, v5
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = sdiv i64 %a, %b
|
|
ret i64 %d
|
|
}
|
|
|
|
define i64 @udiv64(i64 %a, i64 %b) {
|
|
; GFX9-LABEL: udiv64:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_or_b32_e32 v5, v1, v3
|
|
; GFX9-NEXT: v_mov_b32_e32 v4, 0
|
|
; GFX9-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5]
|
|
; GFX9-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GFX9-NEXT: s_and_saveexec_b64 s[4:5], vcc
|
|
; GFX9-NEXT: s_xor_b64 s[6:7], exec, s[4:5]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB1_2
|
|
; GFX9-NEXT: ; %bb.1:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v4, v2
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v5, v3
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v10, vcc, 0, v2
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v11, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_madmk_f32 v4, v5, 0x4f800000, v4
|
|
; GFX9-NEXT: v_rcp_f32_e32 v4, v4
|
|
; GFX9-NEXT: v_mul_f32_e32 v4, 0x5f7ffffc, v4
|
|
; GFX9-NEXT: v_mul_f32_e32 v5, 0x2f800000, v4
|
|
; GFX9-NEXT: v_trunc_f32_e32 v5, v5
|
|
; GFX9-NEXT: v_madmk_f32 v4, v5, 0xcf800000, v4
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v8, v5
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v9, v4
|
|
; GFX9-NEXT: v_mul_lo_u32 v6, v10, v8
|
|
; GFX9-NEXT: v_mul_lo_u32 v7, v11, v9
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v10, v9, 0
|
|
; GFX9-NEXT: v_add3_u32 v7, v5, v6, v7
|
|
; GFX9-NEXT: v_mul_hi_u32 v12, v9, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v9, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v12, vcc, v12, v5
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v8, v4, 0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v8, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v12, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v13, v5, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v7, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v6
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v12, vcc, v9, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v13, vcc, v8, v5, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v6, v10, v13
|
|
; GFX9-NEXT: v_mul_lo_u32 v7, v11, v12
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v10, v12, 0
|
|
; GFX9-NEXT: v_add3_u32 v7, v5, v6, v7
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v13, v7, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[7:8], s[4:5], v12, v7, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v11, v12, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[9:10], s[4:5], v13, v4, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v11, v7
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v7, vcc, 0, v8, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v9
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v7, v10, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v6, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v5
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v6, vcc, v12, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v7, vcc, v13, v5, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v0, v7, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v8, v0, v6
|
|
; GFX9-NEXT: v_add_co_u32_e32 v8, vcc, v8, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v9, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v1, v6, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v1, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v8, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v9, v5, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v7, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v6, vcc, v4, v6
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v7, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v8, v3, v6
|
|
; GFX9-NEXT: v_mul_lo_u32 v9, v2, v7
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v2, v6, 0
|
|
; GFX9-NEXT: v_add3_u32 v5, v5, v9, v8
|
|
; GFX9-NEXT: v_sub_u32_e32 v8, v1, v5
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v0, vcc, v0, v4
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v4, s[4:5], v8, v3, vcc
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v8, s[4:5], v0, v2
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v4, s[4:5], 0, v4, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[4:5], v4, v3
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v9, 0, -1, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[4:5], v8, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v8, 0, -1, s[4:5]
|
|
; GFX9-NEXT: v_cmp_eq_u32_e64 s[4:5], v4, v3
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, v9, v8, s[4:5]
|
|
; GFX9-NEXT: v_add_co_u32_e64 v8, s[4:5], 2, v6
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v1, vcc, v1, v5, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e64 v9, s[4:5], 0, v7, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v1, v3
|
|
; GFX9-NEXT: v_add_co_u32_e64 v10, s[4:5], 1, v6
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e64 v11, s[4:5], 0, v7, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v0, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_eq_u32_e32 vcc, v1, v3
|
|
; GFX9-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v4
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v5, v0, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, v11, v9, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v0, v10, v8, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v5, v7, v4, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v6, v0, vcc
|
|
; GFX9-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; GFX9-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; GFX9-NEXT: .LBB1_2: ; %Flow
|
|
; GFX9-NEXT: s_andn2_saveexec_b64 s[4:5], s[6:7]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB1_4
|
|
; GFX9-NEXT: ; %bb.3:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v2
|
|
; GFX9-NEXT: v_mov_b32_e32 v5, 0
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v1
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v1, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v1, v0, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v1, v2
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v3
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, v0, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v1, v4, vcc
|
|
; GFX9-NEXT: v_add_u32_e32 v3, 1, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v1, v3, vcc
|
|
; GFX9-NEXT: .LBB1_4:
|
|
; GFX9-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; GFX9-NEXT: v_mov_b32_e32 v0, v4
|
|
; GFX9-NEXT: v_mov_b32_e32 v1, v5
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = udiv i64 %a, %b
|
|
ret i64 %d
|
|
}
|
|
|
|
define i64 @srem64(i64 %a, i64 %b) {
|
|
; GFX9-LABEL: srem64:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_or_b32_e32 v5, v1, v3
|
|
; GFX9-NEXT: v_mov_b32_e32 v4, 0
|
|
; GFX9-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5]
|
|
; GFX9-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GFX9-NEXT: s_and_saveexec_b64 s[4:5], vcc
|
|
; GFX9-NEXT: s_xor_b64 s[8:9], exec, s[4:5]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB2_2
|
|
; GFX9-NEXT: ; %bb.1:
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v4, 31, v3
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v4, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v9, v3, v4
|
|
; GFX9-NEXT: v_xor_b32_e32 v10, v2, v4
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v2, v10
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v3, v9
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v7, vcc, 0, v10
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v8, vcc, 0, v9, vcc
|
|
; GFX9-NEXT: v_madmk_f32 v2, v3, 0x4f800000, v2
|
|
; GFX9-NEXT: v_rcp_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v2, 0x5f7ffffc, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v3, 0x2f800000, v2
|
|
; GFX9-NEXT: v_trunc_f32_e32 v3, v3
|
|
; GFX9-NEXT: v_madmk_f32 v2, v3, 0xcf800000, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v6, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v11, v3
|
|
; GFX9-NEXT: v_mul_lo_u32 v4, v8, v6
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v7, v6, 0
|
|
; GFX9-NEXT: v_mul_lo_u32 v5, v7, v11
|
|
; GFX9-NEXT: v_mul_hi_u32 v12, v6, v2
|
|
; GFX9-NEXT: v_add3_u32 v5, v3, v5, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v6, v5, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v12, vcc, v12, v3
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v11, v2, 0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v11, v5, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v12, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v2, vcc, v13, v3, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v12, vcc, v6, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v11, vcc, v11, v3, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v4, v7, v11
|
|
; GFX9-NEXT: v_mul_lo_u32 v5, v8, v12
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v7, v12, 0
|
|
; GFX9-NEXT: v_add3_u32 v5, v3, v4, v5
|
|
; GFX9-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v11, v5, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v12, v5, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v13, v12, v2
|
|
; GFX9-NEXT: v_mad_u64_u32 v[7:8], s[4:5], v11, v2, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v13, v5
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v7
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v2, vcc, v5, v8, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v3
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v12, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, v11, v3, vcc
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v5, 31, v1
|
|
; GFX9-NEXT: v_add_co_u32_e32 v0, vcc, v0, v5
|
|
; GFX9-NEXT: v_xor_b32_e32 v6, v0, v5
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v1, v5, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v6, v3, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v7, v6, v2
|
|
; GFX9-NEXT: v_xor_b32_e32 v4, v4, v5
|
|
; GFX9-NEXT: v_add_co_u32_e32 v7, vcc, v7, v0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v8, vcc, 0, v1, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v4, v2, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v4, v3, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v0, vcc, v7, v0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v0, vcc, v8, v1, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v0, vcc, v0, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v2, v9, v0
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v10, v1
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v10, v0, 0
|
|
; GFX9-NEXT: v_add3_u32 v1, v1, v3, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v2, v4, v1
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v0, vcc, v6, v0
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v2, s[4:5], v2, v9, vcc
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v3, s[4:5], v0, v10
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v6, s[6:7], 0, v2, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[6:7], v6, v9
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v7, 0, -1, s[6:7]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[6:7], v3, v10
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v8, 0, -1, s[6:7]
|
|
; GFX9-NEXT: v_cmp_eq_u32_e64 s[6:7], v6, v9
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v2, s[4:5], v2, v9, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v7, v7, v8, s[6:7]
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v8, s[4:5], v3, v10
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v1, vcc, v4, v1, vcc
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v2, s[4:5], 0, v2, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v1, v9
|
|
; GFX9-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v7
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v10
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v2, v6, v2, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v6, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_eq_u32_e32 vcc, v1, v9
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v4, v6, vcc
|
|
; GFX9-NEXT: v_cmp_ne_u32_e32 vcc, 0, v4
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v1, v2, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v2, v3, v8, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v0, v0, v5
|
|
; GFX9-NEXT: v_xor_b32_e32 v1, v1, v5
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v4, vcc, v0, v5
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v5, vcc, v1, v5, vcc
|
|
; GFX9-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; GFX9-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; GFX9-NEXT: .LBB2_2: ; %Flow
|
|
; GFX9-NEXT: s_andn2_saveexec_b64 s[4:5], s[8:9]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB2_4
|
|
; GFX9-NEXT: ; %bb.3:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v2
|
|
; GFX9-NEXT: v_mov_b32_e32 v5, 0
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v1
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v1, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v1, v0, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v1, v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v1, v0, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v1, vcc
|
|
; GFX9-NEXT: v_sub_u32_e32 v1, v0, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v0, v1, vcc
|
|
; GFX9-NEXT: .LBB2_4:
|
|
; GFX9-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; GFX9-NEXT: v_mov_b32_e32 v0, v4
|
|
; GFX9-NEXT: v_mov_b32_e32 v1, v5
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = srem i64 %a, %b
|
|
ret i64 %d
|
|
}
|
|
|
|
define i64 @urem64(i64 %a, i64 %b) {
|
|
; GFX9-LABEL: urem64:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_or_b32_e32 v5, v1, v3
|
|
; GFX9-NEXT: v_mov_b32_e32 v4, 0
|
|
; GFX9-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5]
|
|
; GFX9-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GFX9-NEXT: s_and_saveexec_b64 s[4:5], vcc
|
|
; GFX9-NEXT: s_xor_b64 s[8:9], exec, s[4:5]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB3_2
|
|
; GFX9-NEXT: ; %bb.1:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v4, v2
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v5, v3
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v10, vcc, 0, v2
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v11, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_madmk_f32 v4, v5, 0x4f800000, v4
|
|
; GFX9-NEXT: v_rcp_f32_e32 v4, v4
|
|
; GFX9-NEXT: v_mul_f32_e32 v4, 0x5f7ffffc, v4
|
|
; GFX9-NEXT: v_mul_f32_e32 v5, 0x2f800000, v4
|
|
; GFX9-NEXT: v_trunc_f32_e32 v5, v5
|
|
; GFX9-NEXT: v_madmk_f32 v4, v5, 0xcf800000, v4
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v8, v5
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v9, v4
|
|
; GFX9-NEXT: v_mul_lo_u32 v6, v10, v8
|
|
; GFX9-NEXT: v_mul_lo_u32 v7, v11, v9
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v10, v9, 0
|
|
; GFX9-NEXT: v_add3_u32 v7, v5, v6, v7
|
|
; GFX9-NEXT: v_mul_hi_u32 v12, v9, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v9, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v12, vcc, v12, v5
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v8, v4, 0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v8, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v12, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v13, v5, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v7, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v6
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v12, vcc, v9, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v13, vcc, v8, v5, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v6, v10, v13
|
|
; GFX9-NEXT: v_mul_lo_u32 v7, v11, v12
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v10, v12, 0
|
|
; GFX9-NEXT: v_add3_u32 v7, v5, v6, v7
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v13, v7, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[7:8], s[4:5], v12, v7, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v11, v12, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[9:10], s[4:5], v13, v4, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v11, v7
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v7, vcc, 0, v8, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v9
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v7, v10, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v6, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v5
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v6, vcc, v12, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v7, vcc, v13, v5, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v0, v7, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v8, v0, v6
|
|
; GFX9-NEXT: v_add_co_u32_e32 v8, vcc, v8, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v9, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v1, v6, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v1, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v8, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v9, v5, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v7, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v6
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v6, v3, v4
|
|
; GFX9-NEXT: v_mul_lo_u32 v7, v2, v5
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v2, v4, 0
|
|
; GFX9-NEXT: v_add3_u32 v5, v5, v7, v6
|
|
; GFX9-NEXT: v_sub_u32_e32 v6, v1, v5
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v0, vcc, v0, v4
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v4, s[4:5], v6, v3, vcc
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v6, s[4:5], v0, v2
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v7, s[6:7], 0, v4, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[6:7], v7, v3
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v8, 0, -1, s[6:7]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[6:7], v6, v2
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v1, vcc, v1, v5, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v9, 0, -1, s[6:7]
|
|
; GFX9-NEXT: v_cmp_eq_u32_e64 s[6:7], v7, v3
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v4, s[4:5], v4, v3, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v1, v3
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v8, v8, v9, s[6:7]
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v9, s[4:5], v6, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v4, s[4:5], 0, v4, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v2, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_eq_u32_e32 vcc, v1, v3
|
|
; GFX9-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v8
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v2, v5, v2, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, v7, v4, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ne_u32_e32 vcc, 0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v5, v1, v4, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v1, v6, v9, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v0, v1, vcc
|
|
; GFX9-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; GFX9-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; GFX9-NEXT: .LBB3_2: ; %Flow
|
|
; GFX9-NEXT: s_andn2_saveexec_b64 s[4:5], s[8:9]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB3_4
|
|
; GFX9-NEXT: ; %bb.3:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v2
|
|
; GFX9-NEXT: v_mov_b32_e32 v5, 0
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v1
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v1, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v1, v0, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v1, v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v1, v0, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v1, vcc
|
|
; GFX9-NEXT: v_sub_u32_e32 v1, v0, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v0, v1, vcc
|
|
; GFX9-NEXT: .LBB3_4:
|
|
; GFX9-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; GFX9-NEXT: v_mov_b32_e32 v0, v4
|
|
; GFX9-NEXT: v_mov_b32_e32 v1, v5
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = urem i64 %a, %b
|
|
ret i64 %d
|
|
}
|
|
|
|
define i32 @sdiv32(i32 %a, i32 %b) {
|
|
; GFX9-LABEL: sdiv32:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v2, 31, v1
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v2
|
|
; GFX9-NEXT: v_xor_b32_e32 v1, v1, v2
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v3, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v4, 0, v1
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v5, 31, v0
|
|
; GFX9-NEXT: v_add_u32_e32 v0, v0, v5
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v3, v3
|
|
; GFX9-NEXT: v_xor_b32_e32 v0, v0, v5
|
|
; GFX9-NEXT: v_xor_b32_e32 v2, v5, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v3, 0x4f7ffffe, v3
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v3, v3
|
|
; GFX9-NEXT: v_mul_lo_u32 v4, v4, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v4, v3, v4
|
|
; GFX9-NEXT: v_add_u32_e32 v3, v3, v4
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v0, v3
|
|
; GFX9-NEXT: v_mul_lo_u32 v4, v3, v1
|
|
; GFX9-NEXT: v_add_u32_e32 v5, 1, v3
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v4
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v4, v0, v1
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v3, v3, v5, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v3
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v1
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v3, v4, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v0, v0, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v2
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = sdiv i32 %a, %b
|
|
ret i32 %d
|
|
}
|
|
|
|
define i32 @udiv32(i32 %a, i32 %b) {
|
|
; GFX9-LABEL: udiv32:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v2, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v1
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v2, 0x4f7ffffe, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v2
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v2, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v2, v2, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v2, v0, v2
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v2, v1
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v3
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, v0, v1
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v2, v2, v4, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
|
|
; GFX9-NEXT: v_add_u32_e32 v3, 1, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v1
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v2, v3, vcc
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = udiv i32 %a, %b
|
|
ret i32 %d
|
|
}
|
|
|
|
define i32 @srem32(i32 %a, i32 %b) {
|
|
; GFX9-LABEL: srem32:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v2, 31, v1
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v2
|
|
; GFX9-NEXT: v_xor_b32_e32 v1, v1, v2
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v2, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v1
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v4, 31, v0
|
|
; GFX9-NEXT: v_add_u32_e32 v0, v0, v4
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_xor_b32_e32 v0, v0, v4
|
|
; GFX9-NEXT: v_mul_f32_e32 v2, 0x4f7ffffe, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v2
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v2, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v2, v2, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v2, v0, v2
|
|
; GFX9-NEXT: v_mul_lo_u32 v2, v2, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v2, v0, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v1
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
|
|
; GFX9-NEXT: v_sub_u32_e32 v2, v0, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v1
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v0, v0, v4
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v4
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = srem i32 %a, %b
|
|
ret i32 %d
|
|
}
|
|
|
|
define i32 @urem32(i32 %a, i32 %b) {
|
|
; GFX9-LABEL: urem32:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v2, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v1
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v2, 0x4f7ffffe, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v2
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v2, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v2, v2, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v2, v0, v2
|
|
; GFX9-NEXT: v_mul_lo_u32 v2, v2, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v2, v0, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v1
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
|
|
; GFX9-NEXT: v_sub_u32_e32 v2, v0, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v1
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = urem i32 %a, %b
|
|
ret i32 %d
|
|
}
|
|
|
|
define <2 x i64> @sdivrem64(i64 %a, i64 %b) {
|
|
; GFX9-LABEL: sdivrem64:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_or_b32_e32 v5, v1, v3
|
|
; GFX9-NEXT: v_mov_b32_e32 v4, 0
|
|
; GFX9-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5]
|
|
; GFX9-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; GFX9-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GFX9-NEXT: s_and_saveexec_b64 s[4:5], vcc
|
|
; GFX9-NEXT: s_xor_b64 s[10:11], exec, s[4:5]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB8_2
|
|
; GFX9-NEXT: ; %bb.1:
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v9, 31, v3
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v9
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v9, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v10, v3, v9
|
|
; GFX9-NEXT: v_xor_b32_e32 v11, v2, v9
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v2, v11
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v3, v10
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v7, vcc, 0, v11
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v8, vcc, 0, v10, vcc
|
|
; GFX9-NEXT: v_madmk_f32 v2, v3, 0x4f800000, v2
|
|
; GFX9-NEXT: v_rcp_f32_e32 v2, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v2, 0x5f7ffffc, v2
|
|
; GFX9-NEXT: v_mul_f32_e32 v3, 0x2f800000, v2
|
|
; GFX9-NEXT: v_trunc_f32_e32 v3, v3
|
|
; GFX9-NEXT: v_madmk_f32 v2, v3, 0xcf800000, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v6, v2
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v12, v3
|
|
; GFX9-NEXT: v_mul_lo_u32 v4, v8, v6
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v7, v6, 0
|
|
; GFX9-NEXT: v_mul_lo_u32 v5, v7, v12
|
|
; GFX9-NEXT: v_mul_hi_u32 v13, v6, v2
|
|
; GFX9-NEXT: v_add3_u32 v5, v3, v5, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v6, v5, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v13, vcc, v13, v3
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v12, v2, 0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v14, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v12, v5, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v13, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v2, vcc, v14, v3, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v13, vcc, v6, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v12, vcc, v12, v3, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v4, v7, v12
|
|
; GFX9-NEXT: v_mul_lo_u32 v5, v8, v13
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v7, v13, 0
|
|
; GFX9-NEXT: v_add3_u32 v5, v3, v4, v5
|
|
; GFX9-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v12, v5, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v13, v5, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v14, v13, v2
|
|
; GFX9-NEXT: v_mad_u64_u32 v[7:8], s[4:5], v12, v2, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v14, v5
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v7
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v2, vcc, v5, v8, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v2, v3
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v4, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v13, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, v12, v3, vcc
|
|
; GFX9-NEXT: v_ashrrev_i32_e32 v7, 31, v1
|
|
; GFX9-NEXT: v_add_co_u32_e32 v0, vcc, v0, v7
|
|
; GFX9-NEXT: v_xor_b32_e32 v5, v0, v7
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v1, v7, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v5, v3, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v6, v5, v2
|
|
; GFX9-NEXT: v_xor_b32_e32 v4, v4, v7
|
|
; GFX9-NEXT: v_add_co_u32_e32 v6, vcc, v6, v0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v8, vcc, 0, v1, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v4, v2, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v4, v3, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v0, vcc, v6, v0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v0, vcc, v8, v1, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v2, vcc, v0, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v1, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v6, v10, v2
|
|
; GFX9-NEXT: v_mul_lo_u32 v8, v11, v3
|
|
; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v11, v2, 0
|
|
; GFX9-NEXT: v_add3_u32 v1, v1, v8, v6
|
|
; GFX9-NEXT: v_sub_u32_e32 v6, v4, v1
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v0, vcc, v5, v0
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v6, s[4:5], v6, v10, vcc
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v8, s[4:5], v0, v11
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v12, s[6:7], 0, v6, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[6:7], v12, v10
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v5, 0, -1, s[6:7]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[6:7], v8, v11
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v13, 0, -1, s[6:7]
|
|
; GFX9-NEXT: v_cmp_eq_u32_e64 s[6:7], v12, v10
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v5, v5, v13, s[6:7]
|
|
; GFX9-NEXT: v_add_co_u32_e64 v13, s[6:7], 2, v2
|
|
; GFX9-NEXT: v_addc_co_u32_e64 v14, s[6:7], 0, v3, s[6:7]
|
|
; GFX9-NEXT: v_add_co_u32_e64 v15, s[6:7], 1, v2
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v1, vcc, v4, v1, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e64 v16, s[6:7], 0, v3, s[6:7]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v1, v10
|
|
; GFX9-NEXT: v_cmp_ne_u32_e64 s[6:7], 0, v5
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v11
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v5, v16, v14, s[6:7]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v14, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_eq_u32_e32 vcc, v1, v10
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v4, v14, vcc
|
|
; GFX9-NEXT: v_cmp_ne_u32_e32 vcc, 0, v4
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, v15, v13, s[6:7]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v3, v3, v5, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v2, v2, v4, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v5, v7, v9
|
|
; GFX9-NEXT: v_xor_b32_e32 v2, v2, v5
|
|
; GFX9-NEXT: v_xor_b32_e32 v3, v3, v5
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v4, s[8:9], v2, v5
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v2, s[4:5], v6, v10, s[4:5]
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v5, s[8:9], v3, v5, s[8:9]
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v3, s[4:5], v8, v11
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v2, s[4:5], 0, v2, s[4:5]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v2, v12, v2, s[6:7]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v1, v2, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v2, v8, v3, s[6:7]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
|
|
; GFX9-NEXT: v_xor_b32_e32 v0, v0, v7
|
|
; GFX9-NEXT: v_xor_b32_e32 v1, v1, v7
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v6, vcc, v0, v7
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v7, vcc, v1, v7, vcc
|
|
; GFX9-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; GFX9-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; GFX9-NEXT: .LBB8_2: ; %Flow
|
|
; GFX9-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB8_4
|
|
; GFX9-NEXT: ; %bb.3:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v2
|
|
; GFX9-NEXT: v_mov_b32_e32 v5, 0
|
|
; GFX9-NEXT: v_mov_b32_e32 v7, v5
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v1
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v1, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v1, v0, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v1, v2
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v3
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, v0, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v1, v4, vcc
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, v0, v2
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v6, v0, v3, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v1, v4, vcc
|
|
; GFX9-NEXT: .LBB8_4:
|
|
; GFX9-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; GFX9-NEXT: v_mov_b32_e32 v0, v4
|
|
; GFX9-NEXT: v_mov_b32_e32 v1, v5
|
|
; GFX9-NEXT: v_mov_b32_e32 v2, v6
|
|
; GFX9-NEXT: v_mov_b32_e32 v3, v7
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = sdiv i64 %a, %b
|
|
%r = srem i64 %a, %b
|
|
%ins.0 = insertelement <2 x i64> undef, i64 %d, i32 0
|
|
%ins.1 = insertelement <2 x i64> %ins.0, i64 %r, i32 1
|
|
ret <2 x i64> %ins.1
|
|
}
|
|
|
|
define <2 x i64> @udivrem64(i64 %a, i64 %b) {
|
|
; GFX9-LABEL: udivrem64:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_or_b32_e32 v5, v1, v3
|
|
; GFX9-NEXT: v_mov_b32_e32 v4, 0
|
|
; GFX9-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5]
|
|
; GFX9-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; GFX9-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GFX9-NEXT: s_and_saveexec_b64 s[4:5], vcc
|
|
; GFX9-NEXT: s_xor_b64 s[8:9], exec, s[4:5]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB9_2
|
|
; GFX9-NEXT: ; %bb.1:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v4, v2
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v5, v3
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v10, vcc, 0, v2
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v11, vcc, 0, v3, vcc
|
|
; GFX9-NEXT: v_madmk_f32 v4, v5, 0x4f800000, v4
|
|
; GFX9-NEXT: v_rcp_f32_e32 v4, v4
|
|
; GFX9-NEXT: v_mul_f32_e32 v4, 0x5f7ffffc, v4
|
|
; GFX9-NEXT: v_mul_f32_e32 v5, 0x2f800000, v4
|
|
; GFX9-NEXT: v_trunc_f32_e32 v5, v5
|
|
; GFX9-NEXT: v_madmk_f32 v4, v5, 0xcf800000, v4
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v8, v5
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v9, v4
|
|
; GFX9-NEXT: v_mul_lo_u32 v6, v10, v8
|
|
; GFX9-NEXT: v_mul_lo_u32 v7, v11, v9
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v10, v9, 0
|
|
; GFX9-NEXT: v_add3_u32 v7, v5, v6, v7
|
|
; GFX9-NEXT: v_mul_hi_u32 v12, v9, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v9, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v12, vcc, v12, v5
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v8, v4, 0
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v13, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v8, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v12, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v13, v5, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v7, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v6
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v12, vcc, v9, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v13, vcc, v8, v5, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v6, v10, v13
|
|
; GFX9-NEXT: v_mul_lo_u32 v7, v11, v12
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v10, v12, 0
|
|
; GFX9-NEXT: v_add3_u32 v7, v5, v6, v7
|
|
; GFX9-NEXT: v_mad_u64_u32 v[5:6], s[4:5], v13, v7, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[7:8], s[4:5], v12, v7, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v11, v12, v4
|
|
; GFX9-NEXT: v_mad_u64_u32 v[9:10], s[4:5], v13, v4, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v11, v7
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v7, vcc, 0, v8, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v9
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v7, v10, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v6, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v4, v5
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v6, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v6, vcc, v12, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v7, vcc, v13, v5, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v0, v7, 0
|
|
; GFX9-NEXT: v_mul_hi_u32 v8, v0, v6
|
|
; GFX9-NEXT: v_add_co_u32_e32 v8, vcc, v8, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v9, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v1, v6, 0
|
|
; GFX9-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v1, v7, 0
|
|
; GFX9-NEXT: v_add_co_u32_e32 v4, vcc, v8, v4
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v4, vcc, v9, v5, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v7, vcc
|
|
; GFX9-NEXT: v_add_co_u32_e32 v6, vcc, v4, v6
|
|
; GFX9-NEXT: v_addc_co_u32_e32 v7, vcc, 0, v5, vcc
|
|
; GFX9-NEXT: v_mul_lo_u32 v8, v3, v6
|
|
; GFX9-NEXT: v_mul_lo_u32 v9, v2, v7
|
|
; GFX9-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v2, v6, 0
|
|
; GFX9-NEXT: v_add3_u32 v5, v5, v9, v8
|
|
; GFX9-NEXT: v_sub_u32_e32 v8, v1, v5
|
|
; GFX9-NEXT: v_sub_co_u32_e32 v0, vcc, v0, v4
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v8, s[4:5], v8, v3, vcc
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v9, s[4:5], v0, v2
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v10, s[6:7], 0, v8, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[6:7], v10, v3
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, 0, -1, s[6:7]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e64 s[6:7], v9, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v11, 0, -1, s[6:7]
|
|
; GFX9-NEXT: v_cmp_eq_u32_e64 s[6:7], v10, v3
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, v4, v11, s[6:7]
|
|
; GFX9-NEXT: v_add_co_u32_e64 v11, s[6:7], 2, v6
|
|
; GFX9-NEXT: v_addc_co_u32_e64 v12, s[6:7], 0, v7, s[6:7]
|
|
; GFX9-NEXT: v_add_co_u32_e64 v13, s[6:7], 1, v6
|
|
; GFX9-NEXT: v_subb_co_u32_e32 v1, vcc, v1, v5, vcc
|
|
; GFX9-NEXT: v_addc_co_u32_e64 v14, s[6:7], 0, v7, s[6:7]
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v1, v3
|
|
; GFX9-NEXT: v_cmp_ne_u32_e64 s[6:7], 0, v4
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, v14, v12, s[6:7]
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v12, 0, -1, vcc
|
|
; GFX9-NEXT: v_cmp_eq_u32_e32 vcc, v1, v3
|
|
; GFX9-NEXT: v_subb_co_u32_e64 v3, s[4:5], v8, v3, s[4:5]
|
|
; GFX9-NEXT: v_sub_co_u32_e64 v2, s[4:5], v9, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v5, v5, v12, vcc
|
|
; GFX9-NEXT: v_subbrev_co_u32_e64 v3, s[4:5], 0, v3, s[4:5]
|
|
; GFX9-NEXT: v_cmp_ne_u32_e32 vcc, 0, v5
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v3, v10, v3, s[6:7]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v5, v7, v4, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v4, v13, v11, s[6:7]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v7, v1, v3, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e64 v1, v9, v2, s[6:7]
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v6, v4, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v6, v0, v1, vcc
|
|
; GFX9-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; GFX9-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; GFX9-NEXT: .LBB9_2: ; %Flow
|
|
; GFX9-NEXT: s_andn2_saveexec_b64 s[4:5], s[8:9]
|
|
; GFX9-NEXT: s_cbranch_execz .LBB9_4
|
|
; GFX9-NEXT: ; %bb.3:
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v2
|
|
; GFX9-NEXT: v_mov_b32_e32 v5, 0
|
|
; GFX9-NEXT: v_mov_b32_e32 v7, v5
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v1
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v1, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v1, v0, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v1, v2
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v3
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, v0, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v1, v4, vcc
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, v0, v2
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v6, v0, v3, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v4, v1, v4, vcc
|
|
; GFX9-NEXT: .LBB9_4:
|
|
; GFX9-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; GFX9-NEXT: v_mov_b32_e32 v0, v4
|
|
; GFX9-NEXT: v_mov_b32_e32 v1, v5
|
|
; GFX9-NEXT: v_mov_b32_e32 v2, v6
|
|
; GFX9-NEXT: v_mov_b32_e32 v3, v7
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%d = udiv i64 %a, %b
|
|
%r = urem i64 %a, %b
|
|
%ins.0 = insertelement <2 x i64> undef, i64 %d, i32 0
|
|
%ins.1 = insertelement <2 x i64> %ins.0, i64 %r, i32 1
|
|
ret <2 x i64> %ins.1
|
|
}
|
|
|
|
define i64 @sdiv64_known32(i64 %a, i64 %b) {
|
|
; GFX9-LABEL: sdiv64_known32:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v0, v3
|
|
; GFX9-NEXT: v_sub_u32_e32 v2, 0, v3
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v0, v0
|
|
; GFX9-NEXT: v_mul_f32_e32 v0, 0x4f7ffffe, v0
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v0, v0
|
|
; GFX9-NEXT: v_mul_lo_u32 v2, v2, v0
|
|
; GFX9-NEXT: v_mul_hi_u32 v2, v0, v2
|
|
; GFX9-NEXT: v_add_u32_e32 v0, v0, v2
|
|
; GFX9-NEXT: v_mul_hi_u32 v0, v1, v0
|
|
; GFX9-NEXT: v_mul_lo_u32 v2, v0, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v0
|
|
; GFX9-NEXT: v_sub_u32_e32 v1, v1, v2
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v1, v3
|
|
; GFX9-NEXT: v_sub_u32_e32 v2, v1, v3
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v1, v2, vcc
|
|
; GFX9-NEXT: v_add_u32_e32 v2, 1, v0
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v1, v3
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
|
|
; GFX9-NEXT: v_mov_b32_e32 v1, 0
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%a.ext = ashr i64 %a, 32
|
|
%b.ext = ashr i64 %b, 32
|
|
%d = udiv i64 %a.ext, %b.ext
|
|
ret i64 %d
|
|
}
|
|
|
|
define i64 @udiv64_known32(i64 %a, i64 %b) {
|
|
; GFX9-LABEL: udiv64_known32:
|
|
; GFX9: ; %bb.0:
|
|
; GFX9-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GFX9-NEXT: v_cvt_f32_u32_e32 v1, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, 0, v2
|
|
; GFX9-NEXT: v_rcp_iflag_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_f32_e32 v1, 0x4f7ffffe, v1
|
|
; GFX9-NEXT: v_cvt_u32_f32_e32 v1, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v3, v1
|
|
; GFX9-NEXT: v_mul_hi_u32 v3, v1, v3
|
|
; GFX9-NEXT: v_add_u32_e32 v1, v1, v3
|
|
; GFX9-NEXT: v_mul_hi_u32 v1, v0, v1
|
|
; GFX9-NEXT: v_mul_lo_u32 v3, v1, v2
|
|
; GFX9-NEXT: v_add_u32_e32 v4, 1, v1
|
|
; GFX9-NEXT: v_sub_u32_e32 v0, v0, v3
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_sub_u32_e32 v3, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v1, v1, v4, vcc
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v0, v3, vcc
|
|
; GFX9-NEXT: v_add_u32_e32 v3, 1, v1
|
|
; GFX9-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
; GFX9-NEXT: v_cndmask_b32_e32 v0, v1, v3, vcc
|
|
; GFX9-NEXT: v_mov_b32_e32 v1, 0
|
|
; GFX9-NEXT: s_setpc_b64 s[30:31]
|
|
%a.mask = and i64 %a, 4294967295
|
|
%b.mask = and i64 %b, 4294967295
|
|
%d = udiv i64 %a.mask, %b.mask
|
|
ret i64 %d
|
|
}
|