|
|
|
|
@@ -968,523 +968,106 @@ define i64 @v_urem_i64_oddk_denom(i64 %num) {
|
|
|
|
|
; CHECK-LABEL: v_urem_i64_oddk_denom:
|
|
|
|
|
; CHECK: ; %bb.0:
|
|
|
|
|
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
|
|
|
; CHECK-NEXT: v_mov_b32_e32 v2, 0x12d8fb
|
|
|
|
|
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, 0x12d8fb
|
|
|
|
|
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
|
|
|
|
|
; CHECK-NEXT: v_mov_b32_e32 v5, 0xffed2705
|
|
|
|
|
; CHECK-NEXT: v_mac_f32_e32 v3, 0x4f800000, v4
|
|
|
|
|
; CHECK-NEXT: v_rcp_iflag_f32_e32 v3, v3
|
|
|
|
|
; CHECK-NEXT: v_mul_f32_e32 v3, 0x5f7ffffc, v3
|
|
|
|
|
; CHECK-NEXT: v_mul_f32_e32 v4, 0x2f800000, v3
|
|
|
|
|
; CHECK-NEXT: v_trunc_f32_e32 v4, v4
|
|
|
|
|
; CHECK-NEXT: v_mac_f32_e32 v3, 0xcf800000, v4
|
|
|
|
|
; CHECK-NEXT: v_cvt_u32_f32_e32 v4, v4
|
|
|
|
|
; CHECK-NEXT: v_cvt_u32_f32_e32 v3, v3
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v6, v4, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v7, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v8, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_sub_i32_e32 v6, vcc, v6, v3
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v8
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v8, v4, v7
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v9, v3, v7
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v7, v4, v7
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v10, v3, v6
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v11, v4, v6
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v12, v3, v6
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v6, v4, v6
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v8, v10
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v11, v7
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v8, v9
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v12
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v10, v8
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v9, vcc, v11, v9
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v8
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v9, v8
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v8
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v7
|
|
|
|
|
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, v4, v6, vcc
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v6, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v7, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v5, v4, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v8, v4, v6
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v9, v3, v6
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v6, v4, v6
|
|
|
|
|
; CHECK-NEXT: v_sub_i32_e32 v5, vcc, v5, v3
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v7, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v10, v4, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v11, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v5, v4, v5
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v10, v6
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v9
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v11
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v8, vcc, v10, v9
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v7
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v6
|
|
|
|
|
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, v4, v5, vcc
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v5, v1, v3
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v3
|
|
|
|
|
; CHECK-NEXT: v_mov_b32_e32 v2, 0x1fb03c31
|
|
|
|
|
; CHECK-NEXT: v_mov_b32_e32 v3, 0xd9528440
|
|
|
|
|
; CHECK-NEXT: v_mov_b32_e32 v4, 0x12d8fb
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v5, v1, v2
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v3
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v7, v0, v2
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v8, v1, v3
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v2, v1, v2
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v9, v0, v3
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v3, v1, v3
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v7, v0, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v8, v1, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v9, v0, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v4, v1, v4
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v8, v3
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v9
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v7, v5
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v8, v6
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v8, v2
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v9
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v8, v7
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v6, v3, v2
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v3, v3, v2
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v4, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v4, v4, v2
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v4, v3
|
|
|
|
|
; CHECK-NEXT: v_sub_i32_e64 v0, s[4:5], v0, v6
|
|
|
|
|
; CHECK-NEXT: v_subb_u32_e64 v4, vcc, v1, v3, s[4:5]
|
|
|
|
|
; CHECK-NEXT: v_sub_i32_e32 v1, vcc, v1, v3
|
|
|
|
|
; CHECK-NEXT: v_cmp_ge_u32_e32 vcc, v0, v2
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v3, 0, -1, vcc
|
|
|
|
|
; CHECK-NEXT: v_sub_i32_e32 v5, vcc, v0, v2
|
|
|
|
|
; CHECK-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v4
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v3, -1, v3, s[6:7]
|
|
|
|
|
; CHECK-NEXT: v_subbrev_u32_e64 v1, s[4:5], 0, v1, s[4:5]
|
|
|
|
|
; CHECK-NEXT: v_cmp_ge_u32_e64 s[4:5], v5, v2
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v2, 0, -1, s[4:5]
|
|
|
|
|
; CHECK-NEXT: s_mov_b64 s[4:5], vcc
|
|
|
|
|
; CHECK-NEXT: v_subrev_i32_e32 v6, vcc, 0x12d8fb, v5
|
|
|
|
|
; CHECK-NEXT: v_subbrev_u32_e64 v1, s[4:5], 0, v1, s[4:5]
|
|
|
|
|
; CHECK-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v1
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v2, -1, v2, s[4:5]
|
|
|
|
|
; CHECK-NEXT: v_subbrev_u32_e32 v7, vcc, 0, v1, vcc
|
|
|
|
|
; CHECK-NEXT: v_cmp_ne_u32_e32 vcc, 0, v2
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e32 v2, v5, v6, vcc
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e32 v1, v1, v7, vcc
|
|
|
|
|
; CHECK-NEXT: v_cmp_ne_u32_e32 vcc, 0, v3
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e32 v0, v0, v2, vcc
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e32 v1, v4, v1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_lshr_b64 v[2:3], v[2:3], 20
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v5, v2, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v3, v3, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v2, v2, v4
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v3, v2
|
|
|
|
|
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v5
|
|
|
|
|
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v2, vcc
|
|
|
|
|
; CHECK-NEXT: s_setpc_b64 s[30:31]
|
|
|
|
|
%result = urem i64 %num, 1235195
|
|
|
|
|
ret i64 %result
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define <2 x i64> @v_urem_v2i64_oddk_denom(<2 x i64> %num) {
|
|
|
|
|
; GISEL-LABEL: v_urem_v2i64_oddk_denom:
|
|
|
|
|
; GISEL: ; %bb.0:
|
|
|
|
|
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
|
|
|
; GISEL-NEXT: v_mov_b32_e32 v4, 0x12d8fb
|
|
|
|
|
; GISEL-NEXT: v_cvt_f32_u32_e32 v6, 0x12d8fb
|
|
|
|
|
; GISEL-NEXT: v_cvt_f32_ubyte0_e32 v7, 0
|
|
|
|
|
; GISEL-NEXT: s_mov_b32 s4, 1
|
|
|
|
|
; GISEL-NEXT: v_mov_b32_e32 v5, 0xffed2705
|
|
|
|
|
; GISEL-NEXT: s_mov_b32 s5, 1
|
|
|
|
|
; GISEL-NEXT: v_mac_f32_e32 v6, 0x4f800000, v7
|
|
|
|
|
; GISEL-NEXT: v_rcp_iflag_f32_e32 v6, v6
|
|
|
|
|
; GISEL-NEXT: s_cmp_lg_u32 s4, 0
|
|
|
|
|
; GISEL-NEXT: s_subb_u32 s6, 0, 0
|
|
|
|
|
; GISEL-NEXT: v_mul_f32_e32 v6, 0x5f7ffffc, v6
|
|
|
|
|
; GISEL-NEXT: v_mul_f32_e32 v7, 0x2f800000, v6
|
|
|
|
|
; GISEL-NEXT: s_cmp_lg_u32 s5, 0
|
|
|
|
|
; GISEL-NEXT: s_subb_u32 s7, 0, 0
|
|
|
|
|
; GISEL-NEXT: v_trunc_f32_e32 v7, v7
|
|
|
|
|
; GISEL-NEXT: v_mac_f32_e32 v6, 0xcf800000, v7
|
|
|
|
|
; GISEL-NEXT: v_cvt_u32_f32_e32 v7, v7
|
|
|
|
|
; GISEL-NEXT: v_cvt_u32_f32_e32 v6, v6
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v8, v7, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v9, v6, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v10, s6, v6
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v11, v6, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v12, s7, v6
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v8
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v13, v7, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v14, v6, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v9, v7, v9
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v12, v8
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v11
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v8, v11
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v11, v6, v10
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v12, v7, v10
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v15, v6, v10
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v10, v7, v10
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v16, v6, v8
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v17, v7, v8
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v18, v6, v8
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v8, v7, v8
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v13, v11
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v19, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v16
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v16, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v14
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e64 v12, s[4:5], v12, v9
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v13, 0, 1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e64 v9, s[4:5], v17, v9
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v17, 0, 1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e64 v11, s[4:5], v11, v14
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v11, 0, 1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e64 v12, s[4:5], v12, v15
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v14, 0, 1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v15, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v9, v18
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v18, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v19, v11
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v14
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v14, vcc, v16, v15
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v15, vcc, v17, v18
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v12, v11
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v9, v14
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v14, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v12, vcc, v13, v12
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v15, v14
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v12
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v8, v13
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v6, v11
|
|
|
|
|
; GISEL-NEXT: v_addc_u32_e32 v10, vcc, v7, v10, vcc
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v12, v11, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v13, s6, v11
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v14, v11, v5
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v6, v9
|
|
|
|
|
; GISEL-NEXT: v_addc_u32_e32 v7, vcc, v7, v8, vcc
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v8, v6, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v9, s7, v6
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v15, v6, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v16, v10, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v17, v10, v12
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v18, v11, v12
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v12, v10, v12
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v5, v7, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v19, v7, v8
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v16
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v16, v6, v8
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v8, v7, v8
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v9, v5
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v13, v14
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v5, v15
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v13, v11, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v14, v10, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v15, v11, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v9, v10, v9
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v17, v13
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v17, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v18
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v13, v6, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v18, v7, v5
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e64 v13, s[4:5], v19, v13
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v19, 0, 1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e64 v13, s[4:5], v13, v16
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v13, v6, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v5, v7, v5
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e64 v12, s[6:7], v14, v12
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v14, 0, 1, s[6:7]
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e64 v8, s[6:7], v18, v8
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v16, 0, 1, s[6:7]
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v18, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v12, vcc, v12, v15
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v15, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v17, vcc, v17, v18
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v18, 0, 1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v8, v13
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v13, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v14, vcc, v14, v15
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v15, vcc, v19, v18
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v16, v13
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v12, vcc, v12, v17
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v16, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v8, v15
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v15, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v14, vcc, v14, v16
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v13, vcc, v13, v15
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v9, v14
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v5, v13
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v11, v12
|
|
|
|
|
; GISEL-NEXT: v_addc_u32_e32 v9, vcc, v10, v9, vcc
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v10, v1, v11
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v12, v0, v11
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v11, v1, v11
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v6, v8
|
|
|
|
|
; GISEL-NEXT: v_addc_u32_e32 v5, vcc, v7, v5, vcc
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v7, v3, v6
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v8, v2, v6
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v6, v3, v6
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v13, v0, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v14, v1, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v15, v0, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v9, v1, v9
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v16, v2, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v17, v3, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v18, v2, v5
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v5, v3, v5
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v13
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v13, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v14, v11
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v14, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v7, v16
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v16, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v17, v6
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v17, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v10, v12
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v11, v15
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v7, v8
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v6, v18
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v13, v10
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v12, vcc, v14, v12
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v16, v7
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v8, vcc, v17, v8
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v10, vcc, v11, v10
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v6, vcc, v6, v7
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v11, vcc, v12, v11
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v12, v10, v4
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v10, v10, v4
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v8, v7
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v8, v6, v4
|
|
|
|
|
; GISEL-NEXT: v_mul_hi_u32 v6, v6, v4
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v9, vcc, v9, v11
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v5, v7
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v7, v9, v4
|
|
|
|
|
; GISEL-NEXT: v_mul_lo_u32 v5, v5, v4
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v7, vcc, v7, v10
|
|
|
|
|
; GISEL-NEXT: v_add_i32_e32 v5, vcc, v5, v6
|
|
|
|
|
; GISEL-NEXT: v_sub_i32_e64 v0, s[4:5], v0, v12
|
|
|
|
|
; GISEL-NEXT: v_subb_u32_e64 v6, vcc, v1, v7, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_sub_i32_e32 v1, vcc, v1, v7
|
|
|
|
|
; GISEL-NEXT: v_cmp_ge_u32_e32 vcc, v0, v4
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v7, 0, -1, vcc
|
|
|
|
|
; GISEL-NEXT: v_sub_i32_e64 v2, s[6:7], v2, v8
|
|
|
|
|
; GISEL-NEXT: v_subb_u32_e64 v8, vcc, v3, v5, s[6:7]
|
|
|
|
|
; GISEL-NEXT: v_sub_i32_e32 v3, vcc, v3, v5
|
|
|
|
|
; GISEL-NEXT: v_cmp_ge_u32_e32 vcc, v2, v4
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
|
|
|
|
|
; GISEL-NEXT: v_sub_i32_e32 v9, vcc, v2, v4
|
|
|
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[8:9], 0, v6
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v7, -1, v7, s[8:9]
|
|
|
|
|
; GISEL-NEXT: v_subbrev_u32_e64 v1, s[4:5], 0, v1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v8
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v5, -1, v5, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_subbrev_u32_e64 v3, s[4:5], 0, v3, s[6:7]
|
|
|
|
|
; GISEL-NEXT: v_cmp_ge_u32_e64 s[4:5], v9, v4
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v10, 0, -1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: s_mov_b64 s[4:5], vcc
|
|
|
|
|
; GISEL-NEXT: v_subrev_i32_e32 v11, vcc, 0x12d8fb, v9
|
|
|
|
|
; GISEL-NEXT: v_sub_i32_e64 v12, s[6:7], v0, v4
|
|
|
|
|
; GISEL-NEXT: v_subbrev_u32_e64 v1, s[6:7], 0, v1, s[6:7]
|
|
|
|
|
; GISEL-NEXT: v_cmp_ge_u32_e64 s[6:7], v12, v4
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v13, 0, -1, s[6:7]
|
|
|
|
|
; GISEL-NEXT: v_subbrev_u32_e64 v3, s[4:5], 0, v3, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v1
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v13, -1, v13, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_sub_i32_e64 v4, s[4:5], v12, v4
|
|
|
|
|
; GISEL-NEXT: v_subbrev_u32_e64 v14, s[4:5], 0, v1, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v3
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v10, -1, v10, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_subbrev_u32_e32 v15, vcc, 0, v3, vcc
|
|
|
|
|
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v13
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v4, v12, v4, vcc
|
|
|
|
|
; GISEL-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v10
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v9, v9, v11, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v1, v1, v14, vcc
|
|
|
|
|
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v7
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v3, v3, v15, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v5
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v2, v2, v9, s[4:5]
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v1, v6, v1, vcc
|
|
|
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v3, v8, v3, s[4:5]
|
|
|
|
|
; GISEL-NEXT: s_setpc_b64 s[30:31]
|
|
|
|
|
;
|
|
|
|
|
; CGP-LABEL: v_urem_v2i64_oddk_denom:
|
|
|
|
|
; CGP: ; %bb.0:
|
|
|
|
|
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
|
|
|
; CGP-NEXT: v_mov_b32_e32 v4, 0x12d8fb
|
|
|
|
|
; CGP-NEXT: v_cvt_f32_u32_e32 v5, 0x12d8fb
|
|
|
|
|
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v6, 0
|
|
|
|
|
; CGP-NEXT: v_mov_b32_e32 v7, 0xffed2705
|
|
|
|
|
; CGP-NEXT: v_mac_f32_e32 v5, 0x4f800000, v6
|
|
|
|
|
; CGP-NEXT: v_rcp_iflag_f32_e32 v5, v5
|
|
|
|
|
; CGP-NEXT: v_mul_f32_e32 v5, 0x5f7ffffc, v5
|
|
|
|
|
; CGP-NEXT: v_mul_f32_e32 v6, 0x2f800000, v5
|
|
|
|
|
; CGP-NEXT: v_trunc_f32_e32 v6, v6
|
|
|
|
|
; CGP-NEXT: v_mac_f32_e32 v5, 0xcf800000, v6
|
|
|
|
|
; CGP-NEXT: v_cvt_u32_f32_e32 v6, v6
|
|
|
|
|
; CGP-NEXT: v_cvt_u32_f32_e32 v5, v5
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v8, v6, v7
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v9, v5, v7
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v10, v5, v7
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e32 v8, vcc, v8, v5
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v10
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v10, v6, v9
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v11, v5, v9
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v9, v6, v9
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v12, v5, v8
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v13, v6, v8
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v14, v5, v8
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v8, v6, v8
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v10, v12
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v13, v9
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v10, v11
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v14
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v12, v10
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v11, vcc, v13, v11
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v10
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v11, v10
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v10
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v5, vcc, v5, v9
|
|
|
|
|
; CGP-NEXT: v_addc_u32_e32 v6, vcc, v6, v8, vcc
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v8, v5, v7
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v9, v5, v7
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v7, v6, v7
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v10, v6, v8
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v11, v5, v8
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v8, v6, v8
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e32 v7, vcc, v7, v5
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v9
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v9, v5, v7
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v12, v6, v7
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v13, v5, v7
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v7, v6, v7
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v10, v9
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v8, vcc, v12, v8
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v11
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v13
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v10, v9
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v12, v11
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v10, v9
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v9
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v5, vcc, v5, v8
|
|
|
|
|
; CGP-NEXT: v_addc_u32_e32 v6, vcc, v6, v7, vcc
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v7, v1, v5
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v8, v0, v5
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v9, v1, v5
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v10, v3, v5
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v11, v2, v5
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v5, v3, v5
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v12, v0, v6
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v13, v1, v6
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v14, v0, v6
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v15, v1, v6
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v16, v2, v6
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v17, v3, v6
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v18, v2, v6
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v6, v3, v6
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v12
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v12, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v13, v9
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v10, v16
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v16, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v5, vcc, v17, v5
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v17, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v8
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v8, vcc, v9, v14
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v10, v11
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v5, vcc, v5, v18
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v7, vcc, v12, v7
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v9, vcc, v13, v9
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v16, v10
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v11, vcc, v17, v11
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v7, vcc, v8, v7
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v5, vcc, v5, v10
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v8, vcc, v9, v8
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v9, v7, v4
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v7, v7, v4
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v10, vcc, v11, v10
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v11, v5, v4
|
|
|
|
|
; CGP-NEXT: v_mul_hi_u32 v5, v5, v4
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v8, vcc, v15, v8
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v6, vcc, v6, v10
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v8, v8, v4
|
|
|
|
|
; CGP-NEXT: v_mul_lo_u32 v6, v6, v4
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v7, vcc, v8, v7
|
|
|
|
|
; CGP-NEXT: v_add_i32_e32 v5, vcc, v6, v5
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e64 v0, s[4:5], v0, v9
|
|
|
|
|
; CGP-NEXT: v_subb_u32_e64 v6, vcc, v1, v7, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e32 v1, vcc, v1, v7
|
|
|
|
|
; CGP-NEXT: v_cmp_ge_u32_e32 vcc, v0, v4
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v7, 0, -1, vcc
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e64 v2, s[6:7], v2, v11
|
|
|
|
|
; CGP-NEXT: v_subb_u32_e64 v8, vcc, v3, v5, s[6:7]
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e32 v3, vcc, v3, v5
|
|
|
|
|
; CGP-NEXT: v_cmp_ge_u32_e32 vcc, v2, v4
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, -1, vcc
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e32 v9, vcc, v2, v4
|
|
|
|
|
; CGP-NEXT: v_cmp_eq_u32_e64 s[8:9], 0, v6
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v7, -1, v7, s[8:9]
|
|
|
|
|
; CGP-NEXT: v_subbrev_u32_e64 v1, s[4:5], 0, v1, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v8
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v5, -1, v5, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_subbrev_u32_e64 v3, s[4:5], 0, v3, s[6:7]
|
|
|
|
|
; CGP-NEXT: v_cmp_ge_u32_e64 s[4:5], v9, v4
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, -1, s[4:5]
|
|
|
|
|
; CGP-NEXT: s_mov_b64 s[4:5], vcc
|
|
|
|
|
; CGP-NEXT: v_subrev_i32_e32 v11, vcc, 0x12d8fb, v9
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e64 v12, s[6:7], v0, v4
|
|
|
|
|
; CGP-NEXT: v_subbrev_u32_e64 v1, s[6:7], 0, v1, s[6:7]
|
|
|
|
|
; CGP-NEXT: v_cmp_ge_u32_e64 s[6:7], v12, v4
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, -1, s[6:7]
|
|
|
|
|
; CGP-NEXT: v_subbrev_u32_e64 v3, s[4:5], 0, v3, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v1
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v13, -1, v13, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_sub_i32_e64 v4, s[4:5], v12, v4
|
|
|
|
|
; CGP-NEXT: v_subbrev_u32_e64 v14, s[4:5], 0, v1, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v3
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v10, -1, v10, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_subbrev_u32_e32 v15, vcc, 0, v3, vcc
|
|
|
|
|
; CGP-NEXT: v_cmp_ne_u32_e32 vcc, 0, v13
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e32 v4, v12, v4, vcc
|
|
|
|
|
; CGP-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v10
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v9, v9, v11, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e32 v1, v1, v14, vcc
|
|
|
|
|
; CGP-NEXT: v_cmp_ne_u32_e32 vcc, 0, v7
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v3, v3, v15, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_cmp_ne_u32_e64 s[4:5], 0, v5
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v2, v2, v9, s[4:5]
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e32 v1, v6, v1, vcc
|
|
|
|
|
; CGP-NEXT: v_cndmask_b32_e64 v3, v8, v3, s[4:5]
|
|
|
|
|
; CGP-NEXT: s_setpc_b64 s[30:31]
|
|
|
|
|
; CHECK-LABEL: v_urem_v2i64_oddk_denom:
|
|
|
|
|
; CHECK: ; %bb.0:
|
|
|
|
|
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
|
|
|
; CHECK-NEXT: v_mov_b32_e32 v4, 0x1fb03c31
|
|
|
|
|
; CHECK-NEXT: v_mov_b32_e32 v5, 0xd9528440
|
|
|
|
|
; CHECK-NEXT: v_mov_b32_e32 v8, 0x12d8fb
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v6, v1, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v7, v0, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v9, v0, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v10, v1, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v11, v1, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v12, v0, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v13, v1, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v14, v3, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v15, v2, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v16, v2, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v17, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v4, v3, v4
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v18, v2, v5
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v19, v3, v5
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v7
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v10, v11
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v10, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v11, vcc, v14, v15
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v14, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v17, v4
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v15, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v9
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v12
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v11, vcc, v11, v16
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v12, vcc, v4, v18
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v4, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v9, vcc, v10, v9
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v14, v11
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v10, vcc, v15, v4
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v7, v5
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v12, v6
|
|
|
|
|
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v9, v5
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v10, v7
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v13, v5
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v19, v7
|
|
|
|
|
; CHECK-NEXT: v_lshr_b64 v[4:5], v[4:5], 20
|
|
|
|
|
; CHECK-NEXT: v_lshr_b64 v[6:7], v[6:7], 20
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v9, v4, v8
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v5, v5, v8
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v4, v4, v8
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v10, v6, v8
|
|
|
|
|
; CHECK-NEXT: v_mul_lo_u32 v7, v7, v8
|
|
|
|
|
; CHECK-NEXT: v_mul_hi_u32 v6, v6, v8
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v5, v4
|
|
|
|
|
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v7, v6
|
|
|
|
|
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v9
|
|
|
|
|
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v4, vcc
|
|
|
|
|
; CHECK-NEXT: v_sub_i32_e32 v2, vcc, v2, v10
|
|
|
|
|
; CHECK-NEXT: v_subb_u32_e32 v3, vcc, v3, v5, vcc
|
|
|
|
|
; CHECK-NEXT: s_setpc_b64 s[30:31]
|
|
|
|
|
%result = urem <2 x i64> %num, <i64 1235195, i64 1235195>
|
|
|
|
|
ret <2 x i64> %result
|
|
|
|
|
}
|
|
|
|
|
|