Files
clang-p2996/llvm/test/CodeGen/AMDGPU/si-spill-sgpr-stack.ll
Jay Foad e0919b189b [CodeGen] Renumber slot indexes before register allocation (#66334)
RegAllocGreedy uses SlotIndexes::getApproxInstrDistance to approximate
the length of a live range for its heuristics. Renumbering all slot
indexes with the default instruction distance ensures that this estimate
will be as accurate as possible, and will not depend on the history of
how instructions have been added to and removed from SlotIndexes's maps.

This also means that enabling -early-live-intervals, which runs the
SlotIndexes analysis earlier, will not cause large amounts of churn due
to different register allocator decisions.
2023-09-19 11:18:12 +01:00

74 lines
3.2 KiB
LLVM

; RUN: llc -march=amdgcn -mcpu=fiji -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefix=ALL -check-prefix=SGPR %s
; Make sure this doesn't crash.
; ALL-LABEL: {{^}}test:
; ALL: s_mov_b32 s[[LO:[0-9]+]], SCRATCH_RSRC_DWORD0
; ALL: s_mov_b32 s[[HI:[0-9]+]], 0xe80000
; Make sure we are handling hazards correctly.
; SGPR: v_mov_b32_e32 v0, vcc_lo
; SGPR-NEXT: s_or_saveexec_b64 [[EXEC_COPY:s\[[0-9]+:[0-9]+\]]], -1
; SGPR-NEXT: buffer_load_dword [[VHI:v[0-9]+]], off, s[{{[0-9]+:[0-9]+}}], 0 offset:4 ; 4-byte Folded Reload
; SGPR-NEXT: s_mov_b64 exec, [[EXEC_COPY]]
; SGPR-NEXT: s_waitcnt vmcnt(0)
; SGPR-NEXT: v_readlane_b32 s{{[0-9]+}}, [[VHI]], 0
; SGPR-NEXT: v_readlane_b32 s{{[0-9]+}}, [[VHI]], 1
; SGPR-NEXT: v_readlane_b32 s{{[0-9]+}}, [[VHI]], 2
; SGPR-NEXT: v_readlane_b32 s[[HI:[0-9]+]], [[VHI]], 3
; SGPR-NEXT: s_or_saveexec_b64 s[100:101], -1
; SGPR-NEXT: s_mov_b64 exec, s[100:101]
; SGPR-NEXT: s_nop 2
; SGPR-NEXT: buffer_store_dword v0, off, s[{{[0-9]+}}:[[HI]]], 0
; SGPR-NEXT: ; kill: killed $vgpr1
; ALL: s_endpgm
define amdgpu_kernel void @test(ptr addrspace(1) %out, i32 %in) {
call void asm sideeffect "", "~{s[0:7]}" ()
call void asm sideeffect "", "~{s[8:15]}" ()
call void asm sideeffect "", "~{s[16:23]}" ()
call void asm sideeffect "", "~{s[24:31]}" ()
call void asm sideeffect "", "~{s[32:39]}" ()
call void asm sideeffect "", "~{s[40:47]}" ()
call void asm sideeffect "", "~{s[48:55]}" ()
call void asm sideeffect "", "~{s[56:63]}" ()
call void asm sideeffect "", "~{s[64:71]}" ()
call void asm sideeffect "", "~{s[72:79]}" ()
call void asm sideeffect "", "~{s[80:87]}" ()
call void asm sideeffect "", "~{s[88:95]}" ()
call void asm sideeffect "", "~{v[0:7]}" ()
call void asm sideeffect "", "~{v[8:15]}" ()
call void asm sideeffect "", "~{v[16:23]}" ()
call void asm sideeffect "", "~{v[24:31]}" ()
call void asm sideeffect "", "~{v[32:39]}" ()
call void asm sideeffect "", "~{v[40:47]}" ()
call void asm sideeffect "", "~{v[48:55]}" ()
call void asm sideeffect "", "~{v[56:63]}" ()
call void asm sideeffect "", "~{v[64:71]}" ()
call void asm sideeffect "", "~{v[72:79]}" ()
call void asm sideeffect "", "~{v[80:87]}" ()
call void asm sideeffect "", "~{v[88:95]}" ()
call void asm sideeffect "", "~{v[96:103]}" ()
call void asm sideeffect "", "~{v[104:111]}" ()
call void asm sideeffect "", "~{v[112:119]}" ()
call void asm sideeffect "", "~{v[120:127]}" ()
call void asm sideeffect "", "~{v[128:135]}" ()
call void asm sideeffect "", "~{v[136:143]}" ()
call void asm sideeffect "", "~{v[144:151]}" ()
call void asm sideeffect "", "~{v[152:159]}" ()
call void asm sideeffect "", "~{v[160:167]}" ()
call void asm sideeffect "", "~{v[168:175]}" ()
call void asm sideeffect "", "~{v[176:183]}" ()
call void asm sideeffect "", "~{v[184:191]}" ()
call void asm sideeffect "", "~{v[192:199]}" ()
call void asm sideeffect "", "~{v[200:207]}" ()
call void asm sideeffect "", "~{v[208:215]}" ()
call void asm sideeffect "", "~{v[216:223]}" ()
call void asm sideeffect "", "~{v[224:231]}" ()
call void asm sideeffect "", "~{v[232:239]}" ()
call void asm sideeffect "", "~{v[240:247]}" ()
call void asm sideeffect "", "~{v[248:255]}" ()
store i32 %in, ptr addrspace(1) %out
ret void
}