This PR removes the old `nocapture` attribute, replacing it with the new `captures` attribute introduced in #116990. This change is intended to be essentially NFC, replacing existing uses of `nocapture` with `captures(none)` without adding any new analysis capabilities. Making use of non-`none` values is left for a followup. Some notes: * `nocapture` will be upgraded to `captures(none)` by the bitcode reader. * `nocapture` will also be upgraded by the textual IR reader. This is to make it easier to use old IR files and somewhat reduce the test churn in this PR. * Helper APIs like `doesNotCapture()` will check for `captures(none)`. * MLIR import will convert `captures(none)` into an `llvm.nocapture` attribute. The representation in the LLVM IR dialect should be updated separately.
219 lines
10 KiB
LLVM
219 lines
10 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 5
|
|
; RUN: opt -S -mtriple=amdgcn-amd-amdhsa -passes=infer-address-spaces %s | FileCheck %s
|
|
|
|
; Make sure memory instructions where the pointer appears in both a
|
|
; pointer and value operand work correctly.
|
|
|
|
declare void @user(ptr)
|
|
|
|
; Make sure only the pointer operand use of the store is replaced
|
|
define void @store_flat_pointer_to_self() {
|
|
; CHECK-LABEL: define void @store_flat_pointer_to_self() {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: store ptr [[FLAT]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT]])
|
|
; CHECK-NEXT: ret void
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
store ptr %flat, ptr %flat, align 8
|
|
call void @user(ptr %flat)
|
|
ret void
|
|
}
|
|
|
|
define void @store_volatile_flat_pointer_to_self() {
|
|
; CHECK-LABEL: define void @store_volatile_flat_pointer_to_self() {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: store volatile ptr [[FLAT]], ptr [[FLAT]], align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT]])
|
|
; CHECK-NEXT: ret void
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
store volatile ptr %flat, ptr %flat, align 8
|
|
call void @user(ptr %flat)
|
|
ret void
|
|
}
|
|
|
|
define ptr @atomicrmw_xchg_flat_pointer_to_self() {
|
|
; CHECK-LABEL: define ptr @atomicrmw_xchg_flat_pointer_to_self() {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT1:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: [[XCHG:%.*]] = atomicrmw xchg ptr addrspace(5) [[ALLOCA]], ptr [[FLAT1]] seq_cst, align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT1]])
|
|
; CHECK-NEXT: ret ptr [[XCHG]]
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
%xchg = atomicrmw xchg ptr %flat, ptr %flat seq_cst, align 8
|
|
call void @user(ptr %flat)
|
|
ret ptr %xchg
|
|
}
|
|
|
|
define ptr @atomicrmw_volatile_xchg_flat_pointer_to_self() {
|
|
; CHECK-LABEL: define ptr @atomicrmw_volatile_xchg_flat_pointer_to_self() {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: [[XCHG:%.*]] = atomicrmw volatile xchg ptr [[FLAT]], ptr [[FLAT]] seq_cst, align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT]])
|
|
; CHECK-NEXT: ret ptr [[XCHG]]
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
%xchg = atomicrmw volatile xchg ptr %flat, ptr %flat seq_cst, align 8
|
|
call void @user(ptr %flat)
|
|
ret ptr %xchg
|
|
}
|
|
|
|
define { ptr, i1 } @cmpxchg_flat_pointer_new_to_self(ptr %cmp) {
|
|
; CHECK-LABEL: define { ptr, i1 } @cmpxchg_flat_pointer_new_to_self(
|
|
; CHECK-SAME: ptr [[CMP:%.*]]) {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT1:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: [[CMPX:%.*]] = cmpxchg ptr addrspace(5) [[ALLOCA]], ptr [[CMP]], ptr [[FLAT1]] seq_cst seq_cst, align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT1]])
|
|
; CHECK-NEXT: ret { ptr, i1 } [[CMPX]]
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
%cmpx = cmpxchg ptr %flat, ptr %cmp, ptr %flat seq_cst seq_cst, align 8
|
|
call void @user(ptr %flat)
|
|
ret { ptr, i1 } %cmpx
|
|
}
|
|
|
|
define { ptr, i1 } @cmpxchg_volatile_flat_pointer_new_to_self(ptr %cmp) {
|
|
; CHECK-LABEL: define { ptr, i1 } @cmpxchg_volatile_flat_pointer_new_to_self(
|
|
; CHECK-SAME: ptr [[CMP:%.*]]) {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: [[CMPX:%.*]] = cmpxchg volatile ptr [[FLAT]], ptr [[CMP]], ptr [[FLAT]] seq_cst seq_cst, align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT]])
|
|
; CHECK-NEXT: ret { ptr, i1 } [[CMPX]]
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
%cmpx = cmpxchg volatile ptr %flat, ptr %cmp, ptr %flat seq_cst seq_cst, align 8
|
|
call void @user(ptr %flat)
|
|
ret { ptr, i1 } %cmpx
|
|
}
|
|
|
|
define { ptr, i1 } @volatile_cmpxchg_flat_pointer_new_to_self(ptr %cmp) {
|
|
; CHECK-LABEL: define { ptr, i1 } @volatile_cmpxchg_flat_pointer_new_to_self(
|
|
; CHECK-SAME: ptr [[CMP:%.*]]) {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: [[CMPX:%.*]] = cmpxchg volatile ptr [[FLAT]], ptr [[CMP]], ptr [[FLAT]] seq_cst seq_cst, align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT]])
|
|
; CHECK-NEXT: ret { ptr, i1 } [[CMPX]]
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
%cmpx = cmpxchg volatile ptr %flat, ptr %cmp, ptr %flat seq_cst seq_cst, align 8
|
|
call void @user(ptr %flat)
|
|
ret { ptr, i1 } %cmpx
|
|
}
|
|
|
|
define { ptr, i1 } @cmpxchg_flat_pointer_cmp_to_self(ptr %new) {
|
|
; CHECK-LABEL: define { ptr, i1 } @cmpxchg_flat_pointer_cmp_to_self(
|
|
; CHECK-SAME: ptr [[NEW:%.*]]) {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT1:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: [[CMPX:%.*]] = cmpxchg ptr addrspace(5) [[ALLOCA]], ptr [[FLAT1]], ptr [[NEW]] seq_cst seq_cst, align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT1]])
|
|
; CHECK-NEXT: ret { ptr, i1 } [[CMPX]]
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
%cmpx = cmpxchg ptr %flat, ptr %flat, ptr %new seq_cst seq_cst, align 8
|
|
call void @user(ptr %flat)
|
|
ret { ptr, i1 } %cmpx
|
|
}
|
|
|
|
define { ptr, i1 } @cmpxchg_flat_pointer_cmp_new_self() {
|
|
; CHECK-LABEL: define { ptr, i1 } @cmpxchg_flat_pointer_cmp_new_self() {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: [[CMPX:%.*]] = cmpxchg ptr addrspace(5) [[ALLOCA]], ptr [[FLAT]], ptr [[FLAT]] seq_cst seq_cst, align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT]])
|
|
; CHECK-NEXT: ret { ptr, i1 } [[CMPX]]
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
%cmpx = cmpxchg ptr %flat, ptr %flat, ptr %flat seq_cst seq_cst, align 8
|
|
call void @user(ptr %flat)
|
|
ret { ptr, i1 } %cmpx
|
|
}
|
|
|
|
define void @multi_store_flat_pointer_to_self() {
|
|
; CHECK-LABEL: define void @multi_store_flat_pointer_to_self() {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: store ptr [[FLAT]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: store ptr [[FLAT]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT]])
|
|
; CHECK-NEXT: store ptr [[FLAT]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: store ptr addrspace(5) [[ALLOCA]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: ret void
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
store ptr %flat, ptr %flat, align 8
|
|
store ptr %flat, ptr %flat, align 8
|
|
call void @user(ptr %flat)
|
|
store ptr %flat, ptr addrspace(5) %alloca, align 8
|
|
store ptr addrspace(5) %alloca, ptr %flat, align 8
|
|
ret void
|
|
}
|
|
|
|
define void @mixed_volatile_multi_store_flat_pointer_to_self() {
|
|
; CHECK-LABEL: define void @mixed_volatile_multi_store_flat_pointer_to_self() {
|
|
; CHECK-NEXT: [[ALLOCA:%.*]] = alloca ptr, align 8, addrspace(5)
|
|
; CHECK-NEXT: [[FLAT:%.*]] = addrspacecast ptr addrspace(5) [[ALLOCA]] to ptr
|
|
; CHECK-NEXT: store ptr [[FLAT]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: store volatile ptr [[FLAT]], ptr [[FLAT]], align 8
|
|
; CHECK-NEXT: store ptr [[FLAT]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: call void @user(ptr [[FLAT]])
|
|
; CHECK-NEXT: store ptr [[FLAT]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: store ptr addrspace(5) [[ALLOCA]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: store volatile ptr [[FLAT]], ptr [[FLAT]], align 8
|
|
; CHECK-NEXT: store ptr [[FLAT]], ptr addrspace(5) [[ALLOCA]], align 8
|
|
; CHECK-NEXT: ret void
|
|
;
|
|
%alloca = alloca ptr, align 8, addrspace(5)
|
|
%flat = addrspacecast ptr addrspace(5) %alloca to ptr
|
|
store ptr %flat, ptr %flat, align 8
|
|
store volatile ptr %flat, ptr %flat, align 8
|
|
store ptr %flat, ptr %flat, align 8
|
|
call void @user(ptr %flat)
|
|
store ptr %flat, ptr addrspace(5) %alloca, align 8
|
|
store ptr addrspace(5) %alloca, ptr %flat, align 8
|
|
store volatile ptr %flat, ptr %flat, align 8
|
|
store ptr %flat, ptr %flat, align 8
|
|
ret void
|
|
}
|
|
|
|
define amdgpu_kernel void @uselist_regression_skipped_load(ptr nocapture readonly %Arg, i32 %i) {
|
|
; CHECK-LABEL: define amdgpu_kernel void @uselist_regression_skipped_load(
|
|
; CHECK-SAME: ptr readonly captures(none) [[ARG:%.*]], i32 [[I:%.*]]) {
|
|
; CHECK-NEXT: [[ENTRY:.*:]]
|
|
; CHECK-NEXT: [[ARG_GLOBAL:%.*]] = addrspacecast ptr [[ARG]] to ptr addrspace(1)
|
|
; CHECK-NEXT: [[P1:%.*]] = getelementptr inbounds ptr, ptr addrspace(1) [[ARG_GLOBAL]], i32 [[I]]
|
|
; CHECK-NEXT: [[TMP0:%.*]] = addrspacecast ptr addrspace(1) [[P1]] to ptr
|
|
; CHECK-NEXT: [[P2:%.*]] = load volatile ptr, ptr [[TMP0]], align 8
|
|
; CHECK-NEXT: [[P2_GLOBAL:%.*]] = addrspacecast ptr [[P2]] to ptr addrspace(1)
|
|
; CHECK-NEXT: store float 0.000000e+00, ptr addrspace(1) [[P2_GLOBAL]], align 4
|
|
; CHECK-NEXT: ret void
|
|
;
|
|
entry:
|
|
%Arg.global = addrspacecast ptr %Arg to ptr addrspace(1)
|
|
%Arg.flat = addrspacecast ptr addrspace(1) %Arg.global to ptr
|
|
%p1 = getelementptr inbounds ptr, ptr %Arg.flat, i32 %i
|
|
%p2 = load volatile ptr, ptr %p1, align 8
|
|
%p2.global = addrspacecast ptr %p2 to ptr addrspace(1)
|
|
%p2.flat = addrspacecast ptr addrspace(1) %p2.global to ptr
|
|
store float 0.000000e+00, ptr %p2.flat, align 4
|
|
ret void
|
|
}
|