From: Matt Arsenault Date: Tue, 31 Jan 2017 01:30:16 +0000 (+0000) Subject: InferAddressSpaces: Don't replace volatile users X-Git-Tag: android-x86-7.1-r4~21145 X-Git-Url: http://git.osdn.net/view?a=commitdiff_plain;h=de6cb7e695112e5f7a4b08cf9b48e7766f6803cf;p=android-x86%2Fexternal-llvm.git InferAddressSpaces: Don't replace volatile users git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@293582 91177308-0d34-0410-b5e6-96231b3b80d8 --- diff --git a/lib/Transforms/Scalar/InferAddressSpaces.cpp b/lib/Transforms/Scalar/InferAddressSpaces.cpp index 8ca35c0061f..46f06090f96 100644 --- a/lib/Transforms/Scalar/InferAddressSpaces.cpp +++ b/lib/Transforms/Scalar/InferAddressSpaces.cpp @@ -570,8 +570,11 @@ bool InferAddressSpaces::rewriteWithNewAddressSpaces( << "\n with\n " << *NewV << '\n'); for (Use *U : Uses) { - if (isa(U->getUser()) || - (isa(U->getUser()) && + LoadInst *LI = dyn_cast(U->getUser()); + StoreInst *SI = dyn_cast(U->getUser()); + + if ((LI && !LI->isVolatile()) || + (SI && !SI->isVolatile() && U->getOperandNo() == StoreInst::getPointerOperandIndex())) { // If V is used as the pointer operand of a load/store, sets the pointer // operand to NewV. This replacement does not change the element type, diff --git a/test/Transforms/InferAddressSpaces/AMDGPU/volatile.ll b/test/Transforms/InferAddressSpaces/AMDGPU/volatile.ll new file mode 100644 index 00000000000..57dff1fea2f --- /dev/null +++ b/test/Transforms/InferAddressSpaces/AMDGPU/volatile.ll @@ -0,0 +1,82 @@ +; RUN: opt -S -mtriple=amdgcn-amd-amdhsa -infer-address-spaces %s | FileCheck %s + +; Check that volatile users of addrspacecast are not replaced. + +; CHECK-LABEL: @volatile_load_flat_from_global( +; CHECK: load volatile i32, i32 addrspace(4)* +; CHECK: store i32 %val, i32 addrspace(1)* +define void @volatile_load_flat_from_global(i32 addrspace(1)* nocapture %input, i32 addrspace(1)* nocapture %output) #0 { + %tmp0 = addrspacecast i32 addrspace(1)* %input to i32 addrspace(4)* + %tmp1 = addrspacecast i32 addrspace(1)* %output to i32 addrspace(4)* + %val = load volatile i32, i32 addrspace(4)* %tmp0, align 4 + store i32 %val, i32 addrspace(4)* %tmp1, align 4 + ret void +} + +; CHECK-LABEL: @volatile_load_flat_from_constant( +; CHECK: load volatile i32, i32 addrspace(4)* +; CHECK: store i32 %val, i32 addrspace(1)* +define void @volatile_load_flat_from_constant(i32 addrspace(2)* nocapture %input, i32 addrspace(1)* nocapture %output) #0 { + %tmp0 = addrspacecast i32 addrspace(2)* %input to i32 addrspace(4)* + %tmp1 = addrspacecast i32 addrspace(1)* %output to i32 addrspace(4)* + %val = load volatile i32, i32 addrspace(4)* %tmp0, align 4 + store i32 %val, i32 addrspace(4)* %tmp1, align 4 + ret void +} + +; CHECK-LABEL: @volatile_load_flat_from_group( +; CHECK: load volatile i32, i32 addrspace(4)* +; CHECK: store i32 %val, i32 addrspace(3)* +define void @volatile_load_flat_from_group(i32 addrspace(3)* nocapture %input, i32 addrspace(3)* nocapture %output) #0 { + %tmp0 = addrspacecast i32 addrspace(3)* %input to i32 addrspace(4)* + %tmp1 = addrspacecast i32 addrspace(3)* %output to i32 addrspace(4)* + %val = load volatile i32, i32 addrspace(4)* %tmp0, align 4 + store i32 %val, i32 addrspace(4)* %tmp1, align 4 + ret void +} + +; CHECK-LABEL: @volatile_load_flat_from_private( +; CHECK: load volatile i32, i32 addrspace(4)* +; CHECK: store i32 %val, i32* +define void @volatile_load_flat_from_private(i32* nocapture %input, i32* nocapture %output) #0 { + %tmp0 = addrspacecast i32* %input to i32 addrspace(4)* + %tmp1 = addrspacecast i32* %output to i32 addrspace(4)* + %val = load volatile i32, i32 addrspace(4)* %tmp0, align 4 + store i32 %val, i32 addrspace(4)* %tmp1, align 4 + ret void +} + +; CHECK-LABEL: @volatile_store_flat_to_global( +; CHECK: load i32, i32 addrspace(1)* +; CHECK: store volatile i32 %val, i32 addrspace(4)* +define void @volatile_store_flat_to_global(i32 addrspace(1)* nocapture %input, i32 addrspace(1)* nocapture %output) #0 { + %tmp0 = addrspacecast i32 addrspace(1)* %input to i32 addrspace(4)* + %tmp1 = addrspacecast i32 addrspace(1)* %output to i32 addrspace(4)* + %val = load i32, i32 addrspace(4)* %tmp0, align 4 + store volatile i32 %val, i32 addrspace(4)* %tmp1, align 4 + ret void +} + +; CHECK-LABEL: @volatile_store_flat_to_group( +; CHECK: load i32, i32 addrspace(3)* +; CHECK: store volatile i32 %val, i32 addrspace(4)* +define void @volatile_store_flat_to_group(i32 addrspace(3)* nocapture %input, i32 addrspace(3)* nocapture %output) #0 { + %tmp0 = addrspacecast i32 addrspace(3)* %input to i32 addrspace(4)* + %tmp1 = addrspacecast i32 addrspace(3)* %output to i32 addrspace(4)* + %val = load i32, i32 addrspace(4)* %tmp0, align 4 + store volatile i32 %val, i32 addrspace(4)* %tmp1, align 4 + ret void +} + +; CHECK-LABEL: @volatile_store_flat_to_private( +; CHECK: load i32, i32* +; CHECK: store volatile i32 %val, i32 addrspace(4)* +define void @volatile_store_flat_to_private(i32* nocapture %input, i32* nocapture %output) #0 { + %tmp0 = addrspacecast i32* %input to i32 addrspace(4)* + %tmp1 = addrspacecast i32* %output to i32 addrspace(4)* + %val = load i32, i32 addrspace(4)* %tmp0, align 4 + store volatile i32 %val, i32 addrspace(4)* %tmp1, align 4 + ret void +} + +attributes #0 = { nounwind } \ No newline at end of file