; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5 ; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1250 < %s | FileCheck -check-prefixes=GCN,GCN-SDAG %s ; RUN: llc -global-isel -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1250 < %s | FileCheck -check-prefixes=GCN,GCN-GISEL %s ; Test that stores that may hit scratch are correctly promoted to SCOPE_SE. define void @test_scratch_store(ptr addrspace(5) %ptr, i32 %val) { ; GCN-LABEL: test_scratch_store: ; GCN: ; %bb.0: ; GCN-NEXT: s_wait_loadcnt_dscnt 0x0 ; GCN-NEXT: s_wait_kmcnt 0x0 ; GCN-NEXT: scratch_store_b32 v0, v1, off scope:SCOPE_SE ; GCN-NEXT: s_set_pc_i64 s[30:31] store i32 %val, ptr addrspace(5) %ptr ret void } define void @test_unknown_flat_store(ptr %ptr, i32 %val) { ; GCN-LABEL: test_unknown_flat_store: ; GCN: ; %bb.0: ; GCN-NEXT: s_wait_loadcnt_dscnt 0x0 ; GCN-NEXT: s_wait_kmcnt 0x0 ; GCN-NEXT: flat_store_b32 v[0:1], v2 scope:SCOPE_SE ; GCN-NEXT: s_wait_dscnt 0x0 ; GCN-NEXT: s_set_pc_i64 s[30:31] store i32 %val, ptr %ptr ret void } define void @test_flat_store_no_scratch_alloc(ptr %ptr, i32 %val) #0 { ; GCN-LABEL: test_flat_store_no_scratch_alloc: ; GCN: ; %bb.0: ; GCN-NEXT: s_wait_loadcnt_dscnt 0x0 ; GCN-NEXT: s_wait_kmcnt 0x0 ; GCN-NEXT: flat_store_b32 v[0:1], v2 ; GCN-NEXT: s_wait_dscnt 0x0 ; GCN-NEXT: s_set_pc_i64 s[30:31] store i32 %val, ptr %ptr ret void } ; TODO: handle define void @test_flat_store_noalias_addrspace(ptr %ptr, i32 %val) { ; GCN-LABEL: test_flat_store_noalias_addrspace: ; GCN: ; %bb.0: ; GCN-NEXT: s_wait_loadcnt_dscnt 0x0 ; GCN-NEXT: s_wait_kmcnt 0x0 ; GCN-NEXT: flat_store_b32 v[0:1], v2 scope:SCOPE_SE ; GCN-NEXT: s_wait_dscnt 0x0 ; GCN-NEXT: s_set_pc_i64 s[30:31] store i32 %val, ptr %ptr, !noalias.addrspace !{i32 5, i32 6} ret void } ; TODO: would be nice to handle too define void @test_flat_store_select(ptr addrspace(1) %a, ptr addrspace(3) %b, i1 %cond, i32 %val) { ; GCN-SDAG-LABEL: test_flat_store_select: ; GCN-SDAG: ; %bb.0: ; GCN-SDAG-NEXT: s_wait_loadcnt_dscnt 0x0 ; GCN-SDAG-NEXT: s_wait_kmcnt 0x0 ; GCN-SDAG-NEXT: v_cmp_ne_u32_e32 vcc_lo, -1, v2 ; GCN-SDAG-NEXT: v_and_b32_e32 v3, 1, v3 ; GCN-SDAG-NEXT: s_mov_b64 s[0:1], src_shared_base ; GCN-SDAG-NEXT: v_cndmask_b32_e32 v2, 0, v2, vcc_lo ; GCN-SDAG-NEXT: v_cndmask_b32_e64 v5, 0, s1, vcc_lo ; GCN-SDAG-NEXT: s_delay_alu instid0(VALU_DEP_3) | instskip(NEXT) | instid1(VALU_DEP_2) ; GCN-SDAG-NEXT: v_cmp_eq_u32_e32 vcc_lo, 1, v3 ; GCN-SDAG-NEXT: v_dual_cndmask_b32 v1, v5, v1 :: v_dual_cndmask_b32 v0, v2, v0 ; GCN-SDAG-NEXT: flat_store_b32 v[0:1], v4 scope:SCOPE_SE ; GCN-SDAG-NEXT: s_wait_dscnt 0x0 ; GCN-SDAG-NEXT: s_set_pc_i64 s[30:31] ; ; GCN-GISEL-LABEL: test_flat_store_select: ; GCN-GISEL: ; %bb.0: ; GCN-GISEL-NEXT: s_wait_loadcnt_dscnt 0x0 ; GCN-GISEL-NEXT: s_wait_kmcnt 0x0 ; GCN-GISEL-NEXT: v_cmp_ne_u32_e32 vcc_lo, -1, v2 ; GCN-GISEL-NEXT: v_and_b32_e32 v3, 1, v3 ; GCN-GISEL-NEXT: s_mov_b64 s[0:1], src_shared_base ; GCN-GISEL-NEXT: v_cndmask_b32_e32 v2, 0, v2, vcc_lo ; GCN-GISEL-NEXT: v_cndmask_b32_e64 v5, 0, s1, vcc_lo ; GCN-GISEL-NEXT: s_delay_alu instid0(VALU_DEP_3) | instskip(NEXT) | instid1(VALU_DEP_2) ; GCN-GISEL-NEXT: v_cmp_ne_u32_e32 vcc_lo, 0, v3 ; GCN-GISEL-NEXT: v_dual_cndmask_b32 v0, v2, v0 :: v_dual_cndmask_b32 v1, v5, v1 ; GCN-GISEL-NEXT: flat_store_b32 v[0:1], v4 scope:SCOPE_SE ; GCN-GISEL-NEXT: s_wait_dscnt 0x0 ; GCN-GISEL-NEXT: s_set_pc_i64 s[30:31] %a.ascast = addrspacecast ptr addrspace(1) %a to ptr %b.ascast = addrspacecast ptr addrspace(3) %b to ptr %ptr = select i1 %cond, ptr %a.ascast, ptr %b.ascast store i32 %val, ptr %ptr ret void } attributes #0 = { "amdgpu-no-flat-scratch-init" }