diff options
author | Jakub Jelinek <jakub@redhat.com> | 2019-06-27 13:13:10 +0200 |
---|---|---|
committer | Jakub Jelinek <jakub@gcc.gnu.org> | 2019-06-27 13:13:10 +0200 |
commit | d2d604d83edb86ce3f492d03900fb29dea97725d (patch) | |
tree | 57818937b504189e908dd1ee12d60648c3ce0356 /gcc/config | |
parent | 45309d286c80ecad8b7a4efba0e9aba35d847af6 (diff) | |
download | gcc-d2d604d83edb86ce3f492d03900fb29dea97725d.zip gcc-d2d604d83edb86ce3f492d03900fb29dea97725d.tar.gz gcc-d2d604d83edb86ce3f492d03900fb29dea97725d.tar.bz2 |
re PR target/90991 (_mm_loadu_ps instrinsic translates to vmovaps in combination with _mm512_insertf32x4)
PR target/90991
* config/i386/sse.md (avx_vec_concat<mode>): Use nonimmediate_operand
instead of register_operand for operands[1], add m to its constraints
if operands[2] uses "C" constraint. Ensure in condition that if
operands[2] is not 0, then operands[1] is not a MEM. For last two
alternatives, use unaligned loads instead of aligned if operands[1] is
misaligned_operand.
* gcc.target/i386/avx2-pr90991-1.c: New test.
* gcc.target/i386/avx512dq-pr90991-2.c: New test.
From-SVN: r272745
Diffstat (limited to 'gcc/config')
-rw-r--r-- | gcc/config/i386/sse.md | 70 |
1 files changed, 54 insertions, 16 deletions
diff --git a/gcc/config/i386/sse.md b/gcc/config/i386/sse.md index 29f16bc..8b4f6c1 100644 --- a/gcc/config/i386/sse.md +++ b/gcc/config/i386/sse.md @@ -20743,9 +20743,11 @@ (define_insn "avx_vec_concat<mode>" [(set (match_operand:V_256_512 0 "register_operand" "=x,v,x,Yv") (vec_concat:V_256_512 - (match_operand:<ssehalfvecmode> 1 "register_operand" "x,v,x,v") + (match_operand:<ssehalfvecmode> 1 "nonimmediate_operand" "x,v,xm,vm") (match_operand:<ssehalfvecmode> 2 "nonimm_or_0_operand" "xm,vm,C,C")))] - "TARGET_AVX" + "TARGET_AVX + && (operands[2] == CONST0_RTX (<ssehalfvecmode>mode) + || !MEM_P (operands[1]))" { switch (which_alternative) { @@ -20771,27 +20773,63 @@ switch (get_attr_mode (insn)) { case MODE_V16SF: - return "vmovaps\t{%1, %t0|%t0, %1}"; + if (misaligned_operand (operands[1], <ssehalfvecmode>mode)) + return "vmovups\t{%1, %t0|%t0, %1}"; + else + return "vmovaps\t{%1, %t0|%t0, %1}"; case MODE_V8DF: - return "vmovapd\t{%1, %t0|%t0, %1}"; + if (misaligned_operand (operands[1], <ssehalfvecmode>mode)) + return "vmovupd\t{%1, %t0|%t0, %1}"; + else + return "vmovapd\t{%1, %t0|%t0, %1}"; case MODE_V8SF: - return "vmovaps\t{%1, %x0|%x0, %1}"; + if (misaligned_operand (operands[1], <ssehalfvecmode>mode)) + return "vmovups\t{%1, %x0|%x0, %1}"; + else + return "vmovaps\t{%1, %x0|%x0, %1}"; case MODE_V4DF: - return "vmovapd\t{%1, %x0|%x0, %1}"; + if (misaligned_operand (operands[1], <ssehalfvecmode>mode)) + return "vmovupd\t{%1, %x0|%x0, %1}"; + else + return "vmovapd\t{%1, %x0|%x0, %1}"; case MODE_XI: - if (which_alternative == 2) - return "vmovdqa\t{%1, %t0|%t0, %1}"; - else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8) - return "vmovdqa64\t{%1, %t0|%t0, %1}"; + if (misaligned_operand (operands[1], <ssehalfvecmode>mode)) + { + if (which_alternative == 2) + return "vmovdqu\t{%1, %t0|%t0, %1}"; + else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8) + return "vmovdqu64\t{%1, %t0|%t0, %1}"; + else + return "vmovdqu32\t{%1, %t0|%t0, %1}"; + } else - return "vmovdqa32\t{%1, %t0|%t0, %1}"; + { + if (which_alternative == 2) + return "vmovdqa\t{%1, %t0|%t0, %1}"; + else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8) + return "vmovdqa64\t{%1, %t0|%t0, %1}"; + else + return "vmovdqa32\t{%1, %t0|%t0, %1}"; + } case MODE_OI: - if (which_alternative == 2) - return "vmovdqa\t{%1, %x0|%x0, %1}"; - else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8) - return "vmovdqa64\t{%1, %x0|%x0, %1}"; + if (misaligned_operand (operands[1], <ssehalfvecmode>mode)) + { + if (which_alternative == 2) + return "vmovdqu\t{%1, %x0|%x0, %1}"; + else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8) + return "vmovdqu64\t{%1, %x0|%x0, %1}"; + else + return "vmovdqu32\t{%1, %x0|%x0, %1}"; + } else - return "vmovdqa32\t{%1, %x0|%x0, %1}"; + { + if (which_alternative == 2) + return "vmovdqa\t{%1, %x0|%x0, %1}"; + else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8) + return "vmovdqa64\t{%1, %x0|%x0, %1}"; + else + return "vmovdqa32\t{%1, %x0|%x0, %1}"; + } default: gcc_unreachable (); } |