aboutsummaryrefslogtreecommitdiff
path: root/gcc/config/aarch64/arm_neon.h
diff options
context:
space:
mode:
authorChristophe Lyon <christophe.lyon@linaro.org>2017-02-10 13:18:16 +0000
committerChristophe Lyon <clyon@gcc.gnu.org>2017-02-10 14:18:16 +0100
commit052ef81d2dbef6727f9684733cfbb70f7adcf066 (patch)
tree7ffc88a2f6899035ca19a30bbe57754aae6ab45e /gcc/config/aarch64/arm_neon.h
parenta7c8ed0c361b549da67f9b83f91725fe1c84dbdb (diff)
downloadgcc-052ef81d2dbef6727f9684733cfbb70f7adcf066.zip
gcc-052ef81d2dbef6727f9684733cfbb70f7adcf066.tar.gz
gcc-052ef81d2dbef6727f9684733cfbb70f7adcf066.tar.bz2
[ARM,AArch64] more poly64 intrinsics and tests
2017-02-06 Christophe Lyon <christophe.lyon@linaro.org> gcc/ * config/aarch64/arm_neon.h (vtst_p8): Rewrite without asm. (vtst_p16): Likewise. (vtstq_p8): Likewise. (vtstq_p16): Likewise. (vtst_p64): New. (vtstq_p64): Likewise. * config/arm/arm_neon.h (vgetq_lane_p64): New. (vset_lane_p64): New. (vsetq_lane_p64): New. 2017-02-06 Christophe Lyon <christophe.lyon@linaro.org> gcc/testsuite/ * gcc.target/aarch64/advsimd-intrinsics/p64_p128.c (vget_lane_expected, vset_lane_expected, vtst_expected_poly64x1): New. (vmov_n_expected0, vmov_n_expected1, vmov_n_expected2) (expected_vld_st2_0, expected_vld_st2_1, expected_vld_st3_0) (expected_vld_st3_1, expected_vld_st3_2, expected_vld_st4_0) (expected_vld_st4_1, expected_vld_st4_2, expected_vld_st4_3) (vtst_expected_poly64x2): Move to aarch64-only section. (vget_lane_p64, vgetq_lane_p64, vset_lane_p64, vsetq_lane_p64) (vtst_p64, vtstq_p64): New tests. From-SVN: r245328
Diffstat (limited to 'gcc/config/aarch64/arm_neon.h')
-rw-r--r--gcc/config/aarch64/arm_neon.h47
1 files changed, 23 insertions, 24 deletions
diff --git a/gcc/config/aarch64/arm_neon.h b/gcc/config/aarch64/arm_neon.h
index a54c0be..0753da3 100644
--- a/gcc/config/aarch64/arm_neon.h
+++ b/gcc/config/aarch64/arm_neon.h
@@ -10862,48 +10862,47 @@ __extension__ extern __inline uint8x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vtst_p8 (poly8x8_t a, poly8x8_t b)
{
- uint8x8_t result;
- __asm__ ("cmtst %0.8b, %1.8b, %2.8b"
- : "=w"(result)
- : "w"(a), "w"(b)
- : /* No clobbers */);
- return result;
+ return (uint8x8_t) ((((uint8x8_t) a) & ((uint8x8_t) b))
+ != 0);
}
__extension__ extern __inline uint16x4_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vtst_p16 (poly16x4_t a, poly16x4_t b)
{
- uint16x4_t result;
- __asm__ ("cmtst %0.4h, %1.4h, %2.4h"
- : "=w"(result)
- : "w"(a), "w"(b)
- : /* No clobbers */);
- return result;
+ return (uint16x4_t) ((((uint16x4_t) a) & ((uint16x4_t) b))
+ != 0);
+}
+
+__extension__ extern __inline uint64x1_t
+__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
+vtst_p64 (poly64x1_t a, poly64x1_t b)
+{
+ return (uint64x1_t) ((a & b) != __AARCH64_INT64_C (0));
}
__extension__ extern __inline uint8x16_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vtstq_p8 (poly8x16_t a, poly8x16_t b)
{
- uint8x16_t result;
- __asm__ ("cmtst %0.16b, %1.16b, %2.16b"
- : "=w"(result)
- : "w"(a), "w"(b)
- : /* No clobbers */);
- return result;
+ return (uint8x16_t) ((((uint8x16_t) a) & ((uint8x16_t) b))
+ != 0);
}
__extension__ extern __inline uint16x8_t
__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
vtstq_p16 (poly16x8_t a, poly16x8_t b)
{
- uint16x8_t result;
- __asm__ ("cmtst %0.8h, %1.8h, %2.8h"
- : "=w"(result)
- : "w"(a), "w"(b)
- : /* No clobbers */);
- return result;
+ return (uint16x8_t) ((((uint16x8_t) a) & ((uint16x8_t) b))
+ != 0);
+}
+
+__extension__ extern __inline uint64x2_t
+__attribute__ ((__always_inline__, __gnu_inline__, __artificial__))
+vtstq_p64 (poly64x2_t a, poly64x2_t b)
+{
+ return (uint64x2_t) ((((uint64x2_t) a) & ((uint64x2_t) b))
+ != __AARCH64_INT64_C (0));
}
/* End of temporary inline asm implementations. */