aboutsummaryrefslogtreecommitdiff
path: root/gcc/config
diff options
context:
space:
mode:
authorJulia Koval <julia.koval@intel.com>2017-12-07 07:28:55 +0100
committerKirill Yukhin <kyukhin@gcc.gnu.org>2017-12-07 06:28:55 +0000
commit69ddc3e5832aa384ffca1d0edfb22a3175bd9dc9 (patch)
tree3113c79cb8d9f05cf7489d725dfe7fac8133f852 /gcc/config
parent38fed05af6a991405af02319a95a036df2bbaa07 (diff)
downloadgcc-69ddc3e5832aa384ffca1d0edfb22a3175bd9dc9.zip
gcc-69ddc3e5832aa384ffca1d0edfb22a3175bd9dc9.tar.gz
gcc-69ddc3e5832aa384ffca1d0edfb22a3175bd9dc9.tar.bz2
Enable VAES support [2/5]
gcc/ * config.gcc: Add vaesintrin.h. * config/i386/i386-builtin-types.def (V64QI_FTYPE_V64QI_V64QI): New type. * config/i386/i386-builtin.def (__builtin_ia32_vaesdec_v16qi, __builtin_ia32_vaesdec_v32qi, __builtin_ia32_vaesdec_v64qi): New builtins. * config/i386/i386.c (ix86_expand_args_builtin): Handle new type. * config/i386/immintrin.h: Include vaesintrin.h. * config/i386/sse.md (vaesdec_<mode>): New pattern. * config/i386/vaesintrin.h (_mm256_aesdec_epi128, _mm512_aesdec_epi128, _mm_aesdec_epi128): New intrinsics. gcc/testsuite/ * gcc.target/i386/avx512-check.h: Handle bit_VAES. * gcc.target/i386/avx512f-aesdec-2.c: New test. * gcc.target/i386/avx512fvl-vaes-1.c: Ditto. * gcc.target/i386/avx512vl-aesdec-2.c: Ditto. * gcc.target/i386/i386.exp (check_effective_target_avx512vaes): New. From-SVN: r255461
Diffstat (limited to 'gcc/config')
-rw-r--r--gcc/config/i386/avx512vnniintrin.h66
-rw-r--r--gcc/config/i386/avx512vnnivlintrin.h89
-rw-r--r--gcc/config/i386/i386-builtin.def42
-rw-r--r--gcc/config/i386/immintrin.h4
-rw-r--r--gcc/config/i386/sse.md221
5 files changed, 422 insertions, 0 deletions
diff --git a/gcc/config/i386/avx512vnniintrin.h b/gcc/config/i386/avx512vnniintrin.h
new file mode 100644
index 0000000..9a48e28
--- /dev/null
+++ b/gcc/config/i386/avx512vnniintrin.h
@@ -0,0 +1,66 @@
+/* Copyright (C) 2013-2017 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3, or (at your option)
+ any later version.
+
+ GCC is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ Under Section 7 of GPL version 3, you are granted additional
+ permissions described in the GCC Runtime Library Exception, version
+ 3.1, as published by the Free Software Foundation.
+
+ You should have received a copy of the GNU General Public License and
+ a copy of the GCC Runtime Library Exception along with this program;
+ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+ <http://www.gnu.org/licenses/>. */
+
+#ifndef _IMMINTRIN_H_INCLUDED
+#error "Never use <avx512vnniintrin.h> directly; include <immintrin.h> instead."
+#endif
+
+#ifndef __AVX512VNNIINTRIN_H_INCLUDED
+#define __AVX512VNNIINTRIN_H_INCLUDED
+
+#if !defined(__AVX512VNNI__) || !defined(__AVX512F__)
+#pragma GCC push_options
+#pragma GCC target("avx512vnni,avx512f")
+#define __DISABLE_AVX512VNNI__
+#endif /* __AVX512VNNI__ */
+
+extern __inline __m512i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm512_dpbusd_epi32 (__m512i __A, __m512i __B, __m512i __C)
+{
+ return (__m512i) __builtin_ia32_vpdpbusd_v16si ((__v16si)__A, (__v16si) __B,
+ (__v16si) __C);
+}
+
+extern __inline __m512i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm512_mask_dpbusd_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D)
+{
+ return (__m512i)__builtin_ia32_vpdpbusd_v16si_mask ((__v16si)__A,
+ (__v16si) __C, (__v16si) __D, (__mmask16)__B);
+}
+
+extern __inline __m512i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm512_maskz_dpbusd_epi32 (__mmask16 __A, __m512i __B, __m512i __C,
+ __m512i __D)
+{
+ return (__m512i)__builtin_ia32_vpdpbusd_v16si_maskz ((__v16si)__B,
+ (__v16si) __C, (__v16si) __D, (__mmask16)__A);
+}
+#ifdef __DISABLE_AVX512VNNI__
+#undef __DISABLE_AVX512VNNI__
+#pragma GCC pop_options
+#endif /* __DISABLE_AVX512VNNI__ */
+
+#endif /* __AVX512VNNIINTRIN_H_INCLUDED */
diff --git a/gcc/config/i386/avx512vnnivlintrin.h b/gcc/config/i386/avx512vnnivlintrin.h
new file mode 100644
index 0000000..b5fea90
--- /dev/null
+++ b/gcc/config/i386/avx512vnnivlintrin.h
@@ -0,0 +1,89 @@
+/* Copyright (C) 2013-2017 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3, or (at your option)
+ any later version.
+
+ GCC is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ Under Section 7 of GPL version 3, you are granted additional
+ permissions described in the GCC Runtime Library Exception, version
+ 3.1, as published by the Free Software Foundation.
+
+ You should have received a copy of the GNU General Public License and
+ a copy of the GCC Runtime Library Exception along with this program;
+ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+ <http://www.gnu.org/licenses/>. */
+
+#ifndef _IMMINTRIN_H_INCLUDED
+#error "Never use <avx512vnnivlintrin.h> directly; include <immintrin.h> instead."
+#endif
+
+#ifndef _AVX512VNNIVLINTRIN_H_INCLUDED
+#define _AVX512VNNIVLINTRIN_H_INCLUDED
+
+#if !defined(__AVX512VL__) || !defined(__AVX512VNNI__)
+#pragma GCC push_options
+#pragma GCC target("avx512vnni,avx512vl")
+#define __DISABLE_AVX512VNNIVL__
+#endif /* __AVX512VNNIVL__ */
+
+extern __inline __m256i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm256_dpbusd_epi32 (__m256i __A, __m256i __B, __m256i __C)
+{
+ return (__m256i) __builtin_ia32_vpdpbusd_v8si ((__v8si)__A, (__v8si) __B,
+ (__v8si) __C);
+}
+
+extern __inline __m256i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm256_mask_dpbusd_epi32 (__m256i __A, __mmask8 __B, __m256i __C, __m256i __D)
+{
+ return (__m256i)__builtin_ia32_vpdpbusd_v8si_mask ((__v8si)__A, (__v8si) __C,
+ (__v8si) __D, (__mmask8)__B);
+}
+
+extern __inline __m256i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm256_maskz_dpbusd_epi32 (__mmask8 __A, __m256i __B, __m256i __C, __m256i __D)
+{
+ return (__m256i)__builtin_ia32_vpdpbusd_v8si_maskz ((__v8si)__B,
+ (__v8si) __C, (__v8si) __D, (__mmask8)__A);
+}
+
+extern __inline __m128i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm_dpbusd_epi32 (__m128i __A, __m128i __B, __m128i __C)
+{
+ return (__m128i) __builtin_ia32_vpdpbusd_v4si ((__v4si)__A, (__v4si) __B,
+ (__v4si) __C);
+}
+
+extern __inline __m128i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm_mask_dpbusd_epi32 (__m128i __A, __mmask8 __B, __m128i __C, __m128i __D)
+{
+ return (__m128i)__builtin_ia32_vpdpbusd_v4si_mask ((__v4si)__A, (__v4si) __C,
+ (__v4si) __D, (__mmask8)__B);
+}
+
+extern __inline __m128i
+__attribute__((__gnu_inline__, __always_inline__, __artificial__))
+_mm_maskz_dpbusd_epi32 (__mmask8 __A, __m128i __B, __m128i __C, __m128i __D)
+{
+ return (__m128i)__builtin_ia32_vpdpbusd_v4si_maskz ((__v4si)__B,
+ (__v4si) __C, (__v4si) __D, (__mmask8)__A);
+}
+
+#ifdef __DISABLE_AVX512VNNIVL__
+#undef __DISABLE_AVX512VNNIVL__
+#pragma GCC pop_options
+#endif /* __DISABLE_AVX512VNNIVL__ */
+#endif /* __DISABLE_AVX512VNNIVL__ */
diff --git a/gcc/config/i386/i386-builtin.def b/gcc/config/i386/i386-builtin.def
index c9b0a70..02a2b7a 100644
--- a/gcc/config/i386/i386-builtin.def
+++ b/gcc/config/i386/i386-builtin.def
@@ -2721,6 +2721,48 @@ BDESC (OPTION_MASK_ISA_AVX512VBMI2, CODE_FOR_vpshldv_v2di, "__builtin_ia32_vpshl
BDESC (OPTION_MASK_ISA_AVX512VBMI2, CODE_FOR_vpshldv_v2di_mask, "__builtin_ia32_vpshldv_v2di_mask", IX86_BUILTIN_VPSHLDVV2DI_MASK, UNKNOWN, (int) V2DI_FTYPE_V2DI_V2DI_V2DI_INT)
BDESC (OPTION_MASK_ISA_AVX512VBMI2, CODE_FOR_vpshldv_v2di_maskz, "__builtin_ia32_vpshldv_v2di_maskz", IX86_BUILTIN_VPSHLDVV2DI_MASKZ, UNKNOWN, (int) V2DI_FTYPE_V2DI_V2DI_V2DI_INT)
+/* VNNI */
+
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v16si, "__builtin_ia32_vpdpbusd_v16si", IX86_BUILTIN_VPDPBUSDV16SI, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v16si_mask, "__builtin_ia32_vpdpbusd_v16si_mask", IX86_BUILTIN_VPDPBUSDV16SI_MASK, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v16si_maskz, "__builtin_ia32_vpdpbusd_v16si_maskz", IX86_BUILTIN_VPDPBUSDV16SI_MASKZ, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v8si, "__builtin_ia32_vpdpbusd_v8si", IX86_BUILTIN_VPDPBUSDV8SI, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v8si_mask, "__builtin_ia32_vpdpbusd_v8si_mask", IX86_BUILTIN_VPDPBUSDV8SI_MASK, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v8si_maskz, "__builtin_ia32_vpdpbusd_v8si_maskz", IX86_BUILTIN_VPDPBUSDV8SI_MASKZ, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v4si, "__builtin_ia32_vpdpbusd_v4si", IX86_BUILTIN_VPDPBUSDV4SI, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v4si_mask, "__builtin_ia32_vpdpbusd_v4si_mask", IX86_BUILTIN_VPDPBUSDV4SI_MASK, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusd_v4si_maskz, "__builtin_ia32_vpdpbusd_v4si_maskz", IX86_BUILTIN_VPDPBUSDV4SI_MASKZ, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI_INT)
+
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v16si, "__builtin_ia32_vpdpbusds_v16si", IX86_BUILTIN_VPDPBUSDSV16SI, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v16si_mask, "__builtin_ia32_vpdpbusds_v16si_mask", IX86_BUILTIN_VPDPBUSDSV16SI_MASK, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v16si_maskz, "__builtin_ia32_vpdpbusds_v16si_maskz", IX86_BUILTIN_VPDPBUSDSV16SI_MASKZ, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v8si, "__builtin_ia32_vpdpbusds_v8si", IX86_BUILTIN_VPDPBUSDSV8SI, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v8si_mask, "__builtin_ia32_vpdpbusds_v8si_mask", IX86_BUILTIN_VPDPBUSDSV8SI_MASK, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v8si_maskz, "__builtin_ia32_vpdpbusds_v8si_maskz", IX86_BUILTIN_VPDPBUSDSV8SI_MASKZ, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v4si, "__builtin_ia32_vpdpbusds_v4si", IX86_BUILTIN_VPDPBUSDSV4SI, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v4si_mask, "__builtin_ia32_vpdpbusds_v4si_mask", IX86_BUILTIN_VPDPBUSDSV4SI_MASK, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpbusds_v4si_maskz, "__builtin_ia32_vpdpbusds_v4si_maskz", IX86_BUILTIN_VPDPBUSDSV4SI_MASKZ, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI_INT)
+
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v16si, "__builtin_ia32_vpdpwssd_v16si", IX86_BUILTIN_VPDPWSSDV16SI, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v16si_mask, "__builtin_ia32_vpdpwssd_v16si_mask", IX86_BUILTIN_VPDPWSSDV16SI_MASK, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v16si_maskz, "__builtin_ia32_vpdpwssd_v16si_maskz", IX86_BUILTIN_VPDPWSSDV16SI_MASKZ, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v8si, "__builtin_ia32_vpdpwssd_v8si", IX86_BUILTIN_VPDPWSSDV8SI, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v8si_mask, "__builtin_ia32_vpdpwssd_v8si_mask", IX86_BUILTIN_VPDPWSSDV8SI_MASK, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v8si_maskz, "__builtin_ia32_vpdpwssd_v8si_maskz", IX86_BUILTIN_VPDPWSSDV8SI_MASKZ, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v4si, "__builtin_ia32_vpdpwssd_v4si", IX86_BUILTIN_VPDPWSSDV4SI, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v4si_mask, "__builtin_ia32_vpdpwssd_v4si_mask", IX86_BUILTIN_VPDPWSSDV4SI_MASK, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssd_v4si_maskz, "__builtin_ia32_vpdpwssd_v4si_maskz", IX86_BUILTIN_VPDPWSSDV4SI_MASKZ, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI_INT)
+
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v16si, "__builtin_ia32_vpdpwssds_v16si", IX86_BUILTIN_VPDPWSSDSV16SI, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v16si_mask, "__builtin_ia32_vpdpwssds_v16si_mask", IX86_BUILTIN_VPDPWSSDSV16SI_MASK, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v16si_maskz, "__builtin_ia32_vpdpwssds_v16si_maskz", IX86_BUILTIN_VPDPWSSDSV16SI_MASKZ, UNKNOWN, (int) V16SI_FTYPE_V16SI_V16SI_V16SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v8si, "__builtin_ia32_vpdpwssds_v8si", IX86_BUILTIN_VPDPWSSDSV8SI, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v8si_mask, "__builtin_ia32_vpdpwssds_v8si_mask", IX86_BUILTIN_VPDPWSSDSV8SI_MASK, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v8si_maskz, "__builtin_ia32_vpdpwssds_v8si_maskz", IX86_BUILTIN_VPDPWSSDSV8SI_MASKZ, UNKNOWN, (int) V8SI_FTYPE_V8SI_V8SI_V8SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v4si, "__builtin_ia32_vpdpwssds_v4si", IX86_BUILTIN_VPDPWSSDSV4SI, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v4si_mask, "__builtin_ia32_vpdpwssds_v4si_mask", IX86_BUILTIN_VPDPWSSDSV4SI_MASK, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI_INT)
+BDESC (OPTION_MASK_ISA_AVX512VNNI, CODE_FOR_vpdpwssds_v4si_maskz, "__builtin_ia32_vpdpwssds_v4si_maskz", IX86_BUILTIN_VPDPWSSDSV4SI_MASKZ, UNKNOWN, (int) V4SI_FTYPE_V4SI_V4SI_V4SI_INT)
+
BDESC_END (ARGS2, SPECIAL_ARGS2)
BDESC_FIRST (special_args2, SPECIAL_ARGS2, OPTION_MASK_ISA_AVX512VBMI2, CODE_FOR_compressstorev64qi_mask, "__builtin_ia32_compressstoreuqi512_mask", IX86_BUILTIN_PCOMPRESSBSTORE512, UNKNOWN, (int) VOID_FTYPE_PV64QI_V64QI_UDI)
diff --git a/gcc/config/i386/immintrin.h b/gcc/config/i386/immintrin.h
index 2cafd75..abc1d31 100644
--- a/gcc/config/i386/immintrin.h
+++ b/gcc/config/i386/immintrin.h
@@ -78,6 +78,10 @@
#include <avx512vbmi2vlintrin.h>
+#include <avx512vnniintrin.h>
+
+#include <avx512vnnivlintrin.h>
+
#include <shaintrin.h>
#include <lzcntintrin.h>
diff --git a/gcc/config/i386/sse.md b/gcc/config/i386/sse.md
index 1f785b75..6ec97e1 100644
--- a/gcc/config/i386/sse.md
+++ b/gcc/config/i386/sse.md
@@ -166,6 +166,12 @@
UNSPEC_VPSHRD
UNSPEC_VPSHRDV
UNSPEC_VPSHLDV
+
+ ;; For AVX512VNNI support
+ UNSPEC_VPMADDUBSWACCD
+ UNSPEC_VPMADDUBSWACCSSD
+ UNSPEC_VPMADDWDACCD
+ UNSPEC_VPMADDWDACCSSD
])
(define_c_enum "unspecv" [
@@ -20228,3 +20234,218 @@
"vpshldv<ssemodesuffix>\t{%3, %2, %0%{%5%}%{z%}|%0%{%5%}%{z%}, %2, %3 }"
[(set_attr ("prefix") ("evex"))
(set_attr "mode" "<sseinsnmode>")])
+
+(define_insn "vpdpbusd_<mode>"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+] UNSPEC_VPMADDUBSWACCD))]
+ "TARGET_AVX512VNNI"
+ "vpdpbusd\t{%3, %2, %0|%0, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+(define_insn "vpdpbusd_<mode>_mask"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (vec_merge:VI4_AVX512VL (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+ ] UNSPEC_VPMADDUBSWACCD)
+ (match_dup 1)
+ (match_operand:<avx512fmaskmode> 4 "register_operand" "Yk"))
+)]
+ "TARGET_AVX512VNNI"
+ "vpdpbusd\t{%3, %2, %0%{%4%}|%0%{%4%}, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+(define_expand "vpdpbusd_<mode>_maskz"
+ [(match_operand:VI4_AVX512VL 0 "register_operand")
+ (match_operand:VI4_AVX512VL 1 "register_operand")
+ (match_operand:VI4_AVX512VL 2 "register_operand")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand")
+ (match_operand:<avx512fmaskmode> 4 "register_operand")]
+ "TARGET_AVX512VNNI"
+{
+ emit_insn (gen_vpdpbusd_<mode>_maskz_1 (
+ operands[0], operands[1], operands[2], operands[3],
+ CONST0_RTX (<MODE>mode), operands[4]));
+ DONE;
+})
+
+(define_insn "vpdpbusd_<mode>_maskz_1"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (vec_merge:VI4_AVX512VL (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+ ] UNSPEC_VPMADDUBSWACCD)
+ (match_operand:VI4_AVX512VL 4 "const0_operand" "C")
+ (match_operand:<avx512fmaskmode> 5 "register_operand" "Yk"))
+)]
+ "TARGET_AVX512VNNI"
+ "vpdpbusd\t{%3, %2, %0%{%5%}%{z%}|%0%{%5%}%{z%}, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+
+(define_insn "vpdpbusds_<mode>"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+] UNSPEC_VPMADDUBSWACCSSD))]
+ "TARGET_AVX512VNNI"
+ "vpdpbusds\t{%3, %2, %0|%0, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+(define_insn "vpdpbusds_<mode>_mask"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (vec_merge:VI4_AVX512VL (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+ ] UNSPEC_VPMADDUBSWACCSSD)
+ (match_dup 1)
+ (match_operand:<avx512fmaskmode> 4 "register_operand" "Yk"))
+)]
+ "TARGET_AVX512VNNI"
+ "vpdpbusds\t{%3, %2, %0%{%4%}|%0%{%4%}, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+(define_expand "vpdpbusds_<mode>_maskz"
+ [(match_operand:VI4_AVX512VL 0 "register_operand")
+ (match_operand:VI4_AVX512VL 1 "register_operand")
+ (match_operand:VI4_AVX512VL 2 "register_operand")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand")
+ (match_operand:<avx512fmaskmode> 4 "register_operand")]
+ "TARGET_AVX512VNNI"
+{
+ emit_insn (gen_vpdpbusds_<mode>_maskz_1 (
+ operands[0], operands[1], operands[2], operands[3],
+ CONST0_RTX (<MODE>mode), operands[4]));
+ DONE;
+})
+
+(define_insn "vpdpbusds_<mode>_maskz_1"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (vec_merge:VI4_AVX512VL (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+ ] UNSPEC_VPMADDUBSWACCSSD)
+ (match_operand:VI4_AVX512VL 4 "const0_operand" "C")
+ (match_operand:<avx512fmaskmode> 5 "register_operand" "Yk"))
+)]
+ "TARGET_AVX512VNNI"
+ "vpdpbusds\t{%3, %2, %0%{%5%}%{z%}|%0%{%5%}%{z%}, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+
+(define_insn "vpdpwssd_<mode>"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+] UNSPEC_VPMADDWDACCD))]
+ "TARGET_AVX512VNNI"
+ "vpdpwssd\t{%3, %2, %0|%0, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+(define_insn "vpdpwssd_<mode>_mask"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (vec_merge:VI4_AVX512VL (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+ ] UNSPEC_VPMADDWDACCD)
+ (match_dup 1)
+ (match_operand:<avx512fmaskmode> 4 "register_operand" "Yk"))
+)]
+ "TARGET_AVX512VNNI"
+ "vpdpwssd\t{%3, %2, %0%{%4%}|%0%{%4%}, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+(define_expand "vpdpwssd_<mode>_maskz"
+ [(match_operand:VI4_AVX512VL 0 "register_operand")
+ (match_operand:VI4_AVX512VL 1 "register_operand")
+ (match_operand:VI4_AVX512VL 2 "register_operand")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand")
+ (match_operand:<avx512fmaskmode> 4 "register_operand")]
+ "TARGET_AVX512VNNI"
+{
+ emit_insn (gen_vpdpwssd_<mode>_maskz_1 (
+ operands[0], operands[1], operands[2], operands[3],
+ CONST0_RTX (<MODE>mode), operands[4]));
+ DONE;
+})
+
+(define_insn "vpdpwssd_<mode>_maskz_1"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (vec_merge:VI4_AVX512VL (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+ ] UNSPEC_VPMADDWDACCD)
+ (match_operand:VI4_AVX512VL 4 "const0_operand" "C")
+ (match_operand:<avx512fmaskmode> 5 "register_operand" "Yk"))
+)]
+ "TARGET_AVX512VNNI"
+ "vpdpwssd\t{%3, %2, %0%{%5%}%{z%}|%0%{%5%}%{z%}, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+
+(define_insn "vpdpwssds_<mode>"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+] UNSPEC_VPMADDWDACCSSD))]
+ "TARGET_AVX512VNNI"
+ "vpdpwssds\t{%3, %2, %0|%0, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+(define_insn "vpdpwssds_<mode>_mask"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (vec_merge:VI4_AVX512VL (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+ ] UNSPEC_VPMADDWDACCSSD)
+ (match_dup 1)
+ (match_operand:<avx512fmaskmode> 4 "register_operand" "Yk"))
+)]
+ "TARGET_AVX512VNNI"
+ "vpdpwssds\t{%3, %2, %0%{%4%}|%0%{%4%}, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])
+
+(define_expand "vpdpwssds_<mode>_maskz"
+ [(match_operand:VI4_AVX512VL 0 "register_operand")
+ (match_operand:VI4_AVX512VL 1 "register_operand")
+ (match_operand:VI4_AVX512VL 2 "register_operand")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand")
+ (match_operand:<avx512fmaskmode> 4 "register_operand")]
+ "TARGET_AVX512VNNI"
+{
+ emit_insn (gen_vpdpwssds_<mode>_maskz_1 (
+ operands[0], operands[1], operands[2], operands[3],
+ CONST0_RTX (<MODE>mode), operands[4]));
+ DONE;
+})
+
+(define_insn "vpdpwssds_<mode>_maskz_1"
+ [(set (match_operand:VI4_AVX512VL 0 "register_operand" "=v")
+ (vec_merge:VI4_AVX512VL (unspec:VI4_AVX512VL
+ [(match_operand:VI4_AVX512VL 1 "register_operand" "0")
+ (match_operand:VI4_AVX512VL 2 "register_operand" "v")
+ (match_operand:VI4_AVX512VL 3 "nonimmediate_operand" "vm")
+ ] UNSPEC_VPMADDWDACCSSD)
+ (match_operand:VI4_AVX512VL 4 "const0_operand" "C")
+ (match_operand:<avx512fmaskmode> 5 "register_operand" "Yk"))
+)]
+ "TARGET_AVX512VNNI"
+ "vpdpwssds\t{%3, %2, %0%{%5%}%{z%}|%0%{%5%}%{z%}, %2, %3 }"
+ [(set_attr ("prefix") ("evex"))])