aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorLiao Shihua <shihua@iscas.ac.cn>2024-01-15 16:31:34 +0800
committerChristoph Müllner <christoph.muellner@vrull.eu>2024-01-15 16:39:53 +0100
commitc7ad6bf5b9e478d87294230318db675d7c8f3ce4 (patch)
tree9d68a8dba33be55fb4413c0a8e5d3afd4756885b /gcc
parent4d7c4288a90ab949f70c47a4b3e97c52f4f39f11 (diff)
downloadgcc-c7ad6bf5b9e478d87294230318db675d7c8f3ce4.zip
gcc-c7ad6bf5b9e478d87294230318db675d7c8f3ce4.tar.gz
gcc-c7ad6bf5b9e478d87294230318db675d7c8f3ce4.tar.bz2
RISC-V: Add C intrinsic for Scalar Crypto Extension
This patch adds C intrinsics for Scalar Crypto Extension. gcc/ChangeLog: * config.gcc: Include riscv_crypto.h. * config/riscv/riscv_crypto.h: New file. gcc/testsuite/ChangeLog: * gcc.target/riscv/scalar_crypto_intrinsic-32.c: New test. * gcc.target/riscv/scalar_crypto_intrinsic-64.c: New test.
Diffstat (limited to 'gcc')
-rw-r--r--gcc/config.gcc2
-rw-r--r--gcc/config/riscv/riscv_crypto.h309
-rw-r--r--gcc/testsuite/gcc.target/riscv/scalar_crypto_intrinsic-32.c115
-rw-r--r--gcc/testsuite/gcc.target/riscv/scalar_crypto_intrinsic-64.c123
4 files changed, 548 insertions, 1 deletions
diff --git a/gcc/config.gcc b/gcc/config.gcc
index d17787b..11c3a64 100644
--- a/gcc/config.gcc
+++ b/gcc/config.gcc
@@ -549,7 +549,7 @@ riscv*)
extra_objs="${extra_objs} riscv-vector-builtins.o riscv-vector-builtins-shapes.o riscv-vector-builtins-bases.o"
extra_objs="${extra_objs} thead.o riscv-target-attr.o"
d_target_objs="riscv-d.o"
- extra_headers="riscv_vector.h"
+ extra_headers="riscv_vector.h riscv_crypto.h"
target_gtfiles="$target_gtfiles \$(srcdir)/config/riscv/riscv-vector-builtins.cc"
target_gtfiles="$target_gtfiles \$(srcdir)/config/riscv/riscv-vector-builtins.h"
;;
diff --git a/gcc/config/riscv/riscv_crypto.h b/gcc/config/riscv/riscv_crypto.h
new file mode 100644
index 0000000..1bfe3d7
--- /dev/null
+++ b/gcc/config/riscv/riscv_crypto.h
@@ -0,0 +1,309 @@
+/* RISC-V 'Scalar Crypto' Extension intrinsics include file.
+ Copyright (C) 2024 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published
+ by the Free Software Foundation; either version 3, or (at your
+ option) any later version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ Under Section 7 of GPL version 3, you are granted additional
+ permissions described in the GCC Runtime Library Exception, version
+ 3.1, as published by the Free Software Foundation.
+
+ You should have received a copy of the GNU General Public License and
+ a copy of the GCC Runtime Library Exception along with this program;
+ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+ <http://www.gnu.org/licenses/>. */
+
+#ifndef __RISCV_SCALAR_CRYPTO_H
+#define __RISCV_SCALAR_CRYPTO_H
+
+#include <stdint.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#if defined (__riscv_zknd)
+
+#if __riscv_xlen == 32
+
+#ifdef __OPTIMIZE__
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes32dsi (uint32_t rs1, uint32_t rs2, const int bs)
+{
+ return __builtin_riscv_aes32dsi (rs1,rs2,bs);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes32dsmi (uint32_t rs1, uint32_t rs2, const int bs)
+{
+ return __builtin_riscv_aes32dsmi (rs1,rs2,bs);
+}
+
+#else
+#define __riscv_aes32dsi(x, y, bs) __builtin_riscv_aes32dsi (x, y, bs)
+#define __riscv_aes32dsmi(x, y, bs) __builtin_riscv_aes32dsmi (x, y, bs)
+#endif
+
+#endif
+
+#if __riscv_xlen == 64
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes64ds (uint64_t rs1, uint64_t rs2)
+{
+ return __builtin_riscv_aes64ds (rs1,rs2);
+}
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes64dsm (uint64_t rs1, uint64_t rs2)
+{
+ return __builtin_riscv_aes64dsm (rs1,rs2);
+}
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes64im (uint64_t rs1)
+{
+ return __builtin_riscv_aes64im (rs1);
+}
+#endif
+#endif // __riscv_zknd
+
+#if (defined (__riscv_zknd) || defined (__riscv_zkne)) && (__riscv_xlen == 64)
+
+#ifdef __OPTIMIZE__
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes64ks1i (uint64_t rs1, const int rnum)
+{
+ return __builtin_riscv_aes64ks1i (rs1,rnum);
+}
+
+#else
+#define __riscv_aes64ks1i(x, rnum) __builtin_riscv_aes64ks1i (x, rnum)
+#endif
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes64ks2 (uint64_t rs1, uint64_t rs2)
+{
+ return __builtin_riscv_aes64ks2 (rs1,rs2);
+}
+
+#endif // __riscv_zknd || __riscv_zkne
+
+#if defined (__riscv_zkne)
+
+#if __riscv_xlen == 32
+
+#ifdef __OPTIMIZE__
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes32esi (uint32_t rs1, uint32_t rs2, const int bs)
+{
+ return __builtin_riscv_aes32esi (rs1,rs2,bs);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes32esmi (uint32_t rs1, uint32_t rs2, const int bs)
+{
+ return __builtin_riscv_aes32esmi (rs1,rs2,bs);
+}
+
+#else
+#define __riscv_aes32esi(x, y, bs) __builtin_riscv_aes32esi (x, y, bs)
+#define __riscv_aes32esmi(x, y, bs) __builtin_riscv_aes32esmi (x, y, bs)
+#endif
+
+#endif
+
+#if __riscv_xlen == 64
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes64es (uint64_t rs1,uint64_t rs2)
+{
+ return __builtin_riscv_aes64es (rs1,rs2);
+}
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_aes64esm (uint64_t rs1,uint64_t rs2)
+{
+ return __builtin_riscv_aes64esm (rs1,rs2);
+}
+#endif
+#endif // __riscv_zkne
+
+#if defined (__riscv_zknh)
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha256sig0 (uint32_t rs1)
+{
+ return __builtin_riscv_sha256sig0 (rs1);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha256sig1 (uint32_t rs1)
+{
+ return __builtin_riscv_sha256sig1 (rs1);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha256sum0 (uint32_t rs1)
+{
+ return __builtin_riscv_sha256sum0 (rs1);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha256sum1 (uint32_t rs1)
+{
+ return __builtin_riscv_sha256sum1 (rs1);
+}
+
+#if __riscv_xlen == 32
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sig0h (uint32_t rs1, uint32_t rs2)
+{
+ return __builtin_riscv_sha512sig0h (rs1,rs2);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sig0l (uint32_t rs1, uint32_t rs2)
+{
+ return __builtin_riscv_sha512sig0l (rs1,rs2);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sig1h (uint32_t rs1, uint32_t rs2)
+{
+ return __builtin_riscv_sha512sig1h (rs1,rs2);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sig1l (uint32_t rs1, uint32_t rs2)
+{
+ return __builtin_riscv_sha512sig1l (rs1,rs2);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sum0r (uint32_t rs1, uint32_t rs2)
+{
+ return __builtin_riscv_sha512sum0r (rs1,rs2);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sum1r (uint32_t rs1, uint32_t rs2)
+{
+ return __builtin_riscv_sha512sum1r (rs1,rs2);
+}
+
+#endif
+
+#if __riscv_xlen == 64
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sig0 (uint64_t rs1)
+{
+ return __builtin_riscv_sha512sig0 (rs1);
+}
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sig1 (uint64_t rs1)
+{
+ return __builtin_riscv_sha512sig1 (rs1);
+}
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sum0 (uint64_t rs1)
+{
+ return __builtin_riscv_sha512sum0 (rs1);
+}
+
+extern __inline uint64_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sha512sum1 (uint64_t rs1)
+{
+ return __builtin_riscv_sha512sum1 (rs1);
+}
+#endif
+#endif // __riscv_zknh
+
+#if defined (__riscv_zksh)
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sm3p0 (uint32_t rs1)
+{
+ return __builtin_riscv_sm3p0 (rs1);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sm3p1 (uint32_t rs1)
+{
+ return __builtin_riscv_sm3p1 (rs1);
+}
+
+#endif // __riscv_zksh
+
+#if defined (__riscv_zksed)
+
+#ifdef __OPTIMIZE__
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sm4ed (uint32_t rs1, uint32_t rs2, const int bs)
+{
+ return __builtin_riscv_sm4ed (rs1,rs2,bs);
+}
+
+extern __inline uint32_t
+__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
+__riscv_sm4ks (uint32_t rs1, uint32_t rs2, const int bs)
+{
+ return __builtin_riscv_sm4ks (rs1,rs2,bs);
+}
+
+#else
+#define __riscv_sm4ed(x, y, bs) __builtin_riscv_sm4ed(x, y, bs);
+#define __riscv_sm4ks(x, y, bs) __builtin_riscv_sm4ks(x, y, bs);
+#endif
+
+#endif // __riscv_zksed
+
+#if defined (__cplusplus)
+}
+#endif // __cplusplus
+#endif // __RISCV_SCALAR_CRYPTO_H
diff --git a/gcc/testsuite/gcc.target/riscv/scalar_crypto_intrinsic-32.c b/gcc/testsuite/gcc.target/riscv/scalar_crypto_intrinsic-32.c
new file mode 100644
index 0000000..87d576a
--- /dev/null
+++ b/gcc/testsuite/gcc.target/riscv/scalar_crypto_intrinsic-32.c
@@ -0,0 +1,115 @@
+/* { dg-do compile } */
+/* { dg-require-effective-target rv32 } */
+/* { dg-options "-march=rv32gc_zknd_zkne_zknh_zksed_zksh -mabi=ilp32d" } */
+/* { dg-skip-if "" { *-*-* } { "-g" "-flto"} } */
+
+#include "riscv_crypto.h"
+
+uint32_t foo1 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_aes32dsi (rs1,rs2,1);
+}
+
+uint32_t foo2 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_aes32dsmi (rs1,rs2,1);
+}
+
+uint32_t foo3 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_aes32esi (rs1,rs2,1);
+}
+
+uint32_t foo4 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_aes32esmi (rs1,rs2,1);
+}
+
+uint32_t foo5 (uint32_t rs1)
+{
+ return __riscv_sha256sig0 (rs1);
+}
+
+uint32_t foo6 (uint32_t rs1)
+{
+ return __riscv_sha256sig1 (rs1);
+}
+
+uint32_t foo7 (uint32_t rs1)
+{
+ return __riscv_sha256sum0 (rs1);
+}
+
+uint32_t foo8 (uint32_t rs1)
+{
+ return __riscv_sha256sum1 (rs1);
+}
+
+uint32_t foo9 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sha512sig0h (rs1,rs2);
+}
+
+uint32_t foo10 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sha512sig0l (rs1,rs2);
+}
+
+uint32_t foo11 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sha512sig1h (rs1,rs2);
+}
+
+uint32_t foo12 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sha512sig1l (rs1,rs2);
+}
+
+uint32_t foo13 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sha512sum0r (rs1,rs2);
+}
+
+uint32_t foo14 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sha512sum1r (rs1,rs2);
+}
+
+uint32_t foo15 (uint32_t rs1)
+{
+ return __riscv_sm3p0 (rs1);
+}
+
+uint32_t foo16 (uint32_t rs1)
+{
+ return __riscv_sm3p1 (rs1);
+}
+
+uint32_t foo17 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sm4ed (rs1,rs2,1);
+}
+
+uint32_t foo18 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sm4ks (rs1,rs2,1);
+}
+
+/* { dg-final { scan-assembler-times "aes32dsi" 1 } } */
+/* { dg-final { scan-assembler-times "aes32dsmi" 1 } } */
+/* { dg-final { scan-assembler-times "aes32esi" 1 } } */
+/* { dg-final { scan-assembler-times "aes32esmi" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sig0" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sig1" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sum0" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sum1" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sig0h" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sig0l" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sig1h" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sig1l" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sum0r" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sum1r" 1 } } */
+/* { dg-final { scan-assembler-times "sm3p0" 1 } } */
+/* { dg-final { scan-assembler-times "sm3p1" 1 } } */
+/* { dg-final { scan-assembler-times "sm4ks" 1 } } */
+/* { dg-final { scan-assembler-times "sm4ed" 1 } } */
diff --git a/gcc/testsuite/gcc.target/riscv/scalar_crypto_intrinsic-64.c b/gcc/testsuite/gcc.target/riscv/scalar_crypto_intrinsic-64.c
new file mode 100644
index 0000000..01c4a4e
--- /dev/null
+++ b/gcc/testsuite/gcc.target/riscv/scalar_crypto_intrinsic-64.c
@@ -0,0 +1,123 @@
+/* { dg-do compile } */
+/* { dg-require-effective-target rv64 } */
+/* { dg-options "-march=rv64gc_zknd_zkne_zknh_zksed_zksh -mabi=lp64d" } */
+/* { dg-skip-if "" { *-*-* } { "-g" "-flto"} } */
+
+#include "riscv_crypto.h"
+
+uint64_t foo1 (uint64_t rs1, uint64_t rs2)
+{
+ return __riscv_aes64ds (rs1,rs2);
+}
+
+uint64_t foo2 (uint64_t rs1, uint64_t rs2)
+{
+ return __riscv_aes64dsm (rs1,rs2);
+}
+
+uint64_t foo3 (uint64_t rs1)
+{
+ return __riscv_aes64im (rs1);
+}
+
+uint64_t foo4 (uint64_t rs1)
+{
+ return __riscv_aes64ks1i (rs1,1);
+}
+
+uint64_t foo5 (uint64_t rs1, uint64_t rs2)
+{
+ return __riscv_aes64ks2 (rs1,rs2);
+}
+
+uint64_t foo6 (uint64_t rs1, uint64_t rs2)
+{
+ return __riscv_aes64es (rs1,rs2);
+}
+
+uint64_t foo7 (uint64_t rs1, uint64_t rs2)
+{
+ return __riscv_aes64esm (rs1,rs2);
+}
+
+uint64_t foo8 (uint64_t rs1)
+{
+ return __riscv_sha512sig0 (rs1);
+}
+
+uint64_t foo9 (uint64_t rs1)
+{
+ return __riscv_sha512sig1 (rs1);
+}
+
+uint64_t foo10 (uint64_t rs1)
+{
+ return __riscv_sha512sum0 (rs1);
+}
+
+uint64_t foo11 (uint64_t rs1)
+{
+ return __riscv_sha512sum1 (rs1);
+}
+
+uint32_t foo12 (uint32_t rs1)
+{
+ return __riscv_sha256sig0 (rs1);
+}
+
+uint32_t foo13 (uint32_t rs1)
+{
+ return __riscv_sha256sig1 (rs1);
+}
+
+uint32_t foo14 (uint32_t rs1)
+{
+ return __riscv_sha256sum0 (rs1);
+}
+
+uint32_t foo15 (uint32_t rs1)
+{
+ return __riscv_sha256sum1 (rs1);
+}
+
+uint32_t foo16 (uint32_t rs1)
+{
+ return __riscv_sm3p0 (rs1);
+}
+
+uint32_t foo17 (uint32_t rs1)
+{
+ return __riscv_sm3p1 (rs1);
+}
+
+uint32_t foo18 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sm4ed (rs1,rs2,1);
+}
+
+uint32_t foo19 (uint32_t rs1, uint32_t rs2)
+{
+ return __riscv_sm4ks (rs1,rs2,1);
+}
+
+/* { dg-final { scan-assembler-times "aes64ds\t" 1 } } */
+/* { dg-final { scan-assembler-times "aes64dsm" 1 } } */
+/* { dg-final { scan-assembler-times "aes64ks1i" 1 } } */
+/* { dg-final { scan-assembler-times "aes64ks2" 1 } } */
+/* { dg-final { scan-assembler-times "aes64im" 1 } } */
+/* { dg-final { scan-assembler-times "aes64es\t" 1 } } */
+/* { dg-final { scan-assembler-times "aes64esm" 1 } } */
+/* { dg-final { scan-assembler-times "aes64ks1i" 1 } } */
+/* { dg-final { scan-assembler-times "aes64ks2" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sig0" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sig1" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sum0" 1 } } */
+/* { dg-final { scan-assembler-times "sha512sum1" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sig0" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sig1" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sum0" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sum1" 1 } } */
+/* { dg-final { scan-assembler-times "sm3p0" 1 } } */
+/* { dg-final { scan-assembler-times "sm3p1" 1 } } */
+/* { dg-final { scan-assembler-times "sm4ks" 1 } } */
+/* { dg-final { scan-assembler-times "sm4ed" 1 } } */