Commit 30442682 by Tejas Belagod Committed by Tejas Belagod

Implement support for AArch64 Crypto SHA1.

gcc/
	* config/aarch64/aarch64-simd-builtins.def: Update builtins table.
	* config/aarch64/aarch64-builtins.c (aarch64_types_ternopu_qualifiers,
	TYPES_TERNOPU): New.
	* config/aarch64/aarch64-simd.md (aarch64_crypto_sha1hsi,
	aarch64_crypto_sha1su1v4si, aarch64_crypto_sha1<sha1_op>v4si,
	aarch64_crypto_sha1su0v4si): New.
	* config/aarch64/arm_neon.h (vsha1cq_u32, sha1mq_u32, vsha1pq_u32,
	vsha1h_u32, vsha1su0q_u32, vsha1su1q_u32): New.
	* config/aarch64/iterators.md (UNSPEC_SHA1<CPMH>, UNSPEC_SHA1SU<01>):
	New.
	(CRYPTO_SHA1): New int iterator.
	(sha1_op): New int attribute.

testsuite/
	* gcc.target/aarch64/sha1_1.c: New.

From-SVN: r206118
parent 5a7a4e80
2013-12-19 Tejas Belagod <tejas.belagod@arm.com> 2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
* config/aarch64/aarch64-simd-builtins.def: Update builtins table. * config/aarch64/aarch64-simd-builtins.def: Update builtins table.
* config/aarch64/aarch64-builtins.c (aarch64_types_ternopu_qualifiers,
TYPES_TERNOPU): New.
* config/aarch64/aarch64-simd.md (aarch64_crypto_sha1hsi,
aarch64_crypto_sha1su1v4si, aarch64_crypto_sha1<sha1_op>v4si,
aarch64_crypto_sha1su0v4si): New.
* config/aarch64/arm_neon.h (vsha1cq_u32, sha1mq_u32, vsha1pq_u32,
vsha1h_u32, vsha1su0q_u32, vsha1su1q_u32): New.
* config/aarch64/iterators.md (UNSPEC_SHA1<CPMH>, UNSPEC_SHA1SU<01>):
New.
(CRYPTO_SHA1): New int iterator.
(sha1_op): New int attribute.
2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
* config/aarch64/aarch64-simd-builtins.def: Update builtins table.
* config/aarch64/aarch64-builtins.c (aarch64_types_binopu_qualifiers, * config/aarch64/aarch64-builtins.c (aarch64_types_binopu_qualifiers,
TYPES_BINOPU): New. TYPES_BINOPU): New.
* config/aarch64/aarch64-simd.md (aarch64_crypto_aes<aes_op>v16qi, * config/aarch64/aarch64-simd.md (aarch64_crypto_aes<aes_op>v16qi,
...@@ -161,6 +161,12 @@ aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS] ...@@ -161,6 +161,12 @@ aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_none, qualifier_none, qualifier_none, qualifier_none }; = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
#define TYPES_TERNOP (aarch64_types_ternop_qualifiers) #define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
static enum aarch64_type_qualifiers static enum aarch64_type_qualifiers
aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_unsigned, qualifier_unsigned,
qualifier_unsigned, qualifier_unsigned };
#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
static enum aarch64_type_qualifiers
aarch64_types_quadop_qualifiers[SIMD_MAX_BUILTIN_ARGS] aarch64_types_quadop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
= { qualifier_none, qualifier_none, qualifier_none, = { qualifier_none, qualifier_none, qualifier_none,
qualifier_none, qualifier_none }; qualifier_none, qualifier_none };
......
...@@ -372,3 +372,11 @@ ...@@ -372,3 +372,11 @@
VAR1 (BINOPU, crypto_aesd, 0, v16qi) VAR1 (BINOPU, crypto_aesd, 0, v16qi)
VAR1 (UNOPU, crypto_aesmc, 0, v16qi) VAR1 (UNOPU, crypto_aesmc, 0, v16qi)
VAR1 (UNOPU, crypto_aesimc, 0, v16qi) VAR1 (UNOPU, crypto_aesimc, 0, v16qi)
/* Implemented by aarch64_crypto_sha1<op><mode>. */
VAR1 (UNOPU, crypto_sha1h, 0, si)
VAR1 (BINOPU, crypto_sha1su1, 0, v4si)
VAR1 (TERNOPU, crypto_sha1c, 0, v4si)
VAR1 (TERNOPU, crypto_sha1m, 0, v4si)
VAR1 (TERNOPU, crypto_sha1p, 0, v4si)
VAR1 (TERNOPU, crypto_sha1su0, 0, v4si)
...@@ -4096,3 +4096,46 @@ ...@@ -4096,3 +4096,46 @@
[(set_attr "type" "crypto_aes")] [(set_attr "type" "crypto_aes")]
) )
;; sha1
(define_insn "aarch64_crypto_sha1hsi"
[(set (match_operand:SI 0 "register_operand" "=w")
(unspec:SI [(match_operand:SI 1
"register_operand" "w")]
UNSPEC_SHA1H))]
"TARGET_SIMD && TARGET_CRYPTO"
"sha1h\\t%s0, %s1"
[(set_attr "type" "crypto_sha1_fast")]
)
(define_insn "aarch64_crypto_sha1su1v4si"
[(set (match_operand:V4SI 0 "register_operand" "=w")
(unspec:V4SI [(match_operand:V4SI 1 "register_operand" "0")
(match_operand:V4SI 2 "register_operand" "w")]
UNSPEC_SHA1SU1))]
"TARGET_SIMD && TARGET_CRYPTO"
"sha1su1\\t%0.4s, %2.4s"
[(set_attr "type" "crypto_sha1_fast")]
)
(define_insn "aarch64_crypto_sha1<sha1_op>v4si"
[(set (match_operand:V4SI 0 "register_operand" "=w")
(unspec:V4SI [(match_operand:V4SI 1 "register_operand" "0")
(match_operand:SI 2 "register_operand" "w")
(match_operand:V4SI 3 "register_operand" "w")]
CRYPTO_SHA1))]
"TARGET_SIMD && TARGET_CRYPTO"
"sha1<sha1_op>\\t%q0, %s2, %3.4s"
[(set_attr "type" "crypto_sha1_slow")]
)
(define_insn "aarch64_crypto_sha1su0v4si"
[(set (match_operand:V4SI 0 "register_operand" "=w")
(unspec:V4SI [(match_operand:V4SI 1 "register_operand" "0")
(match_operand:V4SI 2 "register_operand" "w")
(match_operand:V4SI 3 "register_operand" "w")]
UNSPEC_SHA1SU0))]
"TARGET_SIMD && TARGET_CRYPTO"
"sha1su0\\t%0.4s, %2.4s, %3.4s"
[(set_attr "type" "crypto_sha1_xor")]
)
...@@ -22952,6 +22952,46 @@ vrsrad_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c) ...@@ -22952,6 +22952,46 @@ vrsrad_n_u64 (uint64x1_t __a, uint64x1_t __b, const int __c)
return (uint64x1_t) __builtin_aarch64_ursra_ndi (__a, __b, __c); return (uint64x1_t) __builtin_aarch64_ursra_ndi (__a, __b, __c);
} }
#ifdef __ARM_FEATURE_CRYPTO
/* vsha1 */
static __inline uint32x4_t
vsha1cq_u32 (uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk)
{
return __builtin_aarch64_crypto_sha1cv4si_uuuu (hash_abcd, hash_e, wk);
}
static __inline uint32x4_t
vsha1mq_u32 (uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk)
{
return __builtin_aarch64_crypto_sha1mv4si_uuuu (hash_abcd, hash_e, wk);
}
static __inline uint32x4_t
vsha1pq_u32 (uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk)
{
return __builtin_aarch64_crypto_sha1pv4si_uuuu (hash_abcd, hash_e, wk);
}
static __inline uint32_t
vsha1h_u32 (uint32_t hash_e)
{
return __builtin_aarch64_crypto_sha1hsi_uu (hash_e);
}
static __inline uint32x4_t
vsha1su0q_u32 (uint32x4_t w0_3, uint32x4_t w4_7, uint32x4_t w8_11)
{
return __builtin_aarch64_crypto_sha1su0v4si_uuuu (w0_3, w4_7, w8_11);
}
static __inline uint32x4_t
vsha1su1q_u32 (uint32x4_t tw0_3, uint32x4_t w12_15)
{
return __builtin_aarch64_crypto_sha1su1v4si_uuu (tw0_3, w12_15);
}
#endif
/* vshl */ /* vshl */
__extension__ static __inline int8x8_t __attribute__ ((__always_inline__)) __extension__ static __inline int8x8_t __attribute__ ((__always_inline__))
......
...@@ -271,6 +271,12 @@ ...@@ -271,6 +271,12 @@
UNSPEC_AESD ; Used in aarch64-simd.md. UNSPEC_AESD ; Used in aarch64-simd.md.
UNSPEC_AESMC ; Used in aarch64-simd.md. UNSPEC_AESMC ; Used in aarch64-simd.md.
UNSPEC_AESIMC ; Used in aarch64-simd.md. UNSPEC_AESIMC ; Used in aarch64-simd.md.
UNSPEC_SHA1C ; Used in aarch64-simd.md.
UNSPEC_SHA1M ; Used in aarch64-simd.md.
UNSPEC_SHA1P ; Used in aarch64-simd.md.
UNSPEC_SHA1H ; Used in aarch64-simd.md.
UNSPEC_SHA1SU0 ; Used in aarch64-simd.md.
UNSPEC_SHA1SU1 ; Used in aarch64-simd.md.
]) ])
;; ------------------------------------------------------------------- ;; -------------------------------------------------------------------
...@@ -855,6 +861,8 @@ ...@@ -855,6 +861,8 @@
(define_int_iterator CRYPTO_AES [UNSPEC_AESE UNSPEC_AESD]) (define_int_iterator CRYPTO_AES [UNSPEC_AESE UNSPEC_AESD])
(define_int_iterator CRYPTO_AESMC [UNSPEC_AESMC UNSPEC_AESIMC]) (define_int_iterator CRYPTO_AESMC [UNSPEC_AESMC UNSPEC_AESIMC])
(define_int_iterator CRYPTO_SHA1 [UNSPEC_SHA1C UNSPEC_SHA1M UNSPEC_SHA1P])
;; ------------------------------------------------------------------- ;; -------------------------------------------------------------------
;; Int Iterators Attributes. ;; Int Iterators Attributes.
;; ------------------------------------------------------------------- ;; -------------------------------------------------------------------
...@@ -974,3 +982,6 @@ ...@@ -974,3 +982,6 @@
(define_int_attr aes_op [(UNSPEC_AESE "e") (UNSPEC_AESD "d")]) (define_int_attr aes_op [(UNSPEC_AESE "e") (UNSPEC_AESD "d")])
(define_int_attr aesmc_op [(UNSPEC_AESMC "mc") (UNSPEC_AESIMC "imc")]) (define_int_attr aesmc_op [(UNSPEC_AESMC "mc") (UNSPEC_AESIMC "imc")])
(define_int_attr sha1_op [(UNSPEC_SHA1C "c") (UNSPEC_SHA1P "p")
(UNSPEC_SHA1M "m")])
2013-12-19 Tejas Belagod <tejas.belagod@arm.com> 2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
* gcc.target/aarch64/sha1_1.c: New.
2013-12-19 Tejas Belagod <tejas.belagod@arm.com>
* gcc.target/aarch64/aes_1.c: New. * gcc.target/aarch64/aes_1.c: New.
2013-12-19 Dominik Vogt <vogt@linux.vnet.ibm.com> 2013-12-19 Dominik Vogt <vogt@linux.vnet.ibm.com>
......
/* { dg-do compile } */
/* { dg-options "-march=armv8-a+crypto" } */
#include "arm_neon.h"
uint32x4_t
test_vsha1cq_u32 (uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk)
{
return vsha1cq_u32 (hash_abcd, hash_e, wk);
}
/* { dg-final { scan-assembler-times "sha1c\\tq" 1 } } */
uint32x4_t
test_vsha1mq_u32 (uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk)
{
return vsha1mq_u32 (hash_abcd, hash_e, wk);
}
/* { dg-final { scan-assembler-times "sha1m\\tq" 1 } } */
uint32x4_t
test_vsha1pq_u32 (uint32x4_t hash_abcd, uint32_t hash_e, uint32x4_t wk)
{
return vsha1pq_u32 (hash_abcd, hash_e, wk);
}
/* { dg-final { scan-assembler-times "sha1p\\tq" 1 } } */
uint32_t
test_vsha1h_u32 (uint32_t hash_e)
{
return vsha1h_u32 (hash_e);
}
/* { dg-final { scan-assembler-times "sha1h\\ts" 1 } } */
uint32x4_t
test_vsha1su0q_u32 (uint32x4_t w0_3, uint32x4_t w4_7, uint32x4_t w8_11)
{
return vsha1su0q_u32 (w0_3, w4_7, w8_11);
}
/* { dg-final { scan-assembler-times "sha1su0\\tv" 1 } } */
uint32x4_t
test_vsha1su1q_u32 (uint32x4_t tw0_3, uint32x4_t w12_15)
{
return vsha1su1q_u32 (tw0_3, w12_15);
}
/* { dg-final { scan-assembler-times "sha1su1\\tv" 1 } } */
/* { dg-final { cleanup-saved-temps } } */
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment