Commit 6901ea62 by Andrew Senkevich Committed by H.J. Lu

Add AVX512 k-mask intrinsics

gcc/

2016-12-05  Andrew Senkevich  <andrew.senkevich@intel.com>

	* config/i386/avx512bwintrin.h: Add new k-mask intrinsics.
	* config/i386/avx512dqintrin.h: Ditto.
	* config/i386/avx512fintrin.h: Ditto.
	* config/i386/i386-builtin-types.def (UCHAR_FTYPE_UQI_UQI_PUCHAR,
	UCHAR_FTYPE_UHI_UHI_PUCHAR, UCHAR_FTYPE_USI_USI_PUCHAR,
	UCHAR_FTYPE_UDI_UDI_PUCHAR, UCHAR_FTYPE_UQI_UQI, UCHAR_FTYPE_UHI_UHI,
	UCHAR_FTYPE_USI_USI, UCHAR_FTYPE_UDI_UDI, UQI_FTYPE_UQI_INT,
	UHI_FTYPE_UHI_INT, USI_FTYPE_USI_INT, UDI_FTYPE_UDI_INT,
	UQI_FTYPE_UQI, USI_FTYPE_USI, UDI_FTYPE_UDI, UQI_FTYPE_UQI_UQI): New
	function types.
	* config/i386/i386-builtin.def (__builtin_ia32_knotqi,
	__builtin_ia32_knotsi, __builtin_ia32_knotdi,
	__builtin_ia32_korqi, __builtin_ia32_korsi, __builtin_ia32_kordi,
	__builtin_ia32_kxnorqi, __builtin_ia32_kxnorsi,
	__builtin_ia32_kxnordi, __builtin_ia32_kxorqi, __builtin_ia32_kxorsi,
	__builtin_ia32_kxordi, __builtin_ia32_kandqi,
	__builtin_ia32_kandsi, __builtin_ia32_kanddi, __builtin_ia32_kandnqi,
	__builtin_ia32_kandnsi, __builtin_ia32_kandndi): New.
	* config/i386/i386.c (ix86_expand_args_builtin): Handle new types.

gcc/testsuite/

2016-12-05  Andrew Senkevich  <andrew.senkevich@intel.com>

	* gcc.target/i386/avx512bw-kandd-1.c: New.
	* gcc.target/i386/avx512bw-kandnd-1.c: Ditto.
	* gcc.target/i386/avx512bw-kandnq-1.c: Ditto.
	* gcc.target/i386/avx512bw-kandq-1.c: Ditto.
	* gcc.target/i386/avx512bw-knotd-1.c: Ditto.
	* gcc.target/i386/avx512bw-knotq-1.c: Ditto.
	* gcc.target/i386/avx512bw-kord-1.c: Ditto.
	* gcc.target/i386/avx512bw-korq-1.c: Ditto.
	* gcc.target/i386/avx512bw-kunpckdq-3.c: Ditto.
	* gcc.target/i386/avx512bw-kunpckwd-3.c: Ditto.
	* gcc.target/i386/avx512bw-kxnord-1.c: Ditto.
	* gcc.target/i386/avx512bw-kxnorq-1.c: Ditto.
	* gcc.target/i386/avx512bw-kxord-1.c: Ditto.
	* gcc.target/i386/avx512bw-kxorq-1.c: Ditto.
	* gcc.target/i386/avx512dq-kandb-1.c: Ditto.
	* gcc.target/i386/avx512dq-kandnb-1.c: Ditto.
	* gcc.target/i386/avx512dq-knotb-1.c: Ditto.
	* gcc.target/i386/avx512dq-korb-1.c: Ditto.
	* gcc.target/i386/avx512dq-kxnorb-1.c: Ditto.
	* gcc.target/i386/avx512dq-kxorb-1.c: Ditto.
	* gcc.target/i386/avx512f-kunpckbw-3.c: Ditto.
	* gcc.target/i386/avx512f-kandnw-1.c: Removed unneeded check.

From-SVN: r243265
parent a80b4f57
2016-12-05 Andrew Senkevich <andrew.senkevich@intel.com>
* config/i386/avx512bwintrin.h: Add new k-mask intrinsics.
* config/i386/avx512dqintrin.h: Ditto.
* config/i386/avx512fintrin.h: Ditto.
* config/i386/i386-builtin-types.def (UCHAR_FTYPE_UQI_UQI_PUCHAR,
UCHAR_FTYPE_UHI_UHI_PUCHAR, UCHAR_FTYPE_USI_USI_PUCHAR,
UCHAR_FTYPE_UDI_UDI_PUCHAR, UCHAR_FTYPE_UQI_UQI, UCHAR_FTYPE_UHI_UHI,
UCHAR_FTYPE_USI_USI, UCHAR_FTYPE_UDI_UDI, UQI_FTYPE_UQI_INT,
UHI_FTYPE_UHI_INT, USI_FTYPE_USI_INT, UDI_FTYPE_UDI_INT,
UQI_FTYPE_UQI, USI_FTYPE_USI, UDI_FTYPE_UDI, UQI_FTYPE_UQI_UQI): New
function types.
* config/i386/i386-builtin.def (__builtin_ia32_knotqi,
__builtin_ia32_knotsi, __builtin_ia32_knotdi,
__builtin_ia32_korqi, __builtin_ia32_korsi, __builtin_ia32_kordi,
__builtin_ia32_kxnorqi, __builtin_ia32_kxnorsi,
__builtin_ia32_kxnordi, __builtin_ia32_kxorqi, __builtin_ia32_kxorsi,
__builtin_ia32_kxordi, __builtin_ia32_kandqi,
__builtin_ia32_kandsi, __builtin_ia32_kanddi, __builtin_ia32_kandnqi,
__builtin_ia32_kandnsi, __builtin_ia32_kandndi): New.
* config/i386/i386.c (ix86_expand_args_builtin): Handle new types.
2016-12-05 Segher Boessenkool <segher@kernel.crashing.org> 2016-12-05 Segher Boessenkool <segher@kernel.crashing.org>
* combine.c: Revert r243162. * combine.c: Revert r243162.
...@@ -40,6 +40,90 @@ typedef char __v64qi __attribute__ ((__vector_size__ (64))); ...@@ -40,6 +40,90 @@ typedef char __v64qi __attribute__ ((__vector_size__ (64)));
typedef unsigned long long __mmask64; typedef unsigned long long __mmask64;
extern __inline __mmask32
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_knot_mask32 (__mmask32 __A)
{
return (__mmask32) __builtin_ia32_knotsi ((__mmask32) __A);
}
extern __inline __mmask64
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_knot_mask64 (__mmask64 __A)
{
return (__mmask64) __builtin_ia32_knotdi ((__mmask64) __A);
}
extern __inline __mmask32
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kor_mask32 (__mmask32 __A, __mmask32 __B)
{
return (__mmask32) __builtin_ia32_korsi ((__mmask32) __A, (__mmask32) __B);
}
extern __inline __mmask64
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kor_mask64 (__mmask64 __A, __mmask64 __B)
{
return (__mmask64) __builtin_ia32_kordi ((__mmask64) __A, (__mmask64) __B);
}
extern __inline __mmask32
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kxnor_mask32 (__mmask32 __A, __mmask32 __B)
{
return (__mmask32) __builtin_ia32_kxnorsi ((__mmask32) __A, (__mmask32) __B);
}
extern __inline __mmask64
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kxnor_mask64 (__mmask64 __A, __mmask64 __B)
{
return (__mmask64) __builtin_ia32_kxnordi ((__mmask64) __A, (__mmask64) __B);
}
extern __inline __mmask32
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kxor_mask32 (__mmask32 __A, __mmask32 __B)
{
return (__mmask32) __builtin_ia32_kxorsi ((__mmask32) __A, (__mmask32) __B);
}
extern __inline __mmask64
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kxor_mask64 (__mmask64 __A, __mmask64 __B)
{
return (__mmask64) __builtin_ia32_kxordi ((__mmask64) __A, (__mmask64) __B);
}
extern __inline __mmask32
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kand_mask32 (__mmask32 __A, __mmask32 __B)
{
return (__mmask32) __builtin_ia32_kandsi ((__mmask32) __A, (__mmask32) __B);
}
extern __inline __mmask64
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kand_mask64 (__mmask64 __A, __mmask64 __B)
{
return (__mmask64) __builtin_ia32_kanddi ((__mmask64) __A, (__mmask64) __B);
}
extern __inline __mmask32
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kandn_mask32 (__mmask32 __A, __mmask32 __B)
{
return (__mmask32) __builtin_ia32_kandnsi ((__mmask32) __A, (__mmask32) __B);
}
extern __inline __mmask64
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kandn_mask64 (__mmask64 __A, __mmask64 __B)
{
return (__mmask64) __builtin_ia32_kandndi ((__mmask64) __A, (__mmask64) __B);
}
extern __inline __m512i extern __inline __m512i
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_mm512_mask_mov_epi16 (__m512i __W, __mmask32 __U, __m512i __A) _mm512_mask_mov_epi16 (__m512i __W, __mmask32 __U, __m512i __A)
...@@ -114,6 +198,14 @@ _mm512_kunpackw (__mmask32 __A, __mmask32 __B) ...@@ -114,6 +198,14 @@ _mm512_kunpackw (__mmask32 __A, __mmask32 __B)
(__mmask32) __B); (__mmask32) __B);
} }
extern __inline __mmask32
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kunpackw_mask32 (__mmask16 __A, __mmask16 __B)
{
return (__mmask32) __builtin_ia32_kunpcksi ((__mmask32) __A,
(__mmask32) __B);
}
extern __inline __mmask64 extern __inline __mmask64
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_mm512_kunpackd (__mmask64 __A, __mmask64 __B) _mm512_kunpackd (__mmask64 __A, __mmask64 __B)
...@@ -122,6 +214,14 @@ _mm512_kunpackd (__mmask64 __A, __mmask64 __B) ...@@ -122,6 +214,14 @@ _mm512_kunpackd (__mmask64 __A, __mmask64 __B)
(__mmask64) __B); (__mmask64) __B);
} }
extern __inline __mmask64
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kunpackd_mask64 (__mmask32 __A, __mmask32 __B)
{
return (__mmask64) __builtin_ia32_kunpckdi ((__mmask64) __A,
(__mmask64) __B);
}
extern __inline __m512i extern __inline __m512i
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_mm512_mask_loadu_epi8 (__m512i __W, __mmask64 __U, void const *__P) _mm512_mask_loadu_epi8 (__m512i __W, __mmask64 __U, void const *__P)
......
...@@ -34,6 +34,48 @@ ...@@ -34,6 +34,48 @@
#define __DISABLE_AVX512DQ__ #define __DISABLE_AVX512DQ__
#endif /* __AVX512DQ__ */ #endif /* __AVX512DQ__ */
extern __inline __mmask8
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_knot_mask8 (__mmask8 __A)
{
return (__mmask8) __builtin_ia32_knotqi ((__mmask8) __A);
}
extern __inline __mmask8
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kor_mask8 (__mmask8 __A, __mmask8 __B)
{
return (__mmask8) __builtin_ia32_korqi ((__mmask8) __A, (__mmask8) __B);
}
extern __inline __mmask8
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kxnor_mask8 (__mmask8 __A, __mmask8 __B)
{
return (__mmask8) __builtin_ia32_kxnorqi ((__mmask8) __A, (__mmask8) __B);
}
extern __inline __mmask8
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kxor_mask8 (__mmask8 __A, __mmask8 __B)
{
return (__mmask8) __builtin_ia32_kxorqi ((__mmask8) __A, (__mmask8) __B);
}
extern __inline __mmask8
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kand_mask8 (__mmask8 __A, __mmask8 __B)
{
return (__mmask8) __builtin_ia32_kandqi ((__mmask8) __A, (__mmask8) __B);
}
extern __inline __mmask8
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kandn_mask8 (__mmask8 __A, __mmask8 __B)
{
return (__mmask8) __builtin_ia32_kandnqi ((__mmask8) __A, (__mmask8) __B);
}
extern __inline __m512d extern __inline __m512d
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_mm512_broadcast_f64x2 (__m128d __A) _mm512_broadcast_f64x2 (__m128d __A)
......
...@@ -9977,6 +9977,13 @@ _mm512_maskz_expandloadu_epi32 (__mmask16 __U, void const *__P) ...@@ -9977,6 +9977,13 @@ _mm512_maskz_expandloadu_epi32 (__mmask16 __U, void const *__P)
} }
/* Mask arithmetic operations */ /* Mask arithmetic operations */
#define _kand_mask16 _mm512_kand
#define _kandn_mask16 _mm512_kandn
#define _knot_mask16 _mm512_knot
#define _kor_mask16 _mm512_kor
#define _kxnor_mask16 _mm512_kxnor
#define _kxor_mask16 _mm512_kxor
extern __inline __mmask16 extern __inline __mmask16
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_mm512_kand (__mmask16 __A, __mmask16 __B) _mm512_kand (__mmask16 __A, __mmask16 __B)
...@@ -9988,7 +9995,8 @@ extern __inline __mmask16 ...@@ -9988,7 +9995,8 @@ extern __inline __mmask16
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_mm512_kandn (__mmask16 __A, __mmask16 __B) _mm512_kandn (__mmask16 __A, __mmask16 __B)
{ {
return (__mmask16) __builtin_ia32_kandnhi ((__mmask16) __A, (__mmask16) __B); return (__mmask16) __builtin_ia32_kandnhi ((__mmask16) __A,
(__mmask16) __B);
} }
extern __inline __mmask16 extern __inline __mmask16
...@@ -10042,6 +10050,13 @@ _mm512_kunpackb (__mmask16 __A, __mmask16 __B) ...@@ -10042,6 +10050,13 @@ _mm512_kunpackb (__mmask16 __A, __mmask16 __B)
return (__mmask16) __builtin_ia32_kunpckhi ((__mmask16) __A, (__mmask16) __B); return (__mmask16) __builtin_ia32_kunpckhi ((__mmask16) __A, (__mmask16) __B);
} }
extern __inline __mmask16
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
_kunpackb_mask16 (__mmask8 __A, __mmask8 __B)
{
return (__mmask16) __builtin_ia32_kunpckhi ((__mmask16) __A, (__mmask16) __B);
}
#ifdef __OPTIMIZE__ #ifdef __OPTIMIZE__
extern __inline __m512i extern __inline __m512i
__attribute__ ((__gnu_inline__, __always_inline__, __artificial__)) __attribute__ ((__gnu_inline__, __always_inline__, __artificial__))
......
...@@ -139,6 +139,12 @@ DEF_POINTER_TYPE (PLONGLONG, LONGLONG) ...@@ -139,6 +139,12 @@ DEF_POINTER_TYPE (PLONGLONG, LONGLONG)
DEF_POINTER_TYPE (PULONGLONG, ULONGLONG) DEF_POINTER_TYPE (PULONGLONG, ULONGLONG)
DEF_POINTER_TYPE (PUNSIGNED, UNSIGNED) DEF_POINTER_TYPE (PUNSIGNED, UNSIGNED)
DEF_POINTER_TYPE (PUQI, UQI)
DEF_POINTER_TYPE (PUHI, UHI)
DEF_POINTER_TYPE (PUSI, USI)
DEF_POINTER_TYPE (PUDI, UDI)
DEF_POINTER_TYPE (PUCHAR, UCHAR)
DEF_POINTER_TYPE (PV2SI, V2SI) DEF_POINTER_TYPE (PV2SI, V2SI)
DEF_POINTER_TYPE (PV2DF, V2DF) DEF_POINTER_TYPE (PV2DF, V2DF)
DEF_POINTER_TYPE (PV2DI, V2DI) DEF_POINTER_TYPE (PV2DI, V2DI)
...@@ -536,7 +542,28 @@ DEF_FUNCTION_TYPE (V16SI, V16SI, V16SI, V16SI, V16SI, V16SI, PCV4SI) ...@@ -536,7 +542,28 @@ DEF_FUNCTION_TYPE (V16SI, V16SI, V16SI, V16SI, V16SI, V16SI, PCV4SI)
# Instructions returning mask # Instructions returning mask
DEF_FUNCTION_TYPE (UCHAR, UQI, UQI, PUCHAR)
DEF_FUNCTION_TYPE (UCHAR, UQI, UQI)
DEF_FUNCTION_TYPE (UCHAR, UHI, UHI, PUCHAR)
DEF_FUNCTION_TYPE (UCHAR, UHI, UHI)
DEF_FUNCTION_TYPE (UCHAR, USI, USI, PUCHAR)
DEF_FUNCTION_TYPE (UCHAR, USI, USI)
DEF_FUNCTION_TYPE (UCHAR, UDI, UDI, PUCHAR)
DEF_FUNCTION_TYPE (UCHAR, UDI, UDI)
DEF_FUNCTION_TYPE (USI, UQI)
DEF_FUNCTION_TYPE (USI, UHI)
DEF_FUNCTION_TYPE (UQI, USI)
DEF_FUNCTION_TYPE (UHI, USI)
DEF_FUNCTION_TYPE (UQI, UQI, INT)
DEF_FUNCTION_TYPE (UHI, UHI, INT)
DEF_FUNCTION_TYPE (USI, USI, INT)
DEF_FUNCTION_TYPE (UDI, UDI, INT)
DEF_FUNCTION_TYPE (UQI, UQI)
DEF_FUNCTION_TYPE (UHI, UHI) DEF_FUNCTION_TYPE (UHI, UHI)
DEF_FUNCTION_TYPE (USI, USI)
DEF_FUNCTION_TYPE (UDI, UDI)
DEF_FUNCTION_TYPE (UHI, V16QI) DEF_FUNCTION_TYPE (UHI, V16QI)
DEF_FUNCTION_TYPE (USI, V32QI) DEF_FUNCTION_TYPE (USI, V32QI)
DEF_FUNCTION_TYPE (UDI, V64QI) DEF_FUNCTION_TYPE (UDI, V64QI)
...@@ -549,6 +576,7 @@ DEF_FUNCTION_TYPE (UHI, V16SI) ...@@ -549,6 +576,7 @@ DEF_FUNCTION_TYPE (UHI, V16SI)
DEF_FUNCTION_TYPE (UQI, V2DI) DEF_FUNCTION_TYPE (UQI, V2DI)
DEF_FUNCTION_TYPE (UQI, V4DI) DEF_FUNCTION_TYPE (UQI, V4DI)
DEF_FUNCTION_TYPE (UQI, V8DI) DEF_FUNCTION_TYPE (UQI, V8DI)
DEF_FUNCTION_TYPE (UQI, UQI, UQI)
DEF_FUNCTION_TYPE (UHI, UHI, UHI) DEF_FUNCTION_TYPE (UHI, UHI, UHI)
DEF_FUNCTION_TYPE (USI, USI, USI) DEF_FUNCTION_TYPE (USI, USI, USI)
DEF_FUNCTION_TYPE (UDI, UDI, UDI) DEF_FUNCTION_TYPE (UDI, UDI, UDI)
......
...@@ -1436,15 +1436,33 @@ BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_avx512f_roundpd_vec_pack_sfix512, "__bu ...@@ -1436,15 +1436,33 @@ BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_avx512f_roundpd_vec_pack_sfix512, "__bu
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_avx512f_roundpd_vec_pack_sfix512, "__builtin_ia32_ceilpd_vec_pack_sfix512", IX86_BUILTIN_CEILPD_VEC_PACK_SFIX512, (enum rtx_code) ROUND_CEIL, (int) V16SI_FTYPE_V8DF_V8DF_ROUND) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_avx512f_roundpd_vec_pack_sfix512, "__builtin_ia32_ceilpd_vec_pack_sfix512", IX86_BUILTIN_CEILPD_VEC_PACK_SFIX512, (enum rtx_code) ROUND_CEIL, (int) V16SI_FTYPE_V8DF_V8DF_ROUND)
/* Mask arithmetic operations */ /* Mask arithmetic operations */
BDESC (OPTION_MASK_ISA_AVX512DQ, CODE_FOR_kandqi, "__builtin_ia32_kandqi", IX86_BUILTIN_KAND8, UNKNOWN, (int) UQI_FTYPE_UQI_UQI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kandhi, "__builtin_ia32_kandhi", IX86_BUILTIN_KAND16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kandhi, "__builtin_ia32_kandhi", IX86_BUILTIN_KAND16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kandsi, "__builtin_ia32_kandsi", IX86_BUILTIN_KAND32, UNKNOWN, (int) USI_FTYPE_USI_USI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kanddi, "__builtin_ia32_kanddi", IX86_BUILTIN_KAND64, UNKNOWN, (int) UDI_FTYPE_UDI_UDI)
BDESC (OPTION_MASK_ISA_AVX512DQ, CODE_FOR_kandnqi, "__builtin_ia32_kandnqi", IX86_BUILTIN_KANDN8, UNKNOWN, (int) UQI_FTYPE_UQI_UQI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kandnhi, "__builtin_ia32_kandnhi", IX86_BUILTIN_KANDN16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kandnhi, "__builtin_ia32_kandnhi", IX86_BUILTIN_KANDN16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kandnsi, "__builtin_ia32_kandnsi", IX86_BUILTIN_KANDN32, UNKNOWN, (int) USI_FTYPE_USI_USI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kandndi, "__builtin_ia32_kandndi", IX86_BUILTIN_KANDN64, UNKNOWN, (int) UDI_FTYPE_UDI_UDI)
BDESC (OPTION_MASK_ISA_AVX512DQ, CODE_FOR_knotqi, "__builtin_ia32_knotqi", IX86_BUILTIN_KNOT8, UNKNOWN, (int) UQI_FTYPE_UQI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_knothi, "__builtin_ia32_knothi", IX86_BUILTIN_KNOT16, UNKNOWN, (int) UHI_FTYPE_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_knothi, "__builtin_ia32_knothi", IX86_BUILTIN_KNOT16, UNKNOWN, (int) UHI_FTYPE_UHI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_knotsi, "__builtin_ia32_knotsi", IX86_BUILTIN_KNOT32, UNKNOWN, (int) USI_FTYPE_USI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_knotdi, "__builtin_ia32_knotdi", IX86_BUILTIN_KNOT64, UNKNOWN, (int) UDI_FTYPE_UDI)
BDESC (OPTION_MASK_ISA_AVX512DQ, CODE_FOR_kiorqi, "__builtin_ia32_korqi", IX86_BUILTIN_KOR8, UNKNOWN, (int) UQI_FTYPE_UQI_UQI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kiorhi, "__builtin_ia32_korhi", IX86_BUILTIN_KOR16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kiorhi, "__builtin_ia32_korhi", IX86_BUILTIN_KOR16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kiorsi, "__builtin_ia32_korsi", IX86_BUILTIN_KOR32, UNKNOWN, (int) USI_FTYPE_USI_USI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kiordi, "__builtin_ia32_kordi", IX86_BUILTIN_KOR64, UNKNOWN, (int) UDI_FTYPE_UDI_UDI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kortestchi, "__builtin_ia32_kortestchi", IX86_BUILTIN_KORTESTC16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kortestchi, "__builtin_ia32_kortestchi", IX86_BUILTIN_KORTESTC16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kortestzhi, "__builtin_ia32_kortestzhi", IX86_BUILTIN_KORTESTZ16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kortestzhi, "__builtin_ia32_kortestzhi", IX86_BUILTIN_KORTESTZ16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kunpckhi, "__builtin_ia32_kunpckhi", IX86_BUILTIN_KUNPCKBW, UNKNOWN, (int) UHI_FTYPE_UHI_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kunpckhi, "__builtin_ia32_kunpckhi", IX86_BUILTIN_KUNPCKBW, UNKNOWN, (int) UHI_FTYPE_UHI_UHI)
BDESC (OPTION_MASK_ISA_AVX512DQ, CODE_FOR_kxnorqi, "__builtin_ia32_kxnorqi", IX86_BUILTIN_KXNOR8, UNKNOWN, (int) UQI_FTYPE_UQI_UQI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kxnorhi, "__builtin_ia32_kxnorhi", IX86_BUILTIN_KXNOR16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kxnorhi, "__builtin_ia32_kxnorhi", IX86_BUILTIN_KXNOR16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kxnorsi, "__builtin_ia32_kxnorsi", IX86_BUILTIN_KXNOR32, UNKNOWN, (int) USI_FTYPE_USI_USI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kxnordi, "__builtin_ia32_kxnordi", IX86_BUILTIN_KXNOR64, UNKNOWN, (int) UDI_FTYPE_UDI_UDI)
BDESC (OPTION_MASK_ISA_AVX512DQ, CODE_FOR_kxorqi, "__builtin_ia32_kxorqi", IX86_BUILTIN_KXOR8, UNKNOWN, (int) UQI_FTYPE_UQI_UQI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kxorhi, "__builtin_ia32_kxorhi", IX86_BUILTIN_KXOR16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kxorhi, "__builtin_ia32_kxorhi", IX86_BUILTIN_KXOR16, UNKNOWN, (int) UHI_FTYPE_UHI_UHI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kxorsi, "__builtin_ia32_kxorsi", IX86_BUILTIN_KXOR32, UNKNOWN, (int) USI_FTYPE_USI_USI)
BDESC (OPTION_MASK_ISA_AVX512BW, CODE_FOR_kxordi, "__builtin_ia32_kxordi", IX86_BUILTIN_KXOR64, UNKNOWN, (int) UDI_FTYPE_UDI_UDI)
BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kmovw, "__builtin_ia32_kmov16", IX86_BUILTIN_KMOV16, UNKNOWN, (int) UHI_FTYPE_UHI) BDESC (OPTION_MASK_ISA_AVX512F, CODE_FOR_kmovw, "__builtin_ia32_kmov16", IX86_BUILTIN_KMOV16, UNKNOWN, (int) UHI_FTYPE_UHI)
/* SHA */ /* SHA */
......
...@@ -34842,7 +34842,12 @@ ix86_expand_args_builtin (const struct builtin_description *d, ...@@ -34842,7 +34842,12 @@ ix86_expand_args_builtin (const struct builtin_description *d,
case V4DI_FTYPE_V8HI: case V4DI_FTYPE_V8HI:
case V4DI_FTYPE_V4SI: case V4DI_FTYPE_V4SI:
case V4DI_FTYPE_V2DI: case V4DI_FTYPE_V2DI:
case UQI_FTYPE_UQI:
case UHI_FTYPE_UHI: case UHI_FTYPE_UHI:
case USI_FTYPE_USI:
case USI_FTYPE_UQI:
case USI_FTYPE_UHI:
case UDI_FTYPE_UDI:
case UHI_FTYPE_V16QI: case UHI_FTYPE_V16QI:
case USI_FTYPE_V32QI: case USI_FTYPE_V32QI:
case UDI_FTYPE_V64QI: case UDI_FTYPE_V64QI:
...@@ -34976,6 +34981,7 @@ ix86_expand_args_builtin (const struct builtin_description *d, ...@@ -34976,6 +34981,7 @@ ix86_expand_args_builtin (const struct builtin_description *d,
case UINT_FTYPE_UINT_UCHAR: case UINT_FTYPE_UINT_UCHAR:
case UINT16_FTYPE_UINT16_INT: case UINT16_FTYPE_UINT16_INT:
case UINT8_FTYPE_UINT8_INT: case UINT8_FTYPE_UINT8_INT:
case UQI_FTYPE_UQI_UQI:
case UHI_FTYPE_UHI_UHI: case UHI_FTYPE_UHI_UHI:
case USI_FTYPE_USI_USI: case USI_FTYPE_USI_USI:
case UDI_FTYPE_UDI_UDI: case UDI_FTYPE_UDI_UDI:
...@@ -35023,6 +35029,10 @@ ix86_expand_args_builtin (const struct builtin_description *d, ...@@ -35023,6 +35029,10 @@ ix86_expand_args_builtin (const struct builtin_description *d,
case V4DI_FTYPE_V8DI_INT: case V4DI_FTYPE_V8DI_INT:
case QI_FTYPE_V4SF_INT: case QI_FTYPE_V4SF_INT:
case QI_FTYPE_V2DF_INT: case QI_FTYPE_V2DF_INT:
case UQI_FTYPE_UQI_INT:
case UHI_FTYPE_UHI_INT:
case USI_FTYPE_USI_INT:
case UDI_FTYPE_UDI_INT:
nargs = 2; nargs = 2;
nargs_constant = 1; nargs_constant = 1;
break; break;
2016-12-05 Andrew Senkevich <andrew.senkevich@intel.com>
* gcc.target/i386/avx512bw-kandd-1.c: New.
* gcc.target/i386/avx512bw-kandnd-1.c: Ditto.
* gcc.target/i386/avx512bw-kandnq-1.c: Ditto.
* gcc.target/i386/avx512bw-kandq-1.c: Ditto.
* gcc.target/i386/avx512bw-knotd-1.c: Ditto.
* gcc.target/i386/avx512bw-knotq-1.c: Ditto.
* gcc.target/i386/avx512bw-kord-1.c: Ditto.
* gcc.target/i386/avx512bw-korq-1.c: Ditto.
* gcc.target/i386/avx512bw-kunpckdq-3.c: Ditto.
* gcc.target/i386/avx512bw-kunpckwd-3.c: Ditto.
* gcc.target/i386/avx512bw-kxnord-1.c: Ditto.
* gcc.target/i386/avx512bw-kxnorq-1.c: Ditto.
* gcc.target/i386/avx512bw-kxord-1.c: Ditto.
* gcc.target/i386/avx512bw-kxorq-1.c: Ditto.
* gcc.target/i386/avx512dq-kandb-1.c: Ditto.
* gcc.target/i386/avx512dq-kandnb-1.c: Ditto.
* gcc.target/i386/avx512dq-knotb-1.c: Ditto.
* gcc.target/i386/avx512dq-korb-1.c: Ditto.
* gcc.target/i386/avx512dq-kxnorb-1.c: Ditto.
* gcc.target/i386/avx512dq-kxorb-1.c: Ditto.
* gcc.target/i386/avx512f-kunpckbw-3.c: Ditto.
* gcc.target/i386/avx512f-kandnw-1.c: Removed unneeded check.
2016-12-05 Paolo Bonzini <bonzini@gnu.org> 2016-12-05 Paolo Bonzini <bonzini@gnu.org>
* gcc.dg/fold-and-lshift.c, gcc.dg/fold-and-rshift-1.c, * gcc.dg/fold-and-lshift.c, gcc.dg/fold-and-rshift-1.c,
......
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kandd\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask32 k1, k2, k3;
volatile __m512i x = _mm512_setzero_epi32();
__asm__( "kmovd %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovd %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kand_mask32 (k1, k2);
x = _mm512_mask_add_epi16 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kandnd\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask32 k1, k2, k3;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovd %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovd %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kandn_mask32 (k1, k2);
x = _mm512_mask_add_epi16 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kandnq\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask64 k1, k2, k3;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovq %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovq %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kandn_mask64 (k1, k2);
x = _mm512_mask_add_epi8 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kandq\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask64 k1, k2, k3;
volatile __m512i x = _mm512_setzero_epi32();
__asm__( "kmovq %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovq %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kand_mask64 (k1, k2);
x = _mm512_mask_add_epi8 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "knotd\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask32 k1, k2;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovd %1, %0" : "=k" (k1) : "r" (45) );
k2 = _knot_mask32 (k1);
x = _mm512_mask_add_epi16 (x, k1, x, x);
x = _mm512_mask_add_epi16 (x, k2, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "knotq\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask64 k1, k2;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovq %1, %0" : "=k" (k1) : "r" (45) );
k2 = _knot_mask64 (k1);
x = _mm512_mask_add_epi8 (x, k1, x, x);
x = _mm512_mask_add_epi8 (x, k2, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kord\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask32 k1, k2, k3;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovd %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovd %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kor_mask32 (k1, k2);
x = _mm512_mask_add_epi16 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "korq\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask64 k1, k2, k3;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovq %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovq %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kor_mask64 (k1, k2);
x = _mm512_mask_add_epi8 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kunpckdq\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test () {
volatile __mmask64 k3;
__mmask32 k1, k2;
__asm__( "kmovd %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovd %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kunpackd_mask64 (k1, k2);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kunpckwd\[ \\t\]+\[^\{\n\]*%k\[1-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test () {
volatile __mmask32 k3;
__mmask16 k1, k2;
__asm__( "kmovw %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovw %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kunpackw_mask32 (k1, k2);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kxnord\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask32 k1, k2, k3;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovd %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovd %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kxnor_mask32 (k1, k2);
x = _mm512_mask_add_epi16 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kxnorq\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask64 k1, k2, k3;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovq %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovq %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kxnor_mask64 (k1, k2);
x = _mm512_mask_add_epi8 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kxord\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask32 k1, k2, k3;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovd %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovd %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kxor_mask32 (k1, k2);
x = _mm512_mask_add_epi16 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512bw -O2" } */
/* { dg-final { scan-assembler-times "kxorq\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512bw_test ()
{
__mmask64 k1, k2, k3;
volatile __m512i x = _mm512_setzero_si512 ();
__asm__( "kmovq %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovq %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kxor_mask64 (k1, k2);
x = _mm512_mask_add_epi8 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512dq -O2" } */
/* { dg-final { scan-assembler-times "kandb\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512dq_test ()
{
__mmask8 k1, k2, k3;
volatile __m512i x = _mm512_setzero_epi32();
__asm__( "kmovb %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovb %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kand_mask8 (k1, k2);
x = _mm512_mask_add_epi64 (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512dq -O2" } */
/* { dg-final { scan-assembler-times "kandnb\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512dq_test ()
{
__mmask8 k1, k2, k3;
volatile __m512d x = _mm512_setzero_pd();
__asm__( "kmovb %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovb %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kandn_mask8 (k1, k2);
x = _mm512_mask_add_pd (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512dq -O2" } */
/* { dg-final { scan-assembler-times "knotb\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512dq_test ()
{
__mmask8 k1, k2;
volatile __m512d x = _mm512_setzero_pd();
__asm__( "kmovb %1, %0" : "=k" (k1) : "r" (45) );
k2 = _knot_mask8 (k1);
x = _mm512_mask_add_pd (x, k1, x, x);
x = _mm512_mask_add_pd (x, k2, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512dq -O2" } */
/* { dg-final { scan-assembler-times "korb\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512dq_test ()
{
__mmask8 k1, k2, k3;
volatile __m512d x = _mm512_setzero_pd();
__asm__( "kmovb %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovb %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kor_mask8 (k1, k2);
x = _mm512_mask_add_pd (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512dq -O2" } */
/* { dg-final { scan-assembler-times "kxnorb\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512dq_test ()
{
__mmask8 k1, k2, k3;
volatile __m512d x = _mm512_setzero_pd();
__asm__( "kmovb %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovb %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kxnor_mask8 (k1, k2);
x = _mm512_mask_add_pd (x, k3, x, x);
}
/* { dg-do compile } */
/* { dg-options "-mavx512dq -O2" } */
/* { dg-final { scan-assembler-times "kxorb\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512dq_test ()
{
__mmask8 k1, k2, k3;
volatile __m512d x = _mm512_setzero_pd();
__asm__( "kmovb %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovb %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kxor_mask8 (k1, k2);
x = _mm512_mask_add_pd (x, k3, x, x);
}
/* { dg-do compile } */ /* { dg-do compile } */
/* { dg-options "-mavx512f -O2" } */ /* { dg-options "-mavx512f -O2" } */
/* { dg-final { scan-assembler-times "kandnw\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */ /* { dg-final { scan-assembler-times "kandnw\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
/* { dg-final { scan-assembler-times "kmovw" 2 } } */
#include <immintrin.h> #include <immintrin.h>
......
/* { dg-do compile } */
/* { dg-options "-mavx512f -O2" } */
/* { dg-final { scan-assembler-times "kunpckbw\[ \\t\]+\[^\{\n\]*%k\[0-7\](?:\n|\[ \\t\]+#)" 1 } } */
#include <immintrin.h>
void
avx512f_test () {
__mmask8 k1, k2;
__mmask16 k3;
volatile __m512 x = _mm512_setzero_ps();
__asm__( "kmovb %1, %0" : "=k" (k1) : "r" (1) );
__asm__( "kmovb %1, %0" : "=k" (k2) : "r" (2) );
k3 = _kunpackb_mask16 (k1, k2);
x = _mm512_mask_add_ps (x, k3, x, x);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment