Commit f9355dee by Srinath Parvathaneni Committed by Kyrylo Tkachov

[ARM][GCC][5/2x]: MVE intrinsics with binary operands.

This patch supports following MVE ACLE intrinsics with binary operands.

vqmovntq_u16, vqmovnbq_u16, vmulltq_poly_p8, vmullbq_poly_p8, vmovntq_u16, vmovnbq_u16, vmlaldavxq_u16, vmlaldavq_u16, vqmovuntq_s16, vqmovunbq_s16, vshlltq_n_u8, vshllbq_n_u8, vorrq_n_u16, vbicq_n_u16, vcmpneq_n_f16, vcmpneq_f16, vcmpltq_n_f16, vcmpltq_f16, vcmpleq_n_f16, vcmpleq_f16, vcmpgtq_n_f16, vcmpgtq_f16, vcmpgeq_n_f16, vcmpgeq_f16, vcmpeqq_n_f16, vcmpeqq_f16, vsubq_f16, vqmovntq_s16, vqmovnbq_s16, vqdmulltq_s16, vqdmulltq_n_s16, vqdmullbq_s16, vqdmullbq_n_s16, vorrq_f16, vornq_f16, vmulq_n_f16, vmulq_f16, vmovntq_s16, vmovnbq_s16, vmlsldavxq_s16, vmlsldavq_s16, vmlaldavxq_s16, vmlaldavq_s16, vminnmvq_f16, vminnmq_f16, vminnmavq_f16, vminnmaq_f16, vmaxnmvq_f16, vmaxnmq_f16, vmaxnmavq_f16, vmaxnmaq_f16, veorq_f16, vcmulq_rot90_f16, vcmulq_rot270_f16, vcmulq_rot180_f16, vcmulq_f16, vcaddq_rot90_f16, vcaddq_rot270_f16, vbicq_f16, vandq_f16, vaddq_n_f16, vabdq_f16, vshlltq_n_s8, vshllbq_n_s8, vorrq_n_s16, vbicq_n_s16, vqmovntq_u32, vqmovnbq_u32, vmulltq_poly_p16, vmullbq_poly_p16, vmovntq_u32, vmovnbq_u32, vmlaldavxq_u32, vmlaldavq_u32, vqmovuntq_s32, vqmovunbq_s32, vshlltq_n_u16, vshllbq_n_u16, vorrq_n_u32, vbicq_n_u32, vcmpneq_n_f32, vcmpneq_f32, vcmpltq_n_f32, vcmpltq_f32, vcmpleq_n_f32, vcmpleq_f32, vcmpgtq_n_f32, vcmpgtq_f32, vcmpgeq_n_f32, vcmpgeq_f32, vcmpeqq_n_f32, vcmpeqq_f32, vsubq_f32, vqmovntq_s32, vqmovnbq_s32, vqdmulltq_s32, vqdmulltq_n_s32, vqdmullbq_s32, vqdmullbq_n_s32, vorrq_f32, vornq_f32, vmulq_n_f32, vmulq_f32, vmovntq_s32, vmovnbq_s32, vmlsldavxq_s32, vmlsldavq_s32, vmlaldavxq_s32, vmlaldavq_s32, vminnmvq_f32, vminnmq_f32, vminnmavq_f32, vminnmaq_f32, vmaxnmvq_f32, vmaxnmq_f32, vmaxnmavq_f32, vmaxnmaq_f32, veorq_f32, vcmulq_rot90_f32, vcmulq_rot270_f32, vcmulq_rot180_f32, vcmulq_f32, vcaddq_rot90_f32, vcaddq_rot270_f32, vbicq_f32, vandq_f32, vaddq_n_f32, vabdq_f32, vshlltq_n_s16, vshllbq_n_s16, vorrq_n_s32, vbicq_n_s32, vrmlaldavhq_u32, vctp8q_m, vctp64q_m, vctp32q_m, vctp16q_m, vaddlvaq_u32, vrmlsldavhxq_s32, vrmlsldavhq_s32, vrmlaldavhxq_s32, vrmlaldavhq_s32, vcvttq_f16_f32, vcvtbq_f16_f32, vaddlvaq_s32.

Please refer to M-profile Vector Extension (MVE) intrinsics [1]  for more details.
[1] https://developer.arm.com/architectures/instruction-sets/simd-isas/helium/mve-intrinsics

The above intrinsics are defined using the already defined builtin qualifiers BINOP_NONE_NONE_IMM, BINOP_NONE_NONE_NONE, BINOP_UNONE_NONE_NONE, BINOP_UNONE_UNONE_IMM, BINOP_UNONE_UNONE_NONE, BINOP_UNONE_UNONE_UNONE.

2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
            Mihail Ionescu  <mihail.ionescu@arm.com>
            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>

	* config/arm/arm_mve.h (vqmovntq_u16): Define macro.
	(vqmovnbq_u16): Likewise.
	(vmulltq_poly_p8): Likewise.
	(vmullbq_poly_p8): Likewise.
	(vmovntq_u16): Likewise.
	(vmovnbq_u16): Likewise.
	(vmlaldavxq_u16): Likewise.
	(vmlaldavq_u16): Likewise.
	(vqmovuntq_s16): Likewise.
	(vqmovunbq_s16): Likewise.
	(vshlltq_n_u8): Likewise.
	(vshllbq_n_u8): Likewise.
	(vorrq_n_u16): Likewise.
	(vbicq_n_u16): Likewise.
	(vcmpneq_n_f16): Likewise.
	(vcmpneq_f16): Likewise.
	(vcmpltq_n_f16): Likewise.
	(vcmpltq_f16): Likewise.
	(vcmpleq_n_f16): Likewise.
	(vcmpleq_f16): Likewise.
	(vcmpgtq_n_f16): Likewise.
	(vcmpgtq_f16): Likewise.
	(vcmpgeq_n_f16): Likewise.
	(vcmpgeq_f16): Likewise.
	(vcmpeqq_n_f16): Likewise.
	(vcmpeqq_f16): Likewise.
	(vsubq_f16): Likewise.
	(vqmovntq_s16): Likewise.
	(vqmovnbq_s16): Likewise.
	(vqdmulltq_s16): Likewise.
	(vqdmulltq_n_s16): Likewise.
	(vqdmullbq_s16): Likewise.
	(vqdmullbq_n_s16): Likewise.
	(vorrq_f16): Likewise.
	(vornq_f16): Likewise.
	(vmulq_n_f16): Likewise.
	(vmulq_f16): Likewise.
	(vmovntq_s16): Likewise.
	(vmovnbq_s16): Likewise.
	(vmlsldavxq_s16): Likewise.
	(vmlsldavq_s16): Likewise.
	(vmlaldavxq_s16): Likewise.
	(vmlaldavq_s16): Likewise.
	(vminnmvq_f16): Likewise.
	(vminnmq_f16): Likewise.
	(vminnmavq_f16): Likewise.
	(vminnmaq_f16): Likewise.
	(vmaxnmvq_f16): Likewise.
	(vmaxnmq_f16): Likewise.
	(vmaxnmavq_f16): Likewise.
	(vmaxnmaq_f16): Likewise.
	(veorq_f16): Likewise.
	(vcmulq_rot90_f16): Likewise.
	(vcmulq_rot270_f16): Likewise.
	(vcmulq_rot180_f16): Likewise.
	(vcmulq_f16): Likewise.
	(vcaddq_rot90_f16): Likewise.
	(vcaddq_rot270_f16): Likewise.
	(vbicq_f16): Likewise.
	(vandq_f16): Likewise.
	(vaddq_n_f16): Likewise.
	(vabdq_f16): Likewise.
	(vshlltq_n_s8): Likewise.
	(vshllbq_n_s8): Likewise.
	(vorrq_n_s16): Likewise.
	(vbicq_n_s16): Likewise.
	(vqmovntq_u32): Likewise.
	(vqmovnbq_u32): Likewise.
	(vmulltq_poly_p16): Likewise.
	(vmullbq_poly_p16): Likewise.
	(vmovntq_u32): Likewise.
	(vmovnbq_u32): Likewise.
	(vmlaldavxq_u32): Likewise.
	(vmlaldavq_u32): Likewise.
	(vqmovuntq_s32): Likewise.
	(vqmovunbq_s32): Likewise.
	(vshlltq_n_u16): Likewise.
	(vshllbq_n_u16): Likewise.
	(vorrq_n_u32): Likewise.
	(vbicq_n_u32): Likewise.
	(vcmpneq_n_f32): Likewise.
	(vcmpneq_f32): Likewise.
	(vcmpltq_n_f32): Likewise.
	(vcmpltq_f32): Likewise.
	(vcmpleq_n_f32): Likewise.
	(vcmpleq_f32): Likewise.
	(vcmpgtq_n_f32): Likewise.
	(vcmpgtq_f32): Likewise.
	(vcmpgeq_n_f32): Likewise.
	(vcmpgeq_f32): Likewise.
	(vcmpeqq_n_f32): Likewise.
	(vcmpeqq_f32): Likewise.
	(vsubq_f32): Likewise.
	(vqmovntq_s32): Likewise.
	(vqmovnbq_s32): Likewise.
	(vqdmulltq_s32): Likewise.
	(vqdmulltq_n_s32): Likewise.
	(vqdmullbq_s32): Likewise.
	(vqdmullbq_n_s32): Likewise.
	(vorrq_f32): Likewise.
	(vornq_f32): Likewise.
	(vmulq_n_f32): Likewise.
	(vmulq_f32): Likewise.
	(vmovntq_s32): Likewise.
	(vmovnbq_s32): Likewise.
	(vmlsldavxq_s32): Likewise.
	(vmlsldavq_s32): Likewise.
	(vmlaldavxq_s32): Likewise.
	(vmlaldavq_s32): Likewise.
	(vminnmvq_f32): Likewise.
	(vminnmq_f32): Likewise.
	(vminnmavq_f32): Likewise.
	(vminnmaq_f32): Likewise.
	(vmaxnmvq_f32): Likewise.
	(vmaxnmq_f32): Likewise.
	(vmaxnmavq_f32): Likewise.
	(vmaxnmaq_f32): Likewise.
	(veorq_f32): Likewise.
	(vcmulq_rot90_f32): Likewise.
	(vcmulq_rot270_f32): Likewise.
	(vcmulq_rot180_f32): Likewise.
	(vcmulq_f32): Likewise.
	(vcaddq_rot90_f32): Likewise.
	(vcaddq_rot270_f32): Likewise.
	(vbicq_f32): Likewise.
	(vandq_f32): Likewise.
	(vaddq_n_f32): Likewise.
	(vabdq_f32): Likewise.
	(vshlltq_n_s16): Likewise.
	(vshllbq_n_s16): Likewise.
	(vorrq_n_s32): Likewise.
	(vbicq_n_s32): Likewise.
	(vrmlaldavhq_u32): Likewise.
	(vctp8q_m): Likewise.
	(vctp64q_m): Likewise.
	(vctp32q_m): Likewise.
	(vctp16q_m): Likewise.
	(vaddlvaq_u32): Likewise.
	(vrmlsldavhxq_s32): Likewise.
	(vrmlsldavhq_s32): Likewise.
	(vrmlaldavhxq_s32): Likewise.
	(vrmlaldavhq_s32): Likewise.
	(vcvttq_f16_f32): Likewise.
	(vcvtbq_f16_f32): Likewise.
	(vaddlvaq_s32): Likewise.
	(__arm_vqmovntq_u16): Define intrinsic.
	(__arm_vqmovnbq_u16): Likewise.
	(__arm_vmulltq_poly_p8): Likewise.
	(__arm_vmullbq_poly_p8): Likewise.
	(__arm_vmovntq_u16): Likewise.
	(__arm_vmovnbq_u16): Likewise.
	(__arm_vmlaldavxq_u16): Likewise.
	(__arm_vmlaldavq_u16): Likewise.
	(__arm_vqmovuntq_s16): Likewise.
	(__arm_vqmovunbq_s16): Likewise.
	(__arm_vshlltq_n_u8): Likewise.
	(__arm_vshllbq_n_u8): Likewise.
	(__arm_vorrq_n_u16): Likewise.
	(__arm_vbicq_n_u16): Likewise.
	(__arm_vcmpneq_n_f16): Likewise.
	(__arm_vcmpneq_f16): Likewise.
	(__arm_vcmpltq_n_f16): Likewise.
	(__arm_vcmpltq_f16): Likewise.
	(__arm_vcmpleq_n_f16): Likewise.
	(__arm_vcmpleq_f16): Likewise.
	(__arm_vcmpgtq_n_f16): Likewise.
	(__arm_vcmpgtq_f16): Likewise.
	(__arm_vcmpgeq_n_f16): Likewise.
	(__arm_vcmpgeq_f16): Likewise.
	(__arm_vcmpeqq_n_f16): Likewise.
	(__arm_vcmpeqq_f16): Likewise.
	(__arm_vsubq_f16): Likewise.
	(__arm_vqmovntq_s16): Likewise.
	(__arm_vqmovnbq_s16): Likewise.
	(__arm_vqdmulltq_s16): Likewise.
	(__arm_vqdmulltq_n_s16): Likewise.
	(__arm_vqdmullbq_s16): Likewise.
	(__arm_vqdmullbq_n_s16): Likewise.
	(__arm_vorrq_f16): Likewise.
	(__arm_vornq_f16): Likewise.
	(__arm_vmulq_n_f16): Likewise.
	(__arm_vmulq_f16): Likewise.
	(__arm_vmovntq_s16): Likewise.
	(__arm_vmovnbq_s16): Likewise.
	(__arm_vmlsldavxq_s16): Likewise.
	(__arm_vmlsldavq_s16): Likewise.
	(__arm_vmlaldavxq_s16): Likewise.
	(__arm_vmlaldavq_s16): Likewise.
	(__arm_vminnmvq_f16): Likewise.
	(__arm_vminnmq_f16): Likewise.
	(__arm_vminnmavq_f16): Likewise.
	(__arm_vminnmaq_f16): Likewise.
	(__arm_vmaxnmvq_f16): Likewise.
	(__arm_vmaxnmq_f16): Likewise.
	(__arm_vmaxnmavq_f16): Likewise.
	(__arm_vmaxnmaq_f16): Likewise.
	(__arm_veorq_f16): Likewise.
	(__arm_vcmulq_rot90_f16): Likewise.
	(__arm_vcmulq_rot270_f16): Likewise.
	(__arm_vcmulq_rot180_f16): Likewise.
	(__arm_vcmulq_f16): Likewise.
	(__arm_vcaddq_rot90_f16): Likewise.
	(__arm_vcaddq_rot270_f16): Likewise.
	(__arm_vbicq_f16): Likewise.
	(__arm_vandq_f16): Likewise.
	(__arm_vaddq_n_f16): Likewise.
	(__arm_vabdq_f16): Likewise.
	(__arm_vshlltq_n_s8): Likewise.
	(__arm_vshllbq_n_s8): Likewise.
	(__arm_vorrq_n_s16): Likewise.
	(__arm_vbicq_n_s16): Likewise.
	(__arm_vqmovntq_u32): Likewise.
	(__arm_vqmovnbq_u32): Likewise.
	(__arm_vmulltq_poly_p16): Likewise.
	(__arm_vmullbq_poly_p16): Likewise.
	(__arm_vmovntq_u32): Likewise.
	(__arm_vmovnbq_u32): Likewise.
	(__arm_vmlaldavxq_u32): Likewise.
	(__arm_vmlaldavq_u32): Likewise.
	(__arm_vqmovuntq_s32): Likewise.
	(__arm_vqmovunbq_s32): Likewise.
	(__arm_vshlltq_n_u16): Likewise.
	(__arm_vshllbq_n_u16): Likewise.
	(__arm_vorrq_n_u32): Likewise.
	(__arm_vbicq_n_u32): Likewise.
	(__arm_vcmpneq_n_f32): Likewise.
	(__arm_vcmpneq_f32): Likewise.
	(__arm_vcmpltq_n_f32): Likewise.
	(__arm_vcmpltq_f32): Likewise.
	(__arm_vcmpleq_n_f32): Likewise.
	(__arm_vcmpleq_f32): Likewise.
	(__arm_vcmpgtq_n_f32): Likewise.
	(__arm_vcmpgtq_f32): Likewise.
	(__arm_vcmpgeq_n_f32): Likewise.
	(__arm_vcmpgeq_f32): Likewise.
	(__arm_vcmpeqq_n_f32): Likewise.
	(__arm_vcmpeqq_f32): Likewise.
	(__arm_vsubq_f32): Likewise.
	(__arm_vqmovntq_s32): Likewise.
	(__arm_vqmovnbq_s32): Likewise.
	(__arm_vqdmulltq_s32): Likewise.
	(__arm_vqdmulltq_n_s32): Likewise.
	(__arm_vqdmullbq_s32): Likewise.
	(__arm_vqdmullbq_n_s32): Likewise.
	(__arm_vorrq_f32): Likewise.
	(__arm_vornq_f32): Likewise.
	(__arm_vmulq_n_f32): Likewise.
	(__arm_vmulq_f32): Likewise.
	(__arm_vmovntq_s32): Likewise.
	(__arm_vmovnbq_s32): Likewise.
	(__arm_vmlsldavxq_s32): Likewise.
	(__arm_vmlsldavq_s32): Likewise.
	(__arm_vmlaldavxq_s32): Likewise.
	(__arm_vmlaldavq_s32): Likewise.
	(__arm_vminnmvq_f32): Likewise.
	(__arm_vminnmq_f32): Likewise.
	(__arm_vminnmavq_f32): Likewise.
	(__arm_vminnmaq_f32): Likewise.
	(__arm_vmaxnmvq_f32): Likewise.
	(__arm_vmaxnmq_f32): Likewise.
	(__arm_vmaxnmavq_f32): Likewise.
	(__arm_vmaxnmaq_f32): Likewise.
	(__arm_veorq_f32): Likewise.
	(__arm_vcmulq_rot90_f32): Likewise.
	(__arm_vcmulq_rot270_f32): Likewise.
	(__arm_vcmulq_rot180_f32): Likewise.
	(__arm_vcmulq_f32): Likewise.
	(__arm_vcaddq_rot90_f32): Likewise.
	(__arm_vcaddq_rot270_f32): Likewise.
	(__arm_vbicq_f32): Likewise.
	(__arm_vandq_f32): Likewise.
	(__arm_vaddq_n_f32): Likewise.
	(__arm_vabdq_f32): Likewise.
	(__arm_vshlltq_n_s16): Likewise.
	(__arm_vshllbq_n_s16): Likewise.
	(__arm_vorrq_n_s32): Likewise.
	(__arm_vbicq_n_s32): Likewise.
	(__arm_vrmlaldavhq_u32): Likewise.
	(__arm_vctp8q_m): Likewise.
	(__arm_vctp64q_m): Likewise.
	(__arm_vctp32q_m): Likewise.
	(__arm_vctp16q_m): Likewise.
	(__arm_vaddlvaq_u32): Likewise.
	(__arm_vrmlsldavhxq_s32): Likewise.
	(__arm_vrmlsldavhq_s32): Likewise.
	(__arm_vrmlaldavhxq_s32): Likewise.
	(__arm_vrmlaldavhq_s32): Likewise.
	(__arm_vcvttq_f16_f32): Likewise.
	(__arm_vcvtbq_f16_f32): Likewise.
	(__arm_vaddlvaq_s32): Likewise.
	(vst4q): Define polymorphic variant.
	(vrndxq): Likewise.
	(vrndq): Likewise.
	(vrndpq): Likewise.
	(vrndnq): Likewise.
	(vrndmq): Likewise.
	(vrndaq): Likewise.
	(vrev64q): Likewise.
	(vnegq): Likewise.
	(vdupq_n): Likewise.
	(vabsq): Likewise.
	(vrev32q): Likewise.
	(vcvtbq_f32): Likewise.
	(vcvttq_f32): Likewise.
	(vcvtq): Likewise.
	(vsubq_n): Likewise.
	(vbrsrq_n): Likewise.
	(vcvtq_n): Likewise.
	(vsubq): Likewise.
	(vorrq): Likewise.
	(vabdq): Likewise.
	(vaddq_n): Likewise.
	(vandq): Likewise.
	(vbicq): Likewise.
	(vornq): Likewise.
	(vmulq_n): Likewise.
	(vmulq): Likewise.
	(vcaddq_rot270): Likewise.
	(vcmpeqq_n): Likewise.
	(vcmpeqq): Likewise.
	(vcaddq_rot90): Likewise.
	(vcmpgeq_n): Likewise.
	(vcmpgeq): Likewise.
	(vcmpgtq_n): Likewise.
	(vcmpgtq): Likewise.
	(vcmpgtq): Likewise.
	(vcmpleq_n): Likewise.
	(vcmpleq_n): Likewise.
	(vcmpleq): Likewise.
	(vcmpleq): Likewise.
	(vcmpltq_n): Likewise.
	(vcmpltq_n): Likewise.
	(vcmpltq): Likewise.
	(vcmpltq): Likewise.
	(vcmpneq_n): Likewise.
	(vcmpneq_n): Likewise.
	(vcmpneq): Likewise.
	(vcmpneq): Likewise.
	(vcmulq): Likewise.
	(vcmulq): Likewise.
	(vcmulq_rot180): Likewise.
	(vcmulq_rot180): Likewise.
	(vcmulq_rot270): Likewise.
	(vcmulq_rot270): Likewise.
	(vcmulq_rot90): Likewise.
	(vcmulq_rot90): Likewise.
	(veorq): Likewise.
	(veorq): Likewise.
	(vmaxnmaq): Likewise.
	(vmaxnmaq): Likewise.
	(vmaxnmavq): Likewise.
	(vmaxnmavq): Likewise.
	(vmaxnmq): Likewise.
	(vmaxnmq): Likewise.
	(vmaxnmvq): Likewise.
	(vmaxnmvq): Likewise.
	(vminnmaq): Likewise.
	(vminnmaq): Likewise.
	(vminnmavq): Likewise.
	(vminnmavq): Likewise.
	(vminnmq): Likewise.
	(vminnmq): Likewise.
	(vminnmvq): Likewise.
	(vminnmvq): Likewise.
	(vbicq_n): Likewise.
	(vqmovntq): Likewise.
	(vqmovntq): Likewise.
	(vqmovnbq): Likewise.
	(vqmovnbq): Likewise.
	(vmulltq_poly): Likewise.
	(vmulltq_poly): Likewise.
	(vmullbq_poly): Likewise.
	(vmullbq_poly): Likewise.
	(vmovntq): Likewise.
	(vmovntq): Likewise.
	(vmovnbq): Likewise.
	(vmovnbq): Likewise.
	(vmlaldavxq): Likewise.
	(vmlaldavxq): Likewise.
	(vqmovuntq): Likewise.
	(vqmovuntq): Likewise.
	(vshlltq_n): Likewise.
	(vshlltq_n): Likewise.
	(vshllbq_n): Likewise.
	(vshllbq_n): Likewise.
	(vorrq_n): Likewise.
	(vorrq_n): Likewise.
	(vmlaldavq): Likewise.
	(vmlaldavq): Likewise.
	(vqmovunbq): Likewise.
	(vqmovunbq): Likewise.
	(vqdmulltq_n): Likewise.
	(vqdmulltq_n): Likewise.
	(vqdmulltq): Likewise.
	(vqdmulltq): Likewise.
	(vqdmullbq_n): Likewise.
	(vqdmullbq_n): Likewise.
	(vqdmullbq): Likewise.
	(vqdmullbq): Likewise.
	(vaddlvaq): Likewise.
	(vaddlvaq): Likewise.
	(vrmlaldavhq): Likewise.
	(vrmlaldavhq): Likewise.
	(vrmlaldavhxq): Likewise.
	(vrmlaldavhxq): Likewise.
	(vrmlsldavhq): Likewise.
	(vrmlsldavhq): Likewise.
	(vrmlsldavhxq): Likewise.
	(vrmlsldavhxq): Likewise.
	(vmlsldavxq): Likewise.
	(vmlsldavxq): Likewise.
	(vmlsldavq): Likewise.
	(vmlsldavq): Likewise.
	* config/arm/arm_mve_builtins.def (BINOP_NONE_NONE_IMM): Use it.
	(BINOP_NONE_NONE_NONE): Likewise.
	(BINOP_UNONE_NONE_NONE): Likewise.
	(BINOP_UNONE_UNONE_IMM): Likewise.
	(BINOP_UNONE_UNONE_NONE): Likewise.
	(BINOP_UNONE_UNONE_UNONE): Likewise.
	* config/arm/mve.md (mve_vabdq_f<mode>): Define RTL pattern.
	(mve_vaddlvaq_<supf>v4si): Likewise.
	(mve_vaddq_n_f<mode>): Likewise.
	(mve_vandq_f<mode>): Likewise.
	(mve_vbicq_f<mode>): Likewise.
	(mve_vbicq_n_<supf><mode>): Likewise.
	(mve_vcaddq_rot270_f<mode>): Likewise.
	(mve_vcaddq_rot90_f<mode>): Likewise.
	(mve_vcmpeqq_f<mode>): Likewise.
	(mve_vcmpeqq_n_f<mode>): Likewise.
	(mve_vcmpgeq_f<mode>): Likewise.
	(mve_vcmpgeq_n_f<mode>): Likewise.
	(mve_vcmpgtq_f<mode>): Likewise.
	(mve_vcmpgtq_n_f<mode>): Likewise.
	(mve_vcmpleq_f<mode>): Likewise.
	(mve_vcmpleq_n_f<mode>): Likewise.
	(mve_vcmpltq_f<mode>): Likewise.
	(mve_vcmpltq_n_f<mode>): Likewise.
	(mve_vcmpneq_f<mode>): Likewise.
	(mve_vcmpneq_n_f<mode>): Likewise.
	(mve_vcmulq_f<mode>): Likewise.
	(mve_vcmulq_rot180_f<mode>): Likewise.
	(mve_vcmulq_rot270_f<mode>): Likewise.
	(mve_vcmulq_rot90_f<mode>): Likewise.
	(mve_vctp<mode1>q_mhi): Likewise.
	(mve_vcvtbq_f16_f32v8hf): Likewise.
	(mve_vcvttq_f16_f32v8hf): Likewise.
	(mve_veorq_f<mode>): Likewise.
	(mve_vmaxnmaq_f<mode>): Likewise.
	(mve_vmaxnmavq_f<mode>): Likewise.
	(mve_vmaxnmq_f<mode>): Likewise.
	(mve_vmaxnmvq_f<mode>): Likewise.
	(mve_vminnmaq_f<mode>): Likewise.
	(mve_vminnmavq_f<mode>): Likewise.
	(mve_vminnmq_f<mode>): Likewise.
	(mve_vminnmvq_f<mode>): Likewise.
	(mve_vmlaldavq_<supf><mode>): Likewise.
	(mve_vmlaldavxq_<supf><mode>): Likewise.
	(mve_vmlsldavq_s<mode>): Likewise.
	(mve_vmlsldavxq_s<mode>): Likewise.
	(mve_vmovnbq_<supf><mode>): Likewise.
	(mve_vmovntq_<supf><mode>): Likewise.
	(mve_vmulq_f<mode>): Likewise.
	(mve_vmulq_n_f<mode>): Likewise.
	(mve_vornq_f<mode>): Likewise.
	(mve_vorrq_f<mode>): Likewise.
	(mve_vorrq_n_<supf><mode>): Likewise.
	(mve_vqdmullbq_n_s<mode>): Likewise.
	(mve_vqdmullbq_s<mode>): Likewise.
	(mve_vqdmulltq_n_s<mode>): Likewise.
	(mve_vqdmulltq_s<mode>): Likewise.
	(mve_vqmovnbq_<supf><mode>): Likewise.
	(mve_vqmovntq_<supf><mode>): Likewise.
	(mve_vqmovunbq_s<mode>): Likewise.
	(mve_vqmovuntq_s<mode>): Likewise.
	(mve_vrmlaldavhxq_sv4si): Likewise.
	(mve_vrmlsldavhq_sv4si): Likewise.
	(mve_vrmlsldavhxq_sv4si): Likewise.
	(mve_vshllbq_n_<supf><mode>): Likewise.
	(mve_vshlltq_n_<supf><mode>): Likewise.
	(mve_vsubq_f<mode>): Likewise.
	(mve_vmulltq_poly_p<mode>): Likewise.
	(mve_vmullbq_poly_p<mode>): Likewise.
	(mve_vrmlaldavhq_<supf>v4si): Likewise.

gcc/testsuite/ChangeLog:

2020-03-17  Andre Vieira  <andre.simoesdiasvieira@arm.com>
            Mihail Ionescu  <mihail.ionescu@arm.com>
            Srinath Parvathaneni  <srinath.parvathaneni@arm.com>

	* gcc.target/arm/mve/intrinsics/vabdq_f16.c: New test.
	* gcc.target/arm/mve/intrinsics/vabdq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vaddlvaq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vaddlvaq_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vaddq_n_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vaddq_n_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vandq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vandq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vbicq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vbicq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vbicq_n_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vbicq_n_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vbicq_n_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vbicq_n_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcaddq_rot270_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcaddq_rot270_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcaddq_rot90_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcaddq_rot90_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpeqq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpeqq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpeqq_n_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpeqq_n_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpgeq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpgeq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpgeq_n_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpgeq_n_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpgtq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpgtq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpgtq_n_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpgtq_n_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpleq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpleq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpleq_n_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpleq_n_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpltq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpltq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpltq_n_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpltq_n_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpneq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpneq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpneq_n_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmpneq_n_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmulq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmulq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmulq_rot180_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmulq_rot180_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmulq_rot270_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmulq_rot270_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmulq_rot90_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcmulq_rot90_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vctp16q_m.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vctp32q_m.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vctp64q_m.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vctp8q_m.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcvtbq_f16_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vcvttq_f16_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/veorq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/veorq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmaxnmaq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmaxnmaq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmaxnmavq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmaxnmavq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmaxnmq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmaxnmq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmaxnmvq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmaxnmvq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vminnmaq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vminnmaq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vminnmavq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vminnmavq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vminnmq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vminnmq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vminnmvq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vminnmvq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlaldavq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlaldavq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlaldavq_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlaldavq_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlaldavxq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlaldavxq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlaldavxq_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlaldavxq_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlsldavq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlsldavq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlsldavxq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmlsldavxq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmovnbq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmovnbq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmovnbq_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmovnbq_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmovntq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmovntq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmovntq_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmovntq_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmullbq_poly_p16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmullbq_poly_p8.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmulltq_poly_p16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmulltq_poly_p8.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmulq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmulq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmulq_n_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vmulq_n_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vornq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vornq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vorrq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vorrq_f32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vorrq_n_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vorrq_n_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vorrq_n_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vorrq_n_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqdmullbq_n_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqdmullbq_n_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqdmullbq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqdmullbq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqdmulltq_n_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqdmulltq_n_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqdmulltq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqdmulltq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovnbq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovnbq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovnbq_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovnbq_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovntq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovntq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovntq_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovntq_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovunbq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovunbq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovuntq_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vqmovuntq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vrmlaldavhq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vrmlaldavhq_u32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vrmlaldavhxq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vrmlsldavhq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vrmlsldavhxq_s32.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vshllbq_n_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vshllbq_n_s8.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vshllbq_n_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vshllbq_n_u8.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vshlltq_n_s16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vshlltq_n_s8.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vshlltq_n_u16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vshlltq_n_u8.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vsubq_f16.c: Likewise.
	* gcc.target/arm/mve/intrinsics/vsubq_f32.c: Likewise.
parent 33203b4c
......@@ -2,6 +2,494 @@
Mihail Ionescu <mihail.ionescu@arm.com>
Srinath Parvathaneni <srinath.parvathaneni@arm.com>
* config/arm/arm_mve.h (vqmovntq_u16): Define macro.
(vqmovnbq_u16): Likewise.
(vmulltq_poly_p8): Likewise.
(vmullbq_poly_p8): Likewise.
(vmovntq_u16): Likewise.
(vmovnbq_u16): Likewise.
(vmlaldavxq_u16): Likewise.
(vmlaldavq_u16): Likewise.
(vqmovuntq_s16): Likewise.
(vqmovunbq_s16): Likewise.
(vshlltq_n_u8): Likewise.
(vshllbq_n_u8): Likewise.
(vorrq_n_u16): Likewise.
(vbicq_n_u16): Likewise.
(vcmpneq_n_f16): Likewise.
(vcmpneq_f16): Likewise.
(vcmpltq_n_f16): Likewise.
(vcmpltq_f16): Likewise.
(vcmpleq_n_f16): Likewise.
(vcmpleq_f16): Likewise.
(vcmpgtq_n_f16): Likewise.
(vcmpgtq_f16): Likewise.
(vcmpgeq_n_f16): Likewise.
(vcmpgeq_f16): Likewise.
(vcmpeqq_n_f16): Likewise.
(vcmpeqq_f16): Likewise.
(vsubq_f16): Likewise.
(vqmovntq_s16): Likewise.
(vqmovnbq_s16): Likewise.
(vqdmulltq_s16): Likewise.
(vqdmulltq_n_s16): Likewise.
(vqdmullbq_s16): Likewise.
(vqdmullbq_n_s16): Likewise.
(vorrq_f16): Likewise.
(vornq_f16): Likewise.
(vmulq_n_f16): Likewise.
(vmulq_f16): Likewise.
(vmovntq_s16): Likewise.
(vmovnbq_s16): Likewise.
(vmlsldavxq_s16): Likewise.
(vmlsldavq_s16): Likewise.
(vmlaldavxq_s16): Likewise.
(vmlaldavq_s16): Likewise.
(vminnmvq_f16): Likewise.
(vminnmq_f16): Likewise.
(vminnmavq_f16): Likewise.
(vminnmaq_f16): Likewise.
(vmaxnmvq_f16): Likewise.
(vmaxnmq_f16): Likewise.
(vmaxnmavq_f16): Likewise.
(vmaxnmaq_f16): Likewise.
(veorq_f16): Likewise.
(vcmulq_rot90_f16): Likewise.
(vcmulq_rot270_f16): Likewise.
(vcmulq_rot180_f16): Likewise.
(vcmulq_f16): Likewise.
(vcaddq_rot90_f16): Likewise.
(vcaddq_rot270_f16): Likewise.
(vbicq_f16): Likewise.
(vandq_f16): Likewise.
(vaddq_n_f16): Likewise.
(vabdq_f16): Likewise.
(vshlltq_n_s8): Likewise.
(vshllbq_n_s8): Likewise.
(vorrq_n_s16): Likewise.
(vbicq_n_s16): Likewise.
(vqmovntq_u32): Likewise.
(vqmovnbq_u32): Likewise.
(vmulltq_poly_p16): Likewise.
(vmullbq_poly_p16): Likewise.
(vmovntq_u32): Likewise.
(vmovnbq_u32): Likewise.
(vmlaldavxq_u32): Likewise.
(vmlaldavq_u32): Likewise.
(vqmovuntq_s32): Likewise.
(vqmovunbq_s32): Likewise.
(vshlltq_n_u16): Likewise.
(vshllbq_n_u16): Likewise.
(vorrq_n_u32): Likewise.
(vbicq_n_u32): Likewise.
(vcmpneq_n_f32): Likewise.
(vcmpneq_f32): Likewise.
(vcmpltq_n_f32): Likewise.
(vcmpltq_f32): Likewise.
(vcmpleq_n_f32): Likewise.
(vcmpleq_f32): Likewise.
(vcmpgtq_n_f32): Likewise.
(vcmpgtq_f32): Likewise.
(vcmpgeq_n_f32): Likewise.
(vcmpgeq_f32): Likewise.
(vcmpeqq_n_f32): Likewise.
(vcmpeqq_f32): Likewise.
(vsubq_f32): Likewise.
(vqmovntq_s32): Likewise.
(vqmovnbq_s32): Likewise.
(vqdmulltq_s32): Likewise.
(vqdmulltq_n_s32): Likewise.
(vqdmullbq_s32): Likewise.
(vqdmullbq_n_s32): Likewise.
(vorrq_f32): Likewise.
(vornq_f32): Likewise.
(vmulq_n_f32): Likewise.
(vmulq_f32): Likewise.
(vmovntq_s32): Likewise.
(vmovnbq_s32): Likewise.
(vmlsldavxq_s32): Likewise.
(vmlsldavq_s32): Likewise.
(vmlaldavxq_s32): Likewise.
(vmlaldavq_s32): Likewise.
(vminnmvq_f32): Likewise.
(vminnmq_f32): Likewise.
(vminnmavq_f32): Likewise.
(vminnmaq_f32): Likewise.
(vmaxnmvq_f32): Likewise.
(vmaxnmq_f32): Likewise.
(vmaxnmavq_f32): Likewise.
(vmaxnmaq_f32): Likewise.
(veorq_f32): Likewise.
(vcmulq_rot90_f32): Likewise.
(vcmulq_rot270_f32): Likewise.
(vcmulq_rot180_f32): Likewise.
(vcmulq_f32): Likewise.
(vcaddq_rot90_f32): Likewise.
(vcaddq_rot270_f32): Likewise.
(vbicq_f32): Likewise.
(vandq_f32): Likewise.
(vaddq_n_f32): Likewise.
(vabdq_f32): Likewise.
(vshlltq_n_s16): Likewise.
(vshllbq_n_s16): Likewise.
(vorrq_n_s32): Likewise.
(vbicq_n_s32): Likewise.
(vrmlaldavhq_u32): Likewise.
(vctp8q_m): Likewise.
(vctp64q_m): Likewise.
(vctp32q_m): Likewise.
(vctp16q_m): Likewise.
(vaddlvaq_u32): Likewise.
(vrmlsldavhxq_s32): Likewise.
(vrmlsldavhq_s32): Likewise.
(vrmlaldavhxq_s32): Likewise.
(vrmlaldavhq_s32): Likewise.
(vcvttq_f16_f32): Likewise.
(vcvtbq_f16_f32): Likewise.
(vaddlvaq_s32): Likewise.
(__arm_vqmovntq_u16): Define intrinsic.
(__arm_vqmovnbq_u16): Likewise.
(__arm_vmulltq_poly_p8): Likewise.
(__arm_vmullbq_poly_p8): Likewise.
(__arm_vmovntq_u16): Likewise.
(__arm_vmovnbq_u16): Likewise.
(__arm_vmlaldavxq_u16): Likewise.
(__arm_vmlaldavq_u16): Likewise.
(__arm_vqmovuntq_s16): Likewise.
(__arm_vqmovunbq_s16): Likewise.
(__arm_vshlltq_n_u8): Likewise.
(__arm_vshllbq_n_u8): Likewise.
(__arm_vorrq_n_u16): Likewise.
(__arm_vbicq_n_u16): Likewise.
(__arm_vcmpneq_n_f16): Likewise.
(__arm_vcmpneq_f16): Likewise.
(__arm_vcmpltq_n_f16): Likewise.
(__arm_vcmpltq_f16): Likewise.
(__arm_vcmpleq_n_f16): Likewise.
(__arm_vcmpleq_f16): Likewise.
(__arm_vcmpgtq_n_f16): Likewise.
(__arm_vcmpgtq_f16): Likewise.
(__arm_vcmpgeq_n_f16): Likewise.
(__arm_vcmpgeq_f16): Likewise.
(__arm_vcmpeqq_n_f16): Likewise.
(__arm_vcmpeqq_f16): Likewise.
(__arm_vsubq_f16): Likewise.
(__arm_vqmovntq_s16): Likewise.
(__arm_vqmovnbq_s16): Likewise.
(__arm_vqdmulltq_s16): Likewise.
(__arm_vqdmulltq_n_s16): Likewise.
(__arm_vqdmullbq_s16): Likewise.
(__arm_vqdmullbq_n_s16): Likewise.
(__arm_vorrq_f16): Likewise.
(__arm_vornq_f16): Likewise.
(__arm_vmulq_n_f16): Likewise.
(__arm_vmulq_f16): Likewise.
(__arm_vmovntq_s16): Likewise.
(__arm_vmovnbq_s16): Likewise.
(__arm_vmlsldavxq_s16): Likewise.
(__arm_vmlsldavq_s16): Likewise.
(__arm_vmlaldavxq_s16): Likewise.
(__arm_vmlaldavq_s16): Likewise.
(__arm_vminnmvq_f16): Likewise.
(__arm_vminnmq_f16): Likewise.
(__arm_vminnmavq_f16): Likewise.
(__arm_vminnmaq_f16): Likewise.
(__arm_vmaxnmvq_f16): Likewise.
(__arm_vmaxnmq_f16): Likewise.
(__arm_vmaxnmavq_f16): Likewise.
(__arm_vmaxnmaq_f16): Likewise.
(__arm_veorq_f16): Likewise.
(__arm_vcmulq_rot90_f16): Likewise.
(__arm_vcmulq_rot270_f16): Likewise.
(__arm_vcmulq_rot180_f16): Likewise.
(__arm_vcmulq_f16): Likewise.
(__arm_vcaddq_rot90_f16): Likewise.
(__arm_vcaddq_rot270_f16): Likewise.
(__arm_vbicq_f16): Likewise.
(__arm_vandq_f16): Likewise.
(__arm_vaddq_n_f16): Likewise.
(__arm_vabdq_f16): Likewise.
(__arm_vshlltq_n_s8): Likewise.
(__arm_vshllbq_n_s8): Likewise.
(__arm_vorrq_n_s16): Likewise.
(__arm_vbicq_n_s16): Likewise.
(__arm_vqmovntq_u32): Likewise.
(__arm_vqmovnbq_u32): Likewise.
(__arm_vmulltq_poly_p16): Likewise.
(__arm_vmullbq_poly_p16): Likewise.
(__arm_vmovntq_u32): Likewise.
(__arm_vmovnbq_u32): Likewise.
(__arm_vmlaldavxq_u32): Likewise.
(__arm_vmlaldavq_u32): Likewise.
(__arm_vqmovuntq_s32): Likewise.
(__arm_vqmovunbq_s32): Likewise.
(__arm_vshlltq_n_u16): Likewise.
(__arm_vshllbq_n_u16): Likewise.
(__arm_vorrq_n_u32): Likewise.
(__arm_vbicq_n_u32): Likewise.
(__arm_vcmpneq_n_f32): Likewise.
(__arm_vcmpneq_f32): Likewise.
(__arm_vcmpltq_n_f32): Likewise.
(__arm_vcmpltq_f32): Likewise.
(__arm_vcmpleq_n_f32): Likewise.
(__arm_vcmpleq_f32): Likewise.
(__arm_vcmpgtq_n_f32): Likewise.
(__arm_vcmpgtq_f32): Likewise.
(__arm_vcmpgeq_n_f32): Likewise.
(__arm_vcmpgeq_f32): Likewise.
(__arm_vcmpeqq_n_f32): Likewise.
(__arm_vcmpeqq_f32): Likewise.
(__arm_vsubq_f32): Likewise.
(__arm_vqmovntq_s32): Likewise.
(__arm_vqmovnbq_s32): Likewise.
(__arm_vqdmulltq_s32): Likewise.
(__arm_vqdmulltq_n_s32): Likewise.
(__arm_vqdmullbq_s32): Likewise.
(__arm_vqdmullbq_n_s32): Likewise.
(__arm_vorrq_f32): Likewise.
(__arm_vornq_f32): Likewise.
(__arm_vmulq_n_f32): Likewise.
(__arm_vmulq_f32): Likewise.
(__arm_vmovntq_s32): Likewise.
(__arm_vmovnbq_s32): Likewise.
(__arm_vmlsldavxq_s32): Likewise.
(__arm_vmlsldavq_s32): Likewise.
(__arm_vmlaldavxq_s32): Likewise.
(__arm_vmlaldavq_s32): Likewise.
(__arm_vminnmvq_f32): Likewise.
(__arm_vminnmq_f32): Likewise.
(__arm_vminnmavq_f32): Likewise.
(__arm_vminnmaq_f32): Likewise.
(__arm_vmaxnmvq_f32): Likewise.
(__arm_vmaxnmq_f32): Likewise.
(__arm_vmaxnmavq_f32): Likewise.
(__arm_vmaxnmaq_f32): Likewise.
(__arm_veorq_f32): Likewise.
(__arm_vcmulq_rot90_f32): Likewise.
(__arm_vcmulq_rot270_f32): Likewise.
(__arm_vcmulq_rot180_f32): Likewise.
(__arm_vcmulq_f32): Likewise.
(__arm_vcaddq_rot90_f32): Likewise.
(__arm_vcaddq_rot270_f32): Likewise.
(__arm_vbicq_f32): Likewise.
(__arm_vandq_f32): Likewise.
(__arm_vaddq_n_f32): Likewise.
(__arm_vabdq_f32): Likewise.
(__arm_vshlltq_n_s16): Likewise.
(__arm_vshllbq_n_s16): Likewise.
(__arm_vorrq_n_s32): Likewise.
(__arm_vbicq_n_s32): Likewise.
(__arm_vrmlaldavhq_u32): Likewise.
(__arm_vctp8q_m): Likewise.
(__arm_vctp64q_m): Likewise.
(__arm_vctp32q_m): Likewise.
(__arm_vctp16q_m): Likewise.
(__arm_vaddlvaq_u32): Likewise.
(__arm_vrmlsldavhxq_s32): Likewise.
(__arm_vrmlsldavhq_s32): Likewise.
(__arm_vrmlaldavhxq_s32): Likewise.
(__arm_vrmlaldavhq_s32): Likewise.
(__arm_vcvttq_f16_f32): Likewise.
(__arm_vcvtbq_f16_f32): Likewise.
(__arm_vaddlvaq_s32): Likewise.
(vst4q): Define polymorphic variant.
(vrndxq): Likewise.
(vrndq): Likewise.
(vrndpq): Likewise.
(vrndnq): Likewise.
(vrndmq): Likewise.
(vrndaq): Likewise.
(vrev64q): Likewise.
(vnegq): Likewise.
(vdupq_n): Likewise.
(vabsq): Likewise.
(vrev32q): Likewise.
(vcvtbq_f32): Likewise.
(vcvttq_f32): Likewise.
(vcvtq): Likewise.
(vsubq_n): Likewise.
(vbrsrq_n): Likewise.
(vcvtq_n): Likewise.
(vsubq): Likewise.
(vorrq): Likewise.
(vabdq): Likewise.
(vaddq_n): Likewise.
(vandq): Likewise.
(vbicq): Likewise.
(vornq): Likewise.
(vmulq_n): Likewise.
(vmulq): Likewise.
(vcaddq_rot270): Likewise.
(vcmpeqq_n): Likewise.
(vcmpeqq): Likewise.
(vcaddq_rot90): Likewise.
(vcmpgeq_n): Likewise.
(vcmpgeq): Likewise.
(vcmpgtq_n): Likewise.
(vcmpgtq): Likewise.
(vcmpgtq): Likewise.
(vcmpleq_n): Likewise.
(vcmpleq_n): Likewise.
(vcmpleq): Likewise.
(vcmpleq): Likewise.
(vcmpltq_n): Likewise.
(vcmpltq_n): Likewise.
(vcmpltq): Likewise.
(vcmpltq): Likewise.
(vcmpneq_n): Likewise.
(vcmpneq_n): Likewise.
(vcmpneq): Likewise.
(vcmpneq): Likewise.
(vcmulq): Likewise.
(vcmulq): Likewise.
(vcmulq_rot180): Likewise.
(vcmulq_rot180): Likewise.
(vcmulq_rot270): Likewise.
(vcmulq_rot270): Likewise.
(vcmulq_rot90): Likewise.
(vcmulq_rot90): Likewise.
(veorq): Likewise.
(veorq): Likewise.
(vmaxnmaq): Likewise.
(vmaxnmaq): Likewise.
(vmaxnmavq): Likewise.
(vmaxnmavq): Likewise.
(vmaxnmq): Likewise.
(vmaxnmq): Likewise.
(vmaxnmvq): Likewise.
(vmaxnmvq): Likewise.
(vminnmaq): Likewise.
(vminnmaq): Likewise.
(vminnmavq): Likewise.
(vminnmavq): Likewise.
(vminnmq): Likewise.
(vminnmq): Likewise.
(vminnmvq): Likewise.
(vminnmvq): Likewise.
(vbicq_n): Likewise.
(vqmovntq): Likewise.
(vqmovntq): Likewise.
(vqmovnbq): Likewise.
(vqmovnbq): Likewise.
(vmulltq_poly): Likewise.
(vmulltq_poly): Likewise.
(vmullbq_poly): Likewise.
(vmullbq_poly): Likewise.
(vmovntq): Likewise.
(vmovntq): Likewise.
(vmovnbq): Likewise.
(vmovnbq): Likewise.
(vmlaldavxq): Likewise.
(vmlaldavxq): Likewise.
(vqmovuntq): Likewise.
(vqmovuntq): Likewise.
(vshlltq_n): Likewise.
(vshlltq_n): Likewise.
(vshllbq_n): Likewise.
(vshllbq_n): Likewise.
(vorrq_n): Likewise.
(vorrq_n): Likewise.
(vmlaldavq): Likewise.
(vmlaldavq): Likewise.
(vqmovunbq): Likewise.
(vqmovunbq): Likewise.
(vqdmulltq_n): Likewise.
(vqdmulltq_n): Likewise.
(vqdmulltq): Likewise.
(vqdmulltq): Likewise.
(vqdmullbq_n): Likewise.
(vqdmullbq_n): Likewise.
(vqdmullbq): Likewise.
(vqdmullbq): Likewise.
(vaddlvaq): Likewise.
(vaddlvaq): Likewise.
(vrmlaldavhq): Likewise.
(vrmlaldavhq): Likewise.
(vrmlaldavhxq): Likewise.
(vrmlaldavhxq): Likewise.
(vrmlsldavhq): Likewise.
(vrmlsldavhq): Likewise.
(vrmlsldavhxq): Likewise.
(vrmlsldavhxq): Likewise.
(vmlsldavxq): Likewise.
(vmlsldavxq): Likewise.
(vmlsldavq): Likewise.
(vmlsldavq): Likewise.
* config/arm/arm_mve_builtins.def (BINOP_NONE_NONE_IMM): Use it.
(BINOP_NONE_NONE_NONE): Likewise.
(BINOP_UNONE_NONE_NONE): Likewise.
(BINOP_UNONE_UNONE_IMM): Likewise.
(BINOP_UNONE_UNONE_NONE): Likewise.
(BINOP_UNONE_UNONE_UNONE): Likewise.
* config/arm/mve.md (mve_vabdq_f<mode>): Define RTL pattern.
(mve_vaddlvaq_<supf>v4si): Likewise.
(mve_vaddq_n_f<mode>): Likewise.
(mve_vandq_f<mode>): Likewise.
(mve_vbicq_f<mode>): Likewise.
(mve_vbicq_n_<supf><mode>): Likewise.
(mve_vcaddq_rot270_f<mode>): Likewise.
(mve_vcaddq_rot90_f<mode>): Likewise.
(mve_vcmpeqq_f<mode>): Likewise.
(mve_vcmpeqq_n_f<mode>): Likewise.
(mve_vcmpgeq_f<mode>): Likewise.
(mve_vcmpgeq_n_f<mode>): Likewise.
(mve_vcmpgtq_f<mode>): Likewise.
(mve_vcmpgtq_n_f<mode>): Likewise.
(mve_vcmpleq_f<mode>): Likewise.
(mve_vcmpleq_n_f<mode>): Likewise.
(mve_vcmpltq_f<mode>): Likewise.
(mve_vcmpltq_n_f<mode>): Likewise.
(mve_vcmpneq_f<mode>): Likewise.
(mve_vcmpneq_n_f<mode>): Likewise.
(mve_vcmulq_f<mode>): Likewise.
(mve_vcmulq_rot180_f<mode>): Likewise.
(mve_vcmulq_rot270_f<mode>): Likewise.
(mve_vcmulq_rot90_f<mode>): Likewise.
(mve_vctp<mode1>q_mhi): Likewise.
(mve_vcvtbq_f16_f32v8hf): Likewise.
(mve_vcvttq_f16_f32v8hf): Likewise.
(mve_veorq_f<mode>): Likewise.
(mve_vmaxnmaq_f<mode>): Likewise.
(mve_vmaxnmavq_f<mode>): Likewise.
(mve_vmaxnmq_f<mode>): Likewise.
(mve_vmaxnmvq_f<mode>): Likewise.
(mve_vminnmaq_f<mode>): Likewise.
(mve_vminnmavq_f<mode>): Likewise.
(mve_vminnmq_f<mode>): Likewise.
(mve_vminnmvq_f<mode>): Likewise.
(mve_vmlaldavq_<supf><mode>): Likewise.
(mve_vmlaldavxq_<supf><mode>): Likewise.
(mve_vmlsldavq_s<mode>): Likewise.
(mve_vmlsldavxq_s<mode>): Likewise.
(mve_vmovnbq_<supf><mode>): Likewise.
(mve_vmovntq_<supf><mode>): Likewise.
(mve_vmulq_f<mode>): Likewise.
(mve_vmulq_n_f<mode>): Likewise.
(mve_vornq_f<mode>): Likewise.
(mve_vorrq_f<mode>): Likewise.
(mve_vorrq_n_<supf><mode>): Likewise.
(mve_vqdmullbq_n_s<mode>): Likewise.
(mve_vqdmullbq_s<mode>): Likewise.
(mve_vqdmulltq_n_s<mode>): Likewise.
(mve_vqdmulltq_s<mode>): Likewise.
(mve_vqmovnbq_<supf><mode>): Likewise.
(mve_vqmovntq_<supf><mode>): Likewise.
(mve_vqmovunbq_s<mode>): Likewise.
(mve_vqmovuntq_s<mode>): Likewise.
(mve_vrmlaldavhxq_sv4si): Likewise.
(mve_vrmlsldavhq_sv4si): Likewise.
(mve_vrmlsldavhxq_sv4si): Likewise.
(mve_vshllbq_n_<supf><mode>): Likewise.
(mve_vshlltq_n_<supf><mode>): Likewise.
(mve_vsubq_f<mode>): Likewise.
(mve_vmulltq_poly_p<mode>): Likewise.
(mve_vmullbq_poly_p<mode>): Likewise.
(mve_vrmlaldavhq_<supf>v4si): Likewise.
2020-03-17 Andre Vieira <andre.simoesdiasvieira@arm.com>
Mihail Ionescu <mihail.ionescu@arm.com>
Srinath Parvathaneni <srinath.parvathaneni@arm.com>
* config/arm/arm_mve.h (vsubq_u8): Define macro.
(vsubq_n_u8): Likewise.
(vrmulhq_u8): Likewise.
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -18,198 +18,276 @@
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
VAR5 (STORE1, vst4q, v16qi, v8hi, v4si, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vrndxq_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vrndq_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vrndpq_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vrndnq_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vrndmq_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vrndaq_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vrev64q_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vnegq_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vdupq_n_f, v8hf, v4sf)
VAR2 (UNOP_NONE_NONE, vabsq_f, v8hf, v4sf)
VAR1 (UNOP_NONE_NONE, vrev32q_f, v8hf)
VAR1 (UNOP_NONE_NONE, vcvttq_f32_f16, v4sf)
VAR1 (UNOP_NONE_NONE, vcvtbq_f32_f16, v4sf)
VAR2 (UNOP_NONE_SNONE, vcvtq_to_f_s, v8hf, v4sf)
VAR2 (UNOP_NONE_UNONE, vcvtq_to_f_u, v8hf, v4sf)
VAR3 (UNOP_SNONE_SNONE, vrev64q_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vqnegq_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vqabsq_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vnegq_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vmvnq_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vdupq_n_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vclzq_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vclsq_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vaddvq_s, v16qi, v8hi, v4si)
VAR3 (UNOP_SNONE_SNONE, vabsq_s, v16qi, v8hi, v4si)
VAR2 (UNOP_SNONE_SNONE, vrev32q_s, v16qi, v8hi)
VAR2 (UNOP_SNONE_SNONE, vmovltq_s, v16qi, v8hi)
VAR2 (UNOP_SNONE_SNONE, vmovlbq_s, v16qi, v8hi)
VAR2 (UNOP_SNONE_NONE, vcvtq_from_f_s, v8hi, v4si)
VAR2 (UNOP_SNONE_NONE, vcvtpq_s, v8hi, v4si)
VAR2 (UNOP_SNONE_NONE, vcvtnq_s, v8hi, v4si)
VAR2 (UNOP_SNONE_NONE, vcvtmq_s, v8hi, v4si)
VAR2 (UNOP_SNONE_NONE, vcvtaq_s, v8hi, v4si)
VAR2 (UNOP_SNONE_IMM, vmvnq_n_s, v8hi, v4si)
VAR1 (UNOP_SNONE_SNONE, vrev16q_s, v16qi)
VAR1 (UNOP_SNONE_SNONE, vaddlvq_s, v4si)
VAR3 (UNOP_UNONE_UNONE, vrev64q_u, v16qi, v8hi, v4si)
VAR3 (UNOP_UNONE_UNONE, vmvnq_u, v16qi, v8hi, v4si)
VAR3 (UNOP_UNONE_UNONE, vdupq_n_u, v16qi, v8hi, v4si)
VAR3 (UNOP_UNONE_UNONE, vclzq_u, v16qi, v8hi, v4si)
VAR3 (UNOP_UNONE_UNONE, vaddvq_u, v16qi, v8hi, v4si)
VAR2 (UNOP_UNONE_UNONE, vrev32q_u, v16qi, v8hi)
VAR2 (UNOP_UNONE_UNONE, vmovltq_u, v16qi, v8hi)
VAR2 (UNOP_UNONE_UNONE, vmovlbq_u, v16qi, v8hi)
VAR2 (UNOP_UNONE_NONE, vcvtq_from_f_u, v8hi, v4si)
VAR2 (UNOP_UNONE_NONE, vcvtpq_u, v8hi, v4si)
VAR2 (UNOP_UNONE_NONE, vcvtnq_u, v8hi, v4si)
VAR2 (UNOP_UNONE_NONE, vcvtmq_u, v8hi, v4si)
VAR2 (UNOP_UNONE_NONE, vcvtaq_u, v8hi, v4si)
VAR2 (UNOP_UNONE_IMM, vmvnq_n_u, v8hi, v4si)
VAR1 (UNOP_UNONE_UNONE, vrev16q_u, v16qi)
VAR1 (UNOP_UNONE_UNONE, vaddlvq_u, v4si)
VAR1 (UNOP_UNONE_UNONE, vctp16q, hi)
VAR1 (UNOP_UNONE_UNONE, vctp32q, hi)
VAR1 (UNOP_UNONE_UNONE, vctp64q, hi)
VAR1 (UNOP_UNONE_UNONE, vctp8q, hi)
VAR1 (UNOP_UNONE_UNONE, vpnot, hi)
VAR2 (BINOP_NONE_NONE_NONE, vsubq_n_f, v8hf, v4sf)
VAR2 (BINOP_NONE_NONE_NONE, vbrsrq_n_f, v8hf, v4sf)
VAR2 (BINOP_NONE_NONE_IMM, vcvtq_n_to_f_s, v8hf, v4sf)
VAR2 (BINOP_NONE_UNONE_IMM, vcvtq_n_to_f_u, v8hf, v4sf)
VAR2 (BINOP_NONE_UNONE_UNONE, vcreateq_f, v8hf, v4sf)
VAR2 (BINOP_UNONE_NONE_IMM, vcvtq_n_from_f_u, v8hi, v4si)
VAR2 (BINOP_NONE_NONE_IMM, vcvtq_n_from_f_s, v8hi, v4si)
VAR4 (BINOP_UNONE_UNONE_UNONE, vcreateq_u, v16qi, v8hi, v4si, v2di)
VAR4 (BINOP_NONE_UNONE_UNONE, vcreateq_s, v16qi, v8hi, v4si, v2di)
VAR3 (BINOP_UNONE_UNONE_IMM, vshrq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_IMM, vshrq_n_s, v16qi, v8hi, v4si)
VAR1 (BINOP_NONE_NONE_UNONE, vaddlvq_p_s, v4si)
VAR1 (BINOP_UNONE_UNONE_UNONE, vaddlvq_p_u, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpneq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vshlq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vshlq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vsubq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vsubq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vrmulhq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vrhaddq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vqsubq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vqsubq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vqaddq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vqaddq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vorrq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vornq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vmulq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vmulq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vmulltq_int_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vmullbq_int_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vmulhq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vmladavq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vminvq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vminq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vmaxvq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vmaxq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vhsubq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vhsubq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vhaddq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vhaddq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, veorq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpneq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcmphiq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpeqq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpeqq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcmpcsq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcaddq_rot90_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vcaddq_rot270_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vbicq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vandq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vaddvq_p_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vaddvaq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vaddq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_UNONE, vabdq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vshlq_r_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vrshlq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vrshlq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vqshlq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vqshlq_r_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vqrshlq_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vqrshlq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vminavq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vminaq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vmaxavq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vmaxaq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_NONE, vbrsrq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_IMM, vshlq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_IMM, vrshrq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_UNONE_IMM, vqshlq_n_u, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpneq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpltq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpleq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpgtq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpgeq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_NONE, vcmpeqq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_UNONE_NONE_IMM, vqshluq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_UNONE, vaddvq_p_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vsubq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vsubq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vshlq_r_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vrshlq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vrshlq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vrmulhq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vrhaddq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqsubq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqsubq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqshlq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqshlq_r_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqrshlq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqrshlq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqrdmulhq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqrdmulhq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqdmulhq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqdmulhq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqaddq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vqaddq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vorrq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vornq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmulq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmulq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmulltq_int_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmullbq_int_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmulhq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmlsdavxq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmlsdavq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmladavxq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmladavq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vminvq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vminq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmaxvq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vmaxq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vhsubq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vhsubq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vhcaddq_rot90_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vhcaddq_rot270_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vhaddq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vhaddq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, veorq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vcaddq_rot90_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vcaddq_rot270_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vbrsrq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vbicq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vandq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vaddvaq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vaddq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_NONE, vabdq_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_IMM, vshlq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_IMM, vrshrq_n_s, v16qi, v8hi, v4si)
VAR3 (BINOP_NONE_NONE_IMM, vqshlq_n_s, v16qi, v8hi, v4si)
VAR5(STORE1, vst4q, v16qi, v8hi, v4si, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vrndxq_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vrndq_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vrndpq_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vrndnq_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vrndmq_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vrndaq_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vrev64q_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vnegq_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vdupq_n_f, v8hf, v4sf)
VAR2(UNOP_NONE_NONE, vabsq_f, v8hf, v4sf)
VAR1(UNOP_NONE_NONE, vrev32q_f, v8hf)
VAR1(UNOP_NONE_NONE, vcvttq_f32_f16, v4sf)
VAR1(UNOP_NONE_NONE, vcvtbq_f32_f16, v4sf)
VAR2(UNOP_NONE_SNONE, vcvtq_to_f_s, v8hf, v4sf)
VAR2(UNOP_NONE_UNONE, vcvtq_to_f_u, v8hf, v4sf)
VAR3(UNOP_SNONE_SNONE, vrev64q_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vqnegq_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vqabsq_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vnegq_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vmvnq_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vdupq_n_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vclzq_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vclsq_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vaddvq_s, v16qi, v8hi, v4si)
VAR3(UNOP_SNONE_SNONE, vabsq_s, v16qi, v8hi, v4si)
VAR2(UNOP_SNONE_SNONE, vrev32q_s, v16qi, v8hi)
VAR2(UNOP_SNONE_SNONE, vmovltq_s, v16qi, v8hi)
VAR2(UNOP_SNONE_SNONE, vmovlbq_s, v16qi, v8hi)
VAR2(UNOP_SNONE_NONE, vcvtq_from_f_s, v8hi, v4si)
VAR2(UNOP_SNONE_NONE, vcvtpq_s, v8hi, v4si)
VAR2(UNOP_SNONE_NONE, vcvtnq_s, v8hi, v4si)
VAR2(UNOP_SNONE_NONE, vcvtmq_s, v8hi, v4si)
VAR2(UNOP_SNONE_NONE, vcvtaq_s, v8hi, v4si)
VAR2(UNOP_SNONE_IMM, vmvnq_n_s, v8hi, v4si)
VAR1(UNOP_SNONE_SNONE, vrev16q_s, v16qi)
VAR1(UNOP_SNONE_SNONE, vaddlvq_s, v4si)
VAR3(UNOP_UNONE_UNONE, vrev64q_u, v16qi, v8hi, v4si)
VAR3(UNOP_UNONE_UNONE, vmvnq_u, v16qi, v8hi, v4si)
VAR3(UNOP_UNONE_UNONE, vdupq_n_u, v16qi, v8hi, v4si)
VAR3(UNOP_UNONE_UNONE, vclzq_u, v16qi, v8hi, v4si)
VAR3(UNOP_UNONE_UNONE, vaddvq_u, v16qi, v8hi, v4si)
VAR2(UNOP_UNONE_UNONE, vrev32q_u, v16qi, v8hi)
VAR2(UNOP_UNONE_UNONE, vmovltq_u, v16qi, v8hi)
VAR2(UNOP_UNONE_UNONE, vmovlbq_u, v16qi, v8hi)
VAR2(UNOP_UNONE_NONE, vcvtq_from_f_u, v8hi, v4si)
VAR2(UNOP_UNONE_NONE, vcvtpq_u, v8hi, v4si)
VAR2(UNOP_UNONE_NONE, vcvtnq_u, v8hi, v4si)
VAR2(UNOP_UNONE_NONE, vcvtmq_u, v8hi, v4si)
VAR2(UNOP_UNONE_NONE, vcvtaq_u, v8hi, v4si)
VAR2(UNOP_UNONE_IMM, vmvnq_n_u, v8hi, v4si)
VAR1(UNOP_UNONE_UNONE, vrev16q_u, v16qi)
VAR1(UNOP_UNONE_UNONE, vaddlvq_u, v4si)
VAR1(UNOP_UNONE_UNONE, vctp16q, hi)
VAR1(UNOP_UNONE_UNONE, vctp32q, hi)
VAR1(UNOP_UNONE_UNONE, vctp64q, hi)
VAR1(UNOP_UNONE_UNONE, vctp8q, hi)
VAR1(UNOP_UNONE_UNONE, vpnot, hi)
VAR2(BINOP_NONE_NONE_NONE, vsubq_n_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vbrsrq_n_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_IMM, vcvtq_n_to_f_s, v8hf, v4sf)
VAR2(BINOP_NONE_UNONE_IMM, vcvtq_n_to_f_u, v8hf, v4sf)
VAR2(BINOP_NONE_UNONE_UNONE, vcreateq_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_IMM, vcvtq_n_from_f_u, v8hi, v4si)
VAR2(BINOP_NONE_NONE_IMM, vcvtq_n_from_f_s, v8hi, v4si)
VAR4(BINOP_UNONE_UNONE_UNONE, vcreateq_u, v16qi, v8hi, v4si, v2di)
VAR4(BINOP_NONE_UNONE_UNONE, vcreateq_s, v16qi, v8hi, v4si, v2di)
VAR3(BINOP_UNONE_UNONE_IMM, vshrq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_IMM, vshrq_n_s, v16qi, v8hi, v4si)
VAR1(BINOP_NONE_NONE_UNONE, vaddlvq_p_s, v4si)
VAR1(BINOP_UNONE_UNONE_UNONE, vaddlvq_p_u, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpneq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcmpneq_u, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vshlq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vshlq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vsubq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vsubq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vrmulhq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vrhaddq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vqsubq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vqsubq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vqaddq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vqaddq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vorrq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vornq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vmulq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vmulq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vmulltq_int_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vmullbq_int_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vmulhq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vmladavq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vminvq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vminq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vmaxvq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vmaxq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vhsubq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vhsubq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vhaddq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vhaddq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, veorq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcmpneq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcmphiq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcmphiq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcmpeqq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcmpeqq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcmpcsq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcmpcsq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcaddq_rot90_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vcaddq_rot270_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vbicq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vandq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vaddvq_p_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vaddvaq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vaddq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_UNONE, vabdq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vshlq_r_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vrshlq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vrshlq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vqshlq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vqshlq_r_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vqrshlq_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vqrshlq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vminavq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vminaq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vmaxavq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vmaxaq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_NONE, vbrsrq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_IMM, vshlq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_IMM, vrshrq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_UNONE_IMM, vqshlq_n_u, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpneq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpltq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpltq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpleq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpleq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpgtq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpgtq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpgeq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpgeq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpeqq_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_NONE, vcmpeqq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_UNONE_NONE_IMM, vqshluq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_UNONE, vaddvq_p_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vsubq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vsubq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vshlq_r_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vrshlq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vrshlq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vrmulhq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vrhaddq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqsubq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqsubq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqshlq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqshlq_r_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqrshlq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqrshlq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqrdmulhq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqrdmulhq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqdmulhq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqdmulhq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqaddq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vqaddq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vorrq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vornq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmulq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmulq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmulltq_int_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmullbq_int_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmulhq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmlsdavxq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmlsdavq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmladavxq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmladavq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vminvq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vminq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmaxvq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vmaxq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vhsubq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vhsubq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vhcaddq_rot90_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vhcaddq_rot270_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vhaddq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vhaddq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, veorq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vcaddq_rot90_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vcaddq_rot270_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vbrsrq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vbicq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vandq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vaddvaq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vaddq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_NONE, vabdq_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_IMM, vshlq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_IMM, vrshrq_n_s, v16qi, v8hi, v4si)
VAR3(BINOP_NONE_NONE_IMM, vqshlq_n_s, v16qi, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_UNONE, vqmovntq_u, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_UNONE, vqmovnbq_u, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_UNONE, vmulltq_poly_p, v16qi, v8hi)
VAR2(BINOP_UNONE_UNONE_UNONE, vmullbq_poly_p, v16qi, v8hi)
VAR2(BINOP_UNONE_UNONE_UNONE, vmovntq_u, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_UNONE, vmovnbq_u, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_UNONE, vmlaldavq_u, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_NONE, vqmovuntq_s, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_NONE, vqmovunbq_s, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_IMM, vshlltq_n_u, v16qi, v8hi)
VAR2(BINOP_UNONE_UNONE_IMM, vshllbq_n_u, v16qi, v8hi)
VAR2(BINOP_UNONE_UNONE_IMM, vorrq_n_u, v8hi, v4si)
VAR2(BINOP_UNONE_UNONE_IMM, vbicq_n_u, v8hi, v4si)
VAR2(BINOP_UNONE_NONE_NONE, vcmpneq_n_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpneq_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpltq_n_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpltq_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpleq_n_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpleq_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpgtq_n_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpgtq_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpgeq_n_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpgeq_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpeqq_n_f, v8hf, v4sf)
VAR2(BINOP_UNONE_NONE_NONE, vcmpeqq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vsubq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vqmovntq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vqmovnbq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vqdmulltq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vqdmulltq_n_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vqdmullbq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vqdmullbq_n_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vorrq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vornq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vmulq_n_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vmulq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vmovntq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vmovnbq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vmlsldavxq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vmlsldavq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vmlaldavxq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vmlaldavq_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_NONE, vminnmvq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vminnmq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vminnmavq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vminnmaq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vmaxnmvq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vmaxnmq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vmaxnmavq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vmaxnmaq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, veorq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vcmulq_rot90_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vcmulq_rot270_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vcmulq_rot180_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vcmulq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vcaddq_rot90_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vcaddq_rot270_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vbicq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vandq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vaddq_n_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_NONE, vabdq_f, v8hf, v4sf)
VAR2(BINOP_NONE_NONE_IMM, vshlltq_n_s, v16qi, v8hi)
VAR2(BINOP_NONE_NONE_IMM, vshllbq_n_s, v16qi, v8hi)
VAR2(BINOP_NONE_NONE_IMM, vorrq_n_s, v8hi, v4si)
VAR2(BINOP_NONE_NONE_IMM, vbicq_n_s, v8hi, v4si)
VAR1(BINOP_UNONE_UNONE_UNONE, vrmlaldavhq_u, v4si)
VAR1(BINOP_UNONE_UNONE_UNONE, vctp8q_m, hi)
VAR1(BINOP_UNONE_UNONE_UNONE, vctp64q_m, hi)
VAR1(BINOP_UNONE_UNONE_UNONE, vctp32q_m, hi)
VAR1(BINOP_UNONE_UNONE_UNONE, vctp16q_m, hi)
VAR1(BINOP_UNONE_UNONE_UNONE, vaddlvaq_u, v4si)
VAR1(BINOP_NONE_NONE_NONE, vrmlsldavhxq_s, v4si)
VAR1(BINOP_NONE_NONE_NONE, vrmlsldavhq_s, v4si)
VAR1(BINOP_NONE_NONE_NONE, vrmlaldavhxq_s, v4si)
VAR1(BINOP_NONE_NONE_NONE, vrmlaldavhq_s, v4si)
VAR1(BINOP_NONE_NONE_NONE, vcvttq_f16_f32, v8hf)
VAR1(BINOP_NONE_NONE_NONE, vcvtbq_f16_f32, v8hf)
VAR1(BINOP_NONE_NONE_NONE, vaddlvaq_s, v4si)
......@@ -66,7 +66,26 @@
VMLADAVXQ_S VMLSDAVQ_S VMLSDAVXQ_S VQDMULHQ_N_S
VQDMULHQ_S VQRDMULHQ_N_S VQRDMULHQ_S VQSHLUQ_N_S
VCMPCSQ_N_U VCMPCSQ_U VCMPHIQ_N_U VCMPHIQ_U VABDQ_M_S
VABDQ_M_U])
VABDQ_M_U VABDQ_F VADDQ_N_F VANDQ_F VBICQ_F
VCADDQ_ROT270_F VCADDQ_ROT90_F VCMPEQQ_F VCMPEQQ_N_F
VCMPGEQ_F VCMPGEQ_N_F VCMPGTQ_F VCMPGTQ_N_F VCMPLEQ_F
VCMPLEQ_N_F VCMPLTQ_F VCMPLTQ_N_F VCMPNEQ_F VCMPNEQ_N_F
VCMULQ_F VCMULQ_ROT180_F VCMULQ_ROT270_F VCMULQ_ROT90_F
VEORQ_F VMAXNMAQ_F VMAXNMAVQ_F VMAXNMQ_F VMAXNMVQ_F
VMINNMAQ_F VMINNMAVQ_F VMINNMQ_F VMINNMVQ_F VMULQ_F
VMULQ_N_F VORNQ_F VORRQ_F VSUBQ_F VADDLVAQ_U
VADDLVAQ_S VBICQ_N_U VBICQ_N_S VCTP8Q_M VCTP16Q_M
VCTP32Q_M VCTP64Q_M VCVTBQ_F16_F32 VCVTTQ_F16_F32
VMLALDAVQ_U VMLALDAVXQ_U VMLALDAVXQ_S VMLALDAVQ_S
VMLSLDAVQ_S VMLSLDAVXQ_S VMOVNBQ_U VMOVNBQ_S
VMOVNTQ_U VMOVNTQ_S VORRQ_N_S VORRQ_N_U VQDMULLBQ_N_S
VQDMULLBQ_S VQDMULLTQ_N_S VQDMULLTQ_S VQMOVNBQ_U
VQMOVNBQ_S VQMOVUNBQ_S VQMOVUNTQ_S VRMLALDAVHXQ_S
VRMLSLDAVHQ_S VRMLSLDAVHXQ_S VSHLLBQ_S
VSHLLBQ_U VSHLLTQ_U VSHLLTQ_S VQMOVNTQ_U VQMOVNTQ_S
VSHLLBQ_N_S VSHLLBQ_N_U VSHLLTQ_N_U VSHLLTQ_N_S
VRMLALDAVHQ_U VRMLALDAVHQ_S VMULLTQ_POLY_P
VMULLBQ_POLY_P])
(define_mode_attr MVE_CNVT [(V8HI "V8HF") (V4SI "V4SF")
(V8HF "V8HI") (V4SF "V4SI")])
......@@ -119,10 +138,19 @@
(VSHLQ_N_S "s") (VSHLQ_N_U "u") (VSHLQ_R_S "s")
(VSHLQ_R_U "u") (VSUBQ_N_S "s") (VSUBQ_N_U "u")
(VSUBQ_S "s") (VSUBQ_U "u") (VADDVAQ_S "s")
(VADDVAQ_U "u")])
(VADDVAQ_U "u") (VADDLVAQ_S "s") (VADDLVAQ_U "u")
(VBICQ_N_S "s") (VBICQ_N_U "u") (VMLALDAVQ_U "u")
(VMLALDAVQ_S "s") (VMLALDAVXQ_U "u") (VMLALDAVXQ_S "s")
(VMOVNBQ_U "u") (VMOVNBQ_S "s") (VMOVNTQ_U "u")
(VMOVNTQ_S "s") (VORRQ_N_S "s") (VORRQ_N_U "u")
(VQMOVNBQ_U "u") (VQMOVNBQ_S "s") (VQMOVNTQ_S "s")
(VQMOVNTQ_U "u") (VSHLLBQ_N_U "u") (VSHLLBQ_N_S "s")
(VSHLLTQ_N_U "u") (VSHLLTQ_N_S "s") (VRMLALDAVHQ_U "u")
(VRMLALDAVHQ_S "s")])
(define_int_attr mode1 [(VCTP8Q "8") (VCTP16Q "16") (VCTP32Q "32")
(VCTP64Q "64")])
(VCTP64Q "64") (VCTP8Q_M "8") (VCTP16Q_M "16")
(VCTP32Q_M "32") (VCTP64Q_M "64")])
(define_mode_attr MVE_pred2 [(V16QI "mve_imm_8") (V8HI "mve_imm_16")
(V4SI "mve_imm_32")])
(define_mode_attr MVE_constraint2 [(V16QI "Rb") (V8HI "Rd") (V4SI "Rf")])
......@@ -146,6 +174,7 @@
(define_int_iterator VCVTMQ [VCVTMQ_S VCVTMQ_U])
(define_int_iterator VADDLVQ [VADDLVQ_U VADDLVQ_S])
(define_int_iterator VCTPQ [VCTP8Q VCTP16Q VCTP32Q VCTP64Q])
(define_int_iterator VCTPQ_M [VCTP8Q_M VCTP16Q_M VCTP32Q_M VCTP64Q_M])
(define_int_iterator VCVTQ_N_TO_F [VCVTQ_N_TO_F_S VCVTQ_N_TO_F_U])
(define_int_iterator VCREATEQ [VCREATEQ_U VCREATEQ_S])
(define_int_iterator VSHRQ_N [VSHRQ_N_S VSHRQ_N_U])
......@@ -200,7 +229,18 @@
(define_int_iterator VSHLQ_R [VSHLQ_R_S VSHLQ_R_U])
(define_int_iterator VSUBQ [VSUBQ_S VSUBQ_U])
(define_int_iterator VSUBQ_N [VSUBQ_N_S VSUBQ_N_U])
(define_int_iterator VADDLVAQ [VADDLVAQ_S VADDLVAQ_U])
(define_int_iterator VBICQ_N [VBICQ_N_S VBICQ_N_U])
(define_int_iterator VMLALDAVQ [VMLALDAVQ_U VMLALDAVQ_S])
(define_int_iterator VMLALDAVXQ [VMLALDAVXQ_U VMLALDAVXQ_S])
(define_int_iterator VMOVNBQ [VMOVNBQ_U VMOVNBQ_S])
(define_int_iterator VMOVNTQ [VMOVNTQ_S VMOVNTQ_U])
(define_int_iterator VORRQ_N [VORRQ_N_U VORRQ_N_S])
(define_int_iterator VQMOVNBQ [VQMOVNBQ_U VQMOVNBQ_S])
(define_int_iterator VQMOVNTQ [VQMOVNTQ_U VQMOVNTQ_S])
(define_int_iterator VSHLLBQ_N [VSHLLBQ_N_S VSHLLBQ_N_U])
(define_int_iterator VSHLLTQ_N [VSHLLTQ_N_U VSHLLTQ_N_S])
(define_int_iterator VRMLALDAVHQ [VRMLALDAVHQ_U VRMLALDAVHQ_S])
(define_insn "*mve_mov<mode>"
[(set (match_operand:MVE_types 0 "nonimmediate_operand" "=w,w,r,w,w,r,w,Us")
......@@ -2057,3 +2097,963 @@
"vsub.i%#<V_sz_elem>\t%q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vabdq_f])
;;
(define_insn "mve_vabdq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VABDQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vabd.f%#<V_sz_elem> %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vaddlvaq_s vaddlvaq_u])
;;
(define_insn "mve_vaddlvaq_<supf>v4si"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:DI 1 "s_register_operand" "0")
(match_operand:V4SI 2 "s_register_operand" "w")]
VADDLVAQ))
]
"TARGET_HAVE_MVE"
"vaddlva.<supf>32 %Q0, %R0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vaddq_n_f])
;;
(define_insn "mve_vaddq_n_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VADDQ_N_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vadd.f%#<V_sz_elem> %q0, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vandq_f])
;;
(define_insn "mve_vandq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VANDQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vand %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vbicq_f])
;;
(define_insn "mve_vbicq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VBICQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vbic %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vbicq_n_s, vbicq_n_u])
;;
(define_insn "mve_vbicq_n_<supf><mode>"
[
(set (match_operand:MVE_5 0 "s_register_operand" "=w")
(unspec:MVE_5 [(match_operand:MVE_5 1 "s_register_operand" "0")
(match_operand:SI 2 "immediate_operand" "i")]
VBICQ_N))
]
"TARGET_HAVE_MVE"
"vbic.i%#<V_sz_elem> %q0, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vcaddq_rot270_f])
;;
(define_insn "mve_vcaddq_rot270_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCADDQ_ROT270_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcadd.f%#<V_sz_elem> %q0, %q1, %q2, #270"
[(set_attr "type" "mve_move")
])
;;
;; [vcaddq_rot90_f])
;;
(define_insn "mve_vcaddq_rot90_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCADDQ_ROT90_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcadd.f%#<V_sz_elem> %q0, %q1, %q2, #90"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpeqq_f])
;;
(define_insn "mve_vcmpeqq_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMPEQQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> eq, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpeqq_n_f])
;;
(define_insn "mve_vcmpeqq_n_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VCMPEQQ_N_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> eq, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpgeq_f])
;;
(define_insn "mve_vcmpgeq_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMPGEQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> ge, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpgeq_n_f])
;;
(define_insn "mve_vcmpgeq_n_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VCMPGEQ_N_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> ge, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpgtq_f])
;;
(define_insn "mve_vcmpgtq_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMPGTQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> gt, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpgtq_n_f])
;;
(define_insn "mve_vcmpgtq_n_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VCMPGTQ_N_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> gt, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpleq_f])
;;
(define_insn "mve_vcmpleq_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMPLEQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> le, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpleq_n_f])
;;
(define_insn "mve_vcmpleq_n_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VCMPLEQ_N_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> le, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpltq_f])
;;
(define_insn "mve_vcmpltq_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMPLTQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> lt, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpltq_n_f])
;;
(define_insn "mve_vcmpltq_n_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VCMPLTQ_N_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> lt, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpneq_f])
;;
(define_insn "mve_vcmpneq_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMPNEQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> ne, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmpneq_n_f])
;;
(define_insn "mve_vcmpneq_n_f<mode>"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VCMPNEQ_N_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmp.f%#<V_sz_elem> ne, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vcmulq_f])
;;
(define_insn "mve_vcmulq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMULQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmul.f%#<V_sz_elem> %q0, %q1, %q2, #0"
[(set_attr "type" "mve_move")
])
;;
;; [vcmulq_rot180_f])
;;
(define_insn "mve_vcmulq_rot180_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMULQ_ROT180_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmul.f%#<V_sz_elem> %q0, %q1, %q2, #180"
[(set_attr "type" "mve_move")
])
;;
;; [vcmulq_rot270_f])
;;
(define_insn "mve_vcmulq_rot270_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMULQ_ROT270_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmul.f%#<V_sz_elem> %q0, %q1, %q2, #270"
[(set_attr "type" "mve_move")
])
;;
;; [vcmulq_rot90_f])
;;
(define_insn "mve_vcmulq_rot90_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VCMULQ_ROT90_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcmul.f%#<V_sz_elem> %q0, %q1, %q2, #90"
[(set_attr "type" "mve_move")
])
;;
;; [vctp8q_m vctp16q_m vctp32q_m vctp64q_m])
;;
(define_insn "mve_vctp<mode1>q_mhi"
[
(set (match_operand:HI 0 "vpr_register_operand" "=Up")
(unspec:HI [(match_operand:SI 1 "s_register_operand" "r")
(match_operand:HI 2 "vpr_register_operand" "Up")]
VCTPQ_M))
]
"TARGET_HAVE_MVE"
"vpst\;vctpt.<mode1> %1"
[(set_attr "type" "mve_move")
(set_attr "length""8")])
;;
;; [vcvtbq_f16_f32])
;;
(define_insn "mve_vcvtbq_f16_f32v8hf"
[
(set (match_operand:V8HF 0 "s_register_operand" "=w")
(unspec:V8HF [(match_operand:V8HF 1 "s_register_operand" "0")
(match_operand:V4SF 2 "s_register_operand" "w")]
VCVTBQ_F16_F32))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtb.f16.f32 %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vcvttq_f16_f32])
;;
(define_insn "mve_vcvttq_f16_f32v8hf"
[
(set (match_operand:V8HF 0 "s_register_operand" "=w")
(unspec:V8HF [(match_operand:V8HF 1 "s_register_operand" "0")
(match_operand:V4SF 2 "s_register_operand" "w")]
VCVTTQ_F16_F32))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vcvtt.f16.f32 %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [veorq_f])
;;
(define_insn "mve_veorq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VEORQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"veor %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmaxnmaq_f])
;;
(define_insn "mve_vmaxnmaq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "0")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMAXNMAQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vmaxnma.f%#<V_sz_elem> %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmaxnmavq_f])
;;
(define_insn "mve_vmaxnmavq_f<mode>"
[
(set (match_operand:<V_elem> 0 "s_register_operand" "=r")
(unspec:<V_elem> [(match_operand:<V_elem> 1 "s_register_operand" "0")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMAXNMAVQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vmaxnmav.f%#<V_sz_elem> %0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmaxnmq_f])
;;
(define_insn "mve_vmaxnmq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMAXNMQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vmaxnm.f%#<V_sz_elem> %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmaxnmvq_f])
;;
(define_insn "mve_vmaxnmvq_f<mode>"
[
(set (match_operand:<V_elem> 0 "s_register_operand" "=r")
(unspec:<V_elem> [(match_operand:<V_elem> 1 "s_register_operand" "0")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMAXNMVQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vmaxnmv.f%#<V_sz_elem> %0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vminnmaq_f])
;;
(define_insn "mve_vminnmaq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "0")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMINNMAQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vminnma.f%#<V_sz_elem> %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vminnmavq_f])
;;
(define_insn "mve_vminnmavq_f<mode>"
[
(set (match_operand:<V_elem> 0 "s_register_operand" "=r")
(unspec:<V_elem> [(match_operand:<V_elem> 1 "s_register_operand" "0")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMINNMAVQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vminnmav.f%#<V_sz_elem> %0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vminnmq_f])
;;
(define_insn "mve_vminnmq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMINNMQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vminnm.f%#<V_sz_elem> %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vminnmvq_f])
;;
(define_insn "mve_vminnmvq_f<mode>"
[
(set (match_operand:<V_elem> 0 "s_register_operand" "=r")
(unspec:<V_elem> [(match_operand:<V_elem> 1 "s_register_operand" "0")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMINNMVQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vminnmv.f%#<V_sz_elem> %0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmlaldavq_u, vmlaldavq_s])
;;
(define_insn "mve_vmlaldavq_<supf><mode>"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:MVE_5 1 "s_register_operand" "w")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VMLALDAVQ))
]
"TARGET_HAVE_MVE"
"vmlaldav.<supf>%#<V_sz_elem> %Q0, %R0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmlaldavxq_s])
;;
(define_insn "mve_vmlaldavxq_s<mode>"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:MVE_5 1 "s_register_operand" "w")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VMLALDAVXQ_S))
]
"TARGET_HAVE_MVE"
"vmlaldavx.s%#<V_sz_elem> %Q0, %R0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmlsldavq_s])
;;
(define_insn "mve_vmlsldavq_s<mode>"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:MVE_5 1 "s_register_operand" "w")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VMLSLDAVQ_S))
]
"TARGET_HAVE_MVE"
"vmlsldav.s%#<V_sz_elem> %Q0, %R0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmlsldavxq_s])
;;
(define_insn "mve_vmlsldavxq_s<mode>"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:MVE_5 1 "s_register_operand" "w")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VMLSLDAVXQ_S))
]
"TARGET_HAVE_MVE"
"vmlsldavx.s%#<V_sz_elem> %Q0, %R0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmovnbq_u, vmovnbq_s])
;;
(define_insn "mve_vmovnbq_<supf><mode>"
[
(set (match_operand:<V_narrow_pack> 0 "s_register_operand" "=w")
(unspec:<V_narrow_pack> [(match_operand:<V_narrow_pack> 1 "s_register_operand" "0")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VMOVNBQ))
]
"TARGET_HAVE_MVE"
"vmovnb.i%#<V_sz_elem> %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmovntq_s, vmovntq_u])
;;
(define_insn "mve_vmovntq_<supf><mode>"
[
(set (match_operand:<V_narrow_pack> 0 "s_register_operand" "=w")
(unspec:<V_narrow_pack> [(match_operand:<V_narrow_pack> 1 "s_register_operand" "0")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VMOVNTQ))
]
"TARGET_HAVE_MVE"
"vmovnt.i%#<V_sz_elem> %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmulq_f])
;;
(define_insn "mve_vmulq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VMULQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vmul.f%#<V_sz_elem> %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmulq_n_f])
;;
(define_insn "mve_vmulq_n_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VMULQ_N_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vmul.f%#<V_sz_elem> %q0, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vornq_f])
;;
(define_insn "mve_vornq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VORNQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vorn %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vorrq_f])
;;
(define_insn "mve_vorrq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VORRQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vorr %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vorrq_n_u, vorrq_n_s])
;;
(define_insn "mve_vorrq_n_<supf><mode>"
[
(set (match_operand:MVE_5 0 "s_register_operand" "=w")
(unspec:MVE_5 [(match_operand:MVE_5 1 "s_register_operand" "0")
(match_operand:SI 2 "immediate_operand" "i")]
VORRQ_N))
]
"TARGET_HAVE_MVE"
"vorr.i%#<V_sz_elem> %q0, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vqdmullbq_n_s])
;;
(define_insn "mve_vqdmullbq_n_s<mode>"
[
(set (match_operand:<V_double_width> 0 "s_register_operand" "=w")
(unspec:<V_double_width> [(match_operand:MVE_5 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VQDMULLBQ_N_S))
]
"TARGET_HAVE_MVE"
"vqdmullb.s%#<V_sz_elem> %q0, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vqdmullbq_s])
;;
(define_insn "mve_vqdmullbq_s<mode>"
[
(set (match_operand:<V_double_width> 0 "s_register_operand" "=w")
(unspec:<V_double_width> [(match_operand:MVE_5 1 "s_register_operand" "w")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VQDMULLBQ_S))
]
"TARGET_HAVE_MVE"
"vqdmullb.s%#<V_sz_elem> %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vqdmulltq_n_s])
;;
(define_insn "mve_vqdmulltq_n_s<mode>"
[
(set (match_operand:<V_double_width> 0 "s_register_operand" "=w")
(unspec:<V_double_width> [(match_operand:MVE_5 1 "s_register_operand" "w")
(match_operand:<V_elem> 2 "s_register_operand" "r")]
VQDMULLTQ_N_S))
]
"TARGET_HAVE_MVE"
"vqdmullt.s%#<V_sz_elem> %q0, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vqdmulltq_s])
;;
(define_insn "mve_vqdmulltq_s<mode>"
[
(set (match_operand:<V_double_width> 0 "s_register_operand" "=w")
(unspec:<V_double_width> [(match_operand:MVE_5 1 "s_register_operand" "w")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VQDMULLTQ_S))
]
"TARGET_HAVE_MVE"
"vqdmullt.s%#<V_sz_elem> %q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vqmovnbq_u, vqmovnbq_s])
;;
(define_insn "mve_vqmovnbq_<supf><mode>"
[
(set (match_operand:<V_narrow_pack> 0 "s_register_operand" "=w")
(unspec:<V_narrow_pack> [(match_operand:<V_narrow_pack> 1 "s_register_operand" "0")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VQMOVNBQ))
]
"TARGET_HAVE_MVE"
"vqmovnb.<supf>%#<V_sz_elem> %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vqmovntq_u, vqmovntq_s])
;;
(define_insn "mve_vqmovntq_<supf><mode>"
[
(set (match_operand:<V_narrow_pack> 0 "s_register_operand" "=w")
(unspec:<V_narrow_pack> [(match_operand:<V_narrow_pack> 1 "s_register_operand" "0")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VQMOVNTQ))
]
"TARGET_HAVE_MVE"
"vqmovnt.<supf>%#<V_sz_elem> %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vqmovunbq_s])
;;
(define_insn "mve_vqmovunbq_s<mode>"
[
(set (match_operand:<V_narrow_pack> 0 "s_register_operand" "=w")
(unspec:<V_narrow_pack> [(match_operand:<V_narrow_pack> 1 "s_register_operand" "0")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VQMOVUNBQ_S))
]
"TARGET_HAVE_MVE"
"vqmovunb.s%#<V_sz_elem> %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vqmovuntq_s])
;;
(define_insn "mve_vqmovuntq_s<mode>"
[
(set (match_operand:<V_narrow_pack> 0 "s_register_operand" "=w")
(unspec:<V_narrow_pack> [(match_operand:<V_narrow_pack> 1 "s_register_operand" "0")
(match_operand:MVE_5 2 "s_register_operand" "w")]
VQMOVUNTQ_S))
]
"TARGET_HAVE_MVE"
"vqmovunt.s%#<V_sz_elem> %q0, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vrmlaldavhxq_s])
;;
(define_insn "mve_vrmlaldavhxq_sv4si"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:V4SI 1 "s_register_operand" "w")
(match_operand:V4SI 2 "s_register_operand" "w")]
VRMLALDAVHXQ_S))
]
"TARGET_HAVE_MVE"
"vrmlaldavhx.s32 %Q0, %R0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vrmlsldavhq_s])
;;
(define_insn "mve_vrmlsldavhq_sv4si"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:V4SI 1 "s_register_operand" "w")
(match_operand:V4SI 2 "s_register_operand" "w")]
VRMLSLDAVHQ_S))
]
"TARGET_HAVE_MVE"
"vrmlsldavh.s32\t%Q0, %R0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vrmlsldavhxq_s])
;;
(define_insn "mve_vrmlsldavhxq_sv4si"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:V4SI 1 "s_register_operand" "w")
(match_operand:V4SI 2 "s_register_operand" "w")]
VRMLSLDAVHXQ_S))
]
"TARGET_HAVE_MVE"
"vrmlsldavhx.s32\t%Q0, %R0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vshllbq_n_s, vshllbq_n_u])
;;
(define_insn "mve_vshllbq_n_<supf><mode>"
[
(set (match_operand:<V_double_width> 0 "s_register_operand" "=w")
(unspec:<V_double_width> [(match_operand:MVE_3 1 "s_register_operand" "w")
(match_operand:SI 2 "immediate_operand" "i")]
VSHLLBQ_N))
]
"TARGET_HAVE_MVE"
"vshllb.<supf>%#<V_sz_elem>\t%q0, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vshlltq_n_u, vshlltq_n_s])
;;
(define_insn "mve_vshlltq_n_<supf><mode>"
[
(set (match_operand:<V_double_width> 0 "s_register_operand" "=w")
(unspec:<V_double_width> [(match_operand:MVE_3 1 "s_register_operand" "w")
(match_operand:SI 2 "immediate_operand" "i")]
VSHLLTQ_N))
]
"TARGET_HAVE_MVE"
"vshllt.<supf>%#<V_sz_elem>\t%q0, %q1, %2"
[(set_attr "type" "mve_move")
])
;;
;; [vsubq_f])
;;
(define_insn "mve_vsubq_f<mode>"
[
(set (match_operand:MVE_0 0 "s_register_operand" "=w")
(unspec:MVE_0 [(match_operand:MVE_0 1 "s_register_operand" "w")
(match_operand:MVE_0 2 "s_register_operand" "w")]
VSUBQ_F))
]
"TARGET_HAVE_MVE && TARGET_HAVE_MVE_FLOAT"
"vsub.f%#<V_sz_elem>\t%q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmulltq_poly_p])
;;
(define_insn "mve_vmulltq_poly_p<mode>"
[
(set (match_operand:<V_double_width> 0 "s_register_operand" "=w")
(unspec:<V_double_width> [(match_operand:MVE_3 1 "s_register_operand" "w")
(match_operand:MVE_3 2 "s_register_operand" "w")]
VMULLTQ_POLY_P))
]
"TARGET_HAVE_MVE"
"vmullt.p%#<V_sz_elem>\t%q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vmullbq_poly_p])
;;
(define_insn "mve_vmullbq_poly_p<mode>"
[
(set (match_operand:<V_double_width> 0 "s_register_operand" "=w")
(unspec:<V_double_width> [(match_operand:MVE_3 1 "s_register_operand" "w")
(match_operand:MVE_3 2 "s_register_operand" "w")]
VMULLBQ_POLY_P))
]
"TARGET_HAVE_MVE"
"vmullb.p%#<V_sz_elem>\t%q0, %q1, %q2"
[(set_attr "type" "mve_move")
])
;;
;; [vrmlaldavhq_u vrmlaldavhq_s])
;;
(define_insn "mve_vrmlaldavhq_<supf>v4si"
[
(set (match_operand:DI 0 "s_register_operand" "=r")
(unspec:DI [(match_operand:V4SI 1 "s_register_operand" "w")
(match_operand:V4SI 2 "s_register_operand" "w")]
VRMLALDAVHQ))
]
"TARGET_HAVE_MVE"
"vrmlaldavh.<supf>32 %Q0, %R0, %q1, %q2"
[(set_attr "type" "mve_move")
])
......@@ -2,6 +2,156 @@
Mihail Ionescu <mihail.ionescu@arm.com>
Srinath Parvathaneni <srinath.parvathaneni@arm.com>
* gcc.target/arm/mve/intrinsics/vabdq_f16.c: New test.
* gcc.target/arm/mve/intrinsics/vabdq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vaddlvaq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vaddlvaq_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vaddq_n_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vaddq_n_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vandq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vandq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vbicq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vbicq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vbicq_n_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vbicq_n_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vbicq_n_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vbicq_n_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcaddq_rot270_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcaddq_rot270_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcaddq_rot90_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcaddq_rot90_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpeqq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpeqq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpeqq_n_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpeqq_n_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpgeq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpgeq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpgeq_n_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpgeq_n_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpgtq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpgtq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpgtq_n_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpgtq_n_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpleq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpleq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpleq_n_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpleq_n_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpltq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpltq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpltq_n_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpltq_n_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpneq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpneq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpneq_n_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmpneq_n_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmulq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmulq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmulq_rot180_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmulq_rot180_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmulq_rot270_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmulq_rot270_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmulq_rot90_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcmulq_rot90_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vctp16q_m.c: Likewise.
* gcc.target/arm/mve/intrinsics/vctp32q_m.c: Likewise.
* gcc.target/arm/mve/intrinsics/vctp64q_m.c: Likewise.
* gcc.target/arm/mve/intrinsics/vctp8q_m.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcvtbq_f16_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vcvttq_f16_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/veorq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/veorq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmaxnmaq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmaxnmaq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmaxnmavq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmaxnmavq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmaxnmq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmaxnmq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmaxnmvq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmaxnmvq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vminnmaq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vminnmaq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vminnmavq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vminnmavq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vminnmq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vminnmq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vminnmvq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vminnmvq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlaldavq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlaldavq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlaldavq_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlaldavq_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlaldavxq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlaldavxq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlaldavxq_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlaldavxq_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlsldavq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlsldavq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlsldavxq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmlsldavxq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmovnbq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmovnbq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmovnbq_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmovnbq_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmovntq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmovntq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmovntq_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmovntq_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmullbq_poly_p16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmullbq_poly_p8.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmulltq_poly_p16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmulltq_poly_p8.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmulq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmulq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmulq_n_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vmulq_n_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vornq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vornq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vorrq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vorrq_f32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vorrq_n_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vorrq_n_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vorrq_n_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vorrq_n_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqdmullbq_n_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqdmullbq_n_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqdmullbq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqdmullbq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqdmulltq_n_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqdmulltq_n_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqdmulltq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqdmulltq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovnbq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovnbq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovnbq_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovnbq_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovntq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovntq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovntq_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovntq_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovunbq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovunbq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovuntq_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vqmovuntq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vrmlaldavhq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vrmlaldavhq_u32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vrmlaldavhxq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vrmlsldavhq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vrmlsldavhxq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vshllbq_n_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vshllbq_n_s8.c: Likewise.
* gcc.target/arm/mve/intrinsics/vshllbq_n_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vshllbq_n_u8.c: Likewise.
* gcc.target/arm/mve/intrinsics/vshlltq_n_s16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vshlltq_n_s8.c: Likewise.
* gcc.target/arm/mve/intrinsics/vshlltq_n_u16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vshlltq_n_u8.c: Likewise.
* gcc.target/arm/mve/intrinsics/vsubq_f16.c: Likewise.
* gcc.target/arm/mve/intrinsics/vsubq_f32.c: Likewise.
2020-03-17 Andre Vieira <andre.simoesdiasvieira@arm.com>
Mihail Ionescu <mihail.ionescu@arm.com>
Srinath Parvathaneni <srinath.parvathaneni@arm.com>
* gcc.target/arm/mve/intrinsics/vabdq_s16.c: New test.
* gcc.target/arm/mve/intrinsics/vabdq_s32.c: Likewise.
* gcc.target/arm/mve/intrinsics/vabdq_s8.c: Likewise.
......
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vabdq_f16 (a, b);
}
/* { dg-final { scan-assembler "vabd.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vabdq (a, b);
}
/* { dg-final { scan-assembler "vabd.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vabdq_f32 (a, b);
}
/* { dg-final { scan-assembler "vabd.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vabdq (a, b);
}
/* { dg-final { scan-assembler "vabd.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int64_t a, int32x4_t b)
{
return vaddlvaq_s32 (a, b);
}
/* { dg-final { scan-assembler "vaddlva.s32" } } */
int64_t
foo1 (int64_t a, int32x4_t b)
{
return vaddlvaq (a, b);
}
/* { dg-final { scan-assembler "vaddlva.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint64_t
foo (uint64_t a, uint32x4_t b)
{
return vaddlvaq_u32 (a, b);
}
/* { dg-final { scan-assembler "vaddlva.u32" } } */
uint64_t
foo1 (uint64_t a, uint32x4_t b)
{
return vaddlvaq (a, b);
}
/* { dg-final { scan-assembler "vaddlva.u32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16_t b)
{
return vaddq_n_f16 (a, b);
}
/* { dg-final { scan-assembler "vadd.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16_t b)
{
return vaddq (a, b);
}
/* { dg-final { scan-assembler "vadd.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32_t b)
{
return vaddq_n_f32 (a, b);
}
/* { dg-final { scan-assembler "vadd.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32_t b)
{
return vaddq (a, b);
}
/* { dg-final { scan-assembler "vadd.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vandq_f16 (a, b);
}
/* { dg-final { scan-assembler "vand" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vandq (a, b);
}
/* { dg-final { scan-assembler "vand" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vandq_f32 (a, b);
}
/* { dg-final { scan-assembler "vand" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vandq (a, b);
}
/* { dg-final { scan-assembler "vand" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vbicq_f16 (a, b);
}
/* { dg-final { scan-assembler "vbic" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vbicq (a, b);
}
/* { dg-final { scan-assembler "vbic" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vbicq_f32 (a, b);
}
/* { dg-final { scan-assembler "vbic" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vbicq (a, b);
}
/* { dg-final { scan-assembler "vbic" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int16x8_t
foo (int16x8_t a)
{
return vbicq_n_s16 (a, 1);
}
/* { dg-final { scan-assembler "vbic.i16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int32x4_t
foo (int32x4_t a)
{
return vbicq_n_s32 (a, 1);
}
/* { dg-final { scan-assembler "vbic.i32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint16x8_t a)
{
return vbicq_n_u16 (a, 1);
}
/* { dg-final { scan-assembler "vbic.i16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint32x4_t
foo (uint32x4_t a)
{
return vbicq_n_u32 (a, 1);
}
/* { dg-final { scan-assembler "vbic.i32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vcaddq_rot270_f16 (a, b);
}
/* { dg-final { scan-assembler "vcadd.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcaddq_rot270 (a, b);
}
/* { dg-final { scan-assembler "vcadd.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vcaddq_rot270_f32 (a, b);
}
/* { dg-final { scan-assembler "vcadd.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcaddq_rot270 (a, b);
}
/* { dg-final { scan-assembler "vcadd.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vcaddq_rot90_f16 (a, b);
}
/* { dg-final { scan-assembler "vcadd.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcaddq_rot90 (a, b);
}
/* { dg-final { scan-assembler "vcadd.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vcaddq_rot90_f32 (a, b);
}
/* { dg-final { scan-assembler "vcadd.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcaddq_rot90 (a, b);
}
/* { dg-final { scan-assembler "vcadd.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16x8_t b)
{
return vcmpeqq_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmpeqq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32x4_t b)
{
return vcmpeqq_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmpeqq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16_t b)
{
return vcmpeqq_n_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16_t b)
{
return vcmpeqq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32_t b)
{
return vcmpeqq_n_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32_t b)
{
return vcmpeqq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16x8_t b)
{
return vcmpgeq_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmpgeq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32x4_t b)
{
return vcmpgeq_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmpgeq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16_t b)
{
return vcmpgeq_n_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16_t b)
{
return vcmpgeq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32_t b)
{
return vcmpgeq_n_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32_t b)
{
return vcmpgeq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16x8_t b)
{
return vcmpgtq_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmpgtq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32x4_t b)
{
return vcmpgtq_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmpgtq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16_t b)
{
return vcmpgtq_n_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16_t b)
{
return vcmpgtq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32_t b)
{
return vcmpgtq_n_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32_t b)
{
return vcmpgtq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16x8_t b)
{
return vcmpleq_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmpleq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32x4_t b)
{
return vcmpleq_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmpleq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16_t b)
{
return vcmpleq_n_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16_t b)
{
return vcmpleq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32_t b)
{
return vcmpleq_n_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32_t b)
{
return vcmpleq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16x8_t b)
{
return vcmpltq_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmpltq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32x4_t b)
{
return vcmpltq_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmpltq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16_t b)
{
return vcmpltq_n_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16_t b)
{
return vcmpltq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32_t b)
{
return vcmpltq_n_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32_t b)
{
return vcmpltq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16x8_t b)
{
return vcmpneq_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmpneq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32x4_t b)
{
return vcmpneq_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmpneq (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float16x8_t a, float16_t b)
{
return vcmpneq_n_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
mve_pred16_t
foo1 (float16x8_t a, float16_t b)
{
return vcmpneq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (float32x4_t a, float32_t b)
{
return vcmpneq_n_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
mve_pred16_t
foo1 (float32x4_t a, float32_t b)
{
return vcmpneq_n (a, b);
}
/* { dg-final { scan-assembler "vcmp.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vcmulq_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmulq (a, b);
}
/* { dg-final { scan-assembler "vcmul.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vcmulq_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmulq (a, b);
}
/* { dg-final { scan-assembler "vcmul.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vcmulq_rot180_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmulq_rot180 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vcmulq_rot180_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmulq_rot180 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vcmulq_rot270_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmulq_rot270 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vcmulq_rot270_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmulq_rot270 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vcmulq_rot90_f16 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vcmulq_rot90 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vcmulq_rot90_f32 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vcmulq_rot90 (a, b);
}
/* { dg-final { scan-assembler "vcmul.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (uint32_t a, mve_pred16_t p)
{
return vctp16q_m (a, p);
}
/* { dg-final { scan-assembler "vpst" } } */
/* { dg-final { scan-assembler "vctpt.16" } } */
mve_pred16_t
foo1 (uint32_t a, mve_pred16_t p)
{
return vctp16q_m (a, p);
}
/* { dg-final { scan-assembler "vpst" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (uint32_t a, mve_pred16_t p)
{
return vctp32q_m (a, p);
}
/* { dg-final { scan-assembler "vpst" } } */
/* { dg-final { scan-assembler "vctpt.32" } } */
mve_pred16_t
foo1 (uint32_t a, mve_pred16_t p)
{
return vctp32q_m (a, p);
}
/* { dg-final { scan-assembler "vpst" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (uint32_t a, mve_pred16_t p)
{
return vctp64q_m (a, p);
}
/* { dg-final { scan-assembler "vpst" } } */
/* { dg-final { scan-assembler "vctpt.64" } } */
mve_pred16_t
foo1 (uint32_t a, mve_pred16_t p)
{
return vctp64q_m (a, p);
}
/* { dg-final { scan-assembler "vpst" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
mve_pred16_t
foo (uint32_t a, mve_pred16_t p)
{
return vctp8q_m (a, p);
}
/* { dg-final { scan-assembler "vpst" } } */
/* { dg-final { scan-assembler "vctpt.8" } } */
mve_pred16_t
foo1 (uint32_t a, mve_pred16_t p)
{
return vctp8q_m (a, p);
}
/* { dg-final { scan-assembler "vpst" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float32x4_t b)
{
return vcvtbq_f16_f32 (a, b);
}
/* { dg-final { scan-assembler "vcvtb.f16.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float32x4_t b)
{
return vcvttq_f16_f32 (a, b);
}
/* { dg-final { scan-assembler "vcvtt.f16.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return veorq_f16 (a, b);
}
/* { dg-final { scan-assembler "veor" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return veorq (a, b);
}
/* { dg-final { scan-assembler "veor" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return veorq_f32 (a, b);
}
/* { dg-final { scan-assembler "veor" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return veorq (a, b);
}
/* { dg-final { scan-assembler "veor" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a, int16_t b)
int16x8_t
foo1 (int16x8_t a, int16_t b)
{
return vhaddq_n (a, b);
return vhaddq (a, b);
}
/* { dg-final { scan-assembler "vhadd.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vhaddq_n (a, b);
return vhaddq (a, b);
}
/* { dg-final { scan-assembler "vhadd.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a, int8_t b)
int8x16_t
foo1 (int8x16_t a, int8_t b)
{
return vhaddq_n (a, b);
return vhaddq (a, b);
}
/* { dg-final { scan-assembler "vhadd.s8" } } */
......@@ -16,7 +16,7 @@ foo (uint16x8_t a, uint16_t b)
uint16x8_t
foo1 (uint16x8_t a, uint16_t b)
{
return vhaddq_n (a, b);
return vhaddq (a, b);
}
/* { dg-final { scan-assembler "vhadd.u16" } } */
......@@ -16,7 +16,7 @@ foo (uint32x4_t a, uint32_t b)
uint32x4_t
foo1 (uint32x4_t a, uint32_t b)
{
return vhaddq_n (a, b);
return vhaddq (a, b);
}
/* { dg-final { scan-assembler "vhadd.u32" } } */
......@@ -16,7 +16,7 @@ foo (uint8x16_t a, uint8_t b)
uint8x16_t
foo1 (uint8x16_t a, uint8_t b)
{
return vhaddq_n (a, b);
return vhaddq (a, b);
}
/* { dg-final { scan-assembler "vhadd.u8" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a, int16_t b)
int16x8_t
foo1 (int16x8_t a, int16_t b)
{
return vhsubq_n (a, b);
return vhsubq (a, b);
}
/* { dg-final { scan-assembler "vhsub.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vhsubq_n (a, b);
return vhsubq (a, b);
}
/* { dg-final { scan-assembler "vhsub.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a, int8_t b)
int8x16_t
foo1 (int8x16_t a, int8_t b)
{
return vhsubq_n (a, b);
return vhsubq (a, b);
}
/* { dg-final { scan-assembler "vhsub.s8" } } */
......@@ -16,7 +16,7 @@ foo (uint16x8_t a, uint16_t b)
uint16x8_t
foo1 (uint16x8_t a, uint16_t b)
{
return vhsubq_n (a, b);
return vhsubq (a, b);
}
/* { dg-final { scan-assembler "vhsub.u16" } } */
......@@ -16,7 +16,7 @@ foo (uint32x4_t a, uint32_t b)
uint32x4_t
foo1 (uint32x4_t a, uint32_t b)
{
return vhsubq_n (a, b);
return vhsubq (a, b);
}
/* { dg-final { scan-assembler "vhsub.u32" } } */
......@@ -16,7 +16,7 @@ foo (uint8x16_t a, uint8_t b)
uint8x16_t
foo1 (uint8x16_t a, uint8_t b)
{
return vhsubq_n (a, b);
return vhsubq (a, b);
}
/* { dg-final { scan-assembler "vhsub.u8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vmaxnmaq_f16 (a, b);
}
/* { dg-final { scan-assembler "vmaxnma.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vmaxnmaq (a, b);
}
/* { dg-final { scan-assembler "vmaxnma.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vmaxnmaq_f32 (a, b);
}
/* { dg-final { scan-assembler "vmaxnma.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vmaxnmaq (a, b);
}
/* { dg-final { scan-assembler "vmaxnma.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16_t
foo (float16_t a, float16x8_t b)
{
return vmaxnmavq_f16 (a, b);
}
/* { dg-final { scan-assembler "vmaxnmav.f16" } } */
float16_t
foo1 (float16_t a, float16x8_t b)
{
return vmaxnmavq (a, b);
}
/* { dg-final { scan-assembler "vmaxnmav.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32_t
foo (float32_t a, float32x4_t b)
{
return vmaxnmavq_f32 (a, b);
}
/* { dg-final { scan-assembler "vmaxnmav.f32" } } */
float32_t
foo1 (float32_t a, float32x4_t b)
{
return vmaxnmavq (a, b);
}
/* { dg-final { scan-assembler "vmaxnmav.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vmaxnmq_f16 (a, b);
}
/* { dg-final { scan-assembler "vmaxnm.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vmaxnmq (a, b);
}
/* { dg-final { scan-assembler "vmaxnm.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vmaxnmq_f32 (a, b);
}
/* { dg-final { scan-assembler "vmaxnm.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vmaxnmq (a, b);
}
/* { dg-final { scan-assembler "vmaxnm.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16_t
foo (float16_t a, float16x8_t b)
{
return vmaxnmvq_f16 (a, b);
}
/* { dg-final { scan-assembler "vmaxnmv.f16" } } */
float16_t
foo1 (float16_t a, float16x8_t b)
{
return vmaxnmvq (a, b);
}
/* { dg-final { scan-assembler "vmaxnmv.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32_t
foo (float32_t a, float32x4_t b)
{
return vmaxnmvq_f32 (a, b);
}
/* { dg-final { scan-assembler "vmaxnmv.f32" } } */
float32_t
foo1 (float32_t a, float32x4_t b)
{
return vmaxnmvq (a, b);
}
/* { dg-final { scan-assembler "vmaxnmv.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vminnmaq_f16 (a, b);
}
/* { dg-final { scan-assembler "vminnma.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vminnmaq (a, b);
}
/* { dg-final { scan-assembler "vminnma.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vminnmaq_f32 (a, b);
}
/* { dg-final { scan-assembler "vminnma.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vminnmaq (a, b);
}
/* { dg-final { scan-assembler "vminnma.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16_t
foo (float16_t a, float16x8_t b)
{
return vminnmavq_f16 (a, b);
}
/* { dg-final { scan-assembler "vminnmav.f16" } } */
float16_t
foo1 (float16_t a, float16x8_t b)
{
return vminnmavq (a, b);
}
/* { dg-final { scan-assembler "vminnmav.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32_t
foo (float32_t a, float32x4_t b)
{
return vminnmavq_f32 (a, b);
}
/* { dg-final { scan-assembler "vminnmav.f32" } } */
float32_t
foo1 (float32_t a, float32x4_t b)
{
return vminnmavq (a, b);
}
/* { dg-final { scan-assembler "vminnmav.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vminnmq_f16 (a, b);
}
/* { dg-final { scan-assembler "vminnm.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vminnmq (a, b);
}
/* { dg-final { scan-assembler "vminnm.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vminnmq_f32 (a, b);
}
/* { dg-final { scan-assembler "vminnm.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vminnmq (a, b);
}
/* { dg-final { scan-assembler "vminnm.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16_t
foo (float16_t a, float16x8_t b)
{
return vminnmvq_f16 (a, b);
}
/* { dg-final { scan-assembler "vminnmv.f16" } } */
float16_t
foo1 (float16_t a, float16x8_t b)
{
return vminnmvq (a, b);
}
/* { dg-final { scan-assembler "vminnmv.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32_t
foo (float32_t a, float32x4_t b)
{
return vminnmvq_f32 (a, b);
}
/* { dg-final { scan-assembler "vminnmv.f32" } } */
float32_t
foo1 (float32_t a, float32x4_t b)
{
return vminnmvq (a, b);
}
/* { dg-final { scan-assembler "vminnmv.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int16x8_t a, int16x8_t b)
{
return vmlaldavq_s16 (a, b);
}
/* { dg-final { scan-assembler "vmlaldav.s16" } } */
int64_t
foo1 (int16x8_t a, int16x8_t b)
{
return vmlaldavq (a, b);
}
/* { dg-final { scan-assembler "vmlaldav.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int32x4_t a, int32x4_t b)
{
return vmlaldavq_s32 (a, b);
}
/* { dg-final { scan-assembler "vmlaldav.s32" } } */
int64_t
foo1 (int32x4_t a, int32x4_t b)
{
return vmlaldavq (a, b);
}
/* { dg-final { scan-assembler "vmlaldav.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint64_t
foo (uint16x8_t a, uint16x8_t b)
{
return vmlaldavq_u16 (a, b);
}
/* { dg-final { scan-assembler "vmlaldav.u16" } } */
uint64_t
foo1 (uint16x8_t a, uint16x8_t b)
{
return vmlaldavq (a, b);
}
/* { dg-final { scan-assembler "vmlaldav.u16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint64_t
foo (uint32x4_t a, uint32x4_t b)
{
return vmlaldavq_u32 (a, b);
}
/* { dg-final { scan-assembler "vmlaldav.u32" } } */
uint64_t
foo1 (uint32x4_t a, uint32x4_t b)
{
return vmlaldavq (a, b);
}
/* { dg-final { scan-assembler "vmlaldav.u32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int16x8_t a, int16x8_t b)
{
return vmlaldavxq_s16 (a, b);
}
/* { dg-final { scan-assembler "vmlaldavx.s16" } } */
int64_t
foo1 (int16x8_t a, int16x8_t b)
{
return vmlaldavxq (a, b);
}
/* { dg-final { scan-assembler "vmlaldavx.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int32x4_t a, int32x4_t b)
{
return vmlaldavxq_s32 (a, b);
}
/* { dg-final { scan-assembler "vmlaldavx.s32" } } */
int64_t
foo1 (int32x4_t a, int32x4_t b)
{
return vmlaldavxq (a, b);
}
/* { dg-final { scan-assembler "vmlaldavx.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int16x8_t a, int16x8_t b)
{
return vmlsldavq_s16 (a, b);
}
/* { dg-final { scan-assembler "vmlsldav.s16" } } */
int64_t
foo1 (int16x8_t a, int16x8_t b)
{
return vmlsldavq (a, b);
}
/* { dg-final { scan-assembler "vmlsldav.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int32x4_t a, int32x4_t b)
{
return vmlsldavq_s32 (a, b);
}
/* { dg-final { scan-assembler "vmlsldav.s32" } } */
int64_t
foo1 (int32x4_t a, int32x4_t b)
{
return vmlsldavq (a, b);
}
/* { dg-final { scan-assembler "vmlsldav.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int16x8_t a, int16x8_t b)
{
return vmlsldavxq_s16 (a, b);
}
/* { dg-final { scan-assembler "vmlsldavx.s16" } } */
int64_t
foo1 (int16x8_t a, int16x8_t b)
{
return vmlsldavxq (a, b);
}
/* { dg-final { scan-assembler "vmlsldavx.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int32x4_t a, int32x4_t b)
{
return vmlsldavxq_s32 (a, b);
}
/* { dg-final { scan-assembler "vmlsldavx.s32" } } */
int64_t
foo1 (int32x4_t a, int32x4_t b)
{
return vmlsldavxq (a, b);
}
/* { dg-final { scan-assembler "vmlsldavx.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int8x16_t
foo (int8x16_t a, int16x8_t b)
{
return vmovnbq_s16 (a, b);
}
/* { dg-final { scan-assembler "vmovnb.i16" } } */
int8x16_t
foo1 (int8x16_t a, int16x8_t b)
{
return vmovnbq (a, b);
}
/* { dg-final { scan-assembler "vmovnb.i16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int16x8_t
foo (int16x8_t a, int32x4_t b)
{
return vmovnbq_s32 (a, b);
}
/* { dg-final { scan-assembler "vmovnb.i32" } } */
int16x8_t
foo1 (int16x8_t a, int32x4_t b)
{
return vmovnbq (a, b);
}
/* { dg-final { scan-assembler "vmovnb.i32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint8x16_t
foo (uint8x16_t a, uint16x8_t b)
{
return vmovnbq_u16 (a, b);
}
/* { dg-final { scan-assembler "vmovnb.i16" } } */
uint8x16_t
foo1 (uint8x16_t a, uint16x8_t b)
{
return vmovnbq (a, b);
}
/* { dg-final { scan-assembler "vmovnb.i16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint16x8_t a, uint32x4_t b)
{
return vmovnbq_u32 (a, b);
}
/* { dg-final { scan-assembler "vmovnb.i32" } } */
uint16x8_t
foo1 (uint16x8_t a, uint32x4_t b)
{
return vmovnbq (a, b);
}
/* { dg-final { scan-assembler "vmovnb.i32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int8x16_t
foo (int8x16_t a, int16x8_t b)
{
return vmovntq_s16 (a, b);
}
/* { dg-final { scan-assembler "vmovnt.i16" } } */
int8x16_t
foo1 (int8x16_t a, int16x8_t b)
{
return vmovntq (a, b);
}
/* { dg-final { scan-assembler "vmovnt.i16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int16x8_t
foo (int16x8_t a, int32x4_t b)
{
return vmovntq_s32 (a, b);
}
/* { dg-final { scan-assembler "vmovnt.i32" } } */
int16x8_t
foo1 (int16x8_t a, int32x4_t b)
{
return vmovntq (a, b);
}
/* { dg-final { scan-assembler "vmovnt.i32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint8x16_t
foo (uint8x16_t a, uint16x8_t b)
{
return vmovntq_u16 (a, b);
}
/* { dg-final { scan-assembler "vmovnt.i16" } } */
uint8x16_t
foo1 (uint8x16_t a, uint16x8_t b)
{
return vmovntq (a, b);
}
/* { dg-final { scan-assembler "vmovnt.i16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint16x8_t a, uint32x4_t b)
{
return vmovntq_u32 (a, b);
}
/* { dg-final { scan-assembler "vmovnt.i32" } } */
uint16x8_t
foo1 (uint16x8_t a, uint32x4_t b)
{
return vmovntq (a, b);
}
/* { dg-final { scan-assembler "vmovnt.i32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint32x4_t
foo (uint16x8_t a, uint16x8_t b)
{
return vmullbq_poly_p16 (a, b);
}
/* { dg-final { scan-assembler "vmullb.p16" } } */
uint32x4_t
foo1 (uint16x8_t a, uint16x8_t b)
{
return vmullbq_poly (a, b);
}
/* { dg-final { scan-assembler "vmullb.p16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint8x16_t a, uint8x16_t b)
{
return vmullbq_poly_p8 (a, b);
}
/* { dg-final { scan-assembler "vmullb.p8" } } */
uint16x8_t
foo1 (uint8x16_t a, uint8x16_t b)
{
return vmullbq_poly (a, b);
}
/* { dg-final { scan-assembler "vmullb.p8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint32x4_t
foo (uint16x8_t a, uint16x8_t b)
{
return vmulltq_poly_p16 (a, b);
}
/* { dg-final { scan-assembler "vmullt.p16" } } */
uint32x4_t
foo1 (uint16x8_t a, uint16x8_t b)
{
return vmulltq_poly (a, b);
}
/* { dg-final { scan-assembler "vmullt.p16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint8x16_t a, uint8x16_t b)
{
return vmulltq_poly_p8 (a, b);
}
/* { dg-final { scan-assembler "vmullt.p8" } } */
uint16x8_t
foo1 (uint8x16_t a, uint8x16_t b)
{
return vmulltq_poly (a, b);
}
/* { dg-final { scan-assembler "vmullt.p8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vmulq_f16 (a, b);
}
/* { dg-final { scan-assembler "vmul.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vmulq (a, b);
}
/* { dg-final { scan-assembler "vmul.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vmulq_f32 (a, b);
}
/* { dg-final { scan-assembler "vmul.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vmulq (a, b);
}
/* { dg-final { scan-assembler "vmul.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16_t b)
{
return vmulq_n_f16 (a, b);
}
/* { dg-final { scan-assembler "vmul.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16_t b)
{
return vmulq_n (a, b);
}
/* { dg-final { scan-assembler "vmul.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32_t b)
{
return vmulq_n_f32 (a, b);
}
/* { dg-final { scan-assembler "vmul.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32_t b)
{
return vmulq_n (a, b);
}
/* { dg-final { scan-assembler "vmul.f32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vornq_f16 (a, b);
}
/* { dg-final { scan-assembler "vorn" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vornq (a, b);
}
/* { dg-final { scan-assembler "vorn" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vornq_f32 (a, b);
}
/* { dg-final { scan-assembler "vorn" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vornq (a, b);
}
/* { dg-final { scan-assembler "vorn" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vorrq_f16 (a, b);
}
/* { dg-final { scan-assembler "vorr" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vorrq (a, b);
}
/* { dg-final { scan-assembler "vorr" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vorrq_f32 (a, b);
}
/* { dg-final { scan-assembler "vorr" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vorrq (a, b);
}
/* { dg-final { scan-assembler "vorr" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int16x8_t
foo (int16x8_t a)
{
return vorrq_n_s16 (a, 1);
}
/* { dg-final { scan-assembler "vorr.i16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int32x4_t
foo (int32x4_t a)
{
return vorrq_n_s32 (a, 1);
}
/* { dg-final { scan-assembler "vorr.i32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint16x8_t a)
{
return vorrq_n_u16 (a, 1);
}
/* { dg-final { scan-assembler "vorr.i16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint32x4_t
foo (uint32x4_t a)
{
return vorrq_n_u32 (a, 44);
}
/* { dg-final { scan-assembler "vorr.i32" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a, int16_t b)
int16x8_t
foo1 (int16x8_t a, int16_t b)
{
return vqaddq_n (a, b);
return vqaddq (a, b);
}
/* { dg-final { scan-assembler "vqadd.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vqaddq_n (a, b);
return vqaddq (a, b);
}
/* { dg-final { scan-assembler "vqadd.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a, int8_t b)
int8x16_t
foo1 (int8x16_t a, int8_t b)
{
return vqaddq_n (a, b);
return vqaddq (a, b);
}
/* { dg-final { scan-assembler "vqadd.s8" } } */
......@@ -16,7 +16,7 @@ foo (uint16x8_t a, uint16_t b)
uint16x8_t
foo1 (uint16x8_t a, uint16_t b)
{
return vqaddq_n (a, b);
return vqaddq (a, b);
}
/* { dg-final { scan-assembler "vqadd.u16" } } */
......@@ -16,7 +16,7 @@ foo (uint32x4_t a, uint32_t b)
uint32x4_t
foo1 (uint32x4_t a, uint32_t b)
{
return vqaddq_n (a, b);
return vqaddq (a, b);
}
/* { dg-final { scan-assembler "vqadd.u32" } } */
......@@ -16,7 +16,7 @@ foo (uint8x16_t a, uint8_t b)
uint8x16_t
foo1 (uint8x16_t a, uint8_t b)
{
return vqaddq_n (a, b);
return vqaddq (a, b);
}
/* { dg-final { scan-assembler "vqadd.u8" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a, int16_t b)
int16x8_t
foo1 (int16x8_t a, int16_t b)
{
return vqdmulhq_n (a, b);
return vqdmulhq (a, b);
}
/* { dg-final { scan-assembler "vqdmulh.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vqdmulhq_n (a, b);
return vqdmulhq (a, b);
}
/* { dg-final { scan-assembler "vqdmulh.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a, int8_t b)
int8x16_t
foo1 (int8x16_t a, int8_t b)
{
return vqdmulhq_n (a, b);
return vqdmulhq (a, b);
}
/* { dg-final { scan-assembler "vqdmulh.s8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int32x4_t
foo (int16x8_t a, int16_t b)
{
return vqdmullbq_n_s16 (a, b);
}
/* { dg-final { scan-assembler "vqdmullb.s16" } } */
int32x4_t
foo1 (int16x8_t a, int16_t b)
{
return vqdmullbq (a, b);
}
/* { dg-final { scan-assembler "vqdmullb.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64x2_t
foo (int32x4_t a, int32_t b)
{
return vqdmullbq_n_s32 (a, b);
}
/* { dg-final { scan-assembler "vqdmullb.s32" } } */
int64x2_t
foo1 (int32x4_t a, int32_t b)
{
return vqdmullbq (a, b);
}
/* { dg-final { scan-assembler "vqdmullb.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int32x4_t
foo (int16x8_t a, int16x8_t b)
{
return vqdmullbq_s16 (a, b);
}
/* { dg-final { scan-assembler "vqdmullb.s16" } } */
int32x4_t
foo1 (int16x8_t a, int16x8_t b)
{
return vqdmullbq (a, b);
}
/* { dg-final { scan-assembler "vqdmullb.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64x2_t
foo (int32x4_t a, int32x4_t b)
{
return vqdmullbq_s32 (a, b);
}
/* { dg-final { scan-assembler "vqdmullb.s32" } } */
int64x2_t
foo1 (int32x4_t a, int32x4_t b)
{
return vqdmullbq (a, b);
}
/* { dg-final { scan-assembler "vqdmullb.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int32x4_t
foo (int16x8_t a, int16_t b)
{
return vqdmulltq_n_s16 (a, b);
}
/* { dg-final { scan-assembler "vqdmullt.s16" } } */
int32x4_t
foo1 (int16x8_t a, int16_t b)
{
return vqdmulltq (a, b);
}
/* { dg-final { scan-assembler "vqdmullt.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64x2_t
foo (int32x4_t a, int32_t b)
{
return vqdmulltq_n_s32 (a, b);
}
/* { dg-final { scan-assembler "vqdmullt.s32" } } */
int64x2_t
foo1 (int32x4_t a, int32_t b)
{
return vqdmulltq (a, b);
}
/* { dg-final { scan-assembler "vqdmullt.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int32x4_t
foo (int16x8_t a, int16x8_t b)
{
return vqdmulltq_s16 (a, b);
}
/* { dg-final { scan-assembler "vqdmullt.s16" } } */
int32x4_t
foo1 (int16x8_t a, int16x8_t b)
{
return vqdmulltq (a, b);
}
/* { dg-final { scan-assembler "vqdmullt.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64x2_t
foo (int32x4_t a, int32x4_t b)
{
return vqdmulltq_s32 (a, b);
}
/* { dg-final { scan-assembler "vqdmullt.s32" } } */
int64x2_t
foo1 (int32x4_t a, int32x4_t b)
{
return vqdmulltq (a, b);
}
/* { dg-final { scan-assembler "vqdmullt.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int8x16_t
foo (int8x16_t a, int16x8_t b)
{
return vqmovnbq_s16 (a, b);
}
/* { dg-final { scan-assembler "vqmovnb.s16" } } */
int8x16_t
foo1 (int8x16_t a, int16x8_t b)
{
return vqmovnbq (a, b);
}
/* { dg-final { scan-assembler "vqmovnb.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int16x8_t
foo (int16x8_t a, int32x4_t b)
{
return vqmovnbq_s32 (a, b);
}
/* { dg-final { scan-assembler "vqmovnb.s32" } } */
int16x8_t
foo1 (int16x8_t a, int32x4_t b)
{
return vqmovnbq (a, b);
}
/* { dg-final { scan-assembler "vqmovnb.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint8x16_t
foo (uint8x16_t a, uint16x8_t b)
{
return vqmovnbq_u16 (a, b);
}
/* { dg-final { scan-assembler "vqmovnb.u16" } } */
uint8x16_t
foo1 (uint8x16_t a, uint16x8_t b)
{
return vqmovnbq (a, b);
}
/* { dg-final { scan-assembler "vqmovnb.u16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint16x8_t a, uint32x4_t b)
{
return vqmovnbq_u32 (a, b);
}
/* { dg-final { scan-assembler "vqmovnb.u32" } } */
uint16x8_t
foo1 (uint16x8_t a, uint32x4_t b)
{
return vqmovnbq (a, b);
}
/* { dg-final { scan-assembler "vqmovnb.u32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int8x16_t
foo (int8x16_t a, int16x8_t b)
{
return vqmovntq_s16 (a, b);
}
/* { dg-final { scan-assembler "vqmovnt.s16" } } */
int8x16_t
foo1 (int8x16_t a, int16x8_t b)
{
return vqmovntq (a, b);
}
/* { dg-final { scan-assembler "vqmovnt.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int16x8_t
foo (int16x8_t a, int32x4_t b)
{
return vqmovntq_s32 (a, b);
}
/* { dg-final { scan-assembler "vqmovnt.s32" } } */
int16x8_t
foo1 (int16x8_t a, int32x4_t b)
{
return vqmovntq (a, b);
}
/* { dg-final { scan-assembler "vqmovnt.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint8x16_t
foo (uint8x16_t a, uint16x8_t b)
{
return vqmovntq_u16 (a, b);
}
/* { dg-final { scan-assembler "vqmovnt.u16" } } */
uint8x16_t
foo1 (uint8x16_t a, uint16x8_t b)
{
return vqmovntq (a, b);
}
/* { dg-final { scan-assembler "vqmovnt.u16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint16x8_t a, uint32x4_t b)
{
return vqmovntq_u32 (a, b);
}
/* { dg-final { scan-assembler "vqmovnt.u32" } } */
uint16x8_t
foo1 (uint16x8_t a, uint32x4_t b)
{
return vqmovntq (a, b);
}
/* { dg-final { scan-assembler "vqmovnt.u32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint8x16_t
foo (uint8x16_t a, int16x8_t b)
{
return vqmovunbq_s16 (a, b);
}
/* { dg-final { scan-assembler "vqmovunb.s16" } } */
uint8x16_t
foo1 (uint8x16_t a, int16x8_t b)
{
return vqmovunbq (a, b);
}
/* { dg-final { scan-assembler "vqmovunb.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint16x8_t a, int32x4_t b)
{
return vqmovunbq_s32 (a, b);
}
/* { dg-final { scan-assembler "vqmovunb.s32" } } */
uint16x8_t
foo1 (uint16x8_t a, int32x4_t b)
{
return vqmovunbq (a, b);
}
/* { dg-final { scan-assembler "vqmovunb.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint8x16_t
foo (uint8x16_t a, int16x8_t b)
{
return vqmovuntq_s16 (a, b);
}
/* { dg-final { scan-assembler "vqmovunt.s16" } } */
uint8x16_t
foo1 (uint8x16_t a, int16x8_t b)
{
return vqmovuntq (a, b);
}
/* { dg-final { scan-assembler "vqmovunt.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint16x8_t a, int32x4_t b)
{
return vqmovuntq_s32 (a, b);
}
/* { dg-final { scan-assembler "vqmovunt.s32" } } */
uint16x8_t
foo1 (uint16x8_t a, int32x4_t b)
{
return vqmovuntq (a, b);
}
/* { dg-final { scan-assembler "vqmovunt.s32" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a, int16_t b)
int16x8_t
foo1 (int16x8_t a, int16_t b)
{
return vqrdmulhq_n (a, b);
return vqrdmulhq (a, b);
}
/* { dg-final { scan-assembler "vqrdmulh.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vqrdmulhq_n (a, b);
return vqrdmulhq (a, b);
}
/* { dg-final { scan-assembler "vqrdmulh.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a, int8_t b)
int8x16_t
foo1 (int8x16_t a, int8_t b)
{
return vqrdmulhq_n (a, b);
return vqrdmulhq (a, b);
}
/* { dg-final { scan-assembler "vqrdmulh.s8" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a, int32_t b)
int16x8_t
foo1 (int16x8_t a, int32_t b)
{
return vqrshlq_n (a, b);
return vqrshlq (a, b);
}
/* { dg-final { scan-assembler "vqrshl.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vqrshlq_n (a, b);
return vqrshlq (a, b);
}
/* { dg-final { scan-assembler "vqrshl.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a, int32_t b)
int8x16_t
foo1 (int8x16_t a, int32_t b)
{
return vqrshlq_n (a, b);
return vqrshlq (a, b);
}
/* { dg-final { scan-assembler "vqrshl.s8" } } */
......@@ -16,7 +16,7 @@ foo (uint16x8_t a, int32_t b)
uint16x8_t
foo1 (uint16x8_t a, int32_t b)
{
return vqrshlq_n (a, b);
return vqrshlq (a, b);
}
/* { dg-final { scan-assembler "vqrshl.u16" } } */
......@@ -16,7 +16,7 @@ foo (uint32x4_t a, int32_t b)
uint32x4_t
foo1 (uint32x4_t a, int32_t b)
{
return vqrshlq_n (a, b);
return vqrshlq (a, b);
}
/* { dg-final { scan-assembler "vqrshl.u32" } } */
......@@ -16,7 +16,7 @@ foo (uint8x16_t a, int32_t b)
uint8x16_t
foo1 (uint8x16_t a, int32_t b)
{
return vqrshlq_n (a, b);
return vqrshlq (a, b);
}
/* { dg-final { scan-assembler "vqrshl.u8" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a)
uint16x8_t
foo1 (int16x8_t a)
{
return vqshluq_n (a, 7);
return vqshluq (a, 7);
}
/* { dg-final { scan-assembler "vqshlu.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a)
uint32x4_t
foo1 (int32x4_t a)
{
return vqshluq_n (a, 7);
return vqshluq (a, 7);
}
/* { dg-final { scan-assembler "vqshlu.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a)
uint8x16_t
foo1 (int8x16_t a)
{
return vqshluq_n (a, 7);
return vqshluq (a, 7);
}
/* { dg-final { scan-assembler "vqshlu.s8" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a, int16_t b)
int16x8_t
foo1 (int16x8_t a, int16_t b)
{
return vqsubq_n (a, b);
return vqsubq (a, b);
}
/* { dg-final { scan-assembler "vqsub.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vqsubq_n (a, b);
return vqsubq (a, b);
}
/* { dg-final { scan-assembler "vqsub.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a, int8_t b)
int8x16_t
foo1 (int8x16_t a, int8_t b)
{
return vqsubq_n (a, b);
return vqsubq (a, b);
}
/* { dg-final { scan-assembler "vqsub.s8" } } */
......@@ -16,7 +16,7 @@ foo (uint16x8_t a, uint16_t b)
uint16x8_t
foo1 (uint16x8_t a, uint16_t b)
{
return vqsubq_n (a, b);
return vqsubq (a, b);
}
/* { dg-final { scan-assembler "vqsub.u16" } } */
......@@ -16,7 +16,7 @@ foo (uint32x4_t a, uint32_t b)
uint32x4_t
foo1 (uint32x4_t a, uint32_t b)
{
return vqsubq_n (a, b);
return vqsubq (a, b);
}
/* { dg-final { scan-assembler "vqsub.u32" } } */
......@@ -16,7 +16,7 @@ foo (uint8x16_t a, uint8_t b)
uint8x16_t
foo1 (uint8x16_t a, uint8_t b)
{
return vqsubq_n (a, b);
return vqsubq (a, b);
}
/* { dg-final { scan-assembler "vqsub.u8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int32x4_t a, int32x4_t b)
{
return vrmlaldavhq_s32 (a, b);
}
/* { dg-final { scan-assembler "vrmlaldavh.s32" } } */
int64_t
foo1 (int32x4_t a, int32x4_t b)
{
return vrmlaldavhq (a, b);
}
/* { dg-final { scan-assembler "vrmlaldavh.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint64_t
foo (uint32x4_t a, uint32x4_t b)
{
return vrmlaldavhq_u32 (a, b);
}
/* { dg-final { scan-assembler "vrmlaldavh.u32" } } */
uint64_t
foo1 (uint32x4_t a, uint32x4_t b)
{
return vrmlaldavhq (a, b);
}
/* { dg-final { scan-assembler "vrmlaldavh.u32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int32x4_t a, int32x4_t b)
{
return vrmlaldavhxq_s32 (a, b);
}
/* { dg-final { scan-assembler "vrmlaldavhx.s32" } } */
int64_t
foo1 (int32x4_t a, int32x4_t b)
{
return vrmlaldavhxq (a, b);
}
/* { dg-final { scan-assembler "vrmlaldavhx.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int32x4_t a, int32x4_t b)
{
return vrmlsldavhq_s32 (a, b);
}
/* { dg-final { scan-assembler "vrmlsldavh.s32" } } */
int64_t
foo1 (int32x4_t a, int32x4_t b)
{
return vrmlsldavhq (a, b);
}
/* { dg-final { scan-assembler "vrmlsldavh.s32" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int64_t
foo (int32x4_t a, int32x4_t b)
{
return vrmlsldavhxq_s32 (a, b);
}
/* { dg-final { scan-assembler "vrmlsldavhx.s32" } } */
int64_t
foo1 (int32x4_t a, int32x4_t b)
{
return vrmlsldavhxq (a, b);
}
/* { dg-final { scan-assembler "vrmlsldavhx.s32" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a, int32_t b)
int16x8_t
foo1 (int16x8_t a, int32_t b)
{
return vrshlq_n (a, b);
return vrshlq (a, b);
}
/* { dg-final { scan-assembler "vrshl.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vrshlq_n (a, b);
return vrshlq (a, b);
}
/* { dg-final { scan-assembler "vrshl.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a, int32_t b)
int8x16_t
foo1 (int8x16_t a, int32_t b)
{
return vrshlq_n (a, b);
return vrshlq (a, b);
}
/* { dg-final { scan-assembler "vrshl.s8" } } */
......@@ -16,7 +16,7 @@ foo (uint16x8_t a, int32_t b)
uint16x8_t
foo1 (uint16x8_t a, int32_t b)
{
return vrshlq_n (a, b);
return vrshlq (a, b);
}
/* { dg-final { scan-assembler "vrshl.u16" } } */
......@@ -16,7 +16,7 @@ foo (uint32x4_t a, int32_t b)
uint32x4_t
foo1 (uint32x4_t a, int32_t b)
{
return vrshlq_n (a, b);
return vrshlq (a, b);
}
/* { dg-final { scan-assembler "vrshl.u32" } } */
......@@ -16,7 +16,7 @@ foo (uint8x16_t a, int32_t b)
uint8x16_t
foo1 (uint8x16_t a, int32_t b)
{
return vrshlq_n (a, b);
return vrshlq (a, b);
}
/* { dg-final { scan-assembler "vrshl.u8" } } */
......@@ -16,7 +16,7 @@ foo (int16x8_t a)
int16x8_t
foo1 (int16x8_t a)
{
return vrshrq_n (a, 16);
return vrshrq (a, 16);
}
/* { dg-final { scan-assembler "vrshr.s16" } } */
......@@ -16,7 +16,7 @@ foo (int32x4_t a)
int32x4_t
foo1 (int32x4_t a)
{
return vrshrq_n (a, 32);
return vrshrq (a, 32);
}
/* { dg-final { scan-assembler "vrshr.s32" } } */
......@@ -16,7 +16,7 @@ foo (int8x16_t a)
int8x16_t
foo1 (int8x16_t a)
{
return vrshrq_n (a, 8);
return vrshrq (a, 8);
}
/* { dg-final { scan-assembler "vrshr.s8" } } */
......@@ -16,7 +16,7 @@ foo (uint16x8_t a)
uint16x8_t
foo1 (uint16x8_t a)
{
return vrshrq_n (a, 16);
return vrshrq (a, 16);
}
/* { dg-final { scan-assembler "vrshr.u16" } } */
......@@ -16,7 +16,7 @@ foo (uint32x4_t a)
uint32x4_t
foo1 (uint32x4_t a)
{
return vrshrq_n (a, 32);
return vrshrq (a, 32);
}
/* { dg-final { scan-assembler "vrshr.u32" } } */
......@@ -16,7 +16,7 @@ foo (uint8x16_t a)
uint8x16_t
foo1 (uint8x16_t a)
{
return vrshrq_n (a, 8);
return vrshrq (a, 8);
}
/* { dg-final { scan-assembler "vrshr.u8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int32x4_t
foo (int16x8_t a)
{
return vshllbq_n_s16 (a, 1);
}
/* { dg-final { scan-assembler "vshllb.s16" } } */
int32x4_t
foo1 (int16x8_t a)
{
return vshllbq (a, 1);
}
/* { dg-final { scan-assembler "vshllb.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int16x8_t
foo (int8x16_t a)
{
return vshllbq_n_s8 (a, 1);
}
/* { dg-final { scan-assembler "vshllb.s8" } } */
int16x8_t
foo1 (int8x16_t a)
{
return vshllbq (a, 1);
}
/* { dg-final { scan-assembler "vshllb.s8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint32x4_t
foo (uint16x8_t a)
{
return vshllbq_n_u16 (a, 1);
}
/* { dg-final { scan-assembler "vshllb.u16" } } */
uint32x4_t
foo1 (uint16x8_t a)
{
return vshllbq (a, 1);
}
/* { dg-final { scan-assembler "vshllb.u16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint8x16_t a)
{
return vshllbq_n_u8 (a, 1);
}
/* { dg-final { scan-assembler "vshllb.u8" } } */
uint16x8_t
foo1 (uint8x16_t a)
{
return vshllbq (a, 1);
}
/* { dg-final { scan-assembler "vshllb.u8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int32x4_t
foo (int16x8_t a)
{
return vshlltq_n_s16 (a, 1);
}
/* { dg-final { scan-assembler "vshllt.s16" } } */
int32x4_t
foo1 (int16x8_t a)
{
return vshlltq (a, 1);
}
/* { dg-final { scan-assembler "vshllt.s16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
int16x8_t
foo (int8x16_t a)
{
return vshlltq_n_s8 (a, 1);
}
/* { dg-final { scan-assembler "vshllt.s8" } } */
int16x8_t
foo1 (int8x16_t a)
{
return vshlltq (a, 1);
}
/* { dg-final { scan-assembler "vshllt.s8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint32x4_t
foo (uint16x8_t a)
{
return vshlltq_n_u16 (a, 1);
}
/* { dg-final { scan-assembler "vshllt.u16" } } */
uint32x4_t
foo1 (uint16x8_t a)
{
return vshlltq (a, 1);
}
/* { dg-final { scan-assembler "vshllt.u16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
uint16x8_t
foo (uint8x16_t a)
{
return vshlltq_n_u8 (a, 1);
}
/* { dg-final { scan-assembler "vshllt.u8" } } */
uint16x8_t
foo1 (uint8x16_t a)
{
return vshlltq (a, 1);
}
/* { dg-final { scan-assembler "vshllt.u8" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float16x8_t
foo (float16x8_t a, float16x8_t b)
{
return vsubq_f16 (a, b);
}
/* { dg-final { scan-assembler "vsub.f16" } } */
float16x8_t
foo1 (float16x8_t a, float16x8_t b)
{
return vsubq (a, b);
}
/* { dg-final { scan-assembler "vsub.f16" } } */
/* { dg-do compile } */
/* { dg-require-effective-target arm_v8_1m_mve_fp_ok } */
/* { dg-add-options arm_v8_1m_mve_fp } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
float32x4_t
foo (float32x4_t a, float32x4_t b)
{
return vsubq_f32 (a, b);
}
/* { dg-final { scan-assembler "vsub.f32" } } */
float32x4_t
foo1 (float32x4_t a, float32x4_t b)
{
return vsubq (a, b);
}
/* { dg-final { scan-assembler "vsub.f32" } } */
......@@ -2,6 +2,7 @@
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
......@@ -16,7 +17,7 @@ foo (int16x8_t a, int16_t b)
int16x8_t
foo1 (int16x8_t a, int16_t b)
{
return vsubq_n (a, b);
return vsubq (a, b);
}
/* { dg-final { scan-assembler "vsub.i16" } } */
......@@ -2,6 +2,7 @@
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
......@@ -16,7 +17,7 @@ foo (int32x4_t a, int32_t b)
int32x4_t
foo1 (int32x4_t a, int32_t b)
{
return vsubq_n (a, b);
return vsubq (a, b);
}
/* { dg-final { scan-assembler "vsub.i32" } } */
......@@ -2,6 +2,7 @@
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
......@@ -16,7 +17,7 @@ foo (int8x16_t a, int8_t b)
int8x16_t
foo1 (int8x16_t a, int8_t b)
{
return vsubq_n (a, b);
return vsubq (a, b);
}
/* { dg-final { scan-assembler "vsub.i8" } } */
......@@ -2,6 +2,7 @@
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
......@@ -16,7 +17,7 @@ foo (uint16x8_t a, uint16_t b)
uint16x8_t
foo1 (uint16x8_t a, uint16_t b)
{
return vsubq_n (a, b);
return vsubq (a, b);
}
/* { dg-final { scan-assembler "vsub.i16" } } */
......@@ -2,6 +2,7 @@
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
......@@ -16,7 +17,7 @@ foo (uint32x4_t a, uint32_t b)
uint32x4_t
foo1 (uint32x4_t a, uint32_t b)
{
return vsubq_n (a, b);
return vsubq (a, b);
}
/* { dg-final { scan-assembler "vsub.i32" } } */
......@@ -2,6 +2,7 @@
/* { dg-require-effective-target arm_v8_1m_mve_ok } */
/* { dg-add-options arm_v8_1m_mve } */
/* { dg-additional-options "-O2" } */
/* { dg-additional-options "-O2" } */
#include "arm_mve.h"
......@@ -16,7 +17,7 @@ foo (uint8x16_t a, uint8_t b)
uint8x16_t
foo1 (uint8x16_t a, uint8_t b)
{
return vsubq_n (a, b);
return vsubq (a, b);
}
/* { dg-final { scan-assembler "vsub.i8" } } */
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment