Commit 7744bbe3 by H.J. Lu Committed by H.J. Lu

emmintrin.h (_mm_cvtsd_f64): Add missing Intel intrinsic.

2006-01-27  H.J. Lu  <hongjiu.lu@intel.com>

	* config/i386/emmintrin.h (_mm_cvtsd_f64): Add missing Intel
	intrinsic.
	(_mm_cvtsi128_si64): Likewise.
	(_mm_cvtsd_si64): Likewise.
	(_mm_cvttsd_si64): Likewise.
	(_mm_cvtsi64_sd): Likewise.
	(_mm_cvtsi64_si128): Likewise.
	* config/i386/mmintrin.h (_m_from_int64): Likewise.
	(_mm_cvtsi64_m64): Likewise.
	(_m_to_int64): Likewise.
	(_mm_cvtm64_si64): Likewise.
	* config/i386/xmmintrin.h (_mm_cvtss_si64): Likewise.
	(_mm_cvttss_si64): Likewise.
	(_mm_cvtsi64_ss): Likewise.
	(_mm_cvtss_f32): Likewise.

From-SVN: r110311
parent c1b67b82
2006-01-27 H.J. Lu <hongjiu.lu@intel.com>
* config/i386/emmintrin.h (_mm_cvtsd_f64): Add missing Intel
intrinsic.
(_mm_cvtsi128_si64): Likewise.
(_mm_cvtsd_si64): Likewise.
(_mm_cvttsd_si64): Likewise.
(_mm_cvtsi64_sd): Likewise.
(_mm_cvtsi64_si128): Likewise.
* config/i386/mmintrin.h (_m_from_int64): Likewise.
(_mm_cvtsi64_m64): Likewise.
(_m_to_int64): Likewise.
(_mm_cvtm64_si64): Likewise.
* config/i386/xmmintrin.h (_mm_cvtss_si64): Likewise.
(_mm_cvttss_si64): Likewise.
(_mm_cvtsi64_ss): Likewise.
(_mm_cvtss_f32): Likewise.
2006-01-27 Carlos O'Donell <carlos@codesourcery.com> 2006-01-27 Carlos O'Donell <carlos@codesourcery.com>
* c-pragma.c (init_pragma) : Call c_register_pragma_with_expansion * c-pragma.c (init_pragma) : Call c_register_pragma_with_expansion
......
/* Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc. /* Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
This file is part of GCC. This file is part of GCC.
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
Public License. */ Public License. */
/* Implemented from the specification included in the Intel C++ Compiler /* Implemented from the specification included in the Intel C++ Compiler
User Guide and Reference, version 8.0. */ User Guide and Reference, version 9.0. */
#ifndef _EMMINTRIN_H_INCLUDED #ifndef _EMMINTRIN_H_INCLUDED
#define _EMMINTRIN_H_INCLUDED #define _EMMINTRIN_H_INCLUDED
...@@ -158,6 +158,12 @@ _mm_store_sd (double *__P, __m128d __A) ...@@ -158,6 +158,12 @@ _mm_store_sd (double *__P, __m128d __A)
*__P = __builtin_ia32_vec_ext_v2df (__A, 0); *__P = __builtin_ia32_vec_ext_v2df (__A, 0);
} }
static __inline double __attribute__((__always_inline__))
_mm_cvtsd_f64 (__m128d __A)
{
return __builtin_ia32_vec_ext_v2df (__A, 0);
}
static __inline void __attribute__((__always_inline__)) static __inline void __attribute__((__always_inline__))
_mm_storel_pd (double *__P, __m128d __A) _mm_storel_pd (double *__P, __m128d __A)
{ {
...@@ -199,6 +205,14 @@ _mm_cvtsi128_si32 (__m128i __A) ...@@ -199,6 +205,14 @@ _mm_cvtsi128_si32 (__m128i __A)
} }
#ifdef __x86_64__ #ifdef __x86_64__
/* Intel intrinsic. */
static __inline long long __attribute__((__always_inline__))
_mm_cvtsi128_si64 (__m128i __A)
{
return __builtin_ia32_vec_ext_v2di ((__v2di)__A, 0);
}
/* Microsoft intrinsic. */
static __inline long long __attribute__((__always_inline__)) static __inline long long __attribute__((__always_inline__))
_mm_cvtsi128_si64x (__m128i __A) _mm_cvtsi128_si64x (__m128i __A)
{ {
...@@ -789,6 +803,14 @@ _mm_cvtsd_si32 (__m128d __A) ...@@ -789,6 +803,14 @@ _mm_cvtsd_si32 (__m128d __A)
} }
#ifdef __x86_64__ #ifdef __x86_64__
/* Intel intrinsic. */
static __inline long long __attribute__((__always_inline__))
_mm_cvtsd_si64 (__m128d __A)
{
return __builtin_ia32_cvtsd2si64 ((__v2df) __A);
}
/* Microsoft intrinsic. */
static __inline long long __attribute__((__always_inline__)) static __inline long long __attribute__((__always_inline__))
_mm_cvtsd_si64x (__m128d __A) _mm_cvtsd_si64x (__m128d __A)
{ {
...@@ -803,6 +825,14 @@ _mm_cvttsd_si32 (__m128d __A) ...@@ -803,6 +825,14 @@ _mm_cvttsd_si32 (__m128d __A)
} }
#ifdef __x86_64__ #ifdef __x86_64__
/* Intel intrinsic. */
static __inline long long __attribute__((__always_inline__))
_mm_cvttsd_si64 (__m128d __A)
{
return __builtin_ia32_cvttsd2si64 ((__v2df) __A);
}
/* Microsoft intrinsic. */
static __inline long long __attribute__((__always_inline__)) static __inline long long __attribute__((__always_inline__))
_mm_cvttsd_si64x (__m128d __A) _mm_cvttsd_si64x (__m128d __A)
{ {
...@@ -823,6 +853,14 @@ _mm_cvtsi32_sd (__m128d __A, int __B) ...@@ -823,6 +853,14 @@ _mm_cvtsi32_sd (__m128d __A, int __B)
} }
#ifdef __x86_64__ #ifdef __x86_64__
/* Intel intrinsic. */
static __inline __m128d __attribute__((__always_inline__))
_mm_cvtsi64_sd (__m128d __A, long long __B)
{
return (__m128d)__builtin_ia32_cvtsi642sd ((__v2df) __A, __B);
}
/* Microsoft intrinsic. */
static __inline __m128d __attribute__((__always_inline__)) static __inline __m128d __attribute__((__always_inline__))
_mm_cvtsi64x_sd (__m128d __A, long long __B) _mm_cvtsi64x_sd (__m128d __A, long long __B)
{ {
...@@ -1379,6 +1417,14 @@ _mm_cvtsi32_si128 (int __A) ...@@ -1379,6 +1417,14 @@ _mm_cvtsi32_si128 (int __A)
} }
#ifdef __x86_64__ #ifdef __x86_64__
/* Intel intrinsic. */
static __inline __m128i __attribute__((__always_inline__))
_mm_cvtsi64_si128 (long long __A)
{
return _mm_set_epi64x (0, __A);
}
/* Microsoft intrinsic. */
static __inline __m128i __attribute__((__always_inline__)) static __inline __m128i __attribute__((__always_inline__))
_mm_cvtsi64x_si128 (long long __A) _mm_cvtsi64x_si128 (long long __A)
{ {
......
/* Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc. /* Copyright (C) 2002, 2003, 2004, 2005, 2006
Free Software Foundation, Inc.
This file is part of GCC. This file is part of GCC.
...@@ -25,7 +26,7 @@ ...@@ -25,7 +26,7 @@
Public License. */ Public License. */
/* Implemented from the specification included in the Intel C++ Compiler /* Implemented from the specification included in the Intel C++ Compiler
User Guide and Reference, version 8.0. */ User Guide and Reference, version 9.0. */
#ifndef _MMINTRIN_H_INCLUDED #ifndef _MMINTRIN_H_INCLUDED
#define _MMINTRIN_H_INCLUDED #define _MMINTRIN_H_INCLUDED
...@@ -69,13 +70,27 @@ _m_from_int (int __i) ...@@ -69,13 +70,27 @@ _m_from_int (int __i)
#ifdef __x86_64__ #ifdef __x86_64__
/* Convert I to a __m64 object. */ /* Convert I to a __m64 object. */
/* Intel intrinsic. */
static __inline __m64 __attribute__((__always_inline__))
_m_from_int64 (long long __i)
{
return (__m64) __i;
}
static __inline __m64 __attribute__((__always_inline__))
_mm_cvtsi64_m64 (long long __i)
{
return (__m64) __i;
}
/* Microsoft intrinsic. */
static __inline __m64 __attribute__((__always_inline__)) static __inline __m64 __attribute__((__always_inline__))
_mm_cvtsi64x_si64 (long long __i) _mm_cvtsi64x_si64 (long long __i)
{ {
return (__m64) __i; return (__m64) __i;
} }
/* Convert I to a __m64 object. */
static __inline __m64 __attribute__((__always_inline__)) static __inline __m64 __attribute__((__always_inline__))
_mm_set_pi64x (long long __i) _mm_set_pi64x (long long __i)
{ {
...@@ -97,7 +112,22 @@ _m_to_int (__m64 __i) ...@@ -97,7 +112,22 @@ _m_to_int (__m64 __i)
} }
#ifdef __x86_64__ #ifdef __x86_64__
/* Convert the lower 32 bits of the __m64 object into an integer. */ /* Convert the __m64 object to a 64bit integer. */
/* Intel intrinsic. */
static __inline long long __attribute__((__always_inline__))
_m_to_int64 (__m64 __i)
{
return (long long)__i;
}
static __inline long long __attribute__((__always_inline__))
_mm_cvtm64_si64 (__m64 __i)
{
return (long long)__i;
}
/* Microsoft intrinsic. */
static __inline long long __attribute__((__always_inline__)) static __inline long long __attribute__((__always_inline__))
_mm_cvtsi64_si64x (__m64 __i) _mm_cvtsi64_si64x (__m64 __i)
{ {
......
/* Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc. /* Copyright (C) 2002, 2003, 2004, 2005, 2006
Free Software Foundation, Inc.
This file is part of GCC. This file is part of GCC.
...@@ -25,7 +26,7 @@ ...@@ -25,7 +26,7 @@
Public License. */ Public License. */
/* Implemented from the specification included in the Intel C++ Compiler /* Implemented from the specification included in the Intel C++ Compiler
User Guide and Reference, version 8.0. */ User Guide and Reference, version 9.0. */
#ifndef _XMMINTRIN_H_INCLUDED #ifndef _XMMINTRIN_H_INCLUDED
#define _XMMINTRIN_H_INCLUDED #define _XMMINTRIN_H_INCLUDED
...@@ -491,8 +492,17 @@ _mm_cvt_ss2si (__m128 __A) ...@@ -491,8 +492,17 @@ _mm_cvt_ss2si (__m128 __A)
} }
#ifdef __x86_64__ #ifdef __x86_64__
/* Convert the lower SPFP value to a 32-bit integer according to the current /* Convert the lower SPFP value to a 32-bit integer according to the
rounding mode. */ current rounding mode. */
/* Intel intrinsic. */
static __inline long long __attribute__((__always_inline__))
_mm_cvtss_si64 (__m128 __A)
{
return __builtin_ia32_cvtss2si64 ((__v4sf) __A);
}
/* Microsoft intrinsic. */
static __inline long long __attribute__((__always_inline__)) static __inline long long __attribute__((__always_inline__))
_mm_cvtss_si64x (__m128 __A) _mm_cvtss_si64x (__m128 __A)
{ {
...@@ -529,6 +539,15 @@ _mm_cvtt_ss2si (__m128 __A) ...@@ -529,6 +539,15 @@ _mm_cvtt_ss2si (__m128 __A)
#ifdef __x86_64__ #ifdef __x86_64__
/* Truncate the lower SPFP value to a 32-bit integer. */ /* Truncate the lower SPFP value to a 32-bit integer. */
/* Intel intrinsic. */
static __inline long long __attribute__((__always_inline__))
_mm_cvttss_si64 (__m128 __A)
{
return __builtin_ia32_cvttss2si64 ((__v4sf) __A);
}
/* Microsoft intrinsic. */
static __inline long long __attribute__((__always_inline__)) static __inline long long __attribute__((__always_inline__))
_mm_cvttss_si64x (__m128 __A) _mm_cvttss_si64x (__m128 __A)
{ {
...@@ -565,6 +584,15 @@ _mm_cvt_si2ss (__m128 __A, int __B) ...@@ -565,6 +584,15 @@ _mm_cvt_si2ss (__m128 __A, int __B)
#ifdef __x86_64__ #ifdef __x86_64__
/* Convert B to a SPFP value and insert it as element zero in A. */ /* Convert B to a SPFP value and insert it as element zero in A. */
/* Intel intrinsic. */
static __inline __m128 __attribute__((__always_inline__))
_mm_cvtsi64_ss (__m128 __A, long long __B)
{
return (__m128) __builtin_ia32_cvtsi642ss ((__v4sf) __A, __B);
}
/* Microsoft intrinsic. */
static __inline __m128 __attribute__((__always_inline__)) static __inline __m128 __attribute__((__always_inline__))
_mm_cvtsi64x_ss (__m128 __A, long long __B) _mm_cvtsi64x_ss (__m128 __A, long long __B)
{ {
...@@ -911,6 +939,12 @@ _mm_store_ss (float *__P, __m128 __A) ...@@ -911,6 +939,12 @@ _mm_store_ss (float *__P, __m128 __A)
*__P = __builtin_ia32_vec_ext_v4sf ((__v4sf)__A, 0); *__P = __builtin_ia32_vec_ext_v4sf ((__v4sf)__A, 0);
} }
static __inline float __attribute__((__always_inline__))
_mm_cvtss_f32 (__m128 __A)
{
return __builtin_ia32_vec_ext_v4sf ((__v4sf)__A, 0);
}
/* Store four SPFP values. The address must be 16-byte aligned. */ /* Store four SPFP values. The address must be 16-byte aligned. */
static __inline void __attribute__((__always_inline__)) static __inline void __attribute__((__always_inline__))
_mm_store_ps (float *__P, __m128 __A) _mm_store_ps (float *__P, __m128 __A)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment