Commit 490350a1 by Jonathan Wakely

libstdc++: Remove __memmove wrapper for constexpr algorithms

The mutating sequence algorithms std::copy, std::copy_backward,
std::move and std::move_backward conditionally use __builtin_memmove
for trivially copyable types. However, because memmove isn't usable in
constant expressions the use of __builtin_memmove is wrapped in a
__memmove function which replaces __builtin_memmove with a handwritten
loop when std::is_constant_evaluated() is true.

This means we have a manual loop for non-trivially copyable cases, and a
different manual loop for trivially copyable but constexpr cases. The
latter loop has incorrect semantics for the {copy,move}_backward cases
and so isn't used for them. Until earlier today the latter loop also had
incorrect semantics for the std::move cases, trying to move from const
rvalues.

The approach taken by this patch is to remove the __memmove function
entirely and use the original (and correct) manual loops for the
constexpr cases as well as the non-trivially copyable cases. This was
already done for move_backward and copy_backward, but was incorrectly
turning copy_backward into move_backward, by failing to use the _IsMove
constant to select the right specialization. This patch also fixes that.

	* include/bits/ranges_algobase.h (__copy_or_move): Do not use memmove
	during constant evaluation. Call __builtin_memmove directly instead of
	__memmove.
	(__copy_or_move_backward): Likewise.
	* include/bits/stl_algobase.h (__memmove): Remove.
	(__copy_move<M, true, random_access_iterator_tag>::__copy_m)
	(__copy_move_backward<M, true, random_access_iterator_tag>::__copy_m):
	Use __builtin_memmove directly instead of __memmove.
	(__copy_move_a2): Do not use memmove during constant evaluation.
	(__copy_move_backward_a2): Use _IsMove constant to select correct
	__copy_move_backward specialization.
	* testsuite/25_algorithms/copy_backward/constexpr.cc: Check for copies
	begin turned into moves during constant evaluation.
parent dfb93d05
2020-02-25 Jonathan Wakely <jwakely@redhat.com>
* include/bits/ranges_algobase.h (__copy_or_move): Do not use memmove
during constant evaluation. Call __builtin_memmove directly instead of
__memmove.
(__copy_or_move_backward): Likewise.
* include/bits/stl_algobase.h (__memmove): Remove.
(__copy_move<M, true, random_access_iterator_tag>::__copy_m)
(__copy_move_backward<M, true, random_access_iterator_tag>::__copy_m):
Use __builtin_memmove directly instead of __memmove.
(__copy_move_a2): Do not use memmove during constant evaluation.
(__copy_move_backward_a2): Use _IsMove constant to select correct
__copy_move_backward specialization.
* testsuite/25_algorithms/copy_backward/constexpr.cc: Check for copies
begin turned into moves during constant evaluation.
* testsuite/25_algorithms/move_backward/93872.cc: Add test left out of
previous commit.
......
......@@ -248,37 +248,41 @@ namespace ranges
}
else if constexpr (sized_sentinel_for<_Sent, _Iter>)
{
using _ValueTypeI = iter_value_t<_Iter>;
using _ValueTypeO = typename iterator_traits<_Out>::value_type;
constexpr bool __use_memmove
= (is_trivially_copyable_v<_ValueTypeI>
&& is_same_v<_ValueTypeI, _ValueTypeO>
&& is_pointer_v<_Iter>
&& is_pointer_v<_Out>);
if constexpr (__use_memmove)
#ifdef __cpp_lib_is_constant_evaluated
if (!std::is_constant_evaluated())
#endif
{
static_assert(_IsMove
? is_move_assignable_v<_ValueTypeI>
: is_copy_assignable_v<_ValueTypeI>);
auto __num = __last - __first;
if (__num)
std::__memmove<_IsMove>(__result, __first, __num);
return {__first + __num, __result + __num};
}
else
{
for (auto __n = __last - __first; __n > 0; --__n)
using _ValueTypeI = iter_value_t<_Iter>;
using _ValueTypeO = typename iterator_traits<_Out>::value_type;
constexpr bool __use_memmove
= (is_trivially_copyable_v<_ValueTypeI>
&& is_same_v<_ValueTypeI, _ValueTypeO>
&& is_pointer_v<_Iter>
&& is_pointer_v<_Out>);
if constexpr (__use_memmove)
{
if constexpr (_IsMove)
*__result = std::move(*__first);
else
*__result = *__first;
++__first;
++__result;
static_assert(_IsMove
? is_move_assignable_v<_ValueTypeI>
: is_copy_assignable_v<_ValueTypeI>);
auto __num = __last - __first;
if (__num)
__builtin_memmove(__result, __first,
sizeof(_ValueTypeI) * __num);
return {__first + __num, __result + __num};
}
return {std::move(__first), std::move(__result)};
}
for (auto __n = __last - __first; __n > 0; --__n)
{
if constexpr (_IsMove)
*__result = std::move(*__first);
else
*__result = *__first;
++__first;
++__result;
}
return {std::move(__first), std::move(__result)};
}
else
{
......@@ -385,39 +389,43 @@ namespace ranges
}
else if constexpr (sized_sentinel_for<_Sent, _Iter>)
{
using _ValueTypeI = iter_value_t<_Iter>;
using _ValueTypeO = typename iterator_traits<_Out>::value_type;
constexpr bool __use_memmove
= (is_trivially_copyable_v<_ValueTypeI>
&& is_same_v<_ValueTypeI, _ValueTypeO>
&& is_pointer_v<_Iter>
&& is_pointer_v<_Out>);
if constexpr (__use_memmove)
{
static_assert(_IsMove
? is_move_assignable_v<_ValueTypeI>
: is_copy_assignable_v<_ValueTypeI>);
auto __num = __last - __first;
if (__num)
std::__memmove<_IsMove>(__result - __num, __first, __num);
return {__first + __num, __result - __num};
}
else
#ifdef __cpp_lib_is_constant_evaluated
if (!std::is_constant_evaluated())
#endif
{
auto __lasti = ranges::next(__first, __last);
auto __tail = __lasti;
for (auto __n = __last - __first; __n > 0; --__n)
using _ValueTypeI = iter_value_t<_Iter>;
using _ValueTypeO = typename iterator_traits<_Out>::value_type;
constexpr bool __use_memmove
= (is_trivially_copyable_v<_ValueTypeI>
&& is_same_v<_ValueTypeI, _ValueTypeO>
&& is_pointer_v<_Iter>
&& is_pointer_v<_Out>);
if constexpr (__use_memmove)
{
--__tail;
--__result;
if constexpr (_IsMove)
*__result = std::move(*__tail);
else
*__result = *__tail;
static_assert(_IsMove
? is_move_assignable_v<_ValueTypeI>
: is_copy_assignable_v<_ValueTypeI>);
auto __num = __last - __first;
if (__num)
__builtin_memmove(__result - __num, __first,
sizeof(_ValueTypeI) * __num);
return {__first + __num, __result - __num};
}
return {std::move(__lasti), std::move(__result)};
}
auto __lasti = ranges::next(__first, __last);
auto __tail = __lasti;
for (auto __n = __last - __first; __n > 0; --__n)
{
--__tail;
--__result;
if constexpr (_IsMove)
*__result = std::move(*__tail);
else
*__result = *__tail;
}
return {std::move(__lasti), std::move(__result)};
}
else
{
......
......@@ -81,39 +81,6 @@ namespace std _GLIBCXX_VISIBILITY(default)
_GLIBCXX_BEGIN_NAMESPACE_VERSION
/*
* A constexpr wrapper for __builtin_memmove.
* @param __num The number of elements of type _Tp (not bytes).
*/
template<bool _IsMove, typename _Tp>
_GLIBCXX14_CONSTEXPR
inline void*
__memmove(_Tp* __dst, const _Tp* __src, size_t __num)
{
#ifdef __cpp_lib_is_constant_evaluated
if (std::is_constant_evaluated())
{
for(; __num > 0; --__num)
{
if constexpr (_IsMove)
// This const_cast looks unsafe, but we only use this function
// for trivially-copyable types, which means this assignment
// is trivial and so doesn't alter the source anyway.
// See PR 93872 for why it's needed.
*__dst = std::move(*const_cast<_Tp*>(__src));
else
*__dst = *__src;
++__src;
++__dst;
}
return __dst;
}
else
#endif
return __builtin_memmove(__dst, __src, sizeof(_Tp) * __num);
return __dst;
}
/*
* A constexpr wrapper for __builtin_memcmp.
* @param __num The number of elements of type _Tp (not bytes).
*/
......@@ -453,7 +420,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
#endif
const ptrdiff_t _Num = __last - __first;
if (_Num)
std::__memmove<_IsMove>(__result, __first, _Num);
__builtin_memmove(__result, __first, sizeof(_Tp) * _Num);
return __result + _Num;
}
};
......@@ -492,9 +459,14 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
inline _OI
__copy_move_a2(_II __first, _II __last, _OI __result)
{
typedef typename iterator_traits<_II>::iterator_category _Category;
#ifdef __cpp_lib_is_constant_evaluated
if (std::is_constant_evaluated())
return std::__copy_move<_IsMove, false, _Category>::
__copy_m(__first, __last, __result);
#endif
typedef typename iterator_traits<_II>::value_type _ValueTypeI;
typedef typename iterator_traits<_OI>::value_type _ValueTypeO;
typedef typename iterator_traits<_II>::iterator_category _Category;
const bool __simple = (__is_trivially_copyable(_ValueTypeI)
&& __is_pointer<_II>::__value
&& __is_pointer<_OI>::__value
......@@ -719,7 +691,7 @@ _GLIBCXX_END_NAMESPACE_CONTAINER
#endif
const ptrdiff_t _Num = __last - __first;
if (_Num)
std::__memmove<_IsMove>(__result - _Num, __first, _Num);
__builtin_memmove(__result - _Num, __first, sizeof(_Tp) * _Num);
return __result - _Num;
}
};
......@@ -729,20 +701,19 @@ _GLIBCXX_END_NAMESPACE_CONTAINER
inline _BI2
__copy_move_backward_a2(_BI1 __first, _BI1 __last, _BI2 __result)
{
typedef typename iterator_traits<_BI1>::iterator_category _Category;
#ifdef __cpp_lib_is_constant_evaluated
if (std::is_constant_evaluated())
return std::__copy_move_backward<_IsMove, false, _Category>::
__copy_move_b(__first, __last, __result);
#endif
typedef typename iterator_traits<_BI1>::value_type _ValueType1;
typedef typename iterator_traits<_BI2>::value_type _ValueType2;
typedef typename iterator_traits<_BI1>::iterator_category _Category;
const bool __simple = (__is_trivially_copyable(_ValueType1)
&& __is_pointer<_BI1>::__value
&& __is_pointer<_BI2>::__value
&& __are_same<_ValueType1, _ValueType2>::__value);
#ifdef __cpp_lib_is_constant_evaluated
if (std::is_constant_evaluated())
return std::__copy_move_backward<true, false,
_Category>::__copy_move_b(__first, __last,
__result);
#endif
return std::__copy_move_backward<_IsMove, __simple,
_Category>::__copy_move_b(__first,
__last,
......
......@@ -34,3 +34,21 @@ test()
}
static_assert(test());
constexpr bool
test02()
{
struct X
{
X() = default;
X& operator=(const X&) = default;
constexpr X& operator=(X&& x) { i = x.i; x.i = 0; return *this; }
int i = 1;
};
X from[1], to[1];
std::copy_backward(std::begin(from), std::end(from), std::end(to));
return from[0].i == 1;
}
static_assert(test02());
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment