libstdc++: Do not use use memmove for 1-element ranges [PR108846,PR116471]

This commit ports the fixes already applied by r13-6372-g822a11a1e642e0
to the range-based versions of copy/move algorithms.

When doing so, a further bug (PR116471) was discovered in the
implementation of the range-based algorithms: although the algorithms
are already constrained by the indirectly_copyable/movable concepts,
there was a failing static_assert in the memmove path.

This static_assert checked that iterator's value type was assignable by
using the is_copy_assignable (move) type traits. However, this is a
problem, because the traits are too strict when checking for constness;
a type like

  struct S { S& operator=(S &) = default; };

is trivially copyable (and thus could benefit of the memmove path),
but it does not satisfy is_copy_assignable because the operator takes
by non-const reference.

Now, the reason for the check to be there is because a type with
a deleted assignment operator like

  struct E { E& operator=(const E&) = delete; };

is still trivially copyable, but not assignable. We don't want
algorithms like std::ranges::copy to compile because they end up
selecting the memmove path, "ignoring" the fact that E isn't even
copy assignable.

But the static_assert isn't needed here any longer: as noted before,
the ranges algorithms already have the appropriate constraints; and
even if they didn't, there's now a non-discarded codepath to deal with
ranges of length 1 where there is an explicit assignment operation.

Therefore, this commit removes it. (In fact, r13-6372-g822a11a1e642e0
removed the same static_assert from the non-ranges algorithms.)

libstdc++-v3/ChangeLog:

	PR libstdc++/108846
	PR libstdc++/116471
	* include/bits/ranges_algobase.h (__assign_one): New helper
	function.
	(__copy_or_move): Remove a spurious static_assert; use
	__assign_one for memcpyable ranges of length 1.
	(__copy_or_move_backward): Likewise.
	* testsuite/25_algorithms/copy/108846.cc: Extend to range-based
	algorithms, and cover both memcpyable and non-memcpyable
	cases.
	* testsuite/25_algorithms/copy_backward/108846.cc: Likewise.
	* testsuite/25_algorithms/copy_n/108846.cc: Likewise.
	* testsuite/25_algorithms/move/108846.cc: Likewise.
	* testsuite/25_algorithms/move_backward/108846.cc: Likewise.

Signed-off-by: Giuseppe D'Angelo <giuseppe.dangelo@kdab.com>
This commit is contained in:
Giuseppe D'Angelo 2024-08-23 15:05:54 +02:00 committed by Jonathan Wakely
parent 494d3c3faa
commit 5938e0681c
No known key found for this signature in database
6 changed files with 209 additions and 29 deletions

View File

@ -225,6 +225,16 @@ namespace ranges
copy_backward_result<_Iter, _Out>>
__copy_or_move_backward(_Iter __first, _Sent __last, _Out __result);
template<bool _IsMove, typename _Iter, typename _Out>
constexpr void
__assign_one(_Iter& __iter, _Out& __result)
{
if constexpr (_IsMove)
*__result = std::move(*__iter);
else
*__result = *__iter;
}
template<bool _IsMove,
input_iterator _Iter, sentinel_for<_Iter> _Sent,
weakly_incrementable _Out>
@ -279,23 +289,19 @@ namespace ranges
if constexpr (__memcpyable<_Iter, _Out>::__value)
{
using _ValueTypeI = iter_value_t<_Iter>;
static_assert(_IsMove
? is_move_assignable_v<_ValueTypeI>
: is_copy_assignable_v<_ValueTypeI>);
auto __num = __last - __first;
if (__num)
if (__num > 1) [[likely]]
__builtin_memmove(__result, __first,
sizeof(_ValueTypeI) * __num);
sizeof(_ValueTypeI) * __num);
else if (__num == 1)
ranges::__assign_one<_IsMove>(__first, __result);
return {__first + __num, __result + __num};
}
}
for (auto __n = __last - __first; __n > 0; --__n)
{
if constexpr (_IsMove)
*__result = std::move(*__first);
else
*__result = *__first;
ranges::__assign_one<_IsMove>(__first, __result);
++__first;
++__result;
}
@ -305,10 +311,7 @@ namespace ranges
{
while (__first != __last)
{
if constexpr (_IsMove)
*__result = std::move(*__first);
else
*__result = *__first;
ranges::__assign_one<_IsMove>(__first, __result);
++__first;
++__result;
}
@ -414,13 +417,12 @@ namespace ranges
if constexpr (__memcpyable<_Out, _Iter>::__value)
{
using _ValueTypeI = iter_value_t<_Iter>;
static_assert(_IsMove
? is_move_assignable_v<_ValueTypeI>
: is_copy_assignable_v<_ValueTypeI>);
auto __num = __last - __first;
if (__num)
if (__num > 1) [[likely]]
__builtin_memmove(__result - __num, __first,
sizeof(_ValueTypeI) * __num);
else if (__num == 1)
ranges::__assign_one<_IsMove>(__first, __result);
return {__first + __num, __result - __num};
}
}
@ -432,10 +434,7 @@ namespace ranges
{
--__tail;
--__result;
if constexpr (_IsMove)
*__result = std::move(*__tail);
else
*__result = *__tail;
ranges::__assign_one<_IsMove>(__tail, __result);
}
return {std::move(__lasti), std::move(__result)};
}
@ -448,10 +447,7 @@ namespace ranges
{
--__tail;
--__result;
if constexpr (_IsMove)
*__result = std::move(*__tail);
else
*__result = *__tail;
ranges::__assign_one<_IsMove>(__tail, __result);
}
return {std::move(__lasti), std::move(__result)};
}

View File

@ -26,6 +26,10 @@ test_pr108846()
// If this is optimized to memmove it will overwrite tail padding.
std::copy(src, src+1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy(src, src+1, dst);
VERIFY(ddst.x == 3);
#endif
}
struct B2 {
@ -49,10 +53,44 @@ test_non_const_copy_assign()
// Ensure the not-taken trivial copy path works for this type.
std::copy(src, src+1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy(src, src+1, dst);
VERIFY(ddst.x == 3);
#endif
}
struct B3 {
B3(int i, short j) : i(i), j(j) {}
#if __cplusplus >= 201103L
B3& operator=(B3& b) = default;
#endif
int i;
short j;
};
struct D3 : B3 {
D3(int i, short j, short x) : B3(i, j), x(x) {}
short x; // Stored in tail padding of B3
};
void
test_non_const_copy_assign_trivial()
{
D3 ddst(1, 2, 3);
D3 dsrc(4, 5, 6);
B3 *dst = &ddst;
B3 *src = &dsrc;
// If this is optimized to memmove it will overwrite tail padding.
std::copy(src, src+1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy(src, src+1, dst);
VERIFY(ddst.x == 3);
#endif
}
int main()
{
test_pr108846();
test_non_const_copy_assign();
test_non_const_copy_assign_trivial();
}

View File

@ -26,6 +26,10 @@ test_pr108846()
// If this is optimized to memmove it will overwrite tail padding.
std::copy_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#endif
}
struct B2 {
@ -49,10 +53,44 @@ test_non_const_copy_assign()
// Ensure the not-taken trivial copy path works for this type.
std::copy_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#endif
}
struct B3 {
B3(int i, short j) : i(i), j(j) {}
#if __cplusplus >= 201103L
B3& operator=(B3& b) = default;
#endif
int i;
short j;
};
struct D3 : B3 {
D3(int i, short j, short x) : B3(i, j), x(x) {}
short x; // Stored in tail padding of B3
};
void
test_non_const_copy_assign_trivial()
{
D3 ddst(1, 2, 3);
D3 dsrc(4, 5, 6);
B3 *dst = &ddst;
B3 *src = &dsrc;
// If this is optimized to memmove it will overwrite tail padding.
std::copy_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#endif
}
int main()
{
test_pr108846();
test_non_const_copy_assign();
test_non_const_copy_assign_trivial();
}

View File

@ -26,11 +26,15 @@ test_pr108846()
// If this is optimized to memmove it will overwrite tail padding.
std::copy_n(src, 1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy_n(src, 1, dst);
VERIFY(ddst.x == 3);
#endif
}
struct B2 {
B2(int i, short j) : i(i), j(j) {}
B2& operator=(B2&) = default;
B2& operator=(B2& b) { i = b.i; j = b.j; return *this; }
int i;
short j;
};
@ -49,10 +53,42 @@ test_non_const_copy_assign()
// Ensure the not-taken trivial copy path works for this type.
std::copy_n(src, 1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy_n(src, 1, dst);
VERIFY(ddst.x == 3);
#endif
}
struct B3 {
B3(int i, short j) : i(i), j(j) {}
B3& operator=(B3& b) = default;
int i;
short j;
};
struct D3 : B3 {
D3(int i, short j, short x) : B3(i, j), x(x) {}
short x; // Stored in tail padding of B3
};
void
test_non_const_copy_assign_trivial()
{
D3 ddst(1, 2, 3);
D3 dsrc(4, 5, 6);
B3 *dst = &ddst;
B3 *src = &dsrc;
// If this is optimized to memmove it will overwrite tail padding.
std::copy_n(src, 1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::copy_n(src, 1, dst);
VERIFY(ddst.x == 3);
#endif
}
int main()
{
test_pr108846();
test_non_const_copy_assign();
test_non_const_copy_assign_trivial();
}

View File

@ -26,6 +26,37 @@ test_pr108846()
// If this is optimized to memmove it will overwrite tail padding.
std::move(src, src+1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::move(src, src+1, dst);
VERIFY(ddst.x == 3);
#endif
}
struct B2 {
B2(int i, short j) : i(i), j(j) {}
B2& operator=(B2&& b) { i = b.i; j = b.j; return *this; }
int i;
short j;
};
struct D2 : B2 {
D2(int i, short j, short x) : B2(i, j), x(x) {}
short x; // Stored in tail padding of B2
};
void
test_move_only()
{
D2 ddst(1, 2, 3);
D2 dsrc(4, 5, 6);
B2 *dst = &ddst;
B2 *src = &dsrc;
// Ensure the not-taken trivial copy path works for this type.
std::move(src, src+1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::move(src, src+1, dst);
VERIFY(ddst.x == 3);
#endif
}
struct B3 {
@ -40,19 +71,24 @@ struct D3 : B3 {
};
void
test_move_only()
test_move_only_trivial()
{
D3 ddst(1, 2, 3);
D3 dsrc(4, 5, 6);
B3 *dst = &ddst;
B3 *src = &dsrc;
// Ensure the not-taken trivial copy path works for this type.
// If this is optimized to memmove it will overwrite tail padding.
std::move(src, src+1, dst);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::move(src, src+1, dst);
VERIFY(ddst.x == 3);
#endif
}
int main()
{
test_pr108846();
test_move_only();
test_move_only_trivial();
}

View File

@ -26,6 +26,37 @@ test_pr108846()
// If this is optimized to memmove it will overwrite tail padding.
std::move_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::move_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#endif
}
struct B2 {
B2(int i, short j) : i(i), j(j) {}
B2& operator=(B2&& b) { i = b.i; j = b.j; return *this; }
int i;
short j;
};
struct D2 : B2 {
D2(int i, short j, short x) : B2(i, j), x(x) {}
short x; // Stored in tail padding of B2
};
void
test_move_only()
{
D2 ddst(1, 2, 3);
D2 dsrc(4, 5, 6);
B2 *dst = &ddst;
B2 *src = &dsrc;
// Ensure the not-taken trivial copy path works for this type.
std::move_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::move_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#endif
}
struct B3 {
@ -40,7 +71,7 @@ struct D3 : B3 {
};
void
test_move_only()
test_move_only_trivial()
{
D3 ddst(1, 2, 3);
D3 dsrc(4, 5, 6);
@ -49,10 +80,15 @@ test_move_only()
// Ensure the not-taken trivial copy path works for this type.
std::move_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#if __cpp_lib_ranges >= 201911L
std::ranges::move_backward(src, src+1, dst+1);
VERIFY(ddst.x == 3);
#endif
}
int main()
{
test_pr108846();
test_move_only();
test_move_only_trivial();
}