mirror of
https://sourceware.org/git/glibc.git
synced 2024-11-30 08:40:07 +00:00
5e8c5bb1ac
Since the new SSE2/AVX2 memsets are faster than the previous ones, we can remove the previous SSE2/AVX2 memsets and replace them with the new ones. This reduces the size of libc.so by about 900 bytes. No change in IFUNC selection if SSE2 and AVX2 memsets weren't used before. If SSE2 or AVX2 memset was used, the new SSE2 or AVX2 memset optimized with Enhanced REP STOSB will be used for processors with ERMS. The new AVX512 memset will be used for processors with AVX512 which prefer vzeroupper. [BZ #19881] * sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S: Folded into ... * sysdeps/x86_64/memset.S: This. (__bzero): Removed. (__memset_tail): Likewise. (__memset_chk): Likewise. (memset): Likewise. (MEMSET_CHK_SYMBOL): New. Define only if MEMSET_SYMBOL isn't defined. (MEMSET_SYMBOL): Define only if MEMSET_SYMBOL isn't defined. * sysdeps/x86_64/multiarch/memset-avx2.S: Removed. (__memset_zero_constant_len_parameter): Check SHARED instead of PIC. * sysdeps/x86_64/multiarch/Makefile (sysdep_routines): Remove memset-avx2 and memset-sse2-unaligned-erms. * sysdeps/x86_64/multiarch/ifunc-impl-list.c (__libc_ifunc_impl_list): Remove __memset_chk_sse2, __memset_chk_avx2, __memset_sse2 and __memset_avx2_unaligned. * sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S (__bzero): Enabled. * sysdeps/x86_64/multiarch/memset.S (memset): Replace __memset_sse2 and __memset_avx2 with __memset_sse2_unaligned and __memset_avx2_unaligned. Use __memset_sse2_unaligned_erms or __memset_avx2_unaligned_erms if processor has ERMS. Support __memset_avx512_unaligned_erms and __memset_avx512_unaligned. (memset): Removed. (__memset_chk): Likewise. (MEMSET_SYMBOL): New. (libc_hidden_builtin_def): Replace __memset_sse2 with __memset_sse2_unaligned. * sysdeps/x86_64/multiarch/memset_chk.S (__memset_chk): Replace __memset_chk_sse2 and __memset_chk_avx2 with __memset_chk_sse2_unaligned and __memset_chk_avx2_unaligned_erms. Use __memset_chk_sse2_unaligned_erms or __memset_chk_avx2_unaligned_erms if processor has ERMS. Support __memset_chk_avx512_unaligned_erms and __memset_chk_avx512_unaligned.
52 lines
1.7 KiB
ArmAsm
52 lines
1.7 KiB
ArmAsm
/* memset/bzero -- set memory area to CH/0
|
|
Optimized version for x86-64.
|
|
Copyright (C) 2002-2016 Free Software Foundation, Inc.
|
|
This file is part of the GNU C Library.
|
|
|
|
The GNU C Library is free software; you can redistribute it and/or
|
|
modify it under the terms of the GNU Lesser General Public
|
|
License as published by the Free Software Foundation; either
|
|
version 2.1 of the License, or (at your option) any later version.
|
|
|
|
The GNU C Library is distributed in the hope that it will be useful,
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
Lesser General Public License for more details.
|
|
|
|
You should have received a copy of the GNU Lesser General Public
|
|
License along with the GNU C Library; if not, see
|
|
<http://www.gnu.org/licenses/>. */
|
|
|
|
#include <sysdep.h>
|
|
|
|
#define VEC_SIZE 16
|
|
#define VEC(i) xmm##i
|
|
/* Don't use movups and movaps since it will get larger nop paddings for
|
|
alignment. */
|
|
#define VMOVU movdqu
|
|
#define VMOVA movdqa
|
|
|
|
#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
|
|
movd d, %xmm0; \
|
|
movq r, %rax; \
|
|
punpcklbw %xmm0, %xmm0; \
|
|
punpcklwd %xmm0, %xmm0; \
|
|
pshufd $0, %xmm0, %xmm0
|
|
|
|
#define SECTION(p) p
|
|
|
|
#ifndef MEMSET_SYMBOL
|
|
# define MEMSET_CHK_SYMBOL(p,s) p
|
|
# define MEMSET_SYMBOL(p,s) memset
|
|
#endif
|
|
|
|
#include "multiarch/memset-vec-unaligned-erms.S"
|
|
|
|
libc_hidden_builtin_def (memset)
|
|
|
|
#if defined SHARED && IS_IN (libc) && !defined USE_MULTIARCH
|
|
strong_alias (__memset_chk, __memset_zero_constant_len_parameter)
|
|
.section .gnu.warning.__memset_zero_constant_len_parameter
|
|
.string "memset used with constant zero length parameter; this could be due to transposed parameters"
|
|
#endif
|