Fix -D_FORTIFY_SOURCE memmove and bcop

This commit is contained in:
Jakub Jelinek 2010-12-09 10:38:18 -05:00 committed by Ulrich Drepper
parent a5b913e299
commit 42acbb92c8
3 changed files with 32 additions and 35 deletions

View File

@ -1,3 +1,7 @@
2010-12-09 Jakub Jelinek <jakub@redhat.com>
* string/bits/string3.h (memmove, bcopy): Remove __restrict.
2010-12-03 Ulrich Drepper <drepper@gmail.com> 2010-12-03 Ulrich Drepper <drepper@gmail.com>
* po/it.po: Update from translation team. * po/it.po: Update from translation team.

View File

@ -1,4 +1,4 @@
/* Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc. /* Copyright (C) 2004, 2005, 2007, 2009, 2010 Free Software Foundation, Inc.
This file is part of the GNU C Library. This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or The GNU C Library is free software; you can redistribute it and/or
@ -53,8 +53,7 @@ __NTH (memcpy (void *__restrict __dest, __const void *__restrict __src,
} }
__extern_always_inline void * __extern_always_inline void *
__NTH (memmove (void *__restrict __dest, __const void *__restrict __src, __NTH (memmove (void *__dest, __const void *__src, size_t __len))
size_t __len))
{ {
return __builtin___memmove_chk (__dest, __src, __len, __bos0 (__dest)); return __builtin___memmove_chk (__dest, __src, __len, __bos0 (__dest));
} }
@ -88,8 +87,7 @@ __NTH (memset (void *__dest, int __ch, size_t __len))
#ifdef __USE_BSD #ifdef __USE_BSD
__extern_always_inline void __extern_always_inline void
__NTH (bcopy (__const void *__restrict __src, void *__restrict __dest, __NTH (bcopy (__const void *__src, void *__dest, size_t __len))
size_t __len))
{ {
(void) __builtin___memmove_chk (__dest, __src, __len, __bos0 (__dest)); (void) __builtin___memmove_chk (__dest, __src, __len, __bos0 (__dest));
} }

View File

@ -40,37 +40,32 @@
need strncmp before the initialization happened. */ need strncmp before the initialization happened. */
#if (defined SHARED || !defined USE_AS_STRNCMP) && !defined NOT_IN_libc #if (defined SHARED || !defined USE_AS_STRNCMP) && !defined NOT_IN_libc
# ifdef SHARED # ifdef SHARED
.section .gnu.linkonce.t.__i686.get_pc_thunk.bx,"ax",@progbits .section .gnu.linkonce.t.__i686.get_pc_thunk.dx,"ax",@progbits
.globl __i686.get_pc_thunk.bx .globl __i686.get_pc_thunk.dx
.hidden __i686.get_pc_thunk.bx .hidden __i686.get_pc_thunk.dx
.p2align 4 .p2align 2
.type __i686.get_pc_thunk.bx,@function .type __i686.get_pc_thunk.dx,@function
__i686.get_pc_thunk.bx: __i686.get_pc_thunk.dx:
movl (%esp), %ebx movl (%esp), %edx
ret ret
.size __i686.get_pc_thunk.dx, .-__i686.get_pc_thunk.dx
.text .text
ENTRY(STRCMP) ENTRY(STRCMP)
.type STRCMP, @gnu_indirect_function .type STRCMP, @gnu_indirect_function
pushl %ebx call __i686.get_pc_thunk.dx
cfi_adjust_cfa_offset (4) addl $_GLOBAL_OFFSET_TABLE_, %edx
cfi_rel_offset (ebx, 0) cmpl $0, KIND_OFFSET+__cpu_features@GOTOFF(%edx)
call __i686.get_pc_thunk.bx
addl $_GLOBAL_OFFSET_TABLE_, %ebx
cmpl $0, KIND_OFFSET+__cpu_features@GOTOFF(%ebx)
jne 1f jne 1f
call __init_cpu_features call __init_cpu_features
1: leal __STRCMP_IA32@GOTOFF(%ebx), %eax 1: leal __STRCMP_SSE4_2@GOTOFF(%edx), %eax
testl $bit_SSSE3, CPUID_OFFSET+index_SSSE3+__cpu_features@GOTOFF(%ebx) testl $bit_SSE4_2, CPUID_OFFSET+index_SSE4_2+__cpu_features@GOTOFF(%edx)
jz 2f jnz 2f
leal __STRCMP_SSSE3@GOTOFF(%ebx), %eax leal __STRCMP_SSSE3@GOTOFF(%edx), %eax
testl $bit_SSE4_2, CPUID_OFFSET+index_SSE4_2+__cpu_features@GOTOFF(%ebx) testl $bit_SSSE3, CPUID_OFFSET+index_SSSE3+__cpu_features@GOTOFF(%edx)
jz 2f jnz 2f
leal __STRCMP_SSE4_2@GOTOFF(%ebx), %eax leal __STRCMP_IA32@GOTOFF(%edx), %ecx
2: popl %ebx 2: ret
cfi_adjust_cfa_offset (-4)
cfi_restore (ebx)
ret
END(STRCMP) END(STRCMP)
# else # else
.text .text
@ -79,13 +74,13 @@ ENTRY(STRCMP)
cmpl $0, KIND_OFFSET+__cpu_features cmpl $0, KIND_OFFSET+__cpu_features
jne 1f jne 1f
call __init_cpu_features call __init_cpu_features
1: leal __STRCMP_IA32, %eax 1: leal __STRCMP_SSE4_2, %eax
testl $bit_SSSE3, CPUID_OFFSET+index_SSSE3+__cpu_features
jz 2f
leal __STRCMP_SSSE3, %eax
testl $bit_SSE4_2, CPUID_OFFSET+index_SSE4_2+__cpu_features testl $bit_SSE4_2, CPUID_OFFSET+index_SSE4_2+__cpu_features
jz 2f jnz 2f
leal __STRCMP_SSE4_2, %eax leal __STRCMP_SSSE3, %eax
testl $bit_SSSE3, CPUID_OFFSET+index_SSSE3+__cpu_features
jnz 2f
leal __STRCMP_IA32, %eax
2: ret 2: ret
END(STRCMP) END(STRCMP)
# endif # endif