mirror of
https://sourceware.org/git/glibc.git
synced 2024-12-04 10:50:07 +00:00
d5b411854f
A number of functions in the sysdeps/powerpc/powerpc64/ tree don't use or change r2, yet declare a global entry that sets up r2. This patch fixes that problem, and consolidates the ENTRY and EALIGN macros. * sysdeps/powerpc/powerpc64/sysdep.h: Formatting. (NOPS, ENTRY_3): New macros. (ENTRY): Rewrite. (ENTRY_TOCLESS): Define. (EALIGN, EALIGN_W_0, EALIGN_W_1, EALIGN_W_2, EALIGN_W_4, EALIGN_W_5, EALIGN_W_6, EALIGN_W_7, EALIGN_W_8): Delete. * sysdeps/powerpc/powerpc64/a2/memcpy.S: Replace EALIGN with ENTRY. * sysdeps/powerpc/powerpc64/dl-trampoline.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_ceil.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_ceilf.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_floor.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_floorf.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_nearbyint.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_nearbyintf.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_rint.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_rintf.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_round.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_roundf.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_trunc.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_truncf.S: Likewise. * sysdeps/powerpc/powerpc64/memset.S: Likewise. * sysdeps/powerpc/powerpc64/power7/fpu/s_finite.S: Likewise. * sysdeps/powerpc/powerpc64/power7/fpu/s_isinf.S: Likewise. * sysdeps/powerpc/powerpc64/power7/fpu/s_isnan.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strstr.S: Likewise. * sysdeps/powerpc/powerpc64/power8/fpu/e_expf.S: Likewise. * sysdeps/powerpc/powerpc64/power8/fpu/s_cosf.S: Likewise. * sysdeps/powerpc/powerpc64/power8/fpu/s_sinf.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strcasestr.S: Likewise. * sysdeps/powerpc/powerpc64/addmul_1.S: Use ENTRY_TOCLESS. * sysdeps/powerpc/powerpc64/cell/memcpy.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_copysign.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_copysignl.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_fabsl.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_isnan.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_llrint.S: Likewise. * sysdeps/powerpc/powerpc64/fpu/s_llrintf.S: Likewise. * sysdeps/powerpc/powerpc64/lshift.S: Likewise. * sysdeps/powerpc/powerpc64/memcpy.S: Likewise. * sysdeps/powerpc/powerpc64/mul_1.S: Likewise. * sysdeps/powerpc/powerpc64/power4/memcmp.S: Likewise. * sysdeps/powerpc/powerpc64/power4/memcpy.S: Likewise. * sysdeps/powerpc/powerpc64/power4/memset.S: Likewise. * sysdeps/powerpc/powerpc64/power4/strncmp.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_ceil.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_ceilf.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_floor.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_floorf.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_llround.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_round.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_roundf.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_trunc.S: Likewise. * sysdeps/powerpc/powerpc64/power5+/fpu/s_truncf.S: Likewise. * sysdeps/powerpc/powerpc64/power5/fpu/s_isnan.S: Likewise. * sysdeps/powerpc/powerpc64/power6/fpu/s_copysign.S: Likewise. * sysdeps/powerpc/powerpc64/power6/fpu/s_isnan.S: Likewise. * sysdeps/powerpc/powerpc64/power6/memcpy.S: Likewise. * sysdeps/powerpc/powerpc64/power6/memset.S: Likewise. * sysdeps/powerpc/powerpc64/power6x/fpu/s_isnan.S: Likewise. * sysdeps/powerpc/powerpc64/power6x/fpu/s_llrint.S: Likewise. * sysdeps/powerpc/powerpc64/power6x/fpu/s_llround.S: Likewise. * sysdeps/powerpc/powerpc64/power7/add_n.S: Likewise. * sysdeps/powerpc/powerpc64/power7/memchr.S: Likewise. * sysdeps/powerpc/powerpc64/power7/memcmp.S: Likewise. * sysdeps/powerpc/powerpc64/power7/memcpy.S: Likewise. * sysdeps/powerpc/powerpc64/power7/memmove.S: Likewise. * sysdeps/powerpc/powerpc64/power7/mempcpy.S: Likewise. * sysdeps/powerpc/powerpc64/power7/memrchr.S: Likewise. * sysdeps/powerpc/powerpc64/power7/memset.S: Likewise. * sysdeps/powerpc/powerpc64/power7/rawmemchr.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strcasecmp.S (strcasecmp_l): Likewise. * sysdeps/powerpc/powerpc64/power7/strchr.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strchrnul.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strcmp.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strlen.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strncmp.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strncpy.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strnlen.S: Likewise. * sysdeps/powerpc/powerpc64/power7/strrchr.S: Likewise. * sysdeps/powerpc/powerpc64/power8/fpu/s_finite.S: Likewise. * sysdeps/powerpc/powerpc64/power8/fpu/s_isinf.S: Likewise. * sysdeps/powerpc/powerpc64/power8/fpu/s_isnan.S: Likewise. * sysdeps/powerpc/powerpc64/power8/fpu/s_llrint.S: Likewise. * sysdeps/powerpc/powerpc64/power8/fpu/s_llround.S: Likewise. * sysdeps/powerpc/powerpc64/power8/memcmp.S: Likewise. * sysdeps/powerpc/powerpc64/power8/memset.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strchr.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strcmp.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strcpy.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strlen.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strncmp.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strncpy.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strnlen.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strrchr.S: Likewise. * sysdeps/powerpc/powerpc64/power8/strspn.S: Likewise. * sysdeps/powerpc/powerpc64/power9/strcmp.S: Likewise. * sysdeps/powerpc/powerpc64/power9/strncmp.S: Likewise. * sysdeps/powerpc/powerpc64/strchr.S: Likewise. * sysdeps/powerpc/powerpc64/strcmp.S: Likewise. * sysdeps/powerpc/powerpc64/strlen.S: Likewise. * sysdeps/powerpc/powerpc64/strncmp.S: Likewise. * sysdeps/powerpc/powerpc64/ppc-mcount.S: Store LR earlier. Don't add nop when SHARED. * sysdeps/powerpc/powerpc64/start.S: Fix comment. * sysdeps/powerpc/powerpc64/multiarch/strrchr-power8.S (ENTRY): Don't define. (ENTRY_TOCLESS): Define. * sysdeps/powerpc/powerpc32/sysdep.h (ENTRY_TOCLESS): Define. * sysdeps/powerpc/fpu/s_fma.S: Use ENTRY_TOCLESS. * sysdeps/powerpc/fpu/s_fmaf.S: Likewise.
200 lines
5.2 KiB
ArmAsm
200 lines
5.2 KiB
ArmAsm
/* Optimized memchr implementation for PowerPC64/POWER7 using cmpb insn.
|
|
Copyright (C) 2010-2017 Free Software Foundation, Inc.
|
|
Contributed by Luis Machado <luisgpm@br.ibm.com>.
|
|
This file is part of the GNU C Library.
|
|
|
|
The GNU C Library is free software; you can redistribute it and/or
|
|
modify it under the terms of the GNU Lesser General Public
|
|
License as published by the Free Software Foundation; either
|
|
version 2.1 of the License, or (at your option) any later version.
|
|
|
|
The GNU C Library is distributed in the hope that it will be useful,
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
Lesser General Public License for more details.
|
|
|
|
You should have received a copy of the GNU Lesser General Public
|
|
License along with the GNU C Library; if not, see
|
|
<http://www.gnu.org/licenses/>. */
|
|
|
|
#include <sysdep.h>
|
|
|
|
/* int [r3] memchr (char *s [r3], int byte [r4], int size [r5]) */
|
|
|
|
#ifndef MEMCHR
|
|
# define MEMCHR __memchr
|
|
#endif
|
|
.machine power7
|
|
ENTRY_TOCLESS (MEMCHR)
|
|
CALL_MCOUNT 3
|
|
dcbt 0,r3
|
|
clrrdi r8,r3,3
|
|
insrdi r4,r4,8,48
|
|
|
|
/* Calculate the last acceptable address and check for possible
|
|
addition overflow by using satured math:
|
|
r7 = r3 + r5
|
|
r7 |= -(r7 < x) */
|
|
add r7,r3,r5
|
|
subfc r6,r3,r7
|
|
subfe r9,r9,r9
|
|
extsw r6,r9
|
|
or r7,r7,r6
|
|
|
|
insrdi r4,r4,16,32
|
|
cmpldi r5,32
|
|
li r9, -1
|
|
rlwinm r6,r3,3,26,28 /* Calculate padding. */
|
|
insrdi r4,r4,32,0
|
|
addi r7,r7,-1
|
|
#ifdef __LITTLE_ENDIAN__
|
|
sld r9,r9,r6
|
|
#else
|
|
srd r9,r9,r6
|
|
#endif
|
|
ble L(small_range)
|
|
|
|
ld r12,0(r8) /* Load doubleword from memory. */
|
|
cmpb r3,r12,r4 /* Check for BYTEs in DWORD1. */
|
|
and r3,r3,r9
|
|
clrldi r5,r7,61 /* Byte count - 1 in last dword. */
|
|
clrrdi r7,r7,3 /* Address of last doubleword. */
|
|
cmpldi cr7,r3,0 /* Does r3 indicate we got a hit? */
|
|
bne cr7,L(done)
|
|
|
|
mtcrf 0x01,r8
|
|
/* Are we now aligned to a quadword boundary? If so, skip to
|
|
the main loop. Otherwise, go through the alignment code. */
|
|
bt 28,L(loop_setup)
|
|
|
|
/* Handle DWORD2 of pair. */
|
|
ldu r12,8(r8)
|
|
cmpb r3,r12,r4
|
|
cmpldi cr7,r3,0
|
|
bne cr7,L(done)
|
|
|
|
L(loop_setup):
|
|
/* The last dword we want to read in the loop below is the one
|
|
containing the last byte of the string, ie. the dword at
|
|
(s + size - 1) & ~7, or r7. The first dword read is at
|
|
r8 + 8, we read 2 * cnt dwords, so the last dword read will
|
|
be at r8 + 8 + 16 * cnt - 8. Solving for cnt gives
|
|
cnt = (r7 - r8) / 16 */
|
|
sub r6,r7,r8
|
|
srdi r6,r6,4 /* Number of loop iterations. */
|
|
mtctr r6 /* Setup the counter. */
|
|
|
|
/* Main loop to look for BYTE in the string. Since
|
|
it's a small loop (8 instructions), align it to 32-bytes. */
|
|
.align 5
|
|
L(loop):
|
|
/* Load two doublewords, compare and merge in a
|
|
single register for speed. This is an attempt
|
|
to speed up the byte-checking process for bigger strings. */
|
|
ld r12,8(r8)
|
|
ldu r11,16(r8)
|
|
cmpb r3,r12,r4
|
|
cmpb r9,r11,r4
|
|
or r6,r9,r3 /* Merge everything in one doubleword. */
|
|
cmpldi cr7,r6,0
|
|
bne cr7,L(found)
|
|
bdnz L(loop)
|
|
|
|
/* We may have one more dword to read. */
|
|
cmpld r8,r7
|
|
beqlr
|
|
|
|
ldu r12,8(r8)
|
|
cmpb r3,r12,r4
|
|
cmpldi cr6,r3,0
|
|
bne cr6,L(done)
|
|
blr
|
|
|
|
.align 4
|
|
L(found):
|
|
/* OK, one (or both) of the doublewords contains BYTE. Check
|
|
the first doubleword and decrement the address in case the first
|
|
doubleword really contains BYTE. */
|
|
cmpldi cr6,r3,0
|
|
addi r8,r8,-8
|
|
bne cr6,L(done)
|
|
|
|
/* BYTE must be in the second doubleword. Adjust the address
|
|
again and move the result of cmpb to r3 so we can calculate the
|
|
pointer. */
|
|
|
|
mr r3,r9
|
|
addi r8,r8,8
|
|
|
|
/* r3 has the output of the cmpb instruction, that is, it contains
|
|
0xff in the same position as BYTE in the original
|
|
doubleword from the string. Use that to calculate the pointer.
|
|
We need to make sure BYTE is *before* the end of the range. */
|
|
L(done):
|
|
#ifdef __LITTLE_ENDIAN__
|
|
addi r0,r3,-1
|
|
andc r0,r0,r3
|
|
popcntd r0,r0 /* Count trailing zeros. */
|
|
#else
|
|
cntlzd r0,r3 /* Count leading zeros before the match. */
|
|
#endif
|
|
cmpld r8,r7 /* Are we on the last dword? */
|
|
srdi r0,r0,3 /* Convert leading/trailing zeros to bytes. */
|
|
add r3,r8,r0
|
|
cmpld cr7,r0,r5 /* If on the last dword, check byte offset. */
|
|
bnelr
|
|
blelr cr7
|
|
li r3,0
|
|
blr
|
|
|
|
.align 4
|
|
L(null):
|
|
li r3,0
|
|
blr
|
|
|
|
/* Deals with size <= 32. */
|
|
.align 4
|
|
L(small_range):
|
|
cmpldi r5,0
|
|
beq L(null)
|
|
ld r12,0(r8) /* Load word from memory. */
|
|
cmpb r3,r12,r4 /* Check for BYTE in DWORD1. */
|
|
and r3,r3,r9
|
|
cmpldi cr7,r3,0
|
|
clrldi r5,r7,61 /* Byte count - 1 in last dword. */
|
|
clrrdi r7,r7,3 /* Address of last doubleword. */
|
|
cmpld r8,r7 /* Are we done already? */
|
|
bne cr7,L(done)
|
|
beqlr
|
|
|
|
ldu r12,8(r8)
|
|
cmpb r3,r12,r4
|
|
cmpldi cr6,r3,0
|
|
cmpld r8,r7
|
|
bne cr6,L(done) /* Found something. */
|
|
beqlr /* Hit end of string (length). */
|
|
|
|
ldu r12,8(r8)
|
|
cmpb r3,r12,r4
|
|
cmpldi cr6,r3,0
|
|
cmpld r8,r7
|
|
bne cr6,L(done)
|
|
beqlr
|
|
|
|
ldu r12,8(r8)
|
|
cmpb r3,r12,r4
|
|
cmpldi cr6,r3,0
|
|
cmpld r8,r7
|
|
bne cr6,L(done)
|
|
beqlr
|
|
|
|
ldu r12,8(r8)
|
|
cmpb r3,r12,r4
|
|
cmpldi cr6,r3,0
|
|
bne cr6,L(done)
|
|
blr
|
|
|
|
END (MEMCHR)
|
|
weak_alias (__memchr, memchr)
|
|
libc_hidden_builtin_def (memchr)
|