mirror of
https://sourceware.org/git/glibc.git
synced 2025-01-06 17:40:06 +00:00
4f41c682f3
Replace with !IS_IN (libc). This completes the transition from the IS_IN/NOT_IN macros to the IN_MODULE macro set. The generated code is unchanged on x86_64. * stdlib/isomac.c (fmt): Replace NOT_IN_libc with IN_MODULE. (get_null_defines): Adjust. * sunrpc/Makefile: Adjust comment. * Makerules (CPPFLAGS-nonlib): Remove NOT_IN_libc. * elf/Makefile (CPPFLAGS-sotruss-lib): Likewise. (CFLAGS-interp.c): Likewise. (CFLAGS-ldconfig.c): Likewise. (CPPFLAGS-.os): Likewise. * elf/rtld-Rules (rtld-CPPFLAGS): Likewise. * extra-lib.mk (CPPFLAGS-$(lib)): Likewise. * extra-modules.mk (extra-modules.mk): Likewise. * iconv/Makefile (CPPFLAGS-iconvprogs): Likewise. * locale/Makefile (CPPFLAGS-locale_programs): Likewise. * malloc/Makefile (CPPFLAGS-memusagestat): Likewise. * nscd/Makefile (CPPFLAGS-nscd): Likewise. * nss/Makefile (CPPFLAGS-nss_test1): Likewise. * stdlib/Makefile (CFLAGS-tst-putenvmod.c): Likewise. * sysdeps/gnu/Makefile ($(objpfx)errlist-compat.c): Likewise. * sysdeps/unix/sysv/linux/Makefile (CPPFLAGS-lddlibc4): Likewise. * iconvdata/Makefile (CPPFLAGS): Likewise. (cpp-srcs-left): Add libof for all iconvdata routines. * bits/stdio-lock.h: Replace NOT_IN_libc with IS_IN. * include/assert.h: Likewise. * include/ctype.h: Likewise. * include/errno.h: Likewise. * include/libc-symbols.h: Likewise. * include/math.h: Likewise. * include/netdb.h: Likewise. * include/resolv.h: Likewise. * include/stdio.h: Likewise. * include/stdlib.h: Likewise. * include/string.h: Likewise. * include/sys/stat.h: Likewise. * include/wctype.h: Likewise. * intl/l10nflist.c: Likewise. * libidn/idn-stub.c: Likewise. * libio/libioP.h: Likewise. * nptl/libc_multiple_threads.c: Likewise. * nptl/pthreadP.h: Likewise. * posix/regex_internal.h: Likewise. * resolv/res_hconf.c: Likewise. * sysdeps/arm/armv7/multiarch/memcpy.S: Likewise. * sysdeps/arm/memmove.S: Likewise. * sysdeps/arm/sysdep.h: Likewise. * sysdeps/generic/_itoa.h: Likewise. * sysdeps/generic/symbol-hacks.h: Likewise. * sysdeps/gnu/errlist.awk: Likewise. * sysdeps/gnu/errlist.c: Likewise. * sysdeps/i386/i586/memcpy.S: Likewise. * sysdeps/i386/i586/memset.S: Likewise. * sysdeps/i386/i686/memcpy.S: Likewise. * sysdeps/i386/i686/memmove.S: Likewise. * sysdeps/i386/i686/mempcpy.S: Likewise. * sysdeps/i386/i686/memset.S: Likewise. * sysdeps/i386/i686/multiarch/bcopy.S: Likewise. * sysdeps/i386/i686/multiarch/bzero.S: Likewise. * sysdeps/i386/i686/multiarch/memchr-sse2-bsf.S: Likewise. * sysdeps/i386/i686/multiarch/memchr-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/memchr.S: Likewise. * sysdeps/i386/i686/multiarch/memcmp-sse4.S: Likewise. * sysdeps/i386/i686/multiarch/memcmp-ssse3.S: Likewise. * sysdeps/i386/i686/multiarch/memcmp.S: Likewise. * sysdeps/i386/i686/multiarch/memcpy-ssse3-rep.S: Likewise. * sysdeps/i386/i686/multiarch/memcpy-ssse3.S: Likewise. * sysdeps/i386/i686/multiarch/memcpy.S: Likewise. * sysdeps/i386/i686/multiarch/memcpy_chk.S: Likewise. * sysdeps/i386/i686/multiarch/memmove.S: Likewise. * sysdeps/i386/i686/multiarch/memmove_chk.S: Likewise. * sysdeps/i386/i686/multiarch/mempcpy.S: Likewise. * sysdeps/i386/i686/multiarch/mempcpy_chk.S: Likewise. * sysdeps/i386/i686/multiarch/memrchr-c.c: Likewise. * sysdeps/i386/i686/multiarch/memrchr-sse2-bsf.S: Likewise. * sysdeps/i386/i686/multiarch/memrchr-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/memrchr.S: Likewise. * sysdeps/i386/i686/multiarch/memset-sse2-rep.S: Likewise. * sysdeps/i386/i686/multiarch/memset-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/memset.S: Likewise. * sysdeps/i386/i686/multiarch/memset_chk.S: Likewise. * sysdeps/i386/i686/multiarch/rawmemchr.S: Likewise. * sysdeps/i386/i686/multiarch/strcat-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/strcat-ssse3.S: Likewise. * sysdeps/i386/i686/multiarch/strcat.S: Likewise. * sysdeps/i386/i686/multiarch/strchr-sse2-bsf.S: Likewise. * sysdeps/i386/i686/multiarch/strchr-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/strchr.S: Likewise. * sysdeps/i386/i686/multiarch/strcmp-sse4.S: Likewise. * sysdeps/i386/i686/multiarch/strcmp-ssse3.S: Likewise. * sysdeps/i386/i686/multiarch/strcmp.S: Likewise. * sysdeps/i386/i686/multiarch/strcpy-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/strcpy-ssse3.S: Likewise. * sysdeps/i386/i686/multiarch/strcpy.S: Likewise. * sysdeps/i386/i686/multiarch/strcspn.S: Likewise. * sysdeps/i386/i686/multiarch/strlen-sse2-bsf.S: Likewise. * sysdeps/i386/i686/multiarch/strlen-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/strlen.S: Likewise. * sysdeps/i386/i686/multiarch/strnlen.S: Likewise. * sysdeps/i386/i686/multiarch/strrchr-sse2-bsf.S: Likewise. * sysdeps/i386/i686/multiarch/strrchr-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/strrchr.S: Likewise. * sysdeps/i386/i686/multiarch/strspn.S: Likewise. * sysdeps/i386/i686/multiarch/wcschr-c.c: Likewise. * sysdeps/i386/i686/multiarch/wcschr-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/wcschr.S: Likewise. * sysdeps/i386/i686/multiarch/wcscmp-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/wcscmp.S: Likewise. * sysdeps/i386/i686/multiarch/wcscpy-c.c: Likewise. * sysdeps/i386/i686/multiarch/wcscpy-ssse3.S: Likewise. * sysdeps/i386/i686/multiarch/wcscpy.S: Likewise. * sysdeps/i386/i686/multiarch/wcslen-c.c: Likewise. * sysdeps/i386/i686/multiarch/wcslen-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/wcslen.S: Likewise. * sysdeps/i386/i686/multiarch/wcsrchr-c.c: Likewise. * sysdeps/i386/i686/multiarch/wcsrchr-sse2.S: Likewise. * sysdeps/i386/i686/multiarch/wcsrchr.S: Likewise. * sysdeps/i386/i686/multiarch/wmemcmp-c.c: Likewise. * sysdeps/i386/i686/multiarch/wmemcmp.S: Likewise. * sysdeps/ia64/fpu/libm-symbols.h: Likewise. * sysdeps/nptl/bits/libc-lock.h: Likewise. * sysdeps/nptl/bits/libc-lockP.h: Likewise. * sysdeps/nptl/bits/stdio-lock.h: Likewise. * sysdeps/posix/closedir.c: Likewise. * sysdeps/posix/opendir.c: Likewise. * sysdeps/posix/readdir.c: Likewise. * sysdeps/posix/rewinddir.c: Likewise. * sysdeps/powerpc/novmx-sigjmp.c: Likewise. * sysdeps/powerpc/powerpc32/__longjmp.S: Likewise. * sysdeps/powerpc/powerpc32/bsd-_setjmp.S: Likewise. * sysdeps/powerpc/powerpc32/fpu/__longjmp.S: Likewise. * sysdeps/powerpc/powerpc32/fpu/setjmp.S: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/bzero.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memchr.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memcmp-ppc32.S: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memcmp.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memcpy-ppc32.S: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memcpy.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memmove.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/mempcpy.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memrchr-ppc32.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memrchr.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memset-ppc32.S: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/memset.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/rawmemchr.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strcasecmp.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strcasecmp_l.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strchr.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strchrnul.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strlen-ppc32.S: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strlen.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strncase.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strncase_l.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strncmp-ppc32.S: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strncmp.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/strnlen.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/wcschr-ppc32.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/wcschr.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/wcscpy-ppc32.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/wcscpy.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/wcsrchr-ppc32.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/wcsrchr.c: Likewise. * sysdeps/powerpc/powerpc32/power4/multiarch/wordcopy.c: Likewise. * sysdeps/powerpc/powerpc32/power6/memset.S: Likewise. * sysdeps/powerpc/powerpc32/setjmp.S: Likewise. * sysdeps/powerpc/powerpc64/__longjmp.S: Likewise. * sysdeps/powerpc/powerpc64/multiarch/bzero.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memchr.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memcmp-ppc64.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memcmp.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memcpy-ppc64.S: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memcpy.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memmove-ppc64.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memmove.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/mempcpy.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memrchr.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memset-ppc64.S: Likewise. * sysdeps/powerpc/powerpc64/multiarch/memset.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/rawmemchr.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/stpcpy-ppc64.S: Likewise. * sysdeps/powerpc/powerpc64/multiarch/stpcpy.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/stpncpy.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strcasecmp.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strcasecmp_l.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strcat.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strchr.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strchrnul.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strcmp-ppc64.S: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strcmp.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strcpy-ppc64.S: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strcpy.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strcspn.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strlen-ppc64.S: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strlen.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strncase.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strncase_l.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strncat.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strncmp-ppc64.S: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strncmp.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strncpy-ppc64.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strncpy.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strnlen.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strpbrk.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strrchr-ppc64.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strrchr.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strspn-ppc64.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/strspn.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/wcschr.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/wcscpy.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/wcsrchr.c: Likewise. * sysdeps/powerpc/powerpc64/multiarch/wordcopy.c: Likewise. * sysdeps/powerpc/powerpc64/setjmp.S: Likewise. * sysdeps/s390/s390-32/multiarch/ifunc-resolve.c: Likewise. * sysdeps/s390/s390-32/multiarch/memcmp.S: Likewise. * sysdeps/s390/s390-32/multiarch/memcpy.S: Likewise. * sysdeps/s390/s390-32/multiarch/memset.S: Likewise. * sysdeps/s390/s390-64/multiarch/ifunc-resolve.c: Likewise. * sysdeps/s390/s390-64/multiarch/memcmp.S: Likewise. * sysdeps/s390/s390-64/multiarch/memcpy.S: Likewise. * sysdeps/s390/s390-64/multiarch/memset.S: Likewise. * sysdeps/sparc/sparc64/multiarch/memcpy-niagara1.S: Likewise. * sysdeps/sparc/sparc64/multiarch/memcpy-niagara2.S: Likewise. * sysdeps/sparc/sparc64/multiarch/memcpy-niagara4.S: Likewise. * sysdeps/sparc/sparc64/multiarch/memcpy-ultra3.S: Likewise. * sysdeps/sparc/sparc64/multiarch/memcpy.S: Likewise. * sysdeps/sparc/sparc64/multiarch/memset-niagara1.S: Likewise. * sysdeps/sparc/sparc64/multiarch/memset-niagara4.S: Likewise. * sysdeps/sparc/sparc64/multiarch/memset.S: Likewise. * sysdeps/unix/alpha/sysdep.S: Likewise. * sysdeps/unix/alpha/sysdep.h: Likewise. * sysdeps/unix/make-syscalls.sh: Likewise. * sysdeps/unix/sysv/linux/aarch64/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/aarch64/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/alpha/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/alpha/vfork.S: Likewise. * sysdeps/unix/sysv/linux/arm/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/arm/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/getpid.c: Likewise. * sysdeps/unix/sysv/linux/hppa/nptl/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/hppa/nptl/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/i386/i486/lowlevellock.S: Likewise. * sysdeps/unix/sysv/linux/i386/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/i386/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/i386/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/ia64/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/ia64/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/ia64/sysdep.S: Likewise. * sysdeps/unix/sysv/linux/ia64/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/lowlevellock-futex.h: Likewise. * sysdeps/unix/sysv/linux/m68k/bits/m68k-vdso.h: Likewise. * sysdeps/unix/sysv/linux/m68k/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/m68k/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/microblaze/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/microblaze/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/mips/mips64/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/mips/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/not-cancel.h: Likewise. * sysdeps/unix/sysv/linux/powerpc/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/powerpc/powerpc32/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/powerpc/powerpc64/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/s390/longjmp_chk.c: Likewise. * sysdeps/unix/sysv/linux/s390/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/s390/s390-32/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/s390/s390-32/sysdep.S: Likewise. * sysdeps/unix/sysv/linux/s390/s390-32/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/s390/s390-32/vfork.S: Likewise. * sysdeps/unix/sysv/linux/s390/s390-64/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/s390/s390-64/sysdep.S: Likewise. * sysdeps/unix/sysv/linux/s390/s390-64/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/s390/s390-64/vfork.S: Likewise. * sysdeps/unix/sysv/linux/sh/lowlevellock.S: Likewise. * sysdeps/unix/sysv/linux/sh/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/sh/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/sh/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/sh/vfork.S: Likewise. * sysdeps/unix/sysv/linux/sparc/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/sparc/sparc32/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/sparc/sparc32/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/sparc/sparc64/brk.S: Likewise. * sysdeps/unix/sysv/linux/sparc/sparc64/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/sparc/sparc64/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/tile/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/tile/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/tile/sysdep.h: Likewise. * sysdeps/unix/sysv/linux/tile/waitpid.S: Likewise. * sysdeps/unix/sysv/linux/x86_64/lowlevellock.S: Likewise. * sysdeps/unix/sysv/linux/x86_64/lowlevellock.h: Likewise. * sysdeps/unix/sysv/linux/x86_64/sysdep-cancel.h: Likewise. * sysdeps/unix/sysv/linux/x86_64/sysdep.h: Likewise. * sysdeps/wordsize-32/symbol-hacks.h: Likewise. * sysdeps/x86_64/memcpy.S: Likewise. * sysdeps/x86_64/memmove.c: Likewise. * sysdeps/x86_64/memset.S: Likewise. * sysdeps/x86_64/multiarch/init-arch.h: Likewise. * sysdeps/x86_64/multiarch/memcmp-sse4.S: Likewise. * sysdeps/x86_64/multiarch/memcmp-ssse3.S: Likewise. * sysdeps/x86_64/multiarch/memcmp.S: Likewise. * sysdeps/x86_64/multiarch/memcpy-avx-unaligned.S: Likewise. * sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Likewise. * sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise. * sysdeps/x86_64/multiarch/memcpy.S: Likewise. * sysdeps/x86_64/multiarch/memcpy_chk.S: Likewise. * sysdeps/x86_64/multiarch/memmove.c: Likewise. * sysdeps/x86_64/multiarch/mempcpy.S: Likewise. * sysdeps/x86_64/multiarch/mempcpy_chk.S: Likewise. * sysdeps/x86_64/multiarch/memset-avx2.S: Likewise. * sysdeps/x86_64/multiarch/memset.S: Likewise. * sysdeps/x86_64/multiarch/memset_chk.S: Likewise. * sysdeps/x86_64/multiarch/strcat-sse2-unaligned.S: Likewise. * sysdeps/x86_64/multiarch/strcat-ssse3.S: Likewise. * sysdeps/x86_64/multiarch/strcat.S: Likewise. * sysdeps/x86_64/multiarch/strchr-sse2-no-bsf.S: Likewise. * sysdeps/x86_64/multiarch/strchr.S: Likewise. * sysdeps/x86_64/multiarch/strcmp-ssse3.S: Likewise. * sysdeps/x86_64/multiarch/strcmp.S: Likewise. * sysdeps/x86_64/multiarch/strcpy-sse2-unaligned.S: Likewise. * sysdeps/x86_64/multiarch/strcpy-ssse3.S: Likewise. * sysdeps/x86_64/multiarch/strcpy.S: Likewise. * sysdeps/x86_64/multiarch/strcspn.S: Likewise. * sysdeps/x86_64/multiarch/strspn.S: Likewise. * sysdeps/x86_64/multiarch/wcscpy-c.c: Likewise. * sysdeps/x86_64/multiarch/wcscpy-ssse3.S: Likewise. * sysdeps/x86_64/multiarch/wcscpy.S: Likewise. * sysdeps/x86_64/multiarch/wmemcmp-c.c: Likewise. * sysdeps/x86_64/multiarch/wmemcmp.S: Likewise. * sysdeps/x86_64/strcmp.S: Likewise.
2305 lines
52 KiB
ArmAsm
2305 lines
52 KiB
ArmAsm
/* Highly optimized version for x86-64.
|
|
Copyright (C) 1999-2014 Free Software Foundation, Inc.
|
|
This file is part of the GNU C Library.
|
|
Based on i686 version contributed by Ulrich Drepper
|
|
<drepper@cygnus.com>, 1999.
|
|
Updated with SSE2 support contributed by Intel Corporation.
|
|
|
|
The GNU C Library is free software; you can redistribute it and/or
|
|
modify it under the terms of the GNU Lesser General Public
|
|
License as published by the Free Software Foundation; either
|
|
version 2.1 of the License, or (at your option) any later version.
|
|
|
|
The GNU C Library is distributed in the hope that it will be useful,
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
Lesser General Public License for more details.
|
|
|
|
You should have received a copy of the GNU Lesser General Public
|
|
License along with the GNU C Library; if not, see
|
|
<http://www.gnu.org/licenses/>. */
|
|
|
|
#include <sysdep.h>
|
|
#include "asm-syntax.h"
|
|
|
|
#undef UPDATE_STRNCMP_COUNTER
|
|
|
|
#ifndef LABEL
|
|
#define LABEL(l) L(l)
|
|
#endif
|
|
|
|
#ifdef USE_AS_STRNCMP
|
|
/* The simplified code below is not set up to handle strncmp() so far.
|
|
Should this become necessary it has to be implemented. For now
|
|
just report the problem. */
|
|
# if !IS_IN (libc)
|
|
# error "strncmp not implemented so far"
|
|
# endif
|
|
|
|
/* Since the counter, %r11, is unsigned, we branch to strcmp_exitz
|
|
if the new counter > the old one or is 0. */
|
|
# define UPDATE_STRNCMP_COUNTER \
|
|
/* calculate left number to compare */ \
|
|
lea -16(%rcx, %r11), %r9; \
|
|
cmp %r9, %r11; \
|
|
jb LABEL(strcmp_exitz); \
|
|
test %r9, %r9; \
|
|
je LABEL(strcmp_exitz); \
|
|
mov %r9, %r11
|
|
|
|
#elif defined USE_AS_STRCASECMP_L
|
|
# include "locale-defines.h"
|
|
|
|
/* No support for strcasecmp outside libc so far since it is not needed. */
|
|
# if !IS_IN (libc)
|
|
# error "strcasecmp_l not implemented so far"
|
|
# endif
|
|
|
|
# define UPDATE_STRNCMP_COUNTER
|
|
#elif defined USE_AS_STRNCASECMP_L
|
|
# include "locale-defines.h"
|
|
|
|
/* No support for strncasecmp outside libc so far since it is not needed. */
|
|
# if !IS_IN (libc)
|
|
# error "strncasecmp_l not implemented so far"
|
|
# endif
|
|
|
|
# define UPDATE_STRNCMP_COUNTER \
|
|
/* calculate left number to compare */ \
|
|
lea -16(%rcx, %r11), %r9; \
|
|
cmp %r9, %r11; \
|
|
jb LABEL(strcmp_exitz); \
|
|
test %r9, %r9; \
|
|
je LABEL(strcmp_exitz); \
|
|
mov %r9, %r11
|
|
#else
|
|
# define UPDATE_STRNCMP_COUNTER
|
|
# ifndef STRCMP
|
|
# define STRCMP strcmp
|
|
# endif
|
|
#endif
|
|
|
|
#ifndef USE_SSSE3
|
|
.text
|
|
#else
|
|
.section .text.ssse3,"ax",@progbits
|
|
#endif
|
|
|
|
#ifdef USE_AS_STRCASECMP_L
|
|
# ifndef ENTRY2
|
|
# define ENTRY2(name) ENTRY (name)
|
|
# define END2(name) END (name)
|
|
# endif
|
|
|
|
ENTRY2 (__strcasecmp)
|
|
movq __libc_tsd_LOCALE@gottpoff(%rip),%rax
|
|
mov %fs:(%rax),%RDX_LP
|
|
|
|
// XXX 5 byte should be before the function
|
|
/* 5-byte NOP. */
|
|
.byte 0x0f,0x1f,0x44,0x00,0x00
|
|
END2 (__strcasecmp)
|
|
# ifndef NO_NOLOCALE_ALIAS
|
|
weak_alias (__strcasecmp, strcasecmp)
|
|
libc_hidden_def (__strcasecmp)
|
|
# endif
|
|
/* FALLTHROUGH to strcasecmp_l. */
|
|
#elif defined USE_AS_STRNCASECMP_L
|
|
# ifndef ENTRY2
|
|
# define ENTRY2(name) ENTRY (name)
|
|
# define END2(name) END (name)
|
|
# endif
|
|
|
|
ENTRY2 (__strncasecmp)
|
|
movq __libc_tsd_LOCALE@gottpoff(%rip),%rax
|
|
mov %fs:(%rax),%RCX_LP
|
|
|
|
// XXX 5 byte should be before the function
|
|
/* 5-byte NOP. */
|
|
.byte 0x0f,0x1f,0x44,0x00,0x00
|
|
END2 (__strncasecmp)
|
|
# ifndef NO_NOLOCALE_ALIAS
|
|
weak_alias (__strncasecmp, strncasecmp)
|
|
libc_hidden_def (__strncasecmp)
|
|
# endif
|
|
/* FALLTHROUGH to strncasecmp_l. */
|
|
#endif
|
|
|
|
ENTRY (STRCMP)
|
|
#if !IS_IN (libc)
|
|
/* Simple version since we can't use SSE registers in ld.so. */
|
|
L(oop): movb (%rdi), %al
|
|
cmpb (%rsi), %al
|
|
jne L(neq)
|
|
incq %rdi
|
|
incq %rsi
|
|
testb %al, %al
|
|
jnz L(oop)
|
|
|
|
xorl %eax, %eax
|
|
ret
|
|
|
|
L(neq): movl $1, %eax
|
|
movl $-1, %ecx
|
|
cmovbl %ecx, %eax
|
|
ret
|
|
END (STRCMP)
|
|
#else /* !IS_IN (libc) */
|
|
# ifdef USE_AS_STRCASECMP_L
|
|
/* We have to fall back on the C implementation for locales
|
|
with encodings not matching ASCII for single bytes. */
|
|
# if LOCALE_T___LOCALES != 0 || LC_CTYPE != 0
|
|
mov LOCALE_T___LOCALES+LC_CTYPE*LP_SIZE(%rdx), %RAX_LP
|
|
# else
|
|
mov (%rdx), %RAX_LP
|
|
# endif
|
|
testl $1, LOCALE_DATA_VALUES+_NL_CTYPE_NONASCII_CASE*SIZEOF_VALUES(%rax)
|
|
jne __strcasecmp_l_nonascii
|
|
# elif defined USE_AS_STRNCASECMP_L
|
|
/* We have to fall back on the C implementation for locales
|
|
with encodings not matching ASCII for single bytes. */
|
|
# if LOCALE_T___LOCALES != 0 || LC_CTYPE != 0
|
|
mov LOCALE_T___LOCALES+LC_CTYPE*LP_SIZE(%rcx), %RAX_LP
|
|
# else
|
|
mov (%rcx), %RAX_LP
|
|
# endif
|
|
testl $1, LOCALE_DATA_VALUES+_NL_CTYPE_NONASCII_CASE*SIZEOF_VALUES(%rax)
|
|
jne __strncasecmp_l_nonascii
|
|
# endif
|
|
|
|
/*
|
|
* This implementation uses SSE to compare up to 16 bytes at a time.
|
|
*/
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
test %rdx, %rdx
|
|
je LABEL(strcmp_exitz)
|
|
cmp $1, %rdx
|
|
je LABEL(Byte0)
|
|
mov %rdx, %r11
|
|
# endif
|
|
mov %esi, %ecx
|
|
mov %edi, %eax
|
|
/* Use 64bit AND here to avoid long NOP padding. */
|
|
and $0x3f, %rcx /* rsi alignment in cache line */
|
|
and $0x3f, %rax /* rdi alignment in cache line */
|
|
# if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
.section .rodata.cst16,"aM",@progbits,16
|
|
.align 16
|
|
.Lbelowupper:
|
|
.quad 0x4040404040404040
|
|
.quad 0x4040404040404040
|
|
.Ltopupper:
|
|
.quad 0x5b5b5b5b5b5b5b5b
|
|
.quad 0x5b5b5b5b5b5b5b5b
|
|
.Ltouppermask:
|
|
.quad 0x2020202020202020
|
|
.quad 0x2020202020202020
|
|
.previous
|
|
movdqa .Lbelowupper(%rip), %xmm5
|
|
# define UCLOW_reg %xmm5
|
|
movdqa .Ltopupper(%rip), %xmm6
|
|
# define UCHIGH_reg %xmm6
|
|
movdqa .Ltouppermask(%rip), %xmm7
|
|
# define LCQWORD_reg %xmm7
|
|
# endif
|
|
cmp $0x30, %ecx
|
|
ja LABEL(crosscache) /* rsi: 16-byte load will cross cache line */
|
|
cmp $0x30, %eax
|
|
ja LABEL(crosscache) /* rdi: 16-byte load will cross cache line */
|
|
movlpd (%rdi), %xmm1
|
|
movlpd (%rsi), %xmm2
|
|
movhpd 8(%rdi), %xmm1
|
|
movhpd 8(%rsi), %xmm2
|
|
# if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
# define TOLOWER(reg1, reg2) \
|
|
movdqa reg1, %xmm8; \
|
|
movdqa UCHIGH_reg, %xmm9; \
|
|
movdqa reg2, %xmm10; \
|
|
movdqa UCHIGH_reg, %xmm11; \
|
|
pcmpgtb UCLOW_reg, %xmm8; \
|
|
pcmpgtb reg1, %xmm9; \
|
|
pcmpgtb UCLOW_reg, %xmm10; \
|
|
pcmpgtb reg2, %xmm11; \
|
|
pand %xmm9, %xmm8; \
|
|
pand %xmm11, %xmm10; \
|
|
pand LCQWORD_reg, %xmm8; \
|
|
pand LCQWORD_reg, %xmm10; \
|
|
por %xmm8, reg1; \
|
|
por %xmm10, reg2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
# else
|
|
# define TOLOWER(reg1, reg2)
|
|
# endif
|
|
pxor %xmm0, %xmm0 /* clear %xmm0 for null char checks */
|
|
pcmpeqb %xmm1, %xmm0 /* Any null chars? */
|
|
pcmpeqb %xmm2, %xmm1 /* compare first 16 bytes for equality */
|
|
psubb %xmm0, %xmm1 /* packed sub of comparison results*/
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx /* if first 16 bytes are same, edx == 0xffff */
|
|
jnz LABEL(less16bytes) /* If not, find different value or null char */
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz) /* finish comparision */
|
|
# endif
|
|
add $16, %rsi /* prepare to search next 16 bytes */
|
|
add $16, %rdi /* prepare to search next 16 bytes */
|
|
|
|
/*
|
|
* Determine source and destination string offsets from 16-byte alignment.
|
|
* Use relative offset difference between the two to determine which case
|
|
* below to use.
|
|
*/
|
|
.p2align 4
|
|
LABEL(crosscache):
|
|
and $0xfffffffffffffff0, %rsi /* force %rsi is 16 byte aligned */
|
|
and $0xfffffffffffffff0, %rdi /* force %rdi is 16 byte aligned */
|
|
mov $0xffff, %edx /* for equivalent offset */
|
|
xor %r8d, %r8d
|
|
and $0xf, %ecx /* offset of rsi */
|
|
and $0xf, %eax /* offset of rdi */
|
|
cmp %eax, %ecx
|
|
je LABEL(ashr_0) /* rsi and rdi relative offset same */
|
|
ja LABEL(bigger)
|
|
mov %edx, %r8d /* r8d is offset flag for exit tail */
|
|
xchg %ecx, %eax
|
|
xchg %rsi, %rdi
|
|
LABEL(bigger):
|
|
lea 15(%rax), %r9
|
|
sub %rcx, %r9
|
|
lea LABEL(unaligned_table)(%rip), %r10
|
|
movslq (%r10, %r9,4), %r9
|
|
lea (%r10, %r9), %r10
|
|
jmp *%r10 /* jump to corresponding case */
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_0
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(0~15) n(0~15) 15(15+ n-n) ashr_0
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_0):
|
|
|
|
movdqa (%rsi), %xmm1
|
|
pxor %xmm0, %xmm0 /* clear %xmm0 for null char check */
|
|
pcmpeqb %xmm1, %xmm0 /* Any null chars? */
|
|
# if !defined USE_AS_STRCASECMP_L && !defined USE_AS_STRNCASECMP_L
|
|
pcmpeqb (%rdi), %xmm1 /* compare 16 bytes for equality */
|
|
# else
|
|
movdqa (%rdi), %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm2, %xmm1 /* compare 16 bytes for equality */
|
|
# endif
|
|
psubb %xmm0, %xmm1 /* packed sub of comparison results*/
|
|
pmovmskb %xmm1, %r9d
|
|
shr %cl, %edx /* adjust 0xffff for offset */
|
|
shr %cl, %r9d /* adjust for 16-byte offset */
|
|
sub %r9d, %edx
|
|
/*
|
|
* edx must be the same with r9d if in left byte (16-rcx) is equal to
|
|
* the start from (16-rax) and no null char was seen.
|
|
*/
|
|
jne LABEL(less32bytes) /* mismatch or null char */
|
|
UPDATE_STRNCMP_COUNTER
|
|
mov $16, %rcx
|
|
mov $16, %r9
|
|
pxor %xmm0, %xmm0 /* clear xmm0, may have changed above */
|
|
|
|
/*
|
|
* Now both strings are aligned at 16-byte boundary. Loop over strings
|
|
* checking 32-bytes per iteration.
|
|
*/
|
|
.p2align 4
|
|
LABEL(loop_ashr_0):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit) /* mismatch or null char seen */
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
add $16, %rcx
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
add $16, %rcx
|
|
jmp LABEL(loop_ashr_0)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_1
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(15) n -15 0(15 +(n-15) - n) ashr_1
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_1):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0 /* Any null chars? */
|
|
pslldq $15, %xmm2 /* shift first string to align with second */
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2 /* compare 16 bytes for equality */
|
|
psubb %xmm0, %xmm2 /* packed sub of comparison results*/
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx /* adjust 0xffff for offset */
|
|
shr %cl, %r9d /* adjust for 16-byte offset */
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes) /* mismatch or null char seen */
|
|
movdqa (%rdi), %xmm3
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads*/
|
|
mov $1, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 1(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_1):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_1) /* cross page boundary */
|
|
|
|
LABEL(gobble_ashr_1):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4 /* store for next cycle */
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $1, %xmm3
|
|
pslldq $15, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $1, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_1) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4 /* store for next cycle */
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $1, %xmm3
|
|
pslldq $15, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $1, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_1)
|
|
|
|
/*
|
|
* Nibble avoids loads across page boundary. This is to avoid a potential
|
|
* access into unmapped memory.
|
|
*/
|
|
.p2align 4
|
|
LABEL(nibble_ashr_1):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char*/
|
|
pmovmskb %xmm0, %edx
|
|
test $0xfffe, %edx
|
|
jnz LABEL(ashr_1_exittail) /* find null char*/
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $15, %r11
|
|
jbe LABEL(ashr_1_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10 /* substract 4K from %r10 */
|
|
jmp LABEL(gobble_ashr_1)
|
|
|
|
/*
|
|
* Once find null char, determine if there is a string mismatch
|
|
* before the null char.
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_1_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $1, %xmm0
|
|
psrldq $1, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_2
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(14~15) n -14 1(15 +(n-14) - n) ashr_2
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_2):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $14, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $2, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 2(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_2):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_2)
|
|
|
|
LABEL(gobble_ashr_2):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $2, %xmm3
|
|
pslldq $14, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $2, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_2) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $2, %xmm3
|
|
pslldq $14, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $2, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_2)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_2):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xfffc, %edx
|
|
jnz LABEL(ashr_2_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $14, %r11
|
|
jbe LABEL(ashr_2_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_2)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_2_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $2, %xmm0
|
|
psrldq $2, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_3
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(13~15) n -13 2(15 +(n-13) - n) ashr_3
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_3):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $13, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $3, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 3(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_3):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_3)
|
|
|
|
LABEL(gobble_ashr_3):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $3, %xmm3
|
|
pslldq $13, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $3, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_3) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $3, %xmm3
|
|
pslldq $13, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $3, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_3)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_3):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xfff8, %edx
|
|
jnz LABEL(ashr_3_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $13, %r11
|
|
jbe LABEL(ashr_3_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_3)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_3_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $3, %xmm0
|
|
psrldq $3, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_4
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(12~15) n -12 3(15 +(n-12) - n) ashr_4
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_4):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $12, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $4, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 4(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_4):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_4)
|
|
|
|
LABEL(gobble_ashr_4):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $4, %xmm3
|
|
pslldq $12, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $4, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_4) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $4, %xmm3
|
|
pslldq $12, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $4, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_4)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_4):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xfff0, %edx
|
|
jnz LABEL(ashr_4_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $12, %r11
|
|
jbe LABEL(ashr_4_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_4)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_4_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $4, %xmm0
|
|
psrldq $4, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_5
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(11~15) n - 11 4(15 +(n-11) - n) ashr_5
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_5):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $11, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $5, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 5(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_5):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_5)
|
|
|
|
LABEL(gobble_ashr_5):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $5, %xmm3
|
|
pslldq $11, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $5, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_5) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $5, %xmm3
|
|
pslldq $11, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $5, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_5)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_5):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xffe0, %edx
|
|
jnz LABEL(ashr_5_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $11, %r11
|
|
jbe LABEL(ashr_5_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_5)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_5_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $5, %xmm0
|
|
psrldq $5, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_6
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(10~15) n - 10 5(15 +(n-10) - n) ashr_6
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_6):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $10, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $6, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 6(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_6):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_6)
|
|
|
|
LABEL(gobble_ashr_6):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $6, %xmm3
|
|
pslldq $10, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $6, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_6) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $6, %xmm3
|
|
pslldq $10, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $6, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_6)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_6):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xffc0, %edx
|
|
jnz LABEL(ashr_6_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $10, %r11
|
|
jbe LABEL(ashr_6_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_6)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_6_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $6, %xmm0
|
|
psrldq $6, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_7
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(9~15) n - 9 6(15 +(n - 9) - n) ashr_7
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_7):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $9, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $7, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 7(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_7):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_7)
|
|
|
|
LABEL(gobble_ashr_7):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $7, %xmm3
|
|
pslldq $9, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $7, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_7) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $7, %xmm3
|
|
pslldq $9, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $7, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_7)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_7):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xff80, %edx
|
|
jnz LABEL(ashr_7_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $9, %r11
|
|
jbe LABEL(ashr_7_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_7)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_7_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $7, %xmm0
|
|
psrldq $7, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_8
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(8~15) n - 8 7(15 +(n - 8) - n) ashr_8
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_8):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $8, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $8, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 8(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_8):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_8)
|
|
|
|
LABEL(gobble_ashr_8):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $8, %xmm3
|
|
pslldq $8, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $8, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_8) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $8, %xmm3
|
|
pslldq $8, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $8, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_8)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_8):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xff00, %edx
|
|
jnz LABEL(ashr_8_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $8, %r11
|
|
jbe LABEL(ashr_8_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_8)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_8_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $8, %xmm0
|
|
psrldq $8, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_9
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(7~15) n - 7 8(15 +(n - 7) - n) ashr_9
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_9):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $7, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $9, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 9(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_9):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_9)
|
|
|
|
LABEL(gobble_ashr_9):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $9, %xmm3
|
|
pslldq $7, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $9, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_9) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $9, %xmm3
|
|
pslldq $7, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $9, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3 /* store for next cycle */
|
|
jmp LABEL(loop_ashr_9)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_9):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xfe00, %edx
|
|
jnz LABEL(ashr_9_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $7, %r11
|
|
jbe LABEL(ashr_9_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_9)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_9_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $9, %xmm0
|
|
psrldq $9, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_10
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(6~15) n - 6 9(15 +(n - 6) - n) ashr_10
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_10):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $6, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $10, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 10(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_10):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_10)
|
|
|
|
LABEL(gobble_ashr_10):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $10, %xmm3
|
|
pslldq $6, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $10, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_10) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $10, %xmm3
|
|
pslldq $6, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $10, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_10)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_10):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xfc00, %edx
|
|
jnz LABEL(ashr_10_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $6, %r11
|
|
jbe LABEL(ashr_10_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_10)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_10_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $10, %xmm0
|
|
psrldq $10, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_11
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(5~15) n - 5 10(15 +(n - 5) - n) ashr_11
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_11):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $5, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $11, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 11(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_11):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_11)
|
|
|
|
LABEL(gobble_ashr_11):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $11, %xmm3
|
|
pslldq $5, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $11, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_11) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $11, %xmm3
|
|
pslldq $5, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $11, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_11)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_11):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xf800, %edx
|
|
jnz LABEL(ashr_11_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $5, %r11
|
|
jbe LABEL(ashr_11_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_11)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_11_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $11, %xmm0
|
|
psrldq $11, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_12
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(4~15) n - 4 11(15 +(n - 4) - n) ashr_12
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_12):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $4, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $12, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 12(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_12):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_12)
|
|
|
|
LABEL(gobble_ashr_12):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $12, %xmm3
|
|
pslldq $4, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $12, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_12) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $12, %xmm3
|
|
pslldq $4, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $12, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_12)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_12):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xf000, %edx
|
|
jnz LABEL(ashr_12_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $4, %r11
|
|
jbe LABEL(ashr_12_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_12)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_12_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $12, %xmm0
|
|
psrldq $12, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_13
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(3~15) n - 3 12(15 +(n - 3) - n) ashr_13
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_13):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $3, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $13, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 13(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_13):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_13)
|
|
|
|
LABEL(gobble_ashr_13):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $13, %xmm3
|
|
pslldq $3, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $13, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_13) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $13, %xmm3
|
|
pslldq $3, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $13, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_13)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_13):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xe000, %edx
|
|
jnz LABEL(ashr_13_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $3, %r11
|
|
jbe LABEL(ashr_13_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_13)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_13_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $13, %xmm0
|
|
psrldq $13, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_14
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(2~15) n - 2 13(15 +(n - 2) - n) ashr_14
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_14):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $2, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $14, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 14(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_14):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_14)
|
|
|
|
LABEL(gobble_ashr_14):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $14, %xmm3
|
|
pslldq $2, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $14, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_14) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $14, %xmm3
|
|
pslldq $2, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $14, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP | defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_14)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_14):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0xc000, %edx
|
|
jnz LABEL(ashr_14_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmp $2, %r11
|
|
jbe LABEL(ashr_14_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_14)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_14_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $14, %xmm0
|
|
psrldq $14, %xmm3
|
|
jmp LABEL(aftertail)
|
|
|
|
/*
|
|
* The following cases will be handled by ashr_15
|
|
* rcx(offset of rsi) rax(offset of rdi) relative offset corresponding case
|
|
* n(1~15) n - 1 14(15 +(n - 1) - n) ashr_15
|
|
*/
|
|
.p2align 4
|
|
LABEL(ashr_15):
|
|
pxor %xmm0, %xmm0
|
|
movdqa (%rdi), %xmm2
|
|
movdqa (%rsi), %xmm1
|
|
pcmpeqb %xmm1, %xmm0
|
|
pslldq $1, %xmm2
|
|
TOLOWER (%xmm1, %xmm2)
|
|
pcmpeqb %xmm1, %xmm2
|
|
psubb %xmm0, %xmm2
|
|
pmovmskb %xmm2, %r9d
|
|
shr %cl, %edx
|
|
shr %cl, %r9d
|
|
sub %r9d, %edx
|
|
jnz LABEL(less32bytes)
|
|
|
|
movdqa (%rdi), %xmm3
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
pxor %xmm0, %xmm0
|
|
mov $16, %rcx /* index for loads */
|
|
mov $15, %r9d /* byte position left over from less32bytes case */
|
|
/*
|
|
* Setup %r10 value allows us to detect crossing a page boundary.
|
|
* When %r10 goes positive we have crossed a page boundary and
|
|
* need to do a nibble.
|
|
*/
|
|
lea 15(%rdi), %r10
|
|
and $0xfff, %r10 /* offset into 4K page */
|
|
|
|
sub $0x1000, %r10 /* subtract 4K pagesize */
|
|
|
|
.p2align 4
|
|
LABEL(loop_ashr_15):
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_15)
|
|
|
|
LABEL(gobble_ashr_15):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $15, %xmm3
|
|
pslldq $1, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $15, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
|
|
add $16, %r10
|
|
jg LABEL(nibble_ashr_15) /* cross page boundary */
|
|
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
movdqa (%rdi, %rcx), %xmm2
|
|
movdqa %xmm2, %xmm4
|
|
|
|
# ifndef USE_SSSE3
|
|
psrldq $15, %xmm3
|
|
pslldq $1, %xmm2
|
|
por %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# else
|
|
palignr $15, %xmm3, %xmm2 /* merge into one 16byte value */
|
|
# endif
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
pcmpeqb %xmm2, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
sub $0xffff, %edx
|
|
jnz LABEL(exit)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub $16, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
|
|
add $16, %rcx
|
|
movdqa %xmm4, %xmm3
|
|
jmp LABEL(loop_ashr_15)
|
|
|
|
.p2align 4
|
|
LABEL(nibble_ashr_15):
|
|
pcmpeqb %xmm3, %xmm0 /* check nibble for null char */
|
|
pmovmskb %xmm0, %edx
|
|
test $0x8000, %edx
|
|
jnz LABEL(ashr_15_exittail)
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
cmpq $1, %r11
|
|
jbe LABEL(ashr_15_exittail)
|
|
# endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
sub $0x1000, %r10
|
|
jmp LABEL(gobble_ashr_15)
|
|
|
|
.p2align 4
|
|
LABEL(ashr_15_exittail):
|
|
movdqa (%rsi, %rcx), %xmm1
|
|
psrldq $15, %xmm3
|
|
psrldq $15, %xmm0
|
|
|
|
.p2align 4
|
|
LABEL(aftertail):
|
|
TOLOWER (%xmm1, %xmm3)
|
|
pcmpeqb %xmm3, %xmm1
|
|
psubb %xmm0, %xmm1
|
|
pmovmskb %xmm1, %edx
|
|
not %edx
|
|
|
|
.p2align 4
|
|
LABEL(exit):
|
|
lea -16(%r9, %rcx), %rax /* locate the exact offset for rdi */
|
|
LABEL(less32bytes):
|
|
lea (%rdi, %rax), %rdi /* locate the exact address for first operand(rdi) */
|
|
lea (%rsi, %rcx), %rsi /* locate the exact address for second operand(rsi) */
|
|
test %r8d, %r8d
|
|
jz LABEL(ret)
|
|
xchg %rsi, %rdi /* recover original order according to flag(%r8d) */
|
|
|
|
.p2align 4
|
|
LABEL(ret):
|
|
LABEL(less16bytes):
|
|
bsf %rdx, %rdx /* find and store bit index in %rdx */
|
|
|
|
# if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
sub %rdx, %r11
|
|
jbe LABEL(strcmp_exitz)
|
|
# endif
|
|
movzbl (%rsi, %rdx), %ecx
|
|
movzbl (%rdi, %rdx), %eax
|
|
|
|
# if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
leaq _nl_C_LC_CTYPE_tolower+128*4(%rip), %rdx
|
|
movl (%rdx,%rcx,4), %ecx
|
|
movl (%rdx,%rax,4), %eax
|
|
# endif
|
|
|
|
sub %ecx, %eax
|
|
ret
|
|
|
|
LABEL(strcmp_exitz):
|
|
xor %eax, %eax
|
|
ret
|
|
|
|
.p2align 4
|
|
LABEL(Byte0):
|
|
movzx (%rsi), %ecx
|
|
movzx (%rdi), %eax
|
|
|
|
# if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
leaq _nl_C_LC_CTYPE_tolower+128*4(%rip), %rdx
|
|
movl (%rdx,%rcx,4), %ecx
|
|
movl (%rdx,%rax,4), %eax
|
|
# endif
|
|
|
|
sub %ecx, %eax
|
|
ret
|
|
END (STRCMP)
|
|
|
|
.section .rodata,"a",@progbits
|
|
.p2align 3
|
|
LABEL(unaligned_table):
|
|
.int LABEL(ashr_1) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_2) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_3) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_4) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_5) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_6) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_7) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_8) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_9) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_10) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_11) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_12) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_13) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_14) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_15) - LABEL(unaligned_table)
|
|
.int LABEL(ashr_0) - LABEL(unaligned_table)
|
|
#endif /* !IS_IN (libc) */
|
|
libc_hidden_builtin_def (STRCMP)
|