2010-02-15 19:17:50 +00:00
|
|
|
/* strcmp with SSSE3
|
2019-01-01 00:11:28 +00:00
|
|
|
Copyright (C) 2010-2019 Free Software Foundation, Inc.
|
2010-02-15 19:17:50 +00:00
|
|
|
Contributed by Intel Corporation.
|
|
|
|
This file is part of the GNU C Library.
|
|
|
|
|
|
|
|
The GNU C Library is free software; you can redistribute it and/or
|
|
|
|
modify it under the terms of the GNU Lesser General Public
|
|
|
|
License as published by the Free Software Foundation; either
|
|
|
|
version 2.1 of the License, or (at your option) any later version.
|
|
|
|
|
|
|
|
The GNU C Library is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
Lesser General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Lesser General Public
|
2012-02-09 23:18:22 +00:00
|
|
|
License along with the GNU C Library; if not, see
|
Prefer https to http for gnu.org and fsf.org URLs
Also, change sources.redhat.com to sourceware.org.
This patch was automatically generated by running the following shell
script, which uses GNU sed, and which avoids modifying files imported
from upstream:
sed -ri '
s,(http|ftp)(://(.*\.)?(gnu|fsf|sourceware)\.org($|[^.]|\.[^a-z])),https\2,g
s,(http|ftp)(://(.*\.)?)sources\.redhat\.com($|[^.]|\.[^a-z]),https\2sourceware.org\4,g
' \
$(find $(git ls-files) -prune -type f \
! -name '*.po' \
! -name 'ChangeLog*' \
! -path COPYING ! -path COPYING.LIB \
! -path manual/fdl-1.3.texi ! -path manual/lgpl-2.1.texi \
! -path manual/texinfo.tex ! -path scripts/config.guess \
! -path scripts/config.sub ! -path scripts/install-sh \
! -path scripts/mkinstalldirs ! -path scripts/move-if-change \
! -path INSTALL ! -path locale/programs/charmap-kw.h \
! -path po/libc.pot ! -path sysdeps/gnu/errlist.c \
! '(' -name configure \
-execdir test -f configure.ac -o -f configure.in ';' ')' \
! '(' -name preconfigure \
-execdir test -f preconfigure.ac ';' ')' \
-print)
and then by running 'make dist-prepare' to regenerate files built
from the altered files, and then executing the following to cleanup:
chmod a+x sysdeps/unix/sysv/linux/riscv/configure
# Omit irrelevant whitespace and comment-only changes,
# perhaps from a slightly-different Autoconf version.
git checkout -f \
sysdeps/csky/configure \
sysdeps/hppa/configure \
sysdeps/riscv/configure \
sysdeps/unix/sysv/linux/csky/configure
# Omit changes that caused a pre-commit check to fail like this:
# remote: *** error: sysdeps/powerpc/powerpc64/ppc-mcount.S: trailing lines
git checkout -f \
sysdeps/powerpc/powerpc64/ppc-mcount.S \
sysdeps/unix/sysv/linux/s390/s390-64/syscall.S
# Omit change that caused a pre-commit check to fail like this:
# remote: *** error: sysdeps/sparc/sparc64/multiarch/memcpy-ultra3.S: last line does not end in newline
git checkout -f sysdeps/sparc/sparc64/multiarch/memcpy-ultra3.S
2019-09-07 05:40:42 +00:00
|
|
|
<https://www.gnu.org/licenses/>. */
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2014-11-24 09:33:45 +00:00
|
|
|
#if IS_IN (libc)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
#include <sysdep.h>
|
|
|
|
#include "asm-syntax.h"
|
|
|
|
|
|
|
|
#define CFI_PUSH(REG) \
|
|
|
|
cfi_adjust_cfa_offset (4); \
|
|
|
|
cfi_rel_offset (REG, 0)
|
|
|
|
|
|
|
|
#define CFI_POP(REG) \
|
|
|
|
cfi_adjust_cfa_offset (-4); \
|
|
|
|
cfi_restore (REG)
|
|
|
|
|
|
|
|
#define PUSH(REG) pushl REG; CFI_PUSH (REG)
|
|
|
|
#define POP(REG) popl REG; CFI_POP (REG)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#ifdef USE_AS_STRNCMP
|
2010-02-15 19:17:50 +00:00
|
|
|
# ifndef STRCMP
|
2011-11-13 14:50:13 +00:00
|
|
|
# define STRCMP __strncmp_ssse3
|
2010-02-15 19:17:50 +00:00
|
|
|
# endif
|
2011-11-13 14:50:13 +00:00
|
|
|
# define STR1 8
|
2010-02-15 19:17:50 +00:00
|
|
|
# define STR2 STR1+4
|
2011-11-13 14:50:13 +00:00
|
|
|
# define CNT STR2+4
|
|
|
|
# define RETURN POP (REM); ret; .p2align 4; CFI_PUSH (REM)
|
|
|
|
# define UPDATE_STRNCMP_COUNTER \
|
|
|
|
/* calculate left number to compare */ \
|
|
|
|
mov $16, %esi; \
|
|
|
|
sub %ecx, %esi; \
|
|
|
|
cmp %esi, REM; \
|
|
|
|
jbe L(more8byteseq); \
|
|
|
|
sub %esi, REM
|
|
|
|
# define FLAGS %ebx
|
|
|
|
# define REM %ebp
|
|
|
|
#elif defined USE_AS_STRCASECMP_L
|
|
|
|
# include "locale-defines.h"
|
2010-02-15 19:17:50 +00:00
|
|
|
# ifndef STRCMP
|
2011-11-13 14:50:13 +00:00
|
|
|
# define STRCMP __strcasecmp_l_ssse3
|
2010-02-15 19:17:50 +00:00
|
|
|
# endif
|
2011-11-16 08:36:15 +00:00
|
|
|
# ifdef PIC
|
|
|
|
# define STR1 8
|
|
|
|
# else
|
|
|
|
# define STR1 4
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
# define STR2 STR1+4
|
2013-08-21 17:48:48 +00:00
|
|
|
# define LOCALE 12 /* Loaded before the adjustment. */
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef PIC
|
|
|
|
# define RETURN POP (%ebx); ret; .p2align 4; CFI_PUSH (%ebx)
|
|
|
|
# else
|
|
|
|
# define RETURN ret; .p2align 4
|
|
|
|
# endif
|
|
|
|
# define UPDATE_STRNCMP_COUNTER
|
|
|
|
# define FLAGS (%esp)
|
|
|
|
# define NONASCII __strcasecmp_nonascii
|
|
|
|
#elif defined USE_AS_STRNCASECMP_L
|
|
|
|
# include "locale-defines.h"
|
|
|
|
# ifndef STRCMP
|
|
|
|
# define STRCMP __strncasecmp_l_ssse3
|
|
|
|
# endif
|
2011-11-16 08:36:15 +00:00
|
|
|
# ifdef PIC
|
|
|
|
# define STR1 12
|
|
|
|
# else
|
|
|
|
# define STR1 8
|
|
|
|
# endif
|
2011-11-13 14:50:13 +00:00
|
|
|
# define STR2 STR1+4
|
2010-02-15 19:17:50 +00:00
|
|
|
# define CNT STR2+4
|
2013-08-21 17:48:48 +00:00
|
|
|
# define LOCALE 16 /* Loaded before the adjustment. */
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef PIC
|
|
|
|
# define RETURN POP (REM); POP (%ebx); ret; \
|
|
|
|
.p2align 4; CFI_PUSH (%ebx); CFI_PUSH (REM)
|
|
|
|
# else
|
|
|
|
# define RETURN POP (REM); ret; .p2align 4; CFI_PUSH (REM)
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
# define UPDATE_STRNCMP_COUNTER \
|
|
|
|
/* calculate left number to compare */ \
|
|
|
|
mov $16, %esi; \
|
|
|
|
sub %ecx, %esi; \
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp %esi, REM; \
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq); \
|
2011-11-13 14:50:13 +00:00
|
|
|
sub %esi, REM
|
|
|
|
# define FLAGS (%esp)
|
|
|
|
# define REM %ebp
|
|
|
|
# define NONASCII __strncasecmp_nonascii
|
|
|
|
#else
|
|
|
|
# ifndef STRCMP
|
|
|
|
# define STRCMP __strcmp_ssse3
|
|
|
|
# endif
|
|
|
|
# define STR1 4
|
|
|
|
# define STR2 STR1+4
|
|
|
|
# define RETURN ret; .p2align 4
|
|
|
|
# define UPDATE_STRNCMP_COUNTER
|
|
|
|
# define FLAGS %ebx
|
2010-02-15 19:17:50 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
.section .text.ssse3,"ax",@progbits
|
2011-11-13 14:50:13 +00:00
|
|
|
|
|
|
|
#ifdef USE_AS_STRCASECMP_L
|
|
|
|
ENTRY (__strcasecmp_ssse3)
|
|
|
|
# ifdef PIC
|
|
|
|
PUSH (%ebx)
|
2012-02-03 23:22:53 +00:00
|
|
|
LOAD_PIC_REG(bx)
|
2011-11-13 14:50:13 +00:00
|
|
|
movl __libc_tsd_LOCALE@GOTNTPOFF(%ebx), %eax
|
2011-11-16 08:36:15 +00:00
|
|
|
# ifdef NO_TLS_DIRECT_SEG_REFS
|
|
|
|
addl %gs:0, %eax
|
|
|
|
movl (%eax), %eax
|
|
|
|
# else
|
|
|
|
movl %gs:(%eax), %eax
|
|
|
|
# endif
|
2011-11-13 14:50:13 +00:00
|
|
|
# else
|
2011-11-16 08:36:15 +00:00
|
|
|
# ifdef NO_TLS_DIRECT_SEG_REFS
|
|
|
|
movl %gs:0, %eax
|
|
|
|
movl __libc_tsd_LOCALE@NTPOFF(%eax), %eax
|
|
|
|
# else
|
|
|
|
movl %gs:__libc_tsd_LOCALE@NTPOFF, %eax
|
|
|
|
# endif
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
# if LOCALE_T___LOCALES != 0 || LC_CTYPE != 0
|
|
|
|
movl LOCALE_T___LOCALES+LC_CTYPE*4(%eax), %eax
|
|
|
|
# else
|
|
|
|
movl (%eax), %eax
|
|
|
|
# endif
|
|
|
|
testl $1, LOCALE_DATA_VALUES+_NL_CTYPE_NONASCII_CASE*SIZEOF_VALUES(%eax)
|
2013-07-29 12:58:41 +00:00
|
|
|
# ifdef PIC
|
|
|
|
je L(ascii)
|
|
|
|
POP (%ebx)
|
|
|
|
jmp __strcasecmp_nonascii
|
|
|
|
# else
|
2011-11-13 14:50:13 +00:00
|
|
|
jne __strcasecmp_nonascii
|
2011-11-14 23:24:35 +00:00
|
|
|
jmp L(ascii)
|
2013-07-29 12:58:41 +00:00
|
|
|
# endif
|
2011-11-13 14:50:13 +00:00
|
|
|
END (__strcasecmp_ssse3)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef USE_AS_STRNCASECMP_L
|
|
|
|
ENTRY (__strncasecmp_ssse3)
|
|
|
|
# ifdef PIC
|
|
|
|
PUSH (%ebx)
|
2012-02-03 23:22:53 +00:00
|
|
|
LOAD_PIC_REG(bx)
|
2011-11-13 14:50:13 +00:00
|
|
|
movl __libc_tsd_LOCALE@GOTNTPOFF(%ebx), %eax
|
2011-11-16 08:36:15 +00:00
|
|
|
# ifdef NO_TLS_DIRECT_SEG_REFS
|
|
|
|
addl %gs:0, %eax
|
|
|
|
movl (%eax), %eax
|
|
|
|
# else
|
|
|
|
movl %gs:(%eax), %eax
|
|
|
|
# endif
|
2011-11-13 14:50:13 +00:00
|
|
|
# else
|
2011-11-16 08:36:15 +00:00
|
|
|
# ifdef NO_TLS_DIRECT_SEG_REFS
|
|
|
|
movl %gs:0, %eax
|
|
|
|
movl __libc_tsd_LOCALE@NTPOFF(%eax), %eax
|
|
|
|
# else
|
|
|
|
movl %gs:__libc_tsd_LOCALE@NTPOFF, %eax
|
|
|
|
# endif
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
# if LOCALE_T___LOCALES != 0 || LC_CTYPE != 0
|
|
|
|
movl LOCALE_T___LOCALES+LC_CTYPE*4(%eax), %eax
|
|
|
|
# else
|
|
|
|
movl (%eax), %eax
|
|
|
|
# endif
|
|
|
|
testl $1, LOCALE_DATA_VALUES+_NL_CTYPE_NONASCII_CASE*SIZEOF_VALUES(%eax)
|
2013-07-29 12:58:41 +00:00
|
|
|
# ifdef PIC
|
|
|
|
je L(ascii)
|
|
|
|
POP (%ebx)
|
|
|
|
jmp __strncasecmp_nonascii
|
|
|
|
# else
|
2011-11-13 14:50:13 +00:00
|
|
|
jne __strncasecmp_nonascii
|
2011-11-14 23:24:35 +00:00
|
|
|
jmp L(ascii)
|
2013-07-29 12:58:41 +00:00
|
|
|
# endif
|
2011-11-13 14:50:13 +00:00
|
|
|
END (__strncasecmp_ssse3)
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
ENTRY (STRCMP)
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
movl LOCALE(%esp), %eax
|
|
|
|
# if LOCALE_T___LOCALES != 0 || LC_CTYPE != 0
|
|
|
|
movl LOCALE_T___LOCALES+LC_CTYPE*4(%eax), %eax
|
|
|
|
# else
|
|
|
|
movl (%eax), %eax
|
|
|
|
# endif
|
|
|
|
testl $1, LOCALE_DATA_VALUES+_NL_CTYPE_NONASCII_CASE*SIZEOF_VALUES(%eax)
|
|
|
|
jne NONASCII
|
|
|
|
|
|
|
|
# ifdef PIC
|
|
|
|
PUSH (%ebx)
|
2012-02-03 23:22:53 +00:00
|
|
|
LOAD_PIC_REG(bx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
2011-11-14 23:24:35 +00:00
|
|
|
L(ascii):
|
2011-11-13 14:50:13 +00:00
|
|
|
.section .rodata.cst16,"aM",@progbits,16
|
|
|
|
.align 16
|
|
|
|
.Lbelowupper:
|
|
|
|
.quad 0x4040404040404040
|
|
|
|
.quad 0x4040404040404040
|
|
|
|
.Ltopupper:
|
|
|
|
.quad 0x5b5b5b5b5b5b5b5b
|
|
|
|
.quad 0x5b5b5b5b5b5b5b5b
|
|
|
|
.Ltouppermask:
|
|
|
|
.quad 0x2020202020202020
|
|
|
|
.quad 0x2020202020202020
|
|
|
|
.previous
|
|
|
|
|
|
|
|
# ifdef PIC
|
|
|
|
# define UCLOW_reg .Lbelowupper@GOTOFF(%ebx)
|
|
|
|
# define UCHIGH_reg .Ltopupper@GOTOFF(%ebx)
|
|
|
|
# define LCQWORD_reg .Ltouppermask@GOTOFF(%ebx)
|
|
|
|
# else
|
|
|
|
# define UCLOW_reg .Lbelowupper
|
|
|
|
# define UCHIGH_reg .Ltopupper
|
|
|
|
# define LCQWORD_reg .Ltouppermask
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
#endif
|
2011-11-13 14:50:13 +00:00
|
|
|
|
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
PUSH (REM)
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
movl STR1(%esp), %edx
|
|
|
|
movl STR2(%esp), %eax
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
movl CNT(%esp), REM
|
|
|
|
cmp $16, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jb L(less16bytes_sncmp)
|
2011-11-13 14:50:13 +00:00
|
|
|
#elif !defined USE_AS_STRCASECMP_L
|
2010-02-15 19:17:50 +00:00
|
|
|
movzbl (%eax), %ecx
|
|
|
|
cmpb %cl, (%edx)
|
|
|
|
jne L(neq)
|
|
|
|
cmpl $0, %ecx
|
|
|
|
je L(eq)
|
|
|
|
|
|
|
|
movzbl 1(%eax), %ecx
|
|
|
|
cmpb %cl, 1(%edx)
|
|
|
|
jne L(neq)
|
|
|
|
cmpl $0, %ecx
|
|
|
|
je L(eq)
|
|
|
|
|
|
|
|
movzbl 2(%eax), %ecx
|
|
|
|
cmpb %cl, 2(%edx)
|
|
|
|
jne L(neq)
|
|
|
|
cmpl $0, %ecx
|
|
|
|
je L(eq)
|
|
|
|
|
|
|
|
movzbl 3(%eax), %ecx
|
|
|
|
cmpb %cl, 3(%edx)
|
|
|
|
jne L(neq)
|
|
|
|
cmpl $0, %ecx
|
|
|
|
je L(eq)
|
|
|
|
|
|
|
|
movzbl 4(%eax), %ecx
|
|
|
|
cmpb %cl, 4(%edx)
|
|
|
|
jne L(neq)
|
|
|
|
cmpl $0, %ecx
|
|
|
|
je L(eq)
|
|
|
|
|
|
|
|
movzbl 5(%eax), %ecx
|
|
|
|
cmpb %cl, 5(%edx)
|
|
|
|
jne L(neq)
|
|
|
|
cmpl $0, %ecx
|
|
|
|
je L(eq)
|
|
|
|
|
|
|
|
movzbl 6(%eax), %ecx
|
|
|
|
cmpb %cl, 6(%edx)
|
|
|
|
jne L(neq)
|
|
|
|
cmpl $0, %ecx
|
|
|
|
je L(eq)
|
|
|
|
|
|
|
|
movzbl 7(%eax), %ecx
|
|
|
|
cmpb %cl, 7(%edx)
|
|
|
|
jne L(neq)
|
|
|
|
cmpl $0, %ecx
|
|
|
|
je L(eq)
|
|
|
|
|
|
|
|
add $8, %edx
|
|
|
|
add $8, %eax
|
|
|
|
#endif
|
|
|
|
movl %edx, %ecx
|
|
|
|
and $0xfff, %ecx
|
|
|
|
cmp $0xff0, %ecx
|
|
|
|
ja L(crosspage)
|
|
|
|
mov %eax, %ecx
|
|
|
|
and $0xfff, %ecx
|
|
|
|
cmp $0xff0, %ecx
|
|
|
|
ja L(crosspage)
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movlpd (%eax), %xmm1
|
|
|
|
movlpd (%edx), %xmm2
|
|
|
|
movhpd 8(%eax), %xmm1
|
|
|
|
movhpd 8(%edx), %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# define TOLOWER(reg1, reg2) \
|
|
|
|
movdqa reg1, %xmm5; \
|
|
|
|
movdqa reg2, %xmm7; \
|
|
|
|
movdqa UCHIGH_reg, %xmm6; \
|
|
|
|
pcmpgtb UCLOW_reg, %xmm5; \
|
|
|
|
pcmpgtb UCLOW_reg, %xmm7; \
|
|
|
|
pcmpgtb reg1, %xmm6; \
|
|
|
|
pand %xmm6, %xmm5; \
|
|
|
|
movdqa UCHIGH_reg, %xmm6; \
|
|
|
|
pcmpgtb reg2, %xmm6; \
|
|
|
|
pand %xmm6, %xmm7; \
|
|
|
|
pand LCQWORD_reg, %xmm5; \
|
|
|
|
por %xmm5, reg1; \
|
|
|
|
pand LCQWORD_reg, %xmm7; \
|
|
|
|
por %xmm7, reg2
|
|
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
#else
|
|
|
|
# define TOLOWER(reg1, reg2)
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %ecx
|
|
|
|
sub $0xffff, %ecx
|
|
|
|
jnz L(less16bytes)
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
add $16, %eax
|
|
|
|
add $16, %edx
|
|
|
|
|
|
|
|
L(crosspage):
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if !defined USE_AS_STRCASECMP_L && !defined USE_AS_STRNCASECMP_L
|
|
|
|
PUSH (FLAGS)
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
PUSH (%edi)
|
|
|
|
PUSH (%esi)
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
pushl $0
|
|
|
|
cfi_adjust_cfa_offset (4)
|
|
|
|
#endif
|
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
2010-02-15 21:04:54 +00:00
|
|
|
cfi_remember_state
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movl %edx, %edi
|
|
|
|
movl %eax, %ecx
|
|
|
|
and $0xf, %ecx
|
|
|
|
and $0xf, %edi
|
|
|
|
xor %ecx, %eax
|
|
|
|
xor %edi, %edx
|
2011-11-13 14:50:13 +00:00
|
|
|
#if !defined USE_AS_STRCASECMP_L && !defined USE_AS_STRNCASECMP_L
|
|
|
|
xor FLAGS, FLAGS
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
cmp %edi, %ecx
|
|
|
|
je L(ashr_0)
|
|
|
|
ja L(bigger)
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $0x20, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
xchg %edx, %eax
|
|
|
|
xchg %ecx, %edi
|
|
|
|
L(bigger):
|
|
|
|
lea 15(%edi), %edi
|
|
|
|
sub %ecx, %edi
|
|
|
|
cmp $8, %edi
|
|
|
|
jle L(ashr_less_8)
|
|
|
|
cmp $14, %edi
|
|
|
|
je L(ashr_15)
|
|
|
|
cmp $13, %edi
|
|
|
|
je L(ashr_14)
|
|
|
|
cmp $12, %edi
|
|
|
|
je L(ashr_13)
|
|
|
|
cmp $11, %edi
|
|
|
|
je L(ashr_12)
|
|
|
|
cmp $10, %edi
|
|
|
|
je L(ashr_11)
|
|
|
|
cmp $9, %edi
|
|
|
|
je L(ashr_10)
|
|
|
|
L(ashr_less_8):
|
|
|
|
je L(ashr_9)
|
|
|
|
cmp $7, %edi
|
|
|
|
je L(ashr_8)
|
|
|
|
cmp $6, %edi
|
|
|
|
je L(ashr_7)
|
|
|
|
cmp $5, %edi
|
|
|
|
je L(ashr_6)
|
|
|
|
cmp $4, %edi
|
|
|
|
je L(ashr_5)
|
|
|
|
cmp $3, %edi
|
|
|
|
je L(ashr_4)
|
|
|
|
cmp $2, %edi
|
|
|
|
je L(ashr_3)
|
|
|
|
cmp $1, %edi
|
|
|
|
je L(ashr_2)
|
|
|
|
cmp $0, %edi
|
|
|
|
je L(ashr_1)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_0
|
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
|
|
|
* n(0~15) n(0~15) 15(15+ n-n) ashr_0
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_0):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
TOLOWER (%xmm1, %xmm2)
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
#else
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb (%edx), %xmm1
|
2011-11-13 14:50:13 +00:00
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
mov %ecx, %edi
|
|
|
|
jne L(less32bytes)
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
2011-11-13 14:50:13 +00:00
|
|
|
movl $0x10, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
mov $0x10, %ecx
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_0):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
2010-02-15 19:17:50 +00:00
|
|
|
movdqa (%edx, %ecx), %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
2011-11-13 14:50:13 +00:00
|
|
|
#else
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb (%edx, %ecx), %xmm1
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
jmp L(loop_ashr_0)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_1
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(15) n -15 0(15 +(n-15) - n) ashr_1
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_1):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $15, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -15(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $1, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 1(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_1):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_1)
|
|
|
|
|
|
|
|
L(gobble_ashr_1):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $1, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_1)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $1, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_1)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_1):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xfffe, %esi
|
|
|
|
jnz L(ashr_1_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $15, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_1_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_1)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_1_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $1, %xmm0
|
|
|
|
psrldq $1, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_2
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(14~15) n -14 1(15 +(n-14) - n) ashr_2
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_2):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $14, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -14(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $2, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 2(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_2):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_2)
|
|
|
|
|
|
|
|
L(gobble_ashr_2):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $2, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_2)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $2, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_2)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_2):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xfffc, %esi
|
|
|
|
jnz L(ashr_2_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $14, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_2_exittail)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_2)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_2_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $2, %xmm0
|
|
|
|
psrldq $2, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_3
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(13~15) n -13 2(15 +(n-13) - n) ashr_3
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_3):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $13, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -13(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $3, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 3(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_3):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_3)
|
|
|
|
|
|
|
|
L(gobble_ashr_3):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $3, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_3)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $3, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_3)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_3):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xfff8, %esi
|
|
|
|
jnz L(ashr_3_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $13, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_3_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_3)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_3_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $3, %xmm0
|
|
|
|
psrldq $3, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_4
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(12~15) n -12 3(15 +(n-12) - n) ashr_4
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_4):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $12, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -12(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $4, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 4(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_4):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_4)
|
|
|
|
|
|
|
|
L(gobble_ashr_4):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $4, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_4)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $4, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_4)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_4):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xfff0, %esi
|
|
|
|
jnz L(ashr_4_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $12, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_4_exittail)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_4)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_4_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $4, %xmm0
|
|
|
|
psrldq $4, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_5
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(11~15) n -11 4(15 +(n-11) - n) ashr_5
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_5):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $11, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -11(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $5, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 5(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_5):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_5)
|
|
|
|
|
|
|
|
L(gobble_ashr_5):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $5, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_5)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $5, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_5)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_5):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xffe0, %esi
|
|
|
|
jnz L(ashr_5_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $11, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_5_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_5)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_5_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $5, %xmm0
|
|
|
|
psrldq $5, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_6
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(10~15) n -10 5(15 +(n-10) - n) ashr_6
|
|
|
|
*/
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_6):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $10, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -10(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $6, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 6(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_6):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_6)
|
|
|
|
|
|
|
|
L(gobble_ashr_6):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $6, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_6)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $6, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_6)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_6):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xffc0, %esi
|
|
|
|
jnz L(ashr_6_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $10, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_6_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_6)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_6_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $6, %xmm0
|
|
|
|
psrldq $6, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_7
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(9~15) n - 9 6(15 +(n-9) - n) ashr_7
|
|
|
|
*/
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_7):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $9, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -9(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $7, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 8(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_7):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_7)
|
|
|
|
|
|
|
|
L(gobble_ashr_7):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $7, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_7)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $7, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_7)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_7):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xff80, %esi
|
|
|
|
jnz L(ashr_7_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $9, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_7_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_7)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_7_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $7, %xmm0
|
|
|
|
psrldq $7, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_8
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(8~15) n - 8 7(15 +(n-8) - n) ashr_8
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_8):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $8, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -8(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $8, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 8(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_8):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_8)
|
|
|
|
|
|
|
|
L(gobble_ashr_8):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $8, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_8)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $8, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_8)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_8):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xff00, %esi
|
|
|
|
jnz L(ashr_8_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $8, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_8_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_8)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_8_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $8, %xmm0
|
|
|
|
psrldq $8, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_9
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(7~15) n - 7 8(15 +(n-7) - n) ashr_9
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_9):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $7, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -7(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $9, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 9(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_9):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_9)
|
|
|
|
|
|
|
|
L(gobble_ashr_9):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $9, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_9)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $9, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_9)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_9):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xfe00, %esi
|
|
|
|
jnz L(ashr_9_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $7, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_9_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_9)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_9_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $9, %xmm0
|
|
|
|
psrldq $9, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_10
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(6~15) n - 6 9(15 +(n-6) - n) ashr_10
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_10):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $6, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -6(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $10, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 10(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_10):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_10)
|
|
|
|
|
|
|
|
L(gobble_ashr_10):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $10, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_10)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $10, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_10)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_10):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xfc00, %esi
|
|
|
|
jnz L(ashr_10_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $6, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_10_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_10)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_10_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $10, %xmm0
|
|
|
|
psrldq $10, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_11
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(5~15) n - 5 10(15 +(n-5) - n) ashr_11
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_11):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $5, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -5(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $11, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 11(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_11):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_11)
|
|
|
|
|
|
|
|
L(gobble_ashr_11):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $11, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_11)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $11, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_11)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_11):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xf800, %esi
|
|
|
|
jnz L(ashr_11_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $5, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_11_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_11)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_11_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $11, %xmm0
|
|
|
|
psrldq $11, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_12
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(4~15) n - 4 11(15 +(n-4) - n) ashr_12
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_12):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $4, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -4(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $12, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 12(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_12):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_12)
|
|
|
|
|
|
|
|
L(gobble_ashr_12):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $12, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-04-15 14:49:30 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_12)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $12, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_12)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_12):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xf000, %esi
|
|
|
|
jnz L(ashr_12_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $4, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_12_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_12)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_12_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $12, %xmm0
|
|
|
|
psrldq $12, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_13
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(3~15) n - 3 12(15 +(n-3) - n) ashr_13
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_13):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -3(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $13, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 13(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_13):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_13)
|
|
|
|
|
|
|
|
L(gobble_ashr_13):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $13, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_13)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $13, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_13)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_13):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xe000, %esi
|
|
|
|
jnz L(ashr_13_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $3, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_13_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_13)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_13_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $13, %xmm0
|
|
|
|
psrldq $13, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_14
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(2~15) n - 2 13(15 +(n-2) - n) ashr_14
|
|
|
|
*/
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_14):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $2, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -2(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $14, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 14(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_14):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_14)
|
|
|
|
|
|
|
|
L(gobble_ashr_14):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $14, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_14)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $14, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_14)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_14):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0xc000, %esi
|
|
|
|
jnz L(ashr_14_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $2, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_14_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_14)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_14_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $14, %xmm0
|
|
|
|
psrldq $14, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The following cases will be handled by ashr_14
|
2010-02-15 21:04:54 +00:00
|
|
|
* ecx(offset of esi) eax(offset of edi) relative offset corresponding case
|
2010-02-15 19:17:50 +00:00
|
|
|
* n(1~15) n - 1 14(15 +(n-1) - n) ashr_15
|
|
|
|
*/
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_15):
|
|
|
|
mov $0xffff, %esi
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
movdqa (%edx), %xmm2
|
|
|
|
movdqa (%eax), %xmm1
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pslldq $1, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm1, %xmm2
|
|
|
|
psubb %xmm0, %xmm2
|
|
|
|
pmovmskb %xmm2, %edi
|
|
|
|
shr %cl, %esi
|
|
|
|
shr %cl, %edi
|
|
|
|
sub %edi, %esi
|
|
|
|
lea -1(%ecx), %edi
|
|
|
|
jnz L(less32bytes)
|
|
|
|
|
|
|
|
UPDATE_STRNCMP_COUNTER
|
|
|
|
|
|
|
|
movdqa (%edx), %xmm3
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
mov $16, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
orl $15, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
lea 15(%edx), %edi
|
|
|
|
and $0xfff, %edi
|
|
|
|
sub $0x1000, %edi
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(loop_ashr_15):
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_15)
|
|
|
|
|
|
|
|
L(gobble_ashr_15):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $15, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
|
|
|
|
add $16, %edi
|
|
|
|
jg L(nibble_ashr_15)
|
|
|
|
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
movdqa (%edx, %ecx), %xmm2
|
|
|
|
movdqa %xmm2, %xmm4
|
|
|
|
|
|
|
|
palignr $15, %xmm3, %xmm2
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm2)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
pcmpeqb %xmm1, %xmm0
|
|
|
|
pcmpeqb %xmm2, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
sub $0xffff, %esi
|
|
|
|
jnz L(exit)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $16, REM
|
|
|
|
lea -16(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(more8byteseq)
|
|
|
|
#endif
|
|
|
|
add $16, %ecx
|
|
|
|
movdqa %xmm4, %xmm3
|
|
|
|
jmp L(loop_ashr_15)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(nibble_ashr_15):
|
|
|
|
pcmpeqb %xmm3, %xmm0
|
|
|
|
pmovmskb %xmm0, %esi
|
|
|
|
test $0x8000, %esi
|
|
|
|
jnz L(ashr_15_exittail)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $1, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(ashr_15_exittail)
|
|
|
|
#endif
|
|
|
|
pxor %xmm0, %xmm0
|
|
|
|
sub $0x1000, %edi
|
|
|
|
jmp L(gobble_ashr_15)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ashr_15_exittail):
|
|
|
|
movdqa (%eax, %ecx), %xmm1
|
|
|
|
psrldq $15, %xmm0
|
|
|
|
psrldq $15, %xmm3
|
|
|
|
jmp L(aftertail)
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(aftertail):
|
2011-11-13 14:50:13 +00:00
|
|
|
TOLOWER (%xmm1, %xmm3)
|
2010-02-15 19:17:50 +00:00
|
|
|
pcmpeqb %xmm3, %xmm1
|
|
|
|
psubb %xmm0, %xmm1
|
|
|
|
pmovmskb %xmm1, %esi
|
|
|
|
not %esi
|
|
|
|
L(exit):
|
2011-11-13 14:50:13 +00:00
|
|
|
mov FLAGS, %edi
|
2010-02-15 19:17:50 +00:00
|
|
|
and $0x1f, %edi
|
|
|
|
lea -16(%edi, %ecx), %edi
|
|
|
|
L(less32bytes):
|
|
|
|
add %edi, %edx
|
|
|
|
add %ecx, %eax
|
2011-11-13 14:50:13 +00:00
|
|
|
testl $0x20, FLAGS
|
2010-02-15 19:17:50 +00:00
|
|
|
jz L(ret2)
|
|
|
|
xchg %eax, %edx
|
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(ret2):
|
|
|
|
mov %esi, %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
addl $4, %esp
|
|
|
|
cfi_adjust_cfa_offset (-4)
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
POP (%esi)
|
|
|
|
POP (%edi)
|
2011-11-13 14:50:13 +00:00
|
|
|
#if !defined USE_AS_STRCASECMP_L && !defined USE_AS_STRNCASECMP_L
|
|
|
|
POP (FLAGS)
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
L(less16bytes):
|
|
|
|
test %cl, %cl
|
|
|
|
jz L(2next_8_bytes)
|
|
|
|
|
|
|
|
test $0x01, %cl
|
|
|
|
jnz L(Byte0)
|
|
|
|
|
|
|
|
test $0x02, %cl
|
|
|
|
jnz L(Byte1)
|
|
|
|
|
|
|
|
test $0x04, %cl
|
|
|
|
jnz L(Byte2)
|
|
|
|
|
|
|
|
test $0x08, %cl
|
|
|
|
jnz L(Byte3)
|
|
|
|
|
|
|
|
test $0x10, %cl
|
|
|
|
jnz L(Byte4)
|
|
|
|
|
|
|
|
test $0x20, %cl
|
|
|
|
jnz L(Byte5)
|
|
|
|
|
|
|
|
test $0x40, %cl
|
|
|
|
jnz L(Byte6)
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $7, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
movzx 7(%eax), %ecx
|
|
|
|
movzx 7(%edx), %eax
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
L(Byte0):
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $0, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
movzx (%eax), %ecx
|
|
|
|
movzx (%edx), %eax
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
L(Byte1):
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $1, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
movzx 1(%eax), %ecx
|
|
|
|
movzx 1(%edx), %eax
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
L(Byte2):
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $2, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
movzx 2(%eax), %ecx
|
|
|
|
movzx 2(%edx), %eax
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
L(Byte3):
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $3, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
movzx 3(%eax), %ecx
|
|
|
|
movzx 3(%edx), %eax
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
L(Byte4):
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $4, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
movzx 4(%eax), %ecx
|
|
|
|
movzx 4(%edx), %eax
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 21:04:54 +00:00
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
L(Byte5):
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $5, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
movzx 5(%eax), %ecx
|
|
|
|
movzx 5(%edx), %eax
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
L(Byte6):
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $6, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
movzx 6(%eax), %ecx
|
|
|
|
movzx 6(%edx), %eax
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
L(2next_8_bytes):
|
|
|
|
add $8, %eax
|
|
|
|
add $8, %edx
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $8, REM
|
|
|
|
lea -8(REM), REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
test $0x01, %ch
|
|
|
|
jnz L(Byte0)
|
|
|
|
|
|
|
|
test $0x02, %ch
|
|
|
|
jnz L(Byte1)
|
|
|
|
|
|
|
|
test $0x04, %ch
|
|
|
|
jnz L(Byte2)
|
|
|
|
|
|
|
|
test $0x08, %ch
|
|
|
|
jnz L(Byte3)
|
|
|
|
|
|
|
|
test $0x10, %ch
|
|
|
|
jnz L(Byte4)
|
|
|
|
|
|
|
|
test $0x20, %ch
|
|
|
|
jnz L(Byte5)
|
|
|
|
|
|
|
|
test $0x40, %ch
|
|
|
|
jnz L(Byte6)
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
cmp $7, REM
|
2010-02-15 19:17:50 +00:00
|
|
|
jbe L(eq)
|
|
|
|
#endif
|
|
|
|
movzx 7(%eax), %ecx
|
|
|
|
movzx 7(%edx), %eax
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%eax,4), %eax
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%eax,4), %eax
|
|
|
|
# endif
|
|
|
|
#endif
|
|
|
|
|
2010-02-15 19:17:50 +00:00
|
|
|
sub %ecx, %eax
|
2010-02-18 06:13:55 +00:00
|
|
|
RETURN
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#ifdef USE_AS_STRNCMP
|
|
|
|
L(neq_sncmp):
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
L(neq):
|
|
|
|
mov $1, %eax
|
|
|
|
ja L(neq_bigger)
|
|
|
|
neg %eax
|
|
|
|
L(neq_bigger):
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
addl $4, %esp
|
|
|
|
cfi_adjust_cfa_offset (-4)
|
|
|
|
#endif
|
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
POP (REM)
|
|
|
|
#endif
|
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
POP (%ebx)
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
#endif
|
|
|
|
ret
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
2010-02-18 06:13:55 +00:00
|
|
|
.p2align 4
|
|
|
|
cfi_restore_state
|
2010-02-15 19:17:50 +00:00
|
|
|
L(more8byteseq):
|
2011-11-13 14:50:13 +00:00
|
|
|
|
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
addl $4, %esp
|
|
|
|
cfi_adjust_cfa_offset (-4)
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
POP (%esi)
|
|
|
|
POP (%edi)
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCMP
|
|
|
|
POP (FLAGS)
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
#endif
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#ifdef USE_AS_STRNCMP
|
|
|
|
L(eq_sncmp):
|
|
|
|
#endif
|
2010-02-15 19:17:50 +00:00
|
|
|
L(eq):
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
|
|
|
POP (REM)
|
|
|
|
#endif
|
|
|
|
#if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
|
|
|
|
# ifdef PIC
|
|
|
|
POP (%ebx)
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
#endif
|
|
|
|
xorl %eax, %eax
|
|
|
|
ret
|
2010-02-15 21:04:54 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
#if defined USE_AS_STRNCMP || defined USE_AS_STRNCASECMP_L
|
2010-02-18 06:13:55 +00:00
|
|
|
.p2align 4
|
2011-11-13 14:50:13 +00:00
|
|
|
# if defined USE_AS_STRNCASECMP_L && defined PIC
|
|
|
|
CFI_PUSH (%ebx)
|
|
|
|
# endif
|
|
|
|
CFI_PUSH (REM)
|
2010-02-15 19:17:50 +00:00
|
|
|
L(less16bytes_sncmp):
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
PUSH (%esi)
|
|
|
|
# endif
|
|
|
|
test REM, REM
|
|
|
|
jz L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl (%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl (%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, (%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2012-08-15 19:06:55 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $1, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 1(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 1(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 1(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $2, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 2(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 2(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 2(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $3, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 3(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 3(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 3(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $4, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 4(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 4(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 4(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $5, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 5(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 5(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 5(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $6, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 6(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 6(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 6(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $7, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 7(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 7(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 7(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $8, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 8(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 8(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 8(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $9, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 9(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 9(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 9(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $10, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 10(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 10(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 10(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $11, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 11(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 11(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 11(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $12, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 12(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 12(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 12(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $13, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 13(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 13(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 13(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $14, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 14(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 14(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 14(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
test %cl, %cl
|
2011-11-13 14:50:13 +00:00
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
cmp $15, REM
|
|
|
|
je L(eq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
|
|
|
movzbl 15(%eax), %ecx
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
movzbl 15(%edx), %esi
|
|
|
|
# ifdef PIC
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower@GOTOFF+128*4(%ebx,%esi,4), %esi
|
|
|
|
# else
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%ecx,4), %ecx
|
|
|
|
movl _nl_C_LC_CTYPE_tolower+128*4(,%esi,4), %esi
|
|
|
|
# endif
|
|
|
|
cmpl %ecx, %esi
|
|
|
|
# else
|
2010-02-15 19:17:50 +00:00
|
|
|
cmpb %cl, 15(%edx)
|
2011-11-13 14:50:13 +00:00
|
|
|
# endif
|
|
|
|
jne L(neq_sncmp)
|
2010-02-15 19:17:50 +00:00
|
|
|
|
2011-11-13 14:50:13 +00:00
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
L(eq_sncmp):
|
|
|
|
POP (%esi)
|
|
|
|
# endif
|
|
|
|
POP (REM)
|
|
|
|
# if defined USE_AS_STRNCASECMP_L && defined PIC
|
|
|
|
POP (%ebx)
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
xor %eax, %eax
|
|
|
|
ret
|
2011-11-13 14:50:13 +00:00
|
|
|
|
|
|
|
# ifdef USE_AS_STRNCASECMP_L
|
|
|
|
.p2align 4
|
|
|
|
# ifdef PIC
|
|
|
|
CFI_PUSH (%ebx)
|
|
|
|
# endif
|
|
|
|
CFI_PUSH (REM)
|
|
|
|
CFI_PUSH (%esi)
|
|
|
|
L(neq_sncmp):
|
|
|
|
mov $1, %eax
|
|
|
|
mov $-1, %edx
|
|
|
|
cmovna %edx, %eax
|
|
|
|
POP (%esi)
|
|
|
|
POP (REM)
|
|
|
|
# ifdef PIC
|
|
|
|
POP (%ebx)
|
|
|
|
# endif
|
|
|
|
ret
|
|
|
|
# endif
|
2010-02-15 19:17:50 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
END (STRCMP)
|
|
|
|
|
|
|
|
#endif
|