2017-06-09 12:42:16 +00:00
|
|
|
/* strchr/strchrnul optimized with AVX2.
|
2021-01-02 19:32:25 +00:00
|
|
|
Copyright (C) 2017-2021 Free Software Foundation, Inc.
|
2017-06-09 12:42:16 +00:00
|
|
|
This file is part of the GNU C Library.
|
|
|
|
|
|
|
|
The GNU C Library is free software; you can redistribute it and/or
|
|
|
|
modify it under the terms of the GNU Lesser General Public
|
|
|
|
License as published by the Free Software Foundation; either
|
|
|
|
version 2.1 of the License, or (at your option) any later version.
|
|
|
|
|
|
|
|
The GNU C Library is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
Lesser General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Lesser General Public
|
|
|
|
License along with the GNU C Library; if not, see
|
Prefer https to http for gnu.org and fsf.org URLs
Also, change sources.redhat.com to sourceware.org.
This patch was automatically generated by running the following shell
script, which uses GNU sed, and which avoids modifying files imported
from upstream:
sed -ri '
s,(http|ftp)(://(.*\.)?(gnu|fsf|sourceware)\.org($|[^.]|\.[^a-z])),https\2,g
s,(http|ftp)(://(.*\.)?)sources\.redhat\.com($|[^.]|\.[^a-z]),https\2sourceware.org\4,g
' \
$(find $(git ls-files) -prune -type f \
! -name '*.po' \
! -name 'ChangeLog*' \
! -path COPYING ! -path COPYING.LIB \
! -path manual/fdl-1.3.texi ! -path manual/lgpl-2.1.texi \
! -path manual/texinfo.tex ! -path scripts/config.guess \
! -path scripts/config.sub ! -path scripts/install-sh \
! -path scripts/mkinstalldirs ! -path scripts/move-if-change \
! -path INSTALL ! -path locale/programs/charmap-kw.h \
! -path po/libc.pot ! -path sysdeps/gnu/errlist.c \
! '(' -name configure \
-execdir test -f configure.ac -o -f configure.in ';' ')' \
! '(' -name preconfigure \
-execdir test -f preconfigure.ac ';' ')' \
-print)
and then by running 'make dist-prepare' to regenerate files built
from the altered files, and then executing the following to cleanup:
chmod a+x sysdeps/unix/sysv/linux/riscv/configure
# Omit irrelevant whitespace and comment-only changes,
# perhaps from a slightly-different Autoconf version.
git checkout -f \
sysdeps/csky/configure \
sysdeps/hppa/configure \
sysdeps/riscv/configure \
sysdeps/unix/sysv/linux/csky/configure
# Omit changes that caused a pre-commit check to fail like this:
# remote: *** error: sysdeps/powerpc/powerpc64/ppc-mcount.S: trailing lines
git checkout -f \
sysdeps/powerpc/powerpc64/ppc-mcount.S \
sysdeps/unix/sysv/linux/s390/s390-64/syscall.S
# Omit change that caused a pre-commit check to fail like this:
# remote: *** error: sysdeps/sparc/sparc64/multiarch/memcpy-ultra3.S: last line does not end in newline
git checkout -f sysdeps/sparc/sparc64/multiarch/memcpy-ultra3.S
2019-09-07 05:40:42 +00:00
|
|
|
<https://www.gnu.org/licenses/>. */
|
2017-06-09 12:42:16 +00:00
|
|
|
|
|
|
|
#if IS_IN (libc)
|
|
|
|
|
|
|
|
# include <sysdep.h>
|
|
|
|
|
|
|
|
# ifndef STRCHR
|
|
|
|
# define STRCHR __strchr_avx2
|
|
|
|
# endif
|
|
|
|
|
|
|
|
# ifdef USE_AS_WCSCHR
|
|
|
|
# define VPBROADCAST vpbroadcastd
|
|
|
|
# define VPCMPEQ vpcmpeqd
|
2021-02-03 05:38:59 +00:00
|
|
|
# define VPMINU vpminud
|
2017-06-09 12:42:16 +00:00
|
|
|
# define CHAR_REG esi
|
|
|
|
# else
|
|
|
|
# define VPBROADCAST vpbroadcastb
|
|
|
|
# define VPCMPEQ vpcmpeqb
|
2021-02-03 05:38:59 +00:00
|
|
|
# define VPMINU vpminub
|
2017-06-09 12:42:16 +00:00
|
|
|
# define CHAR_REG sil
|
|
|
|
# endif
|
|
|
|
|
|
|
|
# ifndef VZEROUPPER
|
|
|
|
# define VZEROUPPER vzeroupper
|
|
|
|
# endif
|
|
|
|
|
2021-03-05 15:26:42 +00:00
|
|
|
# ifndef SECTION
|
|
|
|
# define SECTION(p) p##.avx
|
|
|
|
# endif
|
|
|
|
|
2017-06-09 12:42:16 +00:00
|
|
|
# define VEC_SIZE 32
|
2021-02-03 05:38:59 +00:00
|
|
|
# define PAGE_SIZE 4096
|
2017-06-09 12:42:16 +00:00
|
|
|
|
2021-03-05 15:26:42 +00:00
|
|
|
.section SECTION(.text),"ax",@progbits
|
2022-03-23 21:57:16 +00:00
|
|
|
ENTRY_P2ALIGN (STRCHR, 5)
|
2021-02-03 05:38:59 +00:00
|
|
|
/* Broadcast CHAR to YMM0. */
|
2017-06-09 12:42:16 +00:00
|
|
|
vmovd %esi, %xmm0
|
2021-04-23 19:56:24 +00:00
|
|
|
movl %edi, %eax
|
|
|
|
andl $(PAGE_SIZE - 1), %eax
|
|
|
|
VPBROADCAST %xmm0, %ymm0
|
2022-03-23 21:57:16 +00:00
|
|
|
vpxor %xmm1, %xmm1, %xmm1
|
2017-06-09 12:42:16 +00:00
|
|
|
|
2021-02-03 05:38:59 +00:00
|
|
|
/* Check if we cross page boundary with one vector load. */
|
2021-04-23 19:56:24 +00:00
|
|
|
cmpl $(PAGE_SIZE - VEC_SIZE), %eax
|
|
|
|
ja L(cross_page_boundary)
|
2017-06-09 12:42:16 +00:00
|
|
|
|
2021-02-03 05:38:59 +00:00
|
|
|
/* Check the first VEC_SIZE bytes. Search for both CHAR and the
|
|
|
|
null byte. */
|
2022-03-23 21:57:16 +00:00
|
|
|
vmovdqu (%rdi), %ymm2
|
|
|
|
VPCMPEQ %ymm2, %ymm0, %ymm3
|
|
|
|
VPCMPEQ %ymm2, %ymm1, %ymm2
|
|
|
|
vpor %ymm3, %ymm2, %ymm3
|
|
|
|
vpmovmskb %ymm3, %eax
|
2017-06-09 12:42:16 +00:00
|
|
|
testl %eax, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
jz L(aligned_more)
|
2017-06-09 12:42:16 +00:00
|
|
|
tzcntl %eax, %eax
|
2021-02-03 05:38:59 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
2022-03-23 21:57:16 +00:00
|
|
|
/* Found CHAR or the null byte. */
|
2021-04-23 19:56:24 +00:00
|
|
|
cmp (%rdi, %rax), %CHAR_REG
|
2022-03-23 21:57:16 +00:00
|
|
|
/* NB: Use a branch instead of cmovcc here. The expectation is
|
|
|
|
that with strchr the user will branch based on input being
|
|
|
|
null. Since this branch will be 100% predictive of the user
|
|
|
|
branch a branch miss here should save what otherwise would
|
|
|
|
be branch miss in the user code. Otherwise using a branch 1)
|
|
|
|
saves code size and 2) is faster in highly predictable
|
|
|
|
environments. */
|
2021-04-23 19:56:24 +00:00
|
|
|
jne L(zero)
|
2021-02-03 05:38:59 +00:00
|
|
|
# endif
|
2021-04-23 19:56:24 +00:00
|
|
|
addq %rdi, %rax
|
2022-03-23 21:57:16 +00:00
|
|
|
L(return_vzeroupper):
|
|
|
|
ZERO_UPPER_VEC_REGISTERS_RETURN
|
2017-06-09 12:42:16 +00:00
|
|
|
|
2021-02-03 05:38:59 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
2021-04-23 19:56:24 +00:00
|
|
|
L(zero):
|
|
|
|
xorl %eax, %eax
|
2021-03-05 15:26:42 +00:00
|
|
|
VZEROUPPER_RETURN
|
2021-04-23 19:56:24 +00:00
|
|
|
# endif
|
|
|
|
|
2017-06-09 12:42:16 +00:00
|
|
|
|
|
|
|
.p2align 4
|
|
|
|
L(first_vec_x1):
|
2022-03-23 21:57:16 +00:00
|
|
|
/* Use bsf to save code size. */
|
|
|
|
bsfl %eax, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
incq %rdi
|
2021-02-03 05:38:59 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
2021-04-23 19:56:24 +00:00
|
|
|
/* Found CHAR or the null byte. */
|
|
|
|
cmp (%rdi, %rax), %CHAR_REG
|
|
|
|
jne L(zero)
|
2017-06-09 12:42:16 +00:00
|
|
|
# endif
|
2021-04-23 19:56:24 +00:00
|
|
|
addq %rdi, %rax
|
2021-03-05 15:26:42 +00:00
|
|
|
VZEROUPPER_RETURN
|
2017-06-09 12:42:16 +00:00
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
.p2align 4,, 10
|
2017-06-09 12:42:16 +00:00
|
|
|
L(first_vec_x2):
|
2022-03-23 21:57:16 +00:00
|
|
|
/* Use bsf to save code size. */
|
|
|
|
bsfl %eax, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
addq $(VEC_SIZE + 1), %rdi
|
|
|
|
# ifndef USE_AS_STRCHRNUL
|
2021-02-03 05:38:59 +00:00
|
|
|
/* Found CHAR or the null byte. */
|
2021-04-23 19:56:24 +00:00
|
|
|
cmp (%rdi, %rax), %CHAR_REG
|
|
|
|
jne L(zero)
|
|
|
|
# endif
|
|
|
|
addq %rdi, %rax
|
|
|
|
VZEROUPPER_RETURN
|
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
.p2align 4,, 8
|
2021-04-23 19:56:24 +00:00
|
|
|
L(first_vec_x3):
|
2022-03-23 21:57:16 +00:00
|
|
|
/* Use bsf to save code size. */
|
|
|
|
bsfl %eax, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
addq $(VEC_SIZE * 2 + 1), %rdi
|
2021-02-03 05:38:59 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
2021-04-23 19:56:24 +00:00
|
|
|
/* Found CHAR or the null byte. */
|
|
|
|
cmp (%rdi, %rax), %CHAR_REG
|
|
|
|
jne L(zero)
|
2017-06-09 12:42:16 +00:00
|
|
|
# endif
|
2021-04-23 19:56:24 +00:00
|
|
|
addq %rdi, %rax
|
2021-03-05 15:26:42 +00:00
|
|
|
VZEROUPPER_RETURN
|
2017-06-09 12:42:16 +00:00
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
.p2align 4,, 10
|
|
|
|
L(first_vec_x4):
|
|
|
|
/* Use bsf to save code size. */
|
|
|
|
bsfl %eax, %eax
|
|
|
|
addq $(VEC_SIZE * 3 + 1), %rdi
|
|
|
|
# ifndef USE_AS_STRCHRNUL
|
|
|
|
/* Found CHAR or the null byte. */
|
|
|
|
cmp (%rdi, %rax), %CHAR_REG
|
|
|
|
jne L(zero)
|
|
|
|
# endif
|
|
|
|
addq %rdi, %rax
|
|
|
|
VZEROUPPER_RETURN
|
|
|
|
|
|
|
|
|
|
|
|
|
2021-04-23 19:56:24 +00:00
|
|
|
.p2align 4
|
|
|
|
L(aligned_more):
|
|
|
|
/* Align data to VEC_SIZE - 1. This is the same number of
|
|
|
|
instructions as using andq -VEC_SIZE but saves 4 bytes of code
|
|
|
|
on x4 check. */
|
|
|
|
orq $(VEC_SIZE - 1), %rdi
|
|
|
|
L(cross_page_continue):
|
|
|
|
/* Check the next 4 * VEC_SIZE. Only one VEC_SIZE at a time
|
|
|
|
since data is only aligned to VEC_SIZE. */
|
2022-03-23 21:57:16 +00:00
|
|
|
vmovdqa 1(%rdi), %ymm2
|
|
|
|
VPCMPEQ %ymm2, %ymm0, %ymm3
|
|
|
|
VPCMPEQ %ymm2, %ymm1, %ymm2
|
|
|
|
vpor %ymm3, %ymm2, %ymm3
|
|
|
|
vpmovmskb %ymm3, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
testl %eax, %eax
|
|
|
|
jnz L(first_vec_x1)
|
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
vmovdqa (VEC_SIZE + 1)(%rdi), %ymm2
|
|
|
|
VPCMPEQ %ymm2, %ymm0, %ymm3
|
|
|
|
VPCMPEQ %ymm2, %ymm1, %ymm2
|
|
|
|
vpor %ymm3, %ymm2, %ymm3
|
|
|
|
vpmovmskb %ymm3, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
testl %eax, %eax
|
|
|
|
jnz L(first_vec_x2)
|
2021-02-03 05:38:59 +00:00
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
vmovdqa (VEC_SIZE * 2 + 1)(%rdi), %ymm2
|
|
|
|
VPCMPEQ %ymm2, %ymm0, %ymm3
|
|
|
|
VPCMPEQ %ymm2, %ymm1, %ymm2
|
|
|
|
vpor %ymm3, %ymm2, %ymm3
|
|
|
|
vpmovmskb %ymm3, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
testl %eax, %eax
|
|
|
|
jnz L(first_vec_x3)
|
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
vmovdqa (VEC_SIZE * 3 + 1)(%rdi), %ymm2
|
|
|
|
VPCMPEQ %ymm2, %ymm0, %ymm3
|
|
|
|
VPCMPEQ %ymm2, %ymm1, %ymm2
|
|
|
|
vpor %ymm3, %ymm2, %ymm3
|
|
|
|
vpmovmskb %ymm3, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
testl %eax, %eax
|
|
|
|
jnz L(first_vec_x4)
|
2022-03-23 21:57:16 +00:00
|
|
|
/* Align data to VEC_SIZE * 4 - 1. */
|
|
|
|
incq %rdi
|
|
|
|
orq $(VEC_SIZE * 4 - 1), %rdi
|
2017-06-09 12:42:16 +00:00
|
|
|
.p2align 4
|
2021-02-03 05:38:59 +00:00
|
|
|
L(loop_4x_vec):
|
|
|
|
/* Compare 4 * VEC at a time forward. */
|
2022-03-23 21:57:16 +00:00
|
|
|
vmovdqa 1(%rdi), %ymm6
|
|
|
|
vmovdqa (VEC_SIZE + 1)(%rdi), %ymm7
|
2021-02-03 05:38:59 +00:00
|
|
|
|
|
|
|
/* Leaves only CHARS matching esi as 0. */
|
|
|
|
vpxor %ymm6, %ymm0, %ymm2
|
|
|
|
vpxor %ymm7, %ymm0, %ymm3
|
|
|
|
|
|
|
|
VPMINU %ymm2, %ymm6, %ymm2
|
|
|
|
VPMINU %ymm3, %ymm7, %ymm3
|
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
vmovdqa (VEC_SIZE * 2 + 1)(%rdi), %ymm6
|
|
|
|
vmovdqa (VEC_SIZE * 3 + 1)(%rdi), %ymm7
|
|
|
|
|
|
|
|
vpxor %ymm6, %ymm0, %ymm4
|
|
|
|
vpxor %ymm7, %ymm0, %ymm5
|
|
|
|
|
|
|
|
VPMINU %ymm4, %ymm6, %ymm4
|
|
|
|
VPMINU %ymm5, %ymm7, %ymm5
|
2021-02-03 05:38:59 +00:00
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
VPMINU %ymm2, %ymm3, %ymm6
|
|
|
|
VPMINU %ymm4, %ymm5, %ymm7
|
2021-02-03 05:38:59 +00:00
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
VPMINU %ymm6, %ymm7, %ymm7
|
|
|
|
|
|
|
|
VPCMPEQ %ymm7, %ymm1, %ymm7
|
|
|
|
vpmovmskb %ymm7, %ecx
|
2021-04-23 19:56:24 +00:00
|
|
|
subq $-(VEC_SIZE * 4), %rdi
|
|
|
|
testl %ecx, %ecx
|
|
|
|
jz L(loop_4x_vec)
|
2021-02-03 05:38:59 +00:00
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
VPCMPEQ %ymm2, %ymm1, %ymm2
|
|
|
|
vpmovmskb %ymm2, %eax
|
2017-06-09 12:42:16 +00:00
|
|
|
testl %eax, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
jnz L(last_vec_x0)
|
2021-02-03 05:38:59 +00:00
|
|
|
|
2021-04-23 19:56:24 +00:00
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
VPCMPEQ %ymm3, %ymm1, %ymm3
|
|
|
|
vpmovmskb %ymm3, %eax
|
2017-06-09 12:42:16 +00:00
|
|
|
testl %eax, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
jnz L(last_vec_x1)
|
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
VPCMPEQ %ymm4, %ymm1, %ymm4
|
|
|
|
vpmovmskb %ymm4, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
/* rcx has combined result from all 4 VEC. It will only be used
|
|
|
|
if the first 3 other VEC all did not contain a match. */
|
|
|
|
salq $32, %rcx
|
|
|
|
orq %rcx, %rax
|
|
|
|
tzcntq %rax, %rax
|
2022-03-23 21:57:16 +00:00
|
|
|
subq $(VEC_SIZE * 2 - 1), %rdi
|
2021-04-23 19:56:24 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
|
|
|
/* Found CHAR or the null byte. */
|
|
|
|
cmp (%rdi, %rax), %CHAR_REG
|
|
|
|
jne L(zero_end)
|
|
|
|
# endif
|
|
|
|
addq %rdi, %rax
|
|
|
|
VZEROUPPER_RETURN
|
|
|
|
|
2021-02-03 05:38:59 +00:00
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
.p2align 4,, 10
|
2021-04-23 19:56:24 +00:00
|
|
|
L(last_vec_x0):
|
2022-03-23 21:57:16 +00:00
|
|
|
/* Use bsf to save code size. */
|
|
|
|
bsfl %eax, %eax
|
|
|
|
addq $-(VEC_SIZE * 4 - 1), %rdi
|
2021-02-03 05:38:59 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
2021-04-23 19:56:24 +00:00
|
|
|
/* Found CHAR or the null byte. */
|
|
|
|
cmp (%rdi, %rax), %CHAR_REG
|
|
|
|
jne L(zero_end)
|
2021-02-03 05:38:59 +00:00
|
|
|
# endif
|
2021-04-23 19:56:24 +00:00
|
|
|
addq %rdi, %rax
|
|
|
|
VZEROUPPER_RETURN
|
|
|
|
|
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
.p2align 4,, 10
|
2021-04-23 19:56:24 +00:00
|
|
|
L(last_vec_x1):
|
|
|
|
tzcntl %eax, %eax
|
2022-03-23 21:57:16 +00:00
|
|
|
subq $(VEC_SIZE * 3 - 1), %rdi
|
2021-04-23 19:56:24 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
|
|
|
/* Found CHAR or the null byte. */
|
|
|
|
cmp (%rdi, %rax), %CHAR_REG
|
|
|
|
jne L(zero_end)
|
|
|
|
# endif
|
|
|
|
addq %rdi, %rax
|
|
|
|
VZEROUPPER_RETURN
|
|
|
|
|
2022-03-23 21:57:16 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
|
|
|
L(zero_end):
|
|
|
|
xorl %eax, %eax
|
|
|
|
VZEROUPPER_RETURN
|
|
|
|
# endif
|
2021-02-03 05:38:59 +00:00
|
|
|
|
|
|
|
/* Cold case for crossing page with first load. */
|
2022-03-23 21:57:16 +00:00
|
|
|
.p2align 4,, 8
|
2021-02-03 05:38:59 +00:00
|
|
|
L(cross_page_boundary):
|
2021-04-23 19:56:24 +00:00
|
|
|
movq %rdi, %rdx
|
|
|
|
/* Align rdi to VEC_SIZE - 1. */
|
|
|
|
orq $(VEC_SIZE - 1), %rdi
|
2022-03-23 21:57:16 +00:00
|
|
|
vmovdqa -(VEC_SIZE - 1)(%rdi), %ymm2
|
|
|
|
VPCMPEQ %ymm2, %ymm0, %ymm3
|
|
|
|
VPCMPEQ %ymm2, %ymm1, %ymm2
|
|
|
|
vpor %ymm3, %ymm2, %ymm3
|
|
|
|
vpmovmskb %ymm3, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
/* Remove the leading bytes. sarxl only uses bits [5:0] of COUNT
|
|
|
|
so no need to manually mod edx. */
|
|
|
|
sarxl %edx, %eax, %eax
|
2017-06-09 12:42:16 +00:00
|
|
|
testl %eax, %eax
|
2021-04-23 19:56:24 +00:00
|
|
|
jz L(cross_page_continue)
|
2017-06-09 12:42:16 +00:00
|
|
|
tzcntl %eax, %eax
|
2021-02-03 05:38:59 +00:00
|
|
|
# ifndef USE_AS_STRCHRNUL
|
2021-04-23 19:56:24 +00:00
|
|
|
xorl %ecx, %ecx
|
|
|
|
/* Found CHAR or the null byte. */
|
|
|
|
cmp (%rdx, %rax), %CHAR_REG
|
2022-03-23 21:57:16 +00:00
|
|
|
jne L(zero_end)
|
2017-06-09 12:42:16 +00:00
|
|
|
# endif
|
2022-03-23 21:57:16 +00:00
|
|
|
addq %rdx, %rax
|
|
|
|
VZEROUPPER_RETURN
|
2017-06-09 12:42:16 +00:00
|
|
|
|
|
|
|
END (STRCHR)
|
2022-03-23 21:57:16 +00:00
|
|
|
#endif
|