2013-11-22 12:38:03 +00:00
|
|
|
/* PowerPC64 __mpn_addmul_1 -- Multiply a limb vector with a limb and add
|
|
|
|
the result to a second limb vector.
|
2018-01-01 00:32:25 +00:00
|
|
|
Copyright (C) 1999-2018 Free Software Foundation, Inc.
|
2013-11-22 12:38:03 +00:00
|
|
|
This file is part of the GNU C Library.
|
|
|
|
|
|
|
|
The GNU C Library is free software; you can redistribute it and/or
|
|
|
|
modify it under the terms of the GNU Lesser General Public
|
|
|
|
License as published by the Free Software Foundation; either
|
|
|
|
version 2.1 of the License, or (at your option) any later version.
|
|
|
|
|
|
|
|
The GNU C Library is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
Lesser General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Lesser General Public
|
|
|
|
License along with the GNU C Library; if not, see
|
|
|
|
<http://www.gnu.org/licenses/>. */
|
|
|
|
|
|
|
|
#include <sysdep.h>
|
|
|
|
|
|
|
|
#ifdef USE_AS_SUBMUL
|
|
|
|
# define FUNC __mpn_submul_1
|
|
|
|
# define ADDSUBC subfe
|
|
|
|
# define ADDSUB subfc
|
|
|
|
#else
|
|
|
|
# define FUNC __mpn_addmul_1
|
|
|
|
# define ADDSUBC adde
|
|
|
|
# define ADDSUB addc
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#define RP r3
|
|
|
|
#define UP r4
|
|
|
|
#define N r5
|
|
|
|
#define VL r6
|
|
|
|
|
PowerPC64 ENTRY_TOCLESS
A number of functions in the sysdeps/powerpc/powerpc64/ tree don't use
or change r2, yet declare a global entry that sets up r2. This patch
fixes that problem, and consolidates the ENTRY and EALIGN macros.
* sysdeps/powerpc/powerpc64/sysdep.h: Formatting.
(NOPS, ENTRY_3): New macros.
(ENTRY): Rewrite.
(ENTRY_TOCLESS): Define.
(EALIGN, EALIGN_W_0, EALIGN_W_1, EALIGN_W_2, EALIGN_W_4, EALIGN_W_5,
EALIGN_W_6, EALIGN_W_7, EALIGN_W_8): Delete.
* sysdeps/powerpc/powerpc64/a2/memcpy.S: Replace EALIGN with ENTRY.
* sysdeps/powerpc/powerpc64/dl-trampoline.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_ceil.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_ceilf.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_floor.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_floorf.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_nearbyint.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_nearbyintf.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_rint.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_rintf.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_round.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_roundf.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_trunc.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_truncf.S: Likewise.
* sysdeps/powerpc/powerpc64/memset.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/fpu/s_finite.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/fpu/s_isinf.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strstr.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/fpu/e_expf.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/fpu/s_cosf.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/fpu/s_sinf.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strcasestr.S: Likewise.
* sysdeps/powerpc/powerpc64/addmul_1.S: Use ENTRY_TOCLESS.
* sysdeps/powerpc/powerpc64/cell/memcpy.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_copysign.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_copysignl.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_fabsl.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_llrint.S: Likewise.
* sysdeps/powerpc/powerpc64/fpu/s_llrintf.S: Likewise.
* sysdeps/powerpc/powerpc64/lshift.S: Likewise.
* sysdeps/powerpc/powerpc64/memcpy.S: Likewise.
* sysdeps/powerpc/powerpc64/mul_1.S: Likewise.
* sysdeps/powerpc/powerpc64/power4/memcmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power4/memcpy.S: Likewise.
* sysdeps/powerpc/powerpc64/power4/memset.S: Likewise.
* sysdeps/powerpc/powerpc64/power4/strncmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_ceil.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_ceilf.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_floor.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_floorf.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_llround.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_round.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_roundf.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_trunc.S: Likewise.
* sysdeps/powerpc/powerpc64/power5+/fpu/s_truncf.S: Likewise.
* sysdeps/powerpc/powerpc64/power5/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc64/power6/fpu/s_copysign.S: Likewise.
* sysdeps/powerpc/powerpc64/power6/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc64/power6/memcpy.S: Likewise.
* sysdeps/powerpc/powerpc64/power6/memset.S: Likewise.
* sysdeps/powerpc/powerpc64/power6x/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc64/power6x/fpu/s_llrint.S: Likewise.
* sysdeps/powerpc/powerpc64/power6x/fpu/s_llround.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/add_n.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/memchr.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/memcmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/memcpy.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/memmove.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/mempcpy.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/memrchr.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/memset.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/rawmemchr.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strcasecmp.S (strcasecmp_l):
Likewise.
* sysdeps/powerpc/powerpc64/power7/strchr.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strchrnul.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strcmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strlen.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strncmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strncpy.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strnlen.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/strrchr.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/fpu/s_finite.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/fpu/s_isinf.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/fpu/s_llrint.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/fpu/s_llround.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/memcmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/memset.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strchr.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strcmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strcpy.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strlen.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strncmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strncpy.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strnlen.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strrchr.S: Likewise.
* sysdeps/powerpc/powerpc64/power8/strspn.S: Likewise.
* sysdeps/powerpc/powerpc64/power9/strcmp.S: Likewise.
* sysdeps/powerpc/powerpc64/power9/strncmp.S: Likewise.
* sysdeps/powerpc/powerpc64/strchr.S: Likewise.
* sysdeps/powerpc/powerpc64/strcmp.S: Likewise.
* sysdeps/powerpc/powerpc64/strlen.S: Likewise.
* sysdeps/powerpc/powerpc64/strncmp.S: Likewise.
* sysdeps/powerpc/powerpc64/ppc-mcount.S: Store LR earlier. Don't
add nop when SHARED.
* sysdeps/powerpc/powerpc64/start.S: Fix comment.
* sysdeps/powerpc/powerpc64/multiarch/strrchr-power8.S (ENTRY): Don't
define.
(ENTRY_TOCLESS): Define.
* sysdeps/powerpc/powerpc32/sysdep.h (ENTRY_TOCLESS): Define.
* sysdeps/powerpc/fpu/s_fma.S: Use ENTRY_TOCLESS.
* sysdeps/powerpc/fpu/s_fmaf.S: Likewise.
2017-06-14 01:15:50 +00:00
|
|
|
ENTRY_TOCLESS (FUNC, 5)
|
2013-11-22 12:38:03 +00:00
|
|
|
std r31, -8(r1)
|
|
|
|
rldicl. r0, N, 0, 62
|
|
|
|
std r30, -16(r1)
|
|
|
|
cmpdi VL, r0, 2
|
|
|
|
std r29, -24(r1)
|
|
|
|
addi N, N, 3
|
|
|
|
std r28, -32(r1)
|
|
|
|
srdi N, N, 2
|
|
|
|
std r27, -40(r1)
|
|
|
|
mtctr N
|
|
|
|
beq cr0, L(b00)
|
|
|
|
blt cr6, L(b01)
|
|
|
|
beq cr6, L(b10)
|
|
|
|
|
|
|
|
L(b11): ld r9, 0(UP)
|
|
|
|
ld r28, 0(RP)
|
|
|
|
mulld r0, r9, VL
|
|
|
|
mulhdu r12, r9, VL
|
|
|
|
ADDSUB r0, r0, r28
|
|
|
|
std r0, 0(RP)
|
|
|
|
addi RP, RP, 8
|
|
|
|
ld r9, 8(UP)
|
|
|
|
ld r27, 16(UP)
|
|
|
|
addi UP, UP, 24
|
|
|
|
#ifdef USE_AS_SUBMUL
|
|
|
|
subfe r11, r11, r11
|
|
|
|
#endif
|
|
|
|
b L(bot)
|
|
|
|
|
|
|
|
.align 4
|
|
|
|
L(b00): ld r9, 0(UP)
|
|
|
|
ld r27, 8(UP)
|
|
|
|
ld r28, 0(RP)
|
|
|
|
ld r29, 8(RP)
|
|
|
|
mulld r0, r9, VL
|
|
|
|
mulhdu N, r9, VL
|
|
|
|
mulld r7, r27, VL
|
|
|
|
mulhdu r8, r27, VL
|
|
|
|
addc r7, r7, N
|
|
|
|
addze r12, r8
|
|
|
|
ADDSUB r0, r0, r28
|
|
|
|
std r0, 0(RP)
|
|
|
|
ADDSUBC r7, r7, r29
|
|
|
|
std r7, 8(RP)
|
|
|
|
addi RP, RP, 16
|
|
|
|
ld r9, 16(UP)
|
|
|
|
ld r27, 24(UP)
|
|
|
|
addi UP, UP, 32
|
|
|
|
#ifdef USE_AS_SUBMUL
|
|
|
|
subfe r11, r11, r11
|
|
|
|
#endif
|
|
|
|
b L(bot)
|
|
|
|
|
|
|
|
.align 4
|
|
|
|
L(b01): bdnz L(gt1)
|
|
|
|
ld r9, 0(UP)
|
|
|
|
ld r11, 0(RP)
|
|
|
|
mulld r0, r9, VL
|
|
|
|
mulhdu r8, r9, VL
|
|
|
|
ADDSUB r0, r0, r11
|
|
|
|
std r0, 0(RP)
|
|
|
|
#ifdef USE_AS_SUBMUL
|
|
|
|
subfe r11, r11, r11
|
|
|
|
addic r11, r11, 1
|
|
|
|
#endif
|
|
|
|
addze RP, r8
|
|
|
|
blr
|
|
|
|
|
|
|
|
L(gt1): ld r9, 0(UP)
|
|
|
|
ld r27, 8(UP)
|
|
|
|
mulld r0, r9, VL
|
|
|
|
mulhdu N, r9, VL
|
|
|
|
mulld r7, r27, VL
|
|
|
|
mulhdu r8, r27, VL
|
|
|
|
ld r9, 16(UP)
|
|
|
|
ld r28, 0(RP)
|
|
|
|
ld r29, 8(RP)
|
|
|
|
ld r30, 16(RP)
|
|
|
|
mulld r11, r9, VL
|
|
|
|
mulhdu r10, r9, VL
|
|
|
|
addc r7, r7, N
|
|
|
|
adde r11, r11, r8
|
|
|
|
addze r12, r10
|
|
|
|
ADDSUB r0, r0, r28
|
|
|
|
std r0, 0(RP)
|
|
|
|
ADDSUBC r7, r7, r29
|
|
|
|
std r7, 8(RP)
|
|
|
|
ADDSUBC r11, r11, r30
|
|
|
|
std r11, 16(RP)
|
|
|
|
addi RP, RP, 24
|
|
|
|
ld r9, 24(UP)
|
|
|
|
ld r27, 32(UP)
|
|
|
|
addi UP, UP, 40
|
|
|
|
#ifdef USE_AS_SUBMUL
|
|
|
|
subfe r11, r11, r11
|
|
|
|
#endif
|
|
|
|
b L(bot)
|
|
|
|
|
|
|
|
L(b10): addic r0, r0, r0
|
|
|
|
li r12, 0
|
|
|
|
ld r9, 0(UP)
|
|
|
|
ld r27, 8(UP)
|
|
|
|
bdz L(end)
|
|
|
|
addi UP, UP, 16
|
|
|
|
|
|
|
|
.align 4
|
|
|
|
L(top): mulld r0, r9, VL
|
|
|
|
mulhdu N, r9, VL
|
|
|
|
mulld r7, r27, VL
|
|
|
|
mulhdu r8, r27, VL
|
|
|
|
ld r9, 0(UP)
|
|
|
|
ld r28, 0(RP)
|
|
|
|
ld r27, 8(UP)
|
|
|
|
ld r29, 8(RP)
|
|
|
|
adde r0, r0, r12
|
|
|
|
adde r7, r7, N
|
|
|
|
mulld N, r9, VL
|
|
|
|
mulhdu r10, r9, VL
|
|
|
|
mulld r11, r27, VL
|
|
|
|
mulhdu r12, r27, VL
|
|
|
|
ld r9, 16(UP)
|
|
|
|
ld r30, 16(RP)
|
|
|
|
ld r27, 24(UP)
|
|
|
|
ld r31, 24(RP)
|
|
|
|
adde N, N, r8
|
|
|
|
adde r11, r11, r10
|
|
|
|
addze r12, r12
|
|
|
|
ADDSUB r0, r0, r28
|
|
|
|
std r0, 0(RP)
|
|
|
|
ADDSUBC r7, r7, r29
|
|
|
|
std r7, 8(RP)
|
|
|
|
ADDSUBC N, N, r30
|
|
|
|
std N, 16(RP)
|
|
|
|
ADDSUBC r11, r11, r31
|
|
|
|
std r11, 24(RP)
|
|
|
|
addi UP, UP, 32
|
|
|
|
#ifdef USE_AS_SUBMUL
|
|
|
|
subfe r11, r11, r11
|
|
|
|
#endif
|
|
|
|
addi RP, RP, 32
|
|
|
|
L(bot):
|
|
|
|
#ifdef USE_AS_SUBMUL
|
|
|
|
addic r11, r11, 1
|
|
|
|
#endif
|
|
|
|
bdnz L(top)
|
|
|
|
|
|
|
|
L(end): mulld r0, r9, VL
|
|
|
|
mulhdu N, r9, VL
|
|
|
|
mulld r7, r27, VL
|
|
|
|
mulhdu r8, r27, VL
|
|
|
|
ld r28, 0(RP)
|
|
|
|
ld r29, 8(RP)
|
|
|
|
adde r0, r0, r12
|
|
|
|
adde r7, r7, N
|
|
|
|
addze r8, r8
|
|
|
|
ADDSUB r0, r0, r28
|
|
|
|
std r0, 0(RP)
|
|
|
|
ADDSUBC r7, r7, r29
|
|
|
|
std r7, 8(RP)
|
|
|
|
#ifdef USE_AS_SUBMUL
|
|
|
|
subfe r11, r11, r11
|
|
|
|
addic r11, r11, 1
|
|
|
|
#endif
|
|
|
|
addze RP, r8
|
|
|
|
ld r31, -8(r1)
|
|
|
|
ld r30, -16(r1)
|
|
|
|
ld r29, -24(r1)
|
|
|
|
ld r28, -32(r1)
|
|
|
|
ld r27, -40(r1)
|
|
|
|
blr
|
|
|
|
END(FUNC)
|