(__longjmp): Make aligned_restore_vmx a local symbol.

This commit is contained in:
Ulrich Drepper 2009-01-30 20:35:22 +00:00
parent 430902daac
commit 59eade4810
2 changed files with 6 additions and 7 deletions

View File

@ -1,5 +1,5 @@
/* longjmp for PowerPC. /* longjmp for PowerPC.
Copyright (C) 1995-99, 2000, 2003-2005, 2006 Free Software Foundation, Inc. Copyright (C) 1995-99, 2000, 2003-2006, 2009 Free Software Foundation, Inc.
This file is part of the GNU C Library. This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or The GNU C Library is free software; you can redistribute it and/or
@ -64,7 +64,7 @@ ENTRY (BP_SYM (__longjmp))
andi. r6,r5,0xf andi. r6,r5,0xf
lwz r0,((JB_VRSAVE)*4)(3) lwz r0,((JB_VRSAVE)*4)(3)
mtspr VRSAVE,r0 mtspr VRSAVE,r0
beq+ aligned_restore_vmx beq+ L(aligned_restore_vmx)
addi r6,r5,16 addi r6,r5,16
lvsl v0,0,r5 lvsl v0,0,r5
lvx v1,0,r5 lvx v1,0,r5
@ -88,7 +88,7 @@ ENTRY (BP_SYM (__longjmp))
lvx v1,0,r5 lvx v1,0,r5
vperm v31,v31,v1,v0 vperm v31,v31,v1,v0
b L(no_vmx) b L(no_vmx)
aligned_restore_vmx: L(aligned_restore_vmx):
addi r6,r5,16 addi r6,r5,16
lvx v20,0,r5 lvx v20,0,r5
addi r5,r5,32 addi r5,r5,32

View File

@ -1,6 +1,5 @@
/* longjmp for PowerPC64. /* longjmp for PowerPC64.
Copyright (C) 1995, 1996,1997,1999,2000,2001,2002,2003,2004,2005,2006 Copyright (C) 1995, 1996,1997,1999-2006,2009 Free Software Foundation, Inc.
Free Software Foundation, Inc.
This file is part of the GNU C Library. This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or The GNU C Library is free software; you can redistribute it and/or
@ -58,7 +57,7 @@ ENTRY (BP_SYM (__longjmp))
andi. r6,r5,0xf andi. r6,r5,0xf
lwz r0,((JB_VRSAVE)*8)(3) lwz r0,((JB_VRSAVE)*8)(3)
mtspr VRSAVE,r0 mtspr VRSAVE,r0
beq+ aligned_restore_vmx beq+ L(aligned_restore_vmx)
addi r6,r5,16 addi r6,r5,16
lvsl v0,0,r5 lvsl v0,0,r5
lvx v1,0,r5 lvx v1,0,r5
@ -82,7 +81,7 @@ ENTRY (BP_SYM (__longjmp))
lvx v1,0,r5 lvx v1,0,r5
vperm v31,v31,v1,v0 vperm v31,v31,v1,v0
b L(no_vmx) b L(no_vmx)
aligned_restore_vmx: L(aligned_restore_vmx):
addi r6,r5,16 addi r6,r5,16
lvx v20,0,r5 lvx v20,0,r5
addi r5,r5,32 addi r5,r5,32