mirror of
https://sourceware.org/git/glibc.git
synced 2024-11-09 23:00:07 +00:00
i386: Remove CET support
CET is only support for x86_64, this patch reverts: -faaee1f07e
x86: Support shadow stack pointer in setjmp/longjmp. -be9ccd27c0
i386: Add _CET_ENDBR to indirect jump targets in add_n.S/sub_n.S -c02695d776
x86/CET: Update vfork to prevent child return -5d844e1b72
i386: Enable CET support in ucontext functions -124bcde683
x86: Add _CET_ENDBR to functions in crti.S -562837c002
x86: Add _CET_ENDBR to functions in dl-tlsdesc.S -f753fa7dea
x86: Support IBT and SHSTK in Intel CET [BZ #21598] -825b58f3fb
i386-mcount.S: Add _CET_ENDBR to _mcount and __fentry__ -7e119cd582
i386: Use _CET_NOTRACK in i686/memcmp.S -177824e232
i386: Use _CET_NOTRACK in memcmp-sse4.S -0a899af097
i386: Use _CET_NOTRACK in memcpy-ssse3-rep.S -7fb613361c
i386: Use _CET_NOTRACK in memcpy-ssse3.S -77a8ae0948
i386: Use _CET_NOTRACK in memset-sse2-rep.S -00e7b76a8f
i386: Use _CET_NOTRACK in memset-sse2.S -90d15dc577
i386: Use _CET_NOTRACK in strcat-sse2.S -f1574581c7
i386: Use _CET_NOTRACK in strcpy-sse2.S -4031d7484a
i386/sub_n.S: Add a missing _CET_ENDBR to indirect jump - target - Checked on i686-linux-gnu.
This commit is contained in:
parent
b7fc4a07f2
commit
25f1e16ef0
@ -19,55 +19,14 @@
|
|||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <pointer_guard.h>
|
#include <pointer_guard.h>
|
||||||
#include <jmpbuf-offsets.h>
|
#include <jmpbuf-offsets.h>
|
||||||
#include <jmp_buf-ssp.h>
|
|
||||||
#include <asm-syntax.h>
|
#include <asm-syntax.h>
|
||||||
#include <stap-probe.h>
|
#include <stap-probe.h>
|
||||||
|
|
||||||
/* Don't restore shadow stack register if
|
|
||||||
1. Shadow stack isn't enabled. Or
|
|
||||||
2. __longjmp is defined for __longjmp_cancel.
|
|
||||||
*/
|
|
||||||
#if !SHSTK_ENABLED || defined __longjmp
|
|
||||||
# undef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
#endif
|
|
||||||
|
|
||||||
.text
|
.text
|
||||||
ENTRY (__longjmp)
|
ENTRY (__longjmp)
|
||||||
#ifdef PTR_DEMANGLE
|
#ifdef PTR_DEMANGLE
|
||||||
movl 4(%esp), %eax /* User's jmp_buf in %eax. */
|
movl 4(%esp), %eax /* User's jmp_buf in %eax. */
|
||||||
|
|
||||||
# ifdef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
# if IS_IN (libc) && defined SHARED && defined FEATURE_1_OFFSET
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(skip_ssp)
|
|
||||||
# else
|
|
||||||
xorl %edx, %edx
|
|
||||||
# endif
|
|
||||||
/* Check and adjust the Shadow-Stack-Pointer. */
|
|
||||||
rdsspd %edx
|
|
||||||
/* And compare it with the saved ssp value. */
|
|
||||||
subl SHADOW_STACK_POINTER_OFFSET(%eax), %edx
|
|
||||||
je L(skip_ssp)
|
|
||||||
/* Count the number of frames to adjust and adjust it
|
|
||||||
with incssp instruction. The instruction can adjust
|
|
||||||
the ssp by [0..255] value only thus use a loop if
|
|
||||||
the number of frames is bigger than 255. */
|
|
||||||
negl %edx
|
|
||||||
shrl $2, %edx
|
|
||||||
/* NB: We saved Shadow-Stack-Pointer of setjmp. Since we are
|
|
||||||
restoring Shadow-Stack-Pointer of setjmp's caller, we
|
|
||||||
need to unwind shadow stack by one more frame. */
|
|
||||||
addl $1, %edx
|
|
||||||
movl $255, %ebx
|
|
||||||
L(loop):
|
|
||||||
cmpl %ebx, %edx
|
|
||||||
cmovb %edx, %ebx
|
|
||||||
incsspd %ebx
|
|
||||||
subl %ebx, %edx
|
|
||||||
ja L(loop)
|
|
||||||
L(skip_ssp):
|
|
||||||
# endif
|
|
||||||
/* Save the return address now. */
|
/* Save the return address now. */
|
||||||
movl (JB_PC*4)(%eax), %edx
|
movl (JB_PC*4)(%eax), %edx
|
||||||
/* Get the stack pointer. */
|
/* Get the stack pointer. */
|
||||||
@ -98,38 +57,6 @@ L(skip_ssp):
|
|||||||
#else
|
#else
|
||||||
movl 4(%esp), %ecx /* User's jmp_buf in %ecx. */
|
movl 4(%esp), %ecx /* User's jmp_buf in %ecx. */
|
||||||
movl 8(%esp), %eax /* Second argument is return value. */
|
movl 8(%esp), %eax /* Second argument is return value. */
|
||||||
# ifdef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
# if IS_IN (libc) && defined SHARED
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(skip_ssp)
|
|
||||||
# endif
|
|
||||||
/* Check and adjust the Shadow-Stack-Pointer. */
|
|
||||||
xorl %edx, %edx
|
|
||||||
/* Get the current ssp. */
|
|
||||||
rdsspd %edx
|
|
||||||
/* And compare it with the saved ssp value. */
|
|
||||||
subl SHADOW_STACK_POINTER_OFFSET(%ecx), %edx
|
|
||||||
je L(skip_ssp)
|
|
||||||
/* Count the number of frames to adjust and adjust it
|
|
||||||
with incssp instruction. The instruction can adjust
|
|
||||||
the ssp by [0..255] value only thus use a loop if
|
|
||||||
the number of frames is bigger than 255. */
|
|
||||||
negl %edx
|
|
||||||
shrl $2, %edx
|
|
||||||
/* NB: We saved Shadow-Stack-Pointer of setjmp. Since we are
|
|
||||||
restoring Shadow-Stack-Pointer of setjmp's caller, we
|
|
||||||
need to unwind shadow stack by one more frame. */
|
|
||||||
addl $1, %edx
|
|
||||||
movl $255, %ebx
|
|
||||||
L(loop):
|
|
||||||
cmpl %ebx, %edx
|
|
||||||
cmovb %edx, %ebx
|
|
||||||
incsspd %ebx
|
|
||||||
subl %ebx, %edx
|
|
||||||
ja L(loop)
|
|
||||||
L(skip_ssp):
|
|
||||||
# endif
|
|
||||||
/* Save the return address now. */
|
/* Save the return address now. */
|
||||||
movl (JB_PC*4)(%ecx), %edx
|
movl (JB_PC*4)(%ecx), %edx
|
||||||
LIBC_PROBE (longjmp, 3, 4@%ecx, -4@%eax, 4@%edx)
|
LIBC_PROBE (longjmp, 3, 4@%ecx, -4@%eax, 4@%edx)
|
||||||
|
@ -40,13 +40,6 @@ ENTRY (__mpn_add_n)
|
|||||||
cfi_rel_offset (esi, 0)
|
cfi_rel_offset (esi, 0)
|
||||||
movl S2(%esp),%edx
|
movl S2(%esp),%edx
|
||||||
movl SIZE(%esp),%ecx
|
movl SIZE(%esp),%ecx
|
||||||
|
|
||||||
#if IBT_ENABLED
|
|
||||||
pushl %ebx
|
|
||||||
cfi_adjust_cfa_offset (4)
|
|
||||||
cfi_rel_offset (ebx, 0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
movl %ecx,%eax
|
movl %ecx,%eax
|
||||||
shrl $3,%ecx /* compute count for unrolled loop */
|
shrl $3,%ecx /* compute count for unrolled loop */
|
||||||
negl %eax
|
negl %eax
|
||||||
@ -58,9 +51,6 @@ ENTRY (__mpn_add_n)
|
|||||||
subl %eax,%esi /* ... by a constant when we ... */
|
subl %eax,%esi /* ... by a constant when we ... */
|
||||||
subl %eax,%edx /* ... enter the loop */
|
subl %eax,%edx /* ... enter the loop */
|
||||||
shrl $2,%eax /* restore previous value */
|
shrl $2,%eax /* restore previous value */
|
||||||
#if IBT_ENABLED
|
|
||||||
leal -4(,%eax,4),%ebx /* Count for 4-byte endbr32 */
|
|
||||||
#endif
|
|
||||||
#ifdef PIC
|
#ifdef PIC
|
||||||
/* Calculate start address in loop for PIC. Due to limitations in some
|
/* Calculate start address in loop for PIC. Due to limitations in some
|
||||||
assemblers, Loop-L0-3 cannot be put into the leal */
|
assemblers, Loop-L0-3 cannot be put into the leal */
|
||||||
@ -74,40 +64,30 @@ L(0): leal (%eax,%eax,8),%eax
|
|||||||
#else
|
#else
|
||||||
/* Calculate start address in loop for non-PIC. */
|
/* Calculate start address in loop for non-PIC. */
|
||||||
leal (L(oop) - 3)(%eax,%eax,8),%eax
|
leal (L(oop) - 3)(%eax,%eax,8),%eax
|
||||||
#endif
|
|
||||||
#if IBT_ENABLED
|
|
||||||
addl %ebx,%eax /* Adjust for endbr32 */
|
|
||||||
#endif
|
#endif
|
||||||
jmp *%eax /* jump into loop */
|
jmp *%eax /* jump into loop */
|
||||||
ALIGN (3)
|
ALIGN (3)
|
||||||
L(oop): movl (%esi),%eax
|
L(oop): movl (%esi),%eax
|
||||||
adcl (%edx),%eax
|
adcl (%edx),%eax
|
||||||
movl %eax,(%edi)
|
movl %eax,(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 4(%esi),%eax
|
movl 4(%esi),%eax
|
||||||
adcl 4(%edx),%eax
|
adcl 4(%edx),%eax
|
||||||
movl %eax,4(%edi)
|
movl %eax,4(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 8(%esi),%eax
|
movl 8(%esi),%eax
|
||||||
adcl 8(%edx),%eax
|
adcl 8(%edx),%eax
|
||||||
movl %eax,8(%edi)
|
movl %eax,8(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 12(%esi),%eax
|
movl 12(%esi),%eax
|
||||||
adcl 12(%edx),%eax
|
adcl 12(%edx),%eax
|
||||||
movl %eax,12(%edi)
|
movl %eax,12(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 16(%esi),%eax
|
movl 16(%esi),%eax
|
||||||
adcl 16(%edx),%eax
|
adcl 16(%edx),%eax
|
||||||
movl %eax,16(%edi)
|
movl %eax,16(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 20(%esi),%eax
|
movl 20(%esi),%eax
|
||||||
adcl 20(%edx),%eax
|
adcl 20(%edx),%eax
|
||||||
movl %eax,20(%edi)
|
movl %eax,20(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 24(%esi),%eax
|
movl 24(%esi),%eax
|
||||||
adcl 24(%edx),%eax
|
adcl 24(%edx),%eax
|
||||||
movl %eax,24(%edi)
|
movl %eax,24(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 28(%esi),%eax
|
movl 28(%esi),%eax
|
||||||
adcl 28(%edx),%eax
|
adcl 28(%edx),%eax
|
||||||
movl %eax,28(%edi)
|
movl %eax,28(%edi)
|
||||||
@ -120,11 +100,6 @@ L(oop): movl (%esi),%eax
|
|||||||
sbbl %eax,%eax
|
sbbl %eax,%eax
|
||||||
negl %eax
|
negl %eax
|
||||||
|
|
||||||
#if IBT_ENABLED
|
|
||||||
popl %ebx
|
|
||||||
cfi_adjust_cfa_offset (-4)
|
|
||||||
cfi_restore (ebx)
|
|
||||||
#endif
|
|
||||||
popl %esi
|
popl %esi
|
||||||
cfi_adjust_cfa_offset (-4)
|
cfi_adjust_cfa_offset (-4)
|
||||||
cfi_restore (esi)
|
cfi_restore (esi)
|
||||||
|
@ -23,18 +23,12 @@
|
|||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <pointer_guard.h>
|
#include <pointer_guard.h>
|
||||||
#include <jmpbuf-offsets.h>
|
#include <jmpbuf-offsets.h>
|
||||||
#include <jmp_buf-ssp.h>
|
|
||||||
#include <stap-probe.h>
|
#include <stap-probe.h>
|
||||||
|
|
||||||
#define PARMS 4 /* no space for saved regs */
|
#define PARMS 4 /* no space for saved regs */
|
||||||
#define JMPBUF PARMS
|
#define JMPBUF PARMS
|
||||||
#define SIGMSK JMPBUF+4
|
#define SIGMSK JMPBUF+4
|
||||||
|
|
||||||
/* Don't save shadow stack register if shadow stack isn't enabled. */
|
|
||||||
#if !SHSTK_ENABLED
|
|
||||||
# undef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ENTRY (_setjmp)
|
ENTRY (_setjmp)
|
||||||
|
|
||||||
xorl %eax, %eax
|
xorl %eax, %eax
|
||||||
@ -58,21 +52,6 @@ ENTRY (_setjmp)
|
|||||||
movl %ebp, (JB_BP*4)(%edx) /* Save caller's frame pointer. */
|
movl %ebp, (JB_BP*4)(%edx) /* Save caller's frame pointer. */
|
||||||
|
|
||||||
movl %eax, JB_SIZE(%edx) /* No signal mask set. */
|
movl %eax, JB_SIZE(%edx) /* No signal mask set. */
|
||||||
#ifdef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
# if IS_IN (libc) && defined SHARED && defined FEATURE_1_OFFSET
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(skip_ssp)
|
|
||||||
# else
|
|
||||||
xorl %ecx, %ecx
|
|
||||||
# endif
|
|
||||||
/* Get the current Shadow-Stack-Pointer and save it. */
|
|
||||||
rdsspd %ecx
|
|
||||||
movl %ecx, SHADOW_STACK_POINTER_OFFSET(%edx)
|
|
||||||
# if IS_IN (libc) && defined SHARED && defined FEATURE_1_OFFSET
|
|
||||||
L(skip_ssp):
|
|
||||||
# endif
|
|
||||||
#endif
|
|
||||||
ret
|
ret
|
||||||
END (_setjmp)
|
END (_setjmp)
|
||||||
libc_hidden_def (_setjmp)
|
libc_hidden_def (_setjmp)
|
||||||
|
@ -23,18 +23,12 @@
|
|||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <pointer_guard.h>
|
#include <pointer_guard.h>
|
||||||
#include <jmpbuf-offsets.h>
|
#include <jmpbuf-offsets.h>
|
||||||
#include <jmp_buf-ssp.h>
|
|
||||||
#include <stap-probe.h>
|
#include <stap-probe.h>
|
||||||
|
|
||||||
#define PARMS 4 /* no space for saved regs */
|
#define PARMS 4 /* no space for saved regs */
|
||||||
#define JMPBUF PARMS
|
#define JMPBUF PARMS
|
||||||
#define SIGMSK JMPBUF+4
|
#define SIGMSK JMPBUF+4
|
||||||
|
|
||||||
/* Don't save shadow stack register if shadow stack isn't enabled. */
|
|
||||||
#if !SHSTK_ENABLED
|
|
||||||
# undef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ENTRY (setjmp)
|
ENTRY (setjmp)
|
||||||
/* Note that we have to use a non-exported symbol in the next
|
/* Note that we have to use a non-exported symbol in the next
|
||||||
jump since otherwise gas will emit it as a jump through the
|
jump since otherwise gas will emit it as a jump through the
|
||||||
@ -58,21 +52,6 @@ ENTRY (setjmp)
|
|||||||
#endif
|
#endif
|
||||||
movl %ecx, (JB_PC*4)(%eax)
|
movl %ecx, (JB_PC*4)(%eax)
|
||||||
movl %ebp, (JB_BP*4)(%eax) /* Save caller's frame pointer. */
|
movl %ebp, (JB_BP*4)(%eax) /* Save caller's frame pointer. */
|
||||||
#ifdef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
# if IS_IN (libc) && defined SHARED && defined FEATURE_1_OFFSET
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(skip_ssp)
|
|
||||||
# else
|
|
||||||
xorl %ecx, %ecx
|
|
||||||
# endif
|
|
||||||
/* Get the current Shadow-Stack-Pointer and save it. */
|
|
||||||
rdsspd %ecx
|
|
||||||
movl %ecx, SHADOW_STACK_POINTER_OFFSET(%eax)
|
|
||||||
# if IS_IN (libc) && defined SHARED && defined FEATURE_1_OFFSET
|
|
||||||
L(skip_ssp):
|
|
||||||
# endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Call __sigjmp_save. */
|
/* Call __sigjmp_save. */
|
||||||
pushl $1
|
pushl $1
|
||||||
|
@ -61,7 +61,6 @@
|
|||||||
.hidden _init
|
.hidden _init
|
||||||
.type _init, @function
|
.type _init, @function
|
||||||
_init:
|
_init:
|
||||||
_CET_ENDBR
|
|
||||||
pushl %ebx
|
pushl %ebx
|
||||||
/* Maintain 16-byte stack alignment for called functions. */
|
/* Maintain 16-byte stack alignment for called functions. */
|
||||||
subl $8, %esp
|
subl $8, %esp
|
||||||
@ -82,7 +81,6 @@ _init:
|
|||||||
.hidden _fini
|
.hidden _fini
|
||||||
.type _fini, @function
|
.type _fini, @function
|
||||||
_fini:
|
_fini:
|
||||||
_CET_ENDBR
|
|
||||||
pushl %ebx
|
pushl %ebx
|
||||||
subl $8, %esp
|
subl $8, %esp
|
||||||
LOAD_PIC_REG (bx)
|
LOAD_PIC_REG (bx)
|
||||||
|
@ -37,7 +37,6 @@
|
|||||||
cfi_startproc
|
cfi_startproc
|
||||||
.align 16
|
.align 16
|
||||||
_dl_tlsdesc_return:
|
_dl_tlsdesc_return:
|
||||||
_CET_ENDBR
|
|
||||||
movl 4(%eax), %eax
|
movl 4(%eax), %eax
|
||||||
ret
|
ret
|
||||||
cfi_endproc
|
cfi_endproc
|
||||||
@ -59,7 +58,6 @@ _dl_tlsdesc_return:
|
|||||||
cfi_startproc
|
cfi_startproc
|
||||||
.align 16
|
.align 16
|
||||||
_dl_tlsdesc_undefweak:
|
_dl_tlsdesc_undefweak:
|
||||||
_CET_ENDBR
|
|
||||||
movl 4(%eax), %eax
|
movl 4(%eax), %eax
|
||||||
subl %gs:0, %eax
|
subl %gs:0, %eax
|
||||||
ret
|
ret
|
||||||
@ -101,7 +99,6 @@ _dl_tlsdesc_dynamic (struct tlsdesc *tdp)
|
|||||||
cfi_startproc
|
cfi_startproc
|
||||||
.align 16
|
.align 16
|
||||||
_dl_tlsdesc_dynamic:
|
_dl_tlsdesc_dynamic:
|
||||||
_CET_ENDBR
|
|
||||||
/* Like all TLS resolvers, preserve call-clobbered registers.
|
/* Like all TLS resolvers, preserve call-clobbered registers.
|
||||||
We need two scratch regs anyway. */
|
We need two scratch regs anyway. */
|
||||||
subl $28, %esp
|
subl $28, %esp
|
||||||
|
@ -26,7 +26,6 @@
|
|||||||
.align 16
|
.align 16
|
||||||
_dl_runtime_resolve:
|
_dl_runtime_resolve:
|
||||||
cfi_adjust_cfa_offset (8)
|
cfi_adjust_cfa_offset (8)
|
||||||
_CET_ENDBR
|
|
||||||
pushl %eax # Preserve registers otherwise clobbered.
|
pushl %eax # Preserve registers otherwise clobbered.
|
||||||
cfi_adjust_cfa_offset (4)
|
cfi_adjust_cfa_offset (4)
|
||||||
pushl %ecx
|
pushl %ecx
|
||||||
@ -53,7 +52,6 @@ _dl_runtime_resolve:
|
|||||||
.align 16
|
.align 16
|
||||||
_dl_runtime_resolve_shstk:
|
_dl_runtime_resolve_shstk:
|
||||||
cfi_adjust_cfa_offset (8)
|
cfi_adjust_cfa_offset (8)
|
||||||
_CET_ENDBR
|
|
||||||
pushl %eax # Preserve registers otherwise clobbered.
|
pushl %eax # Preserve registers otherwise clobbered.
|
||||||
cfi_adjust_cfa_offset (4)
|
cfi_adjust_cfa_offset (4)
|
||||||
pushl %edx
|
pushl %edx
|
||||||
@ -78,7 +76,6 @@ _dl_runtime_resolve_shstk:
|
|||||||
.align 16
|
.align 16
|
||||||
_dl_runtime_profile_shstk:
|
_dl_runtime_profile_shstk:
|
||||||
cfi_adjust_cfa_offset (8)
|
cfi_adjust_cfa_offset (8)
|
||||||
_CET_ENDBR
|
|
||||||
pushl %esp
|
pushl %esp
|
||||||
cfi_adjust_cfa_offset (4)
|
cfi_adjust_cfa_offset (4)
|
||||||
addl $8, (%esp) # Account for the pushed PLT data
|
addl $8, (%esp) # Account for the pushed PLT data
|
||||||
@ -123,7 +120,6 @@ _dl_runtime_profile_shstk:
|
|||||||
.align 16
|
.align 16
|
||||||
_dl_runtime_profile:
|
_dl_runtime_profile:
|
||||||
cfi_adjust_cfa_offset (8)
|
cfi_adjust_cfa_offset (8)
|
||||||
_CET_ENDBR
|
|
||||||
pushl %esp
|
pushl %esp
|
||||||
cfi_adjust_cfa_offset (4)
|
cfi_adjust_cfa_offset (4)
|
||||||
addl $8, (%esp) # Account for the pushed PLT data
|
addl $8, (%esp) # Account for the pushed PLT data
|
||||||
|
@ -29,7 +29,6 @@
|
|||||||
.type C_SYMBOL_NAME(_mcount), @function
|
.type C_SYMBOL_NAME(_mcount), @function
|
||||||
.align ALIGNARG(4)
|
.align ALIGNARG(4)
|
||||||
C_LABEL(_mcount)
|
C_LABEL(_mcount)
|
||||||
_CET_ENDBR
|
|
||||||
/* Save the caller-clobbered registers. */
|
/* Save the caller-clobbered registers. */
|
||||||
pushl %eax
|
pushl %eax
|
||||||
pushl %ecx
|
pushl %ecx
|
||||||
@ -58,7 +57,6 @@ weak_alias (_mcount, mcount)
|
|||||||
.type C_SYMBOL_NAME(__fentry__), @function
|
.type C_SYMBOL_NAME(__fentry__), @function
|
||||||
.align ALIGNARG(4)
|
.align ALIGNARG(4)
|
||||||
C_LABEL(__fentry__)
|
C_LABEL(__fentry__)
|
||||||
_CET_ENDBR
|
|
||||||
/* Save the caller-clobbered registers. */
|
/* Save the caller-clobbered registers. */
|
||||||
pushl %eax
|
pushl %eax
|
||||||
pushl %ecx
|
pushl %ecx
|
||||||
|
@ -44,13 +44,6 @@ ENTRY (__mpn_add_n)
|
|||||||
cfi_rel_offset (esi, 0)
|
cfi_rel_offset (esi, 0)
|
||||||
movl S2(%esp),%edx
|
movl S2(%esp),%edx
|
||||||
movl SIZE(%esp),%ecx
|
movl SIZE(%esp),%ecx
|
||||||
|
|
||||||
#if IBT_ENABLED
|
|
||||||
pushl %ebx
|
|
||||||
cfi_adjust_cfa_offset (4)
|
|
||||||
cfi_rel_offset (ebx, 0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
movl %ecx,%eax
|
movl %ecx,%eax
|
||||||
shrl $3,%ecx /* compute count for unrolled loop */
|
shrl $3,%ecx /* compute count for unrolled loop */
|
||||||
negl %eax
|
negl %eax
|
||||||
@ -62,9 +55,6 @@ ENTRY (__mpn_add_n)
|
|||||||
subl %eax,%esi /* ... by a constant when we ... */
|
subl %eax,%esi /* ... by a constant when we ... */
|
||||||
subl %eax,%edx /* ... enter the loop */
|
subl %eax,%edx /* ... enter the loop */
|
||||||
shrl $2,%eax /* restore previous value */
|
shrl $2,%eax /* restore previous value */
|
||||||
#if IBT_ENABLED
|
|
||||||
leal -4(,%eax,4),%ebx /* Count for 4-byte endbr32 */
|
|
||||||
#endif
|
|
||||||
#ifdef PIC
|
#ifdef PIC
|
||||||
/* Calculate start address in loop for PIC. */
|
/* Calculate start address in loop for PIC. */
|
||||||
leal (L(oop)-L(0)-3)(%eax,%eax,8),%eax
|
leal (L(oop)-L(0)-3)(%eax,%eax,8),%eax
|
||||||
@ -73,40 +63,30 @@ L(0):
|
|||||||
#else
|
#else
|
||||||
/* Calculate start address in loop for non-PIC. */
|
/* Calculate start address in loop for non-PIC. */
|
||||||
leal (L(oop) - 3)(%eax,%eax,8),%eax
|
leal (L(oop) - 3)(%eax,%eax,8),%eax
|
||||||
#endif
|
|
||||||
#if IBT_ENABLED
|
|
||||||
addl %ebx,%eax /* Adjust for endbr32 */
|
|
||||||
#endif
|
#endif
|
||||||
jmp *%eax /* jump into loop */
|
jmp *%eax /* jump into loop */
|
||||||
ALIGN (3)
|
ALIGN (3)
|
||||||
L(oop): movl (%esi),%eax
|
L(oop): movl (%esi),%eax
|
||||||
adcl (%edx),%eax
|
adcl (%edx),%eax
|
||||||
movl %eax,(%edi)
|
movl %eax,(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 4(%esi),%eax
|
movl 4(%esi),%eax
|
||||||
adcl 4(%edx),%eax
|
adcl 4(%edx),%eax
|
||||||
movl %eax,4(%edi)
|
movl %eax,4(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 8(%esi),%eax
|
movl 8(%esi),%eax
|
||||||
adcl 8(%edx),%eax
|
adcl 8(%edx),%eax
|
||||||
movl %eax,8(%edi)
|
movl %eax,8(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 12(%esi),%eax
|
movl 12(%esi),%eax
|
||||||
adcl 12(%edx),%eax
|
adcl 12(%edx),%eax
|
||||||
movl %eax,12(%edi)
|
movl %eax,12(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 16(%esi),%eax
|
movl 16(%esi),%eax
|
||||||
adcl 16(%edx),%eax
|
adcl 16(%edx),%eax
|
||||||
movl %eax,16(%edi)
|
movl %eax,16(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 20(%esi),%eax
|
movl 20(%esi),%eax
|
||||||
adcl 20(%edx),%eax
|
adcl 20(%edx),%eax
|
||||||
movl %eax,20(%edi)
|
movl %eax,20(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 24(%esi),%eax
|
movl 24(%esi),%eax
|
||||||
adcl 24(%edx),%eax
|
adcl 24(%edx),%eax
|
||||||
movl %eax,24(%edi)
|
movl %eax,24(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 28(%esi),%eax
|
movl 28(%esi),%eax
|
||||||
adcl 28(%edx),%eax
|
adcl 28(%edx),%eax
|
||||||
movl %eax,28(%edi)
|
movl %eax,28(%edi)
|
||||||
@ -119,11 +99,6 @@ L(oop): movl (%esi),%eax
|
|||||||
sbbl %eax,%eax
|
sbbl %eax,%eax
|
||||||
negl %eax
|
negl %eax
|
||||||
|
|
||||||
#if IBT_ENABLED
|
|
||||||
popl %ebx
|
|
||||||
cfi_adjust_cfa_offset (-4)
|
|
||||||
cfi_restore (ebx)
|
|
||||||
#endif
|
|
||||||
popl %esi
|
popl %esi
|
||||||
cfi_adjust_cfa_offset (-4)
|
cfi_adjust_cfa_offset (-4)
|
||||||
cfi_restore (esi)
|
cfi_restore (esi)
|
||||||
|
@ -80,7 +80,7 @@ L(not_1):
|
|||||||
LOAD_JUMP_TABLE_ENTRY (L(table_32bytes), %ecx)
|
LOAD_JUMP_TABLE_ENTRY (L(table_32bytes), %ecx)
|
||||||
addl %ecx, %edx
|
addl %ecx, %edx
|
||||||
addl %ecx, %esi
|
addl %ecx, %esi
|
||||||
_CET_NOTRACK jmp *%ebx
|
jmp *%ebx
|
||||||
|
|
||||||
ALIGN (4)
|
ALIGN (4)
|
||||||
L(28bytes):
|
L(28bytes):
|
||||||
@ -326,7 +326,7 @@ L(32bytesormore):
|
|||||||
LOAD_JUMP_TABLE_ENTRY (L(table_32bytes), %ecx)
|
LOAD_JUMP_TABLE_ENTRY (L(table_32bytes), %ecx)
|
||||||
addl %ecx, %edx
|
addl %ecx, %edx
|
||||||
addl %ecx, %esi
|
addl %ecx, %esi
|
||||||
_CET_NOTRACK jmp *%ebx
|
jmp *%ebx
|
||||||
|
|
||||||
L(load_ecx_28):
|
L(load_ecx_28):
|
||||||
addl $0x4, %edx
|
addl $0x4, %edx
|
||||||
|
@ -58,7 +58,7 @@
|
|||||||
absolute address. */ \
|
absolute address. */ \
|
||||||
addl (%ebx,INDEX,SCALE), %ebx; \
|
addl (%ebx,INDEX,SCALE), %ebx; \
|
||||||
/* We loaded the jump table and adjusted EDX/ESI. Go. */ \
|
/* We loaded the jump table and adjusted EDX/ESI. Go. */ \
|
||||||
_CET_NOTRACK jmp *%ebx
|
jmp *%ebx
|
||||||
# else
|
# else
|
||||||
# define JMPTBL(I, B) I
|
# define JMPTBL(I, B) I
|
||||||
|
|
||||||
@ -66,7 +66,7 @@
|
|||||||
jump table with relative offsets. INDEX is a register contains the
|
jump table with relative offsets. INDEX is a register contains the
|
||||||
index into the jump table. SCALE is the scale of INDEX. */
|
index into the jump table. SCALE is the scale of INDEX. */
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
||||||
_CET_NOTRACK jmp *TABLE(,INDEX,SCALE)
|
jmp *TABLE(,INDEX,SCALE)
|
||||||
# endif
|
# endif
|
||||||
|
|
||||||
|
|
||||||
|
@ -64,7 +64,7 @@
|
|||||||
absolute address. */ \
|
absolute address. */ \
|
||||||
addl (%ebx,INDEX,SCALE), %ebx; \
|
addl (%ebx,INDEX,SCALE), %ebx; \
|
||||||
/* We loaded the jump table. Go. */ \
|
/* We loaded the jump table. Go. */ \
|
||||||
_CET_NOTRACK jmp *%ebx
|
jmp *%ebx
|
||||||
|
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY_VALUE(TABLE) \
|
# define BRANCH_TO_JMPTBL_ENTRY_VALUE(TABLE) \
|
||||||
addl $(TABLE - .), %ebx
|
addl $(TABLE - .), %ebx
|
||||||
@ -72,7 +72,7 @@
|
|||||||
# define BRANCH_TO_JMPTBL_ENTRY_TAIL(TABLE, INDEX, SCALE) \
|
# define BRANCH_TO_JMPTBL_ENTRY_TAIL(TABLE, INDEX, SCALE) \
|
||||||
addl (%ebx,INDEX,SCALE), %ebx; \
|
addl (%ebx,INDEX,SCALE), %ebx; \
|
||||||
/* We loaded the jump table. Go. */ \
|
/* We loaded the jump table. Go. */ \
|
||||||
_CET_NOTRACK jmp *%ebx
|
jmp *%ebx
|
||||||
#else
|
#else
|
||||||
# define PARMS 4
|
# define PARMS 4
|
||||||
# define ENTRANCE
|
# define ENTRANCE
|
||||||
@ -84,12 +84,12 @@
|
|||||||
absolute offsets. INDEX is a register contains the index into the
|
absolute offsets. INDEX is a register contains the index into the
|
||||||
jump table. SCALE is the scale of INDEX. */
|
jump table. SCALE is the scale of INDEX. */
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
||||||
_CET_NOTRACK jmp *TABLE(,INDEX,SCALE)
|
jmp *TABLE(,INDEX,SCALE)
|
||||||
|
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY_VALUE(TABLE)
|
# define BRANCH_TO_JMPTBL_ENTRY_VALUE(TABLE)
|
||||||
|
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY_TAIL(TABLE, INDEX, SCALE) \
|
# define BRANCH_TO_JMPTBL_ENTRY_TAIL(TABLE, INDEX, SCALE) \
|
||||||
_CET_NOTRACK jmp *TABLE(,INDEX,SCALE)
|
jmp *TABLE(,INDEX,SCALE)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
.section .text.ssse3,"ax",@progbits
|
.section .text.ssse3,"ax",@progbits
|
||||||
|
@ -64,7 +64,7 @@
|
|||||||
absolute address. */ \
|
absolute address. */ \
|
||||||
addl (%ebx, INDEX, SCALE), %ebx; \
|
addl (%ebx, INDEX, SCALE), %ebx; \
|
||||||
/* We loaded the jump table. Go. */ \
|
/* We loaded the jump table. Go. */ \
|
||||||
_CET_NOTRACK jmp *%ebx
|
jmp *%ebx
|
||||||
# else
|
# else
|
||||||
|
|
||||||
# define PARMS 4
|
# define PARMS 4
|
||||||
@ -78,7 +78,7 @@
|
|||||||
jump table. SCALE is the scale of INDEX. */
|
jump table. SCALE is the scale of INDEX. */
|
||||||
|
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
||||||
_CET_NOTRACK jmp *TABLE(, INDEX, SCALE)
|
jmp *TABLE(, INDEX, SCALE)
|
||||||
# endif
|
# endif
|
||||||
|
|
||||||
.section .text.ssse3,"ax",@progbits
|
.section .text.ssse3,"ax",@progbits
|
||||||
|
@ -56,7 +56,7 @@
|
|||||||
add (%ebx,%ecx,4), %ebx; \
|
add (%ebx,%ecx,4), %ebx; \
|
||||||
add %ecx, %edx; \
|
add %ecx, %edx; \
|
||||||
/* We loaded the jump table and adjusted EDX. Go. */ \
|
/* We loaded the jump table and adjusted EDX. Go. */ \
|
||||||
_CET_NOTRACK jmp *%ebx
|
jmp *%ebx
|
||||||
#else
|
#else
|
||||||
# define ENTRANCE
|
# define ENTRANCE
|
||||||
# define RETURN_END ret
|
# define RETURN_END ret
|
||||||
@ -68,7 +68,7 @@
|
|||||||
absolute offsets. */
|
absolute offsets. */
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY(TABLE) \
|
# define BRANCH_TO_JMPTBL_ENTRY(TABLE) \
|
||||||
add %ecx, %edx; \
|
add %ecx, %edx; \
|
||||||
_CET_NOTRACK jmp *TABLE(,%ecx,4)
|
jmp *TABLE(,%ecx,4)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
.section .text.sse2,"ax",@progbits
|
.section .text.sse2,"ax",@progbits
|
||||||
|
@ -56,7 +56,7 @@
|
|||||||
add (%ebx,%ecx,4), %ebx; \
|
add (%ebx,%ecx,4), %ebx; \
|
||||||
add %ecx, %edx; \
|
add %ecx, %edx; \
|
||||||
/* We loaded the jump table and adjusted EDX. Go. */ \
|
/* We loaded the jump table and adjusted EDX. Go. */ \
|
||||||
_CET_NOTRACK jmp *%ebx
|
jmp *%ebx
|
||||||
#else
|
#else
|
||||||
# define ENTRANCE
|
# define ENTRANCE
|
||||||
# define RETURN_END ret
|
# define RETURN_END ret
|
||||||
@ -68,7 +68,7 @@
|
|||||||
absolute offsets. */
|
absolute offsets. */
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY(TABLE) \
|
# define BRANCH_TO_JMPTBL_ENTRY(TABLE) \
|
||||||
add %ecx, %edx; \
|
add %ecx, %edx; \
|
||||||
_CET_NOTRACK jmp *TABLE(,%ecx,4)
|
jmp *TABLE(,%ecx,4)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
.section .text.sse2,"ax",@progbits
|
.section .text.sse2,"ax",@progbits
|
||||||
|
@ -49,7 +49,7 @@
|
|||||||
absolute address. */ \
|
absolute address. */ \
|
||||||
addl (%ecx,INDEX,SCALE), %ecx; \
|
addl (%ecx,INDEX,SCALE), %ecx; \
|
||||||
/* We loaded the jump table and adjusted ECX. Go. */ \
|
/* We loaded the jump table and adjusted ECX. Go. */ \
|
||||||
_CET_NOTRACK jmp *%ecx
|
jmp *%ecx
|
||||||
# else
|
# else
|
||||||
# define JMPTBL(I, B) I
|
# define JMPTBL(I, B) I
|
||||||
|
|
||||||
@ -58,7 +58,7 @@
|
|||||||
jump table. SCALE is the scale of INDEX. */
|
jump table. SCALE is the scale of INDEX. */
|
||||||
|
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
||||||
_CET_NOTRACK jmp *TABLE(,INDEX,SCALE)
|
jmp *TABLE(,INDEX,SCALE)
|
||||||
# endif
|
# endif
|
||||||
|
|
||||||
# ifndef STRCAT
|
# ifndef STRCAT
|
||||||
|
@ -64,7 +64,7 @@
|
|||||||
absolute address. */ \
|
absolute address. */ \
|
||||||
addl (%ecx,INDEX,SCALE), %ecx; \
|
addl (%ecx,INDEX,SCALE), %ecx; \
|
||||||
/* We loaded the jump table and adjusted ECX. Go. */ \
|
/* We loaded the jump table and adjusted ECX. Go. */ \
|
||||||
_CET_NOTRACK jmp *%ecx
|
jmp *%ecx
|
||||||
# else
|
# else
|
||||||
# define JMPTBL(I, B) I
|
# define JMPTBL(I, B) I
|
||||||
|
|
||||||
@ -73,7 +73,7 @@
|
|||||||
jump table. SCALE is the scale of INDEX. */
|
jump table. SCALE is the scale of INDEX. */
|
||||||
|
|
||||||
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
# define BRANCH_TO_JMPTBL_ENTRY(TABLE, INDEX, SCALE) \
|
||||||
_CET_NOTRACK jmp *TABLE(,INDEX,SCALE)
|
jmp *TABLE(,INDEX,SCALE)
|
||||||
# endif
|
# endif
|
||||||
|
|
||||||
.text
|
.text
|
||||||
|
@ -19,7 +19,6 @@
|
|||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <pointer_guard.h>
|
#include <pointer_guard.h>
|
||||||
#include <jmpbuf-offsets.h>
|
#include <jmpbuf-offsets.h>
|
||||||
#include <jmp_buf-ssp.h>
|
|
||||||
#include <asm-syntax.h>
|
#include <asm-syntax.h>
|
||||||
#include <stap-probe.h>
|
#include <stap-probe.h>
|
||||||
|
|
||||||
@ -27,11 +26,6 @@
|
|||||||
#define JMPBUF PARMS
|
#define JMPBUF PARMS
|
||||||
#define SIGMSK JMPBUF+4
|
#define SIGMSK JMPBUF+4
|
||||||
|
|
||||||
/* Don't save shadow stack register if shadow stack isn't enabled. */
|
|
||||||
#if !SHSTK_ENABLED
|
|
||||||
# undef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ENTRY (__sigsetjmp)
|
ENTRY (__sigsetjmp)
|
||||||
|
|
||||||
movl JMPBUF(%esp), %eax
|
movl JMPBUF(%esp), %eax
|
||||||
@ -53,21 +47,6 @@ ENTRY (__sigsetjmp)
|
|||||||
movl %ecx, (JB_PC*4)(%eax)
|
movl %ecx, (JB_PC*4)(%eax)
|
||||||
movl %ebp, (JB_BP*4)(%eax) /* Save caller's frame pointer. */
|
movl %ebp, (JB_BP*4)(%eax) /* Save caller's frame pointer. */
|
||||||
|
|
||||||
#ifdef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
# if IS_IN (libc) && defined SHARED && defined FEATURE_1_OFFSET
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(skip_ssp)
|
|
||||||
# else
|
|
||||||
xorl %ecx, %ecx
|
|
||||||
# endif
|
|
||||||
/* Get the current Shadow-Stack-Pointer and save it. */
|
|
||||||
rdsspd %ecx
|
|
||||||
movl %ecx, SHADOW_STACK_POINTER_OFFSET(%eax)
|
|
||||||
# if IS_IN (libc) && defined SHARED && defined FEATURE_1_OFFSET
|
|
||||||
L(skip_ssp):
|
|
||||||
# endif
|
|
||||||
#endif
|
|
||||||
#if IS_IN (rtld)
|
#if IS_IN (rtld)
|
||||||
/* In ld.so we never save the signal mask. */
|
/* In ld.so we never save the signal mask. */
|
||||||
xorl %eax, %eax
|
xorl %eax, %eax
|
||||||
|
@ -132,7 +132,6 @@ ENTRY (_start)
|
|||||||
|
|
||||||
#if defined PIC && !defined SHARED
|
#if defined PIC && !defined SHARED
|
||||||
__wrap_main:
|
__wrap_main:
|
||||||
_CET_ENDBR
|
|
||||||
jmp main@PLT
|
jmp main@PLT
|
||||||
#endif
|
#endif
|
||||||
END (_start)
|
END (_start)
|
||||||
|
@ -40,13 +40,6 @@ ENTRY (__mpn_sub_n)
|
|||||||
cfi_rel_offset (esi, 0)
|
cfi_rel_offset (esi, 0)
|
||||||
movl S2(%esp),%edx
|
movl S2(%esp),%edx
|
||||||
movl SIZE(%esp),%ecx
|
movl SIZE(%esp),%ecx
|
||||||
|
|
||||||
#if IBT_ENABLED
|
|
||||||
pushl %ebx
|
|
||||||
cfi_adjust_cfa_offset (4)
|
|
||||||
cfi_rel_offset (ebx, 0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
movl %ecx,%eax
|
movl %ecx,%eax
|
||||||
shrl $3,%ecx /* compute count for unrolled loop */
|
shrl $3,%ecx /* compute count for unrolled loop */
|
||||||
negl %eax
|
negl %eax
|
||||||
@ -58,9 +51,6 @@ ENTRY (__mpn_sub_n)
|
|||||||
subl %eax,%esi /* ... by a constant when we ... */
|
subl %eax,%esi /* ... by a constant when we ... */
|
||||||
subl %eax,%edx /* ... enter the loop */
|
subl %eax,%edx /* ... enter the loop */
|
||||||
shrl $2,%eax /* restore previous value */
|
shrl $2,%eax /* restore previous value */
|
||||||
#if defined __CET__ && (__CET__ & 1) != 0
|
|
||||||
leal -4(,%eax,4),%ebx /* Count for 4-byte endbr32 */
|
|
||||||
#endif
|
|
||||||
#ifdef PIC
|
#ifdef PIC
|
||||||
/* Calculate start address in loop for PIC. Due to limitations in some
|
/* Calculate start address in loop for PIC. Due to limitations in some
|
||||||
assemblers, Loop-L0-3 cannot be put into the leal */
|
assemblers, Loop-L0-3 cannot be put into the leal */
|
||||||
@ -74,40 +64,30 @@ L(0): leal (%eax,%eax,8),%eax
|
|||||||
#else
|
#else
|
||||||
/* Calculate start address in loop for non-PIC. */
|
/* Calculate start address in loop for non-PIC. */
|
||||||
leal (L(oop) - 3)(%eax,%eax,8),%eax
|
leal (L(oop) - 3)(%eax,%eax,8),%eax
|
||||||
#endif
|
|
||||||
#if defined __CET__ && (__CET__ & 1) != 0
|
|
||||||
addl %ebx,%eax /* Adjust for endbr32 */
|
|
||||||
#endif
|
#endif
|
||||||
jmp *%eax /* jump into loop */
|
jmp *%eax /* jump into loop */
|
||||||
ALIGN (3)
|
ALIGN (3)
|
||||||
L(oop): movl (%esi),%eax
|
L(oop): movl (%esi),%eax
|
||||||
sbbl (%edx),%eax
|
sbbl (%edx),%eax
|
||||||
movl %eax,(%edi)
|
movl %eax,(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 4(%esi),%eax
|
movl 4(%esi),%eax
|
||||||
sbbl 4(%edx),%eax
|
sbbl 4(%edx),%eax
|
||||||
movl %eax,4(%edi)
|
movl %eax,4(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 8(%esi),%eax
|
movl 8(%esi),%eax
|
||||||
sbbl 8(%edx),%eax
|
sbbl 8(%edx),%eax
|
||||||
movl %eax,8(%edi)
|
movl %eax,8(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 12(%esi),%eax
|
movl 12(%esi),%eax
|
||||||
sbbl 12(%edx),%eax
|
sbbl 12(%edx),%eax
|
||||||
movl %eax,12(%edi)
|
movl %eax,12(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 16(%esi),%eax
|
movl 16(%esi),%eax
|
||||||
sbbl 16(%edx),%eax
|
sbbl 16(%edx),%eax
|
||||||
movl %eax,16(%edi)
|
movl %eax,16(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 20(%esi),%eax
|
movl 20(%esi),%eax
|
||||||
sbbl 20(%edx),%eax
|
sbbl 20(%edx),%eax
|
||||||
movl %eax,20(%edi)
|
movl %eax,20(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 24(%esi),%eax
|
movl 24(%esi),%eax
|
||||||
sbbl 24(%edx),%eax
|
sbbl 24(%edx),%eax
|
||||||
movl %eax,24(%edi)
|
movl %eax,24(%edi)
|
||||||
_CET_ENDBR
|
|
||||||
movl 28(%esi),%eax
|
movl 28(%esi),%eax
|
||||||
sbbl 28(%edx),%eax
|
sbbl 28(%edx),%eax
|
||||||
movl %eax,28(%edi)
|
movl %eax,28(%edi)
|
||||||
@ -120,11 +100,6 @@ L(oop): movl (%esi),%eax
|
|||||||
sbbl %eax,%eax
|
sbbl %eax,%eax
|
||||||
negl %eax
|
negl %eax
|
||||||
|
|
||||||
#if defined __CET__ && (__CET__ & 1) != 0
|
|
||||||
popl %ebx
|
|
||||||
cfi_adjust_cfa_offset (-4)
|
|
||||||
cfi_restore (ebx)
|
|
||||||
#endif
|
|
||||||
popl %esi
|
popl %esi
|
||||||
cfi_adjust_cfa_offset (-4)
|
cfi_adjust_cfa_offset (-4)
|
||||||
cfi_restore (esi)
|
cfi_restore (esi)
|
||||||
|
@ -18,6 +18,8 @@
|
|||||||
|
|
||||||
#include <sysdeps/x86/sysdep.h>
|
#include <sysdeps/x86/sysdep.h>
|
||||||
|
|
||||||
|
#define CET_ENABLED 0
|
||||||
|
|
||||||
/* It is desirable that the names of PIC thunks match those used by
|
/* It is desirable that the names of PIC thunks match those used by
|
||||||
GCC so that multiple copies are eliminated by the linker. Because
|
GCC so that multiple copies are eliminated by the linker. Because
|
||||||
GCC 4.6 and earlier use __i686 in the names, it is necessary to
|
GCC 4.6 and earlier use __i686 in the names, it is necessary to
|
||||||
@ -37,6 +39,15 @@
|
|||||||
|
|
||||||
/* Syntactic details of assembler. */
|
/* Syntactic details of assembler. */
|
||||||
|
|
||||||
|
/* Define an entry point visible from C. */
|
||||||
|
#define ENTRY_P2ALIGN(name, alignment) \
|
||||||
|
.globl C_SYMBOL_NAME(name); \
|
||||||
|
.type C_SYMBOL_NAME(name),@function; \
|
||||||
|
.align ALIGNARG(alignment); \
|
||||||
|
C_LABEL(name) \
|
||||||
|
cfi_startproc; \
|
||||||
|
CALL_MCOUNT
|
||||||
|
|
||||||
/* If compiled for profiling, call `mcount' at the start of each function. */
|
/* If compiled for profiling, call `mcount' at the start of each function. */
|
||||||
#ifdef PROF
|
#ifdef PROF
|
||||||
/* The mcount code relies on a normal frame pointer being on the stack
|
/* The mcount code relies on a normal frame pointer being on the stack
|
||||||
|
@ -18,14 +18,9 @@
|
|||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <pointer_guard.h>
|
#include <pointer_guard.h>
|
||||||
#include <jmpbuf-offsets.h>
|
#include <jmpbuf-offsets.h>
|
||||||
#include <jmp_buf-ssp.h>
|
|
||||||
#include <asm-syntax.h>
|
#include <asm-syntax.h>
|
||||||
#include <stap-probe.h>
|
#include <stap-probe.h>
|
||||||
|
|
||||||
/* Don't restore shadow stack register if shadow stack isn't enabled. */
|
|
||||||
#if !SHSTK_ENABLED
|
|
||||||
# undef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
#endif
|
|
||||||
|
|
||||||
.section .rodata.str1.1,"aMS",@progbits,1
|
.section .rodata.str1.1,"aMS",@progbits,1
|
||||||
.type longjmp_msg,@object
|
.type longjmp_msg,@object
|
||||||
@ -52,38 +47,6 @@ longjmp_msg:
|
|||||||
ENTRY (____longjmp_chk)
|
ENTRY (____longjmp_chk)
|
||||||
movl 4(%esp), %ecx /* User's jmp_buf in %ecx. */
|
movl 4(%esp), %ecx /* User's jmp_buf in %ecx. */
|
||||||
|
|
||||||
#ifdef SHADOW_STACK_POINTER_OFFSET
|
|
||||||
# if IS_IN (libc) && defined SHARED && defined FEATURE_1_OFFSET
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(skip_ssp)
|
|
||||||
# else
|
|
||||||
xorl %edx, %edx
|
|
||||||
# endif
|
|
||||||
/* Check and adjust the Shadow-Stack-Pointer. */
|
|
||||||
rdsspd %edx
|
|
||||||
/* And compare it with the saved ssp value. */
|
|
||||||
subl SHADOW_STACK_POINTER_OFFSET(%ecx), %edx
|
|
||||||
je L(skip_ssp)
|
|
||||||
/* Count the number of frames to adjust and adjust it
|
|
||||||
with incssp instruction. The instruction can adjust
|
|
||||||
the ssp by [0..255] value only thus use a loop if
|
|
||||||
the number of frames is bigger than 255. */
|
|
||||||
negl %edx
|
|
||||||
shrl $2, %edx
|
|
||||||
/* NB: We saved Shadow-Stack-Pointer of setjmp. Since we are
|
|
||||||
restoring Shadow-Stack-Pointer of setjmp's caller, we
|
|
||||||
need to unwind shadow stack by one more frame. */
|
|
||||||
addl $1, %edx
|
|
||||||
movl $255, %ebx
|
|
||||||
L(loop):
|
|
||||||
cmpl %ebx, %edx
|
|
||||||
cmovb %edx, %ebx
|
|
||||||
incsspd %ebx
|
|
||||||
subl %ebx, %edx
|
|
||||||
ja L(loop)
|
|
||||||
L(skip_ssp):
|
|
||||||
#endif
|
|
||||||
/* Save the return address now. */
|
/* Save the return address now. */
|
||||||
movl (JB_PC*4)(%ecx), %edx
|
movl (JB_PC*4)(%ecx), %edx
|
||||||
/* Get the stack pointer. */
|
/* Get the stack pointer. */
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
<https://www.gnu.org/licenses/>. */
|
<https://www.gnu.org/licenses/>. */
|
||||||
|
|
||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <asm/prctl.h>
|
|
||||||
|
|
||||||
#include "ucontext_i.h"
|
#include "ucontext_i.h"
|
||||||
|
|
||||||
@ -42,61 +41,6 @@ ENTRY(__getcontext)
|
|||||||
movw %fs, %dx
|
movw %fs, %dx
|
||||||
movl %edx, oFS(%eax)
|
movl %edx, oFS(%eax)
|
||||||
|
|
||||||
#if SHSTK_ENABLED
|
|
||||||
/* Check if shadow stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(no_shstk)
|
|
||||||
|
|
||||||
/* Save EAX in EDX. */
|
|
||||||
movl %eax, %edx
|
|
||||||
|
|
||||||
xorl %eax, %eax
|
|
||||||
cmpl %gs:SSP_BASE_OFFSET, %eax
|
|
||||||
jnz L(shadow_stack_bound_recorded)
|
|
||||||
|
|
||||||
/* Save EBX in the first scratch register slot. */
|
|
||||||
movl %ebx, oSCRATCH1(%edx)
|
|
||||||
|
|
||||||
/* Get the base address and size of the default shadow stack
|
|
||||||
which must be the current shadow stack since nothing has
|
|
||||||
been recorded yet. */
|
|
||||||
sub $24, %esp
|
|
||||||
mov %esp, %ecx
|
|
||||||
movl $ARCH_CET_STATUS, %ebx
|
|
||||||
movl $__NR_arch_prctl, %eax
|
|
||||||
ENTER_KERNEL
|
|
||||||
testl %eax, %eax
|
|
||||||
jz L(continue_no_err)
|
|
||||||
|
|
||||||
/* This should never happen. */
|
|
||||||
hlt
|
|
||||||
|
|
||||||
L(continue_no_err):
|
|
||||||
/* Restore EBX from the first scratch register slot. */
|
|
||||||
movl oSCRATCH1(%edx), %ebx
|
|
||||||
|
|
||||||
/* Record the base of the current shadow stack. */
|
|
||||||
movl 8(%esp), %eax
|
|
||||||
movl %eax, %gs:SSP_BASE_OFFSET
|
|
||||||
add $24, %esp
|
|
||||||
|
|
||||||
L(shadow_stack_bound_recorded):
|
|
||||||
/* Load address of the context data structure. */
|
|
||||||
movl 4(%esp), %eax
|
|
||||||
|
|
||||||
/* Get the current shadow stack pointer. */
|
|
||||||
rdsspd %edx
|
|
||||||
/* NB: Save the caller's shadow stack so that we can jump back
|
|
||||||
to the caller directly. */
|
|
||||||
addl $4, %edx
|
|
||||||
movl %edx, oSSP(%eax)
|
|
||||||
|
|
||||||
/* Save the current shadow stack base in ucontext. */
|
|
||||||
movl %gs:SSP_BASE_OFFSET, %edx
|
|
||||||
movl %edx, (oSSP + 4)(%eax)
|
|
||||||
|
|
||||||
L(no_shstk):
|
|
||||||
#endif
|
|
||||||
/* We have separate floating-point register content memory on the
|
/* We have separate floating-point register content memory on the
|
||||||
stack. We use the __fpregs_mem block in the context. Set the
|
stack. We use the __fpregs_mem block in the context. Set the
|
||||||
links up correctly. */
|
links up correctly. */
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
<https://www.gnu.org/licenses/>. */
|
<https://www.gnu.org/licenses/>. */
|
||||||
|
|
||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <asm/prctl.h>
|
|
||||||
|
|
||||||
#include "ucontext_i.h"
|
#include "ucontext_i.h"
|
||||||
|
|
||||||
@ -68,127 +67,6 @@ ENTRY(__makecontext)
|
|||||||
jnz 1b
|
jnz 1b
|
||||||
2:
|
2:
|
||||||
|
|
||||||
#if SHSTK_ENABLED
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(skip_ssp)
|
|
||||||
|
|
||||||
/* Reload the pointer to ucontext. */
|
|
||||||
movl 4(%esp), %eax
|
|
||||||
|
|
||||||
/* Shadow stack is enabled. We need to allocate a new shadow
|
|
||||||
stack. */
|
|
||||||
subl oSS_SP(%eax), %edx
|
|
||||||
shrl $STACK_SIZE_TO_SHADOW_STACK_SIZE_SHIFT, %edx
|
|
||||||
|
|
||||||
/* Align shadow stack size to 8 bytes. */
|
|
||||||
addl $7, %edx
|
|
||||||
andl $-8, %edx
|
|
||||||
|
|
||||||
/* Store shadow stack size in __ssp[2]. */
|
|
||||||
movl %edx, (oSSP + 8)(%eax)
|
|
||||||
|
|
||||||
/* Save ESI in the second scratch register slot. */
|
|
||||||
movl %esi, oSCRATCH2(%eax)
|
|
||||||
/* Save EDI in the third scratch register slot. */
|
|
||||||
movl %edi, oSCRATCH3(%eax)
|
|
||||||
|
|
||||||
/* Save the pointer to ucontext. */
|
|
||||||
movl %eax, %edi
|
|
||||||
|
|
||||||
/* Get the original shadow stack pointer. */
|
|
||||||
rdsspd %esi
|
|
||||||
|
|
||||||
/* Align the saved original shadow stack pointer to the next
|
|
||||||
8 byte aligned boundary. */
|
|
||||||
andl $-8, %esi
|
|
||||||
|
|
||||||
/* Load the top of the new stack into EDX. */
|
|
||||||
movl oESP(%eax), %edx
|
|
||||||
|
|
||||||
/* We need to terminate the FDE here because the unwinder looks
|
|
||||||
at ra-1 for unwind information. */
|
|
||||||
cfi_endproc
|
|
||||||
|
|
||||||
/* Swap the original stack pointer with the top of the new
|
|
||||||
stack. */
|
|
||||||
xchgl %esp, %edx
|
|
||||||
|
|
||||||
/* Add 4 bytes since CALL will push the 4-byte return address
|
|
||||||
onto stack. */
|
|
||||||
addl $4, %esp
|
|
||||||
|
|
||||||
/* Allocate the new shadow stack. Save EBX in the first scratch
|
|
||||||
register slot. */
|
|
||||||
movl %ebx, oSCRATCH1(%eax)
|
|
||||||
|
|
||||||
/* CET syscall takes 64-bit sizes. */
|
|
||||||
subl $16, %esp
|
|
||||||
movl (oSSP + 8)(%eax), %ecx
|
|
||||||
movl %ecx, (%esp)
|
|
||||||
movl $0, 4(%esp)
|
|
||||||
movl %ecx, 8(%esp)
|
|
||||||
movl $0, 12(%esp)
|
|
||||||
movl %esp, %ecx
|
|
||||||
|
|
||||||
movl $ARCH_CET_ALLOC_SHSTK, %ebx
|
|
||||||
movl $__NR_arch_prctl, %eax
|
|
||||||
ENTER_KERNEL
|
|
||||||
testl %eax, %eax
|
|
||||||
jne L(hlt) /* This should never happen. */
|
|
||||||
|
|
||||||
/* Copy the base address of the new shadow stack to __ssp[1]. */
|
|
||||||
movl (%esp), %eax
|
|
||||||
movl %eax, (oSSP + 4)(%edi)
|
|
||||||
|
|
||||||
addl $16, %esp
|
|
||||||
|
|
||||||
/* Restore EBX from the first scratch register slot. */
|
|
||||||
movl oSCRATCH1(%edi), %ebx
|
|
||||||
|
|
||||||
/* Get the size of the new shadow stack. */
|
|
||||||
movl (oSSP + 8)(%edi), %ecx
|
|
||||||
|
|
||||||
/* Use the restore stoken to restore the new shadow stack. */
|
|
||||||
rstorssp -8(%eax, %ecx)
|
|
||||||
|
|
||||||
/* Save the restore token at the next 8 byte aligned boundary
|
|
||||||
on the original shadow stack. */
|
|
||||||
saveprevssp
|
|
||||||
|
|
||||||
/* Push the address of "jmp exitcode" onto the new stack as
|
|
||||||
well as the new shadow stack. */
|
|
||||||
call 1f
|
|
||||||
jmp L(exitcode)
|
|
||||||
1:
|
|
||||||
|
|
||||||
/* Get the new shadow stack pointer. */
|
|
||||||
rdsspd %eax
|
|
||||||
|
|
||||||
/* Use the restore stoken to restore the original shadow stack. */
|
|
||||||
rstorssp -8(%esi)
|
|
||||||
|
|
||||||
/* Save the restore token on the new shadow stack. */
|
|
||||||
saveprevssp
|
|
||||||
|
|
||||||
/* Store the new shadow stack pointer in __ssp[0]. */
|
|
||||||
movl %eax, oSSP(%edi)
|
|
||||||
|
|
||||||
/* Restore the original stack. */
|
|
||||||
mov %edx, %esp
|
|
||||||
|
|
||||||
cfi_startproc
|
|
||||||
|
|
||||||
/* Restore ESI from the second scratch register slot. */
|
|
||||||
movl oSCRATCH2(%edi), %esi
|
|
||||||
/* Restore EDI from the third scratch register slot. */
|
|
||||||
movl oSCRATCH3(%edi), %edi
|
|
||||||
|
|
||||||
ret
|
|
||||||
|
|
||||||
L(skip_ssp):
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* If the function we call returns we must continue with the
|
/* If the function we call returns we must continue with the
|
||||||
context which is given in the uc_link element. To do this
|
context which is given in the uc_link element. To do this
|
||||||
set the return address for the function the user provides
|
set the return address for the function the user provides
|
||||||
@ -244,7 +122,6 @@ L(call_exit):
|
|||||||
call HIDDEN_JUMPTARGET(exit)
|
call HIDDEN_JUMPTARGET(exit)
|
||||||
/* The 'exit' call should never return. In case it does cause
|
/* The 'exit' call should never return. In case it does cause
|
||||||
the process to terminate. */
|
the process to terminate. */
|
||||||
L(hlt):
|
|
||||||
hlt
|
hlt
|
||||||
cfi_startproc
|
cfi_startproc
|
||||||
END(__makecontext)
|
END(__makecontext)
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
<https://www.gnu.org/licenses/>. */
|
<https://www.gnu.org/licenses/>. */
|
||||||
|
|
||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <asm/prctl.h>
|
|
||||||
|
|
||||||
#include "ucontext_i.h"
|
#include "ucontext_i.h"
|
||||||
|
|
||||||
@ -56,6 +55,9 @@ ENTRY(__setcontext)
|
|||||||
movl oFS(%eax), %ecx
|
movl oFS(%eax), %ecx
|
||||||
movw %cx, %fs
|
movw %cx, %fs
|
||||||
|
|
||||||
|
/* Fetch the address to return to. */
|
||||||
|
movl oEIP(%eax), %ecx
|
||||||
|
|
||||||
/* Load the new stack pointer. */
|
/* Load the new stack pointer. */
|
||||||
cfi_def_cfa (eax, 0)
|
cfi_def_cfa (eax, 0)
|
||||||
cfi_offset (edi, oEDI)
|
cfi_offset (edi, oEDI)
|
||||||
@ -64,103 +66,6 @@ ENTRY(__setcontext)
|
|||||||
cfi_offset (ebx, oEBX)
|
cfi_offset (ebx, oEBX)
|
||||||
movl oESP(%eax), %esp
|
movl oESP(%eax), %esp
|
||||||
|
|
||||||
#if SHSTK_ENABLED
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(no_shstk)
|
|
||||||
|
|
||||||
/* If the base of the target shadow stack is the same as the
|
|
||||||
base of the current shadow stack, we unwind the shadow
|
|
||||||
stack. Otherwise it is a stack switch and we look for a
|
|
||||||
restore token. */
|
|
||||||
movl oSSP(%eax), %esi
|
|
||||||
movl %esi, %edi
|
|
||||||
|
|
||||||
/* Get the base of the target shadow stack. */
|
|
||||||
movl (oSSP + 4)(%eax), %ecx
|
|
||||||
cmpl %gs:SSP_BASE_OFFSET, %ecx
|
|
||||||
je L(unwind_shadow_stack)
|
|
||||||
|
|
||||||
/* Align the saved original shadow stack pointer to the next
|
|
||||||
8 byte aligned boundary. */
|
|
||||||
andl $-8, %esi
|
|
||||||
|
|
||||||
L(find_restore_token_loop):
|
|
||||||
/* Look for a restore token. */
|
|
||||||
movl -8(%esi), %ebx
|
|
||||||
andl $-8, %ebx
|
|
||||||
cmpl %esi, %ebx
|
|
||||||
je L(restore_shadow_stack)
|
|
||||||
|
|
||||||
/* Try the next slot. */
|
|
||||||
subl $8, %esi
|
|
||||||
jmp L(find_restore_token_loop)
|
|
||||||
|
|
||||||
L(restore_shadow_stack):
|
|
||||||
/* Pop return address from the shadow stack since setcontext
|
|
||||||
will not return. */
|
|
||||||
movl $1, %ebx
|
|
||||||
incsspd %ebx
|
|
||||||
|
|
||||||
/* Use the restore stoken to restore the target shadow stack. */
|
|
||||||
rstorssp -8(%esi)
|
|
||||||
|
|
||||||
/* Save the restore token on the old shadow stack. NB: This
|
|
||||||
restore token may be checked by setcontext or swapcontext
|
|
||||||
later. */
|
|
||||||
saveprevssp
|
|
||||||
|
|
||||||
/* Record the new shadow stack base that was switched to. */
|
|
||||||
movl (oSSP + 4)(%eax), %ebx
|
|
||||||
movl %ebx, %gs:SSP_BASE_OFFSET
|
|
||||||
|
|
||||||
L(unwind_shadow_stack):
|
|
||||||
rdsspd %ebx
|
|
||||||
subl %edi, %ebx
|
|
||||||
je L(skip_unwind_shadow_stack)
|
|
||||||
negl %ebx
|
|
||||||
shrl $2, %ebx
|
|
||||||
movl $255, %esi
|
|
||||||
L(loop):
|
|
||||||
cmpl %esi, %ebx
|
|
||||||
cmovb %ebx, %esi
|
|
||||||
incsspd %esi
|
|
||||||
subl %esi, %ebx
|
|
||||||
ja L(loop)
|
|
||||||
|
|
||||||
L(skip_unwind_shadow_stack):
|
|
||||||
|
|
||||||
/* Load the values of all the preserved registers (except ESP). */
|
|
||||||
movl oEDI(%eax), %edi
|
|
||||||
movl oESI(%eax), %esi
|
|
||||||
movl oEBP(%eax), %ebp
|
|
||||||
movl oEBX(%eax), %ebx
|
|
||||||
|
|
||||||
/* Get the return address set with getcontext. */
|
|
||||||
movl oEIP(%eax), %ecx
|
|
||||||
|
|
||||||
/* Check if return address is valid for the case when setcontext
|
|
||||||
is invoked from L(exitcode) with linked context. */
|
|
||||||
rdsspd %eax
|
|
||||||
cmpl (%eax), %ecx
|
|
||||||
/* Clear EAX to indicate success. NB: Don't use xorl to keep
|
|
||||||
EFLAGS for jne. */
|
|
||||||
movl $0, %eax
|
|
||||||
jne L(jmp)
|
|
||||||
/* Return to the new context if return address valid. */
|
|
||||||
pushl %ecx
|
|
||||||
ret
|
|
||||||
|
|
||||||
L(jmp):
|
|
||||||
/* Jump to the new context directly. */
|
|
||||||
jmp *%ecx
|
|
||||||
|
|
||||||
L(no_shstk):
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Fetch the address to return to. */
|
|
||||||
movl oEIP(%eax), %ecx
|
|
||||||
|
|
||||||
/* Push the return address on the new stack so we can return there. */
|
/* Push the return address on the new stack so we can return there. */
|
||||||
pushl %ecx
|
pushl %ecx
|
||||||
|
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
<https://www.gnu.org/licenses/>. */
|
<https://www.gnu.org/licenses/>. */
|
||||||
|
|
||||||
#include <sysdep.h>
|
#include <sysdep.h>
|
||||||
#include <asm/prctl.h>
|
|
||||||
|
|
||||||
#include "ucontext_i.h"
|
#include "ucontext_i.h"
|
||||||
|
|
||||||
@ -76,144 +75,6 @@ ENTRY(__swapcontext)
|
|||||||
movl oFS(%eax), %edx
|
movl oFS(%eax), %edx
|
||||||
movw %dx, %fs
|
movw %dx, %fs
|
||||||
|
|
||||||
#if SHSTK_ENABLED
|
|
||||||
/* Check if Shadow Stack is enabled. */
|
|
||||||
testl $X86_FEATURE_1_SHSTK, %gs:FEATURE_1_OFFSET
|
|
||||||
jz L(no_shstk)
|
|
||||||
|
|
||||||
xorl %eax, %eax
|
|
||||||
cmpl %gs:SSP_BASE_OFFSET, %eax
|
|
||||||
jnz L(shadow_stack_bound_recorded)
|
|
||||||
|
|
||||||
/* Get the base address and size of the default shadow stack
|
|
||||||
which must be the current shadow stack since nothing has
|
|
||||||
been recorded yet. */
|
|
||||||
sub $24, %esp
|
|
||||||
mov %esp, %ecx
|
|
||||||
movl $ARCH_CET_STATUS, %ebx
|
|
||||||
movl $__NR_arch_prctl, %eax
|
|
||||||
ENTER_KERNEL
|
|
||||||
testl %eax, %eax
|
|
||||||
jz L(continue_no_err)
|
|
||||||
|
|
||||||
/* This should never happen. */
|
|
||||||
hlt
|
|
||||||
|
|
||||||
L(continue_no_err):
|
|
||||||
/* Record the base of the current shadow stack. */
|
|
||||||
movl 8(%esp), %eax
|
|
||||||
movl %eax, %gs:SSP_BASE_OFFSET
|
|
||||||
add $24, %esp
|
|
||||||
|
|
||||||
L(shadow_stack_bound_recorded):
|
|
||||||
/* Load address of the context data structure we save in. */
|
|
||||||
movl 4(%esp), %eax
|
|
||||||
|
|
||||||
/* Load address of the context data structure we swap in */
|
|
||||||
movl 8(%esp), %edx
|
|
||||||
|
|
||||||
/* If we unwind the stack, we can't undo stack unwinding. Just
|
|
||||||
save the target shadow stack pointer as the current shadow
|
|
||||||
stack pointer. */
|
|
||||||
movl oSSP(%edx), %ecx
|
|
||||||
movl %ecx, oSSP(%eax)
|
|
||||||
|
|
||||||
/* Save the current shadow stack base in ucontext. */
|
|
||||||
movl %gs:SSP_BASE_OFFSET, %ecx
|
|
||||||
movl %ecx, (oSSP + 4)(%eax)
|
|
||||||
|
|
||||||
/* If the base of the target shadow stack is the same as the
|
|
||||||
base of the current shadow stack, we unwind the shadow
|
|
||||||
stack. Otherwise it is a stack switch and we look for a
|
|
||||||
restore token. */
|
|
||||||
movl oSSP(%edx), %esi
|
|
||||||
movl %esi, %edi
|
|
||||||
|
|
||||||
/* Get the base of the target shadow stack. */
|
|
||||||
movl (oSSP + 4)(%edx), %ecx
|
|
||||||
cmpl %gs:SSP_BASE_OFFSET, %ecx
|
|
||||||
je L(unwind_shadow_stack)
|
|
||||||
|
|
||||||
/* Align the saved original shadow stack pointer to the next
|
|
||||||
8 byte aligned boundary. */
|
|
||||||
andl $-8, %esi
|
|
||||||
|
|
||||||
L(find_restore_token_loop):
|
|
||||||
/* Look for a restore token. */
|
|
||||||
movl -8(%esi), %ebx
|
|
||||||
andl $-8, %ebx
|
|
||||||
cmpl %esi, %ebx
|
|
||||||
je L(restore_shadow_stack)
|
|
||||||
|
|
||||||
/* Try the next slot. */
|
|
||||||
subl $8, %esi
|
|
||||||
jmp L(find_restore_token_loop)
|
|
||||||
|
|
||||||
L(restore_shadow_stack):
|
|
||||||
/* The target shadow stack will be restored. Save the current
|
|
||||||
shadow stack pointer. */
|
|
||||||
rdsspd %ecx
|
|
||||||
movl %ecx, oSSP(%eax)
|
|
||||||
|
|
||||||
/* Use the restore stoken to restore the target shadow stack. */
|
|
||||||
rstorssp -8(%esi)
|
|
||||||
|
|
||||||
/* Save the restore token on the old shadow stack. NB: This
|
|
||||||
restore token may be checked by setcontext or swapcontext
|
|
||||||
later. */
|
|
||||||
saveprevssp
|
|
||||||
|
|
||||||
/* Record the new shadow stack base that was switched to. */
|
|
||||||
movl (oSSP + 4)(%edx), %ebx
|
|
||||||
movl %ebx, %gs:SSP_BASE_OFFSET
|
|
||||||
|
|
||||||
L(unwind_shadow_stack):
|
|
||||||
rdsspd %ebx
|
|
||||||
subl %edi, %ebx
|
|
||||||
je L(skip_unwind_shadow_stack)
|
|
||||||
negl %ebx
|
|
||||||
shrl $2, %ebx
|
|
||||||
movl $255, %esi
|
|
||||||
L(loop):
|
|
||||||
cmpl %esi, %ebx
|
|
||||||
cmovb %ebx, %esi
|
|
||||||
incsspd %esi
|
|
||||||
subl %esi, %ebx
|
|
||||||
ja L(loop)
|
|
||||||
|
|
||||||
L(skip_unwind_shadow_stack):
|
|
||||||
|
|
||||||
/* Load the new stack pointer. */
|
|
||||||
movl oESP(%edx), %esp
|
|
||||||
|
|
||||||
/* Load the values of all the preserved registers (except ESP). */
|
|
||||||
movl oEDI(%edx), %edi
|
|
||||||
movl oESI(%edx), %esi
|
|
||||||
movl oEBP(%edx), %ebp
|
|
||||||
movl oEBX(%edx), %ebx
|
|
||||||
|
|
||||||
/* Get the return address set with getcontext. */
|
|
||||||
movl oEIP(%edx), %ecx
|
|
||||||
|
|
||||||
/* Check if return address is valid for the case when setcontext
|
|
||||||
is invoked from L(exitcode) with linked context. */
|
|
||||||
rdsspd %eax
|
|
||||||
cmpl (%eax), %ecx
|
|
||||||
/* Clear EAX to indicate success. NB: Don't use xorl to keep
|
|
||||||
EFLAGS for jne. */
|
|
||||||
movl $0, %eax
|
|
||||||
jne L(jmp)
|
|
||||||
/* Return to the new context if return address valid. */
|
|
||||||
pushl %ecx
|
|
||||||
ret
|
|
||||||
|
|
||||||
L(jmp):
|
|
||||||
/* Jump to the new context directly. */
|
|
||||||
jmp *%ecx
|
|
||||||
|
|
||||||
L(no_shstk):
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Fetch the address to return to. */
|
/* Fetch the address to return to. */
|
||||||
movl oEIP(%eax), %ecx
|
movl oEIP(%eax), %ecx
|
||||||
|
|
||||||
|
@ -446,9 +446,4 @@ struct libc_do_syscall_args
|
|||||||
|
|
||||||
#endif /* __ASSEMBLER__ */
|
#endif /* __ASSEMBLER__ */
|
||||||
|
|
||||||
/* Each shadow stack slot takes 4 bytes. Assuming that each stack
|
|
||||||
frame takes 128 bytes, this is used to compute shadow stack size
|
|
||||||
from stack size. */
|
|
||||||
#define STACK_SIZE_TO_SHADOW_STACK_SIZE_SHIFT 5
|
|
||||||
|
|
||||||
#endif /* linux/i386/sysdep.h */
|
#endif /* linux/i386/sysdep.h */
|
||||||
|
@ -22,10 +22,6 @@ oEBP mreg (EBP)
|
|||||||
oESP mreg (ESP)
|
oESP mreg (ESP)
|
||||||
oEBX mreg (EBX)
|
oEBX mreg (EBX)
|
||||||
oEIP mreg (EIP)
|
oEIP mreg (EIP)
|
||||||
oSCRATCH1 mreg (EAX)
|
|
||||||
oSCRATCH2 mreg (ECX)
|
|
||||||
oSCRATCH3 mreg (EDX)
|
|
||||||
oFPREGS mcontext (fpregs)
|
oFPREGS mcontext (fpregs)
|
||||||
oSIGMASK ucontext (uc_sigmask)
|
oSIGMASK ucontext (uc_sigmask)
|
||||||
oFPREGSMEM ucontext (__fpregs_mem)
|
oFPREGSMEM ucontext (__fpregs_mem)
|
||||||
oSSP ucontext (__ssp)
|
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
#include <bits/errno.h>
|
#include <bits/errno.h>
|
||||||
#include <tcb-offsets.h>
|
#include <tcb-offsets.h>
|
||||||
|
|
||||||
|
|
||||||
/* Clone the calling process, but without copying the whole address space.
|
/* Clone the calling process, but without copying the whole address space.
|
||||||
The calling process is suspended until the new process exits or is
|
The calling process is suspended until the new process exits or is
|
||||||
replaced by a call to `execve'. Return -1 for errors, 0 to the new process,
|
replaced by a call to `execve'. Return -1 for errors, 0 to the new process,
|
||||||
@ -46,29 +47,6 @@ ENTRY (__vfork)
|
|||||||
/* Branch forward if it failed. */
|
/* Branch forward if it failed. */
|
||||||
jae SYSCALL_ERROR_LABEL
|
jae SYSCALL_ERROR_LABEL
|
||||||
|
|
||||||
#if SHSTK_ENABLED
|
|
||||||
/* Check if shadow stack is in use. */
|
|
||||||
xorl %edx, %edx
|
|
||||||
rdsspd %edx
|
|
||||||
testl %edx, %edx
|
|
||||||
/* Normal return if shadow stack isn't in use. */
|
|
||||||
je L(no_shstk)
|
|
||||||
|
|
||||||
testl %eax, %eax
|
|
||||||
/* In parent, normal return. */
|
|
||||||
jnz L(no_shstk)
|
|
||||||
|
|
||||||
/* NB: In child, jump back to caller via indirect branch without
|
|
||||||
popping shadow stack which is shared with parent. Keep shadow
|
|
||||||
stack mismatched so that child returns in the vfork-calling
|
|
||||||
function will trigger SIGSEGV. */
|
|
||||||
popl %ecx
|
|
||||||
cfi_adjust_cfa_offset (-4)
|
|
||||||
jmp *%ecx
|
|
||||||
|
|
||||||
L(no_shstk):
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ret
|
ret
|
||||||
|
|
||||||
PSEUDO_END (__vfork)
|
PSEUDO_END (__vfork)
|
||||||
|
@ -21,33 +21,6 @@
|
|||||||
|
|
||||||
#include <sysdeps/generic/sysdep.h>
|
#include <sysdeps/generic/sysdep.h>
|
||||||
|
|
||||||
/* __CET__ is defined by GCC with Control-Flow Protection values:
|
|
||||||
|
|
||||||
enum cf_protection_level
|
|
||||||
{
|
|
||||||
CF_NONE = 0,
|
|
||||||
CF_BRANCH = 1 << 0,
|
|
||||||
CF_RETURN = 1 << 1,
|
|
||||||
CF_FULL = CF_BRANCH | CF_RETURN,
|
|
||||||
CF_SET = 1 << 2
|
|
||||||
};
|
|
||||||
*/
|
|
||||||
|
|
||||||
/* Set if CF_BRANCH (IBT) is enabled. */
|
|
||||||
#define X86_FEATURE_1_IBT (1U << 0)
|
|
||||||
/* Set if CF_RETURN (SHSTK) is enabled. */
|
|
||||||
#define X86_FEATURE_1_SHSTK (1U << 1)
|
|
||||||
|
|
||||||
#ifdef __CET__
|
|
||||||
# define CET_ENABLED 1
|
|
||||||
# define IBT_ENABLED (__CET__ & X86_FEATURE_1_IBT)
|
|
||||||
# define SHSTK_ENABLED (__CET__ & X86_FEATURE_1_SHSTK)
|
|
||||||
#else
|
|
||||||
# define CET_ENABLED 0
|
|
||||||
# define IBT_ENABLED 0
|
|
||||||
# define SHSTK_ENABLED 0
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Offset for fxsave/xsave area used by _dl_runtime_resolve. Also need
|
/* Offset for fxsave/xsave area used by _dl_runtime_resolve. Also need
|
||||||
space to preserve RCX, RDX, RSI, RDI, R8, R9 and RAX. It must be
|
space to preserve RCX, RDX, RSI, RDI, R8, R9 and RAX. It must be
|
||||||
aligned to 16 bytes for fxsave and 64 bytes for xsave. */
|
aligned to 16 bytes for fxsave and 64 bytes for xsave. */
|
||||||
@ -66,27 +39,10 @@ enum cf_protection_level
|
|||||||
|
|
||||||
/* Syntactic details of assembler. */
|
/* Syntactic details of assembler. */
|
||||||
|
|
||||||
#ifdef _CET_ENDBR
|
|
||||||
# define _CET_NOTRACK notrack
|
|
||||||
#else
|
|
||||||
# define _CET_ENDBR
|
|
||||||
# define _CET_NOTRACK
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* ELF uses byte-counts for .align, most others use log2 of count of bytes. */
|
/* ELF uses byte-counts for .align, most others use log2 of count of bytes. */
|
||||||
#define ALIGNARG(log2) 1<<log2
|
#define ALIGNARG(log2) 1<<log2
|
||||||
#define ASM_SIZE_DIRECTIVE(name) .size name,.-name;
|
#define ASM_SIZE_DIRECTIVE(name) .size name,.-name;
|
||||||
|
|
||||||
/* Define an entry point visible from C. */
|
|
||||||
#define ENTRY_P2ALIGN(name, alignment) \
|
|
||||||
.globl C_SYMBOL_NAME(name); \
|
|
||||||
.type C_SYMBOL_NAME(name),@function; \
|
|
||||||
.align ALIGNARG(alignment); \
|
|
||||||
C_LABEL(name) \
|
|
||||||
cfi_startproc; \
|
|
||||||
_CET_ENDBR; \
|
|
||||||
CALL_MCOUNT
|
|
||||||
|
|
||||||
/* Common entry 16 byte aligns. */
|
/* Common entry 16 byte aligns. */
|
||||||
#define ENTRY(name) ENTRY_P2ALIGN (name, 4)
|
#define ENTRY(name) ENTRY_P2ALIGN (name, 4)
|
||||||
|
|
||||||
|
@ -22,10 +22,52 @@
|
|||||||
#include <sysdeps/x86/sysdep.h>
|
#include <sysdeps/x86/sysdep.h>
|
||||||
#include <x86-lp_size.h>
|
#include <x86-lp_size.h>
|
||||||
|
|
||||||
|
/* __CET__ is defined by GCC with Control-Flow Protection values:
|
||||||
|
|
||||||
|
enum cf_protection_level
|
||||||
|
{
|
||||||
|
CF_NONE = 0,
|
||||||
|
CF_BRANCH = 1 << 0,
|
||||||
|
CF_RETURN = 1 << 1,
|
||||||
|
CF_FULL = CF_BRANCH | CF_RETURN,
|
||||||
|
CF_SET = 1 << 2
|
||||||
|
};
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Set if CF_BRANCH (IBT) is enabled. */
|
||||||
|
#define X86_FEATURE_1_IBT (1U << 0)
|
||||||
|
/* Set if CF_RETURN (SHSTK) is enabled. */
|
||||||
|
#define X86_FEATURE_1_SHSTK (1U << 1)
|
||||||
|
|
||||||
|
#ifdef __CET__
|
||||||
|
# define CET_ENABLED 1
|
||||||
|
# define SHSTK_ENABLED (__CET__ & X86_FEATURE_1_SHSTK)
|
||||||
|
#else
|
||||||
|
# define CET_ENABLED 0
|
||||||
|
# define SHSTK_ENABLED 0
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifdef __ASSEMBLER__
|
#ifdef __ASSEMBLER__
|
||||||
|
|
||||||
/* Syntactic details of assembler. */
|
/* Syntactic details of assembler. */
|
||||||
|
|
||||||
|
#ifdef _CET_ENDBR
|
||||||
|
# define _CET_NOTRACK notrack
|
||||||
|
#else
|
||||||
|
# define _CET_ENDBR
|
||||||
|
# define _CET_NOTRACK
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define an entry point visible from C. */
|
||||||
|
#define ENTRY_P2ALIGN(name, alignment) \
|
||||||
|
.globl C_SYMBOL_NAME(name); \
|
||||||
|
.type C_SYMBOL_NAME(name),@function; \
|
||||||
|
.align ALIGNARG(alignment); \
|
||||||
|
C_LABEL(name) \
|
||||||
|
cfi_startproc; \
|
||||||
|
_CET_ENDBR; \
|
||||||
|
CALL_MCOUNT
|
||||||
|
|
||||||
/* This macro is for setting proper CFI with DW_CFA_expression describing
|
/* This macro is for setting proper CFI with DW_CFA_expression describing
|
||||||
the register as saved relative to %rsp instead of relative to the CFA.
|
the register as saved relative to %rsp instead of relative to the CFA.
|
||||||
Expression is DW_OP_drop, DW_OP_breg7 (%rsp is register 7), sleb128 offset
|
Expression is DW_OP_drop, DW_OP_breg7 (%rsp is register 7), sleb128 offset
|
||||||
|
Loading…
Reference in New Issue
Block a user