Commit b6aca38d authored by Dmitry Safonov's avatar Dmitry Safonov Committed by Andrei Vagin

x86/restorer: add sigreturn to compat mode

Do pure 32-bit sigreturn.
Change code selector, do 0x80 rt_sigreturn.
We should have here remapped 32-bit vDSO, all should be fine.

Cc: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: 's avatarDmitry Safonov <dsafonov@virtuozzo.com>
Signed-off-by: 's avatarPavel Emelyanov <xemul@virtuozzo.com>
Signed-off-by: 's avatarAndrei Vagin <avagin@virtuozzo.com>
parent 5aba43bd
......@@ -32,7 +32,7 @@ struct rt_sigframe {
uint64_t lr;
};
#define ARCH_RT_SIGRETURN(new_sp) \
#define ARCH_RT_SIGRETURN(new_sp, rt_sigframe) \
asm volatile( \
"mov sp, %0 \n" \
"mov x8, #"__stringify(__NR_rt_sigreturn)" \n" \
......
......@@ -64,7 +64,7 @@ struct rt_sigframe {
};
#define ARCH_RT_SIGRETURN(new_sp) \
#define ARCH_RT_SIGRETURN(new_sp, rt_sigframe) \
asm volatile( \
"mov sp, %0 \n" \
"mov r7, #"__stringify(__NR_rt_sigreturn)" \n" \
......
......@@ -43,7 +43,7 @@ struct rt_sigframe {
char abigap[USER_REDZONE_SIZE];
} __attribute__((aligned(16)));
#define ARCH_RT_SIGRETURN(new_sp) \
#define ARCH_RT_SIGRETURN(new_sp, rt_sigframe) \
asm volatile( \
"mr 1, %0 \n" \
"li 0, "__stringify(__NR_rt_sigreturn)" \n" \
......
......@@ -174,7 +174,9 @@ struct rt_sigframe {
*/
#define RT_SIGFRAME_OFFSET(rt_sigframe) ((rt_sigframe->is_native) ? 8 : 4 )
#define ARCH_RT_SIGRETURN(new_sp) \
#define USER32_CS 0x23
#define ARCH_RT_SIGRETURN_NATIVE(new_sp) \
asm volatile( \
"movq %0, %%rax \n" \
"movq %%rax, %%rsp \n" \
......@@ -183,6 +185,28 @@ struct rt_sigframe {
: \
: "r"(new_sp) \
: "rax","rsp","memory")
#define ARCH_RT_SIGRETURN_COMPAT(new_sp) \
asm volatile( \
"pushq $"__stringify(USER32_CS)" \n" \
"pushq $1f \n" \
"lretq \n" \
"1: \n" \
".code32 \n" \
"movl %%edi, %%esp \n" \
"movl $"__stringify(__NR32_rt_sigreturn)",%%eax \n" \
"int $0x80 \n" \
".code64 \n" \
: \
: "rdi"(new_sp) \
: "eax","esp","memory")
#define ARCH_RT_SIGRETURN(new_sp, rt_sigframe) \
do { \
if ((rt_sigframe)->is_native) \
ARCH_RT_SIGRETURN_NATIVE(new_sp); \
else \
ARCH_RT_SIGRETURN_COMPAT(new_sp); \
} while (0)
#else /* CONFIG_X86_64 */
#define RT_SIGFRAME_UC(rt_sigframe) (&rt_sigframe->uc)
#define RT_SIGFRAME_OFFSET(rt_sigframe) 4
......@@ -191,7 +215,7 @@ struct rt_sigframe {
#define RT_SIGFRAME_FPU(rt_sigframe) (&(rt_sigframe)->fpu_state)
#define RT_SIGFRAME_HAS_FPU(rt_sigframe) (RT_SIGFRAME_FPU(rt_sigframe)->has_fpu)
#define ARCH_RT_SIGRETURN(new_sp) \
#define ARCH_RT_SIGRETURN(new_sp, rt_sigframe) \
asm volatile( \
"movl %0, %%eax \n" \
"movl %%eax, %%esp \n" \
......
......@@ -659,7 +659,7 @@ static int __parasite_daemon_wait_msg(struct ctl_msg *m)
static noinline void fini_sigreturn(unsigned long new_sp)
{
ARCH_RT_SIGRETURN(new_sp);
ARCH_RT_SIGRETURN(new_sp, sigframe);
}
static void parasite_cleanup(void)
......
......@@ -424,9 +424,10 @@ static int restore_thread_common(struct thread_restore_args *args)
return 0;
}
static void noinline rst_sigreturn(unsigned long new_sp)
static void noinline rst_sigreturn(unsigned long new_sp,
struct rt_sigframe *sigframe)
{
ARCH_RT_SIGRETURN(new_sp);
ARCH_RT_SIGRETURN(new_sp, sigframe);
}
/*
......@@ -485,7 +486,7 @@ long __export_restore_thread(struct thread_restore_args *args)
futex_dec_and_wake(&thread_inprogress);
new_sp = (long)rt_sigframe + RT_SIGFRAME_OFFSET(rt_sigframe);
rst_sigreturn(new_sp);
rst_sigreturn(new_sp, rt_sigframe);
core_restore_end:
pr_err("Restorer abnormal termination for %ld\n", sys_getpid());
......@@ -1449,7 +1450,7 @@ long __export_restore_task(struct task_restore_args *args)
* pure assembly since we don't need any additional
* code insns from gcc.
*/
rst_sigreturn(new_sp);
rst_sigreturn(new_sp, rt_sigframe);
core_restore_end:
futex_abort_and_wake(&task_entries_local->nr_in_progress);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment