3 * Licensed under the GPL
6 #ifndef __SYSDEP_STUB_H
7 #define __SYSDEP_STUB_H
10 #include <asm/ptrace.h>
11 #include <generated/asm-offsets.h>
13 #define STUB_MMAP_NR __NR_mmap2
14 #define MMAP_OFFSET(o) ((o) >> UM_KERN_PAGE_SHIFT)
16 static __always_inline long stub_syscall0(long syscall)
20 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall)
26 static __always_inline long stub_syscall1(long syscall, long arg1)
30 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1)
36 static __always_inline long stub_syscall2(long syscall, long arg1, long arg2)
40 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
47 static __always_inline long stub_syscall3(long syscall, long arg1, long arg2,
52 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
53 "c" (arg2), "d" (arg3)
59 static __always_inline long stub_syscall4(long syscall, long arg1, long arg2,
64 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
65 "c" (arg2), "d" (arg3), "S" (arg4)
71 static __always_inline long stub_syscall5(long syscall, long arg1, long arg2,
72 long arg3, long arg4, long arg5)
76 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
77 "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5)
83 static __always_inline long stub_syscall6(long syscall, long arg1, long arg2,
84 long arg3, long arg4, long arg5,
89 } args = { arg1, arg6 };
92 __asm__ volatile ("pushl %%ebp;"
93 "movl 0x4(%%ebx),%%ebp;"
98 : "0" (syscall), "b" (&args),
99 "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5)
105 static __always_inline void trap_myself(void)
110 static __always_inline void *get_stub_data(void)
121 : "g" (~(UM_KERN_PAGE_SIZE - 1)),
122 "g" (UM_KERN_PAGE_SIZE));
127 #define stub_start(fn) \
130 "movl %1, %%eax ; " \
132 :: "i" ((1 + STUB_DATA_PAGES) * UM_KERN_PAGE_SIZE), \