1 #ifndef _ASM_X86_UACCESS_64_H
2 #define _ASM_X86_UACCESS_64_H
5 * User space memory access functions
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/prefetch.h>
10 #include <linux/lockdep.h>
11 #include <asm/alternative.h>
12 #include <asm/cpufeature.h>
16 * Copy To/From Userspace
19 /* Handles exceptions in both to and from, but doesn't do access_ok */
20 __must_check unsigned long
21 copy_user_generic_string(void *to, const void *from, unsigned len);
22 __must_check unsigned long
23 copy_user_generic_unrolled(void *to, const void *from, unsigned len);
25 static __always_inline __must_check unsigned long
26 copy_user_generic(void *to, const void *from, unsigned len)
30 alternative_call(copy_user_generic_unrolled,
31 copy_user_generic_string,
33 ASM_OUTPUT2("=a" (ret), "=D" (to), "=S" (from),
35 "1" (to), "2" (from), "3" (len)
36 : "memory", "rcx", "r8", "r9", "r10", "r11");
40 __must_check unsigned long
41 _copy_to_user(void __user *to, const void *from, unsigned len);
42 __must_check unsigned long
43 _copy_from_user(void *to, const void __user *from, unsigned len);
44 __must_check unsigned long
45 copy_in_user(void __user *to, const void __user *from, unsigned len);
47 static inline unsigned long __must_check copy_from_user(void *to,
48 const void __user *from,
51 int sz = __compiletime_object_size(to);
55 if (likely(sz == -1 || sz >= n))
56 ret = _copy_from_user(to, from, n);
57 #ifdef CONFIG_DEBUG_VM
59 WARN(1, "Buffer overflow detected!\n");
64 static __always_inline __must_check
65 int copy_to_user(void __user *dst, const void *src, unsigned size)
69 return _copy_to_user(dst, src, size);
72 static __always_inline __must_check
73 int __copy_from_user(void *dst, const void __user *src, unsigned size)
78 if (!__builtin_constant_p(size))
79 return copy_user_generic(dst, (__force void *)src, size);
81 case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
82 ret, "b", "b", "=q", 1);
84 case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
85 ret, "w", "w", "=r", 2);
87 case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
88 ret, "l", "k", "=r", 4);
90 case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
91 ret, "q", "", "=r", 8);
94 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
95 ret, "q", "", "=r", 10);
98 __get_user_asm(*(u16 *)(8 + (char *)dst),
99 (u16 __user *)(8 + (char __user *)src),
100 ret, "w", "w", "=r", 2);
103 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
104 ret, "q", "", "=r", 16);
107 __get_user_asm(*(u64 *)(8 + (char *)dst),
108 (u64 __user *)(8 + (char __user *)src),
109 ret, "q", "", "=r", 8);
112 return copy_user_generic(dst, (__force void *)src, size);
116 static __always_inline __must_check
117 int __copy_to_user(void __user *dst, const void *src, unsigned size)
122 if (!__builtin_constant_p(size))
123 return copy_user_generic((__force void *)dst, src, size);
125 case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
126 ret, "b", "b", "iq", 1);
128 case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
129 ret, "w", "w", "ir", 2);
131 case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
132 ret, "l", "k", "ir", 4);
134 case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
135 ret, "q", "", "er", 8);
138 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
139 ret, "q", "", "er", 10);
143 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
144 ret, "w", "w", "ir", 2);
147 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
148 ret, "q", "", "er", 16);
152 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
153 ret, "q", "", "er", 8);
156 return copy_user_generic((__force void *)dst, src, size);
160 static __always_inline __must_check
161 int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
166 if (!__builtin_constant_p(size))
167 return copy_user_generic((__force void *)dst,
168 (__force void *)src, size);
172 __get_user_asm(tmp, (u8 __user *)src,
173 ret, "b", "b", "=q", 1);
175 __put_user_asm(tmp, (u8 __user *)dst,
176 ret, "b", "b", "iq", 1);
181 __get_user_asm(tmp, (u16 __user *)src,
182 ret, "w", "w", "=r", 2);
184 __put_user_asm(tmp, (u16 __user *)dst,
185 ret, "w", "w", "ir", 2);
191 __get_user_asm(tmp, (u32 __user *)src,
192 ret, "l", "k", "=r", 4);
194 __put_user_asm(tmp, (u32 __user *)dst,
195 ret, "l", "k", "ir", 4);
200 __get_user_asm(tmp, (u64 __user *)src,
201 ret, "q", "", "=r", 8);
203 __put_user_asm(tmp, (u64 __user *)dst,
204 ret, "q", "", "er", 8);
208 return copy_user_generic((__force void *)dst,
209 (__force void *)src, size);
214 strncpy_from_user(char *dst, const char __user *src, long count);
216 __strncpy_from_user(char *dst, const char __user *src, long count);
217 __must_check long strnlen_user(const char __user *str, long n);
218 __must_check long __strnlen_user(const char __user *str, long n);
219 __must_check long strlen_user(const char __user *str);
220 __must_check unsigned long clear_user(void __user *mem, unsigned long len);
221 __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
223 static __must_check __always_inline int
224 __copy_from_user_inatomic(void *dst, const void __user *src, unsigned size)
226 return copy_user_generic(dst, (__force const void *)src, size);
229 static __must_check __always_inline int
230 __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
232 return copy_user_generic((__force void *)dst, src, size);
235 extern long __copy_user_nocache(void *dst, const void __user *src,
236 unsigned size, int zerorest);
239 __copy_from_user_nocache(void *dst, const void __user *src, unsigned size)
242 return __copy_user_nocache(dst, src, size, 1);
246 __copy_from_user_inatomic_nocache(void *dst, const void __user *src,
249 return __copy_user_nocache(dst, src, size, 0);
253 copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
255 #endif /* _ASM_X86_UACCESS_64_H */