1
2
3
4
5
6
7
8
9
10
11#include <linux/jump_label.h>
12#include <linux/uaccess.h>
13#include <linux/export.h>
14#include <linux/errno.h>
15#include <linux/mm.h>
16#include <asm/mmu_context.h>
17#include <asm/facility.h>
18
19#ifdef CONFIG_DEBUG_ENTRY
20void debug_user_asce(int exit)
21{
22 unsigned long cr1, cr7;
23
24 __ctl_store(cr1, 1, 1);
25 __ctl_store(cr7, 7, 7);
26 if (cr1 == S390_lowcore.kernel_asce && cr7 == S390_lowcore.user_asce)
27 return;
28 panic("incorrect ASCE on kernel %s\n"
29 "cr1: %016lx cr7: %016lx\n"
30 "kernel: %016llx user: %016llx\n",
31 exit ? "exit" : "entry", cr1, cr7,
32 S390_lowcore.kernel_asce, S390_lowcore.user_asce);
33
34}
35#endif
36
37#ifndef CONFIG_HAVE_MARCH_Z10_FEATURES
38static DEFINE_STATIC_KEY_FALSE(have_mvcos);
39
40static int __init uaccess_init(void)
41{
42 if (test_facility(27))
43 static_branch_enable(&have_mvcos);
44 return 0;
45}
46early_initcall(uaccess_init);
47
48static inline int copy_with_mvcos(void)
49{
50 if (static_branch_likely(&have_mvcos))
51 return 1;
52 return 0;
53}
54#else
55static inline int copy_with_mvcos(void)
56{
57 return 1;
58}
59#endif
60
61static inline unsigned long copy_from_user_mvcos(void *x, const void __user *ptr,
62 unsigned long size)
63{
64 register unsigned long reg0 asm("0") = 0x81UL;
65 unsigned long tmp1, tmp2;
66
67 tmp1 = -4096UL;
68 asm volatile(
69 "0: .insn ss,0xc80000000000,0(%0,%2),0(%1),0\n"
70 "6: jz 4f\n"
71 "1: algr %0,%3\n"
72 " slgr %1,%3\n"
73 " slgr %2,%3\n"
74 " j 0b\n"
75 "2: la %4,4095(%1)\n"
76 " nr %4,%3\n"
77 " slgr %4,%1\n"
78 " clgr %0,%4\n"
79 " jnh 5f\n"
80 "3: .insn ss,0xc80000000000,0(%4,%2),0(%1),0\n"
81 "7: slgr %0,%4\n"
82 " j 5f\n"
83 "4: slgr %0,%0\n"
84 "5:\n"
85 EX_TABLE(0b,2b) EX_TABLE(3b,5b) EX_TABLE(6b,2b) EX_TABLE(7b,5b)
86 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
87 : "d" (reg0) : "cc", "memory");
88 return size;
89}
90
91static inline unsigned long copy_from_user_mvcp(void *x, const void __user *ptr,
92 unsigned long size)
93{
94 unsigned long tmp1, tmp2;
95
96 tmp1 = -256UL;
97 asm volatile(
98 " sacf 0\n"
99 "0: mvcp 0(%0,%2),0(%1),%3\n"
100 "7: jz 5f\n"
101 "1: algr %0,%3\n"
102 " la %1,256(%1)\n"
103 " la %2,256(%2)\n"
104 "2: mvcp 0(%0,%2),0(%1),%3\n"
105 "8: jnz 1b\n"
106 " j 5f\n"
107 "3: la %4,255(%1)\n"
108 " lghi %3,-4096\n"
109 " nr %4,%3\n"
110 " slgr %4,%1\n"
111 " clgr %0,%4\n"
112 " jnh 6f\n"
113 "4: mvcp 0(%4,%2),0(%1),%3\n"
114 "9: slgr %0,%4\n"
115 " j 6f\n"
116 "5: slgr %0,%0\n"
117 "6: sacf 768\n"
118 EX_TABLE(0b,3b) EX_TABLE(2b,3b) EX_TABLE(4b,6b)
119 EX_TABLE(7b,3b) EX_TABLE(8b,3b) EX_TABLE(9b,6b)
120 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
121 : : "cc", "memory");
122 return size;
123}
124
125unsigned long raw_copy_from_user(void *to, const void __user *from, unsigned long n)
126{
127 if (copy_with_mvcos())
128 return copy_from_user_mvcos(to, from, n);
129 return copy_from_user_mvcp(to, from, n);
130}
131EXPORT_SYMBOL(raw_copy_from_user);
132
133static inline unsigned long copy_to_user_mvcos(void __user *ptr, const void *x,
134 unsigned long size)
135{
136 register unsigned long reg0 asm("0") = 0x810000UL;
137 unsigned long tmp1, tmp2;
138
139 tmp1 = -4096UL;
140 asm volatile(
141 "0: .insn ss,0xc80000000000,0(%0,%1),0(%2),0\n"
142 "6: jz 4f\n"
143 "1: algr %0,%3\n"
144 " slgr %1,%3\n"
145 " slgr %2,%3\n"
146 " j 0b\n"
147 "2: la %4,4095(%1)\n"
148 " nr %4,%3\n"
149 " slgr %4,%1\n"
150 " clgr %0,%4\n"
151 " jnh 5f\n"
152 "3: .insn ss,0xc80000000000,0(%4,%1),0(%2),0\n"
153 "7: slgr %0,%4\n"
154 " j 5f\n"
155 "4: slgr %0,%0\n"
156 "5:\n"
157 EX_TABLE(0b,2b) EX_TABLE(3b,5b) EX_TABLE(6b,2b) EX_TABLE(7b,5b)
158 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
159 : "d" (reg0) : "cc", "memory");
160 return size;
161}
162
163static inline unsigned long copy_to_user_mvcs(void __user *ptr, const void *x,
164 unsigned long size)
165{
166 unsigned long tmp1, tmp2;
167
168 tmp1 = -256UL;
169 asm volatile(
170 " sacf 0\n"
171 "0: mvcs 0(%0,%1),0(%2),%3\n"
172 "7: jz 5f\n"
173 "1: algr %0,%3\n"
174 " la %1,256(%1)\n"
175 " la %2,256(%2)\n"
176 "2: mvcs 0(%0,%1),0(%2),%3\n"
177 "8: jnz 1b\n"
178 " j 5f\n"
179 "3: la %4,255(%1)\n"
180 " lghi %3,-4096\n"
181 " nr %4,%3\n"
182 " slgr %4,%1\n"
183 " clgr %0,%4\n"
184 " jnh 6f\n"
185 "4: mvcs 0(%4,%1),0(%2),%3\n"
186 "9: slgr %0,%4\n"
187 " j 6f\n"
188 "5: slgr %0,%0\n"
189 "6: sacf 768\n"
190 EX_TABLE(0b,3b) EX_TABLE(2b,3b) EX_TABLE(4b,6b)
191 EX_TABLE(7b,3b) EX_TABLE(8b,3b) EX_TABLE(9b,6b)
192 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2)
193 : : "cc", "memory");
194 return size;
195}
196
197unsigned long raw_copy_to_user(void __user *to, const void *from, unsigned long n)
198{
199 if (copy_with_mvcos())
200 return copy_to_user_mvcos(to, from, n);
201 return copy_to_user_mvcs(to, from, n);
202}
203EXPORT_SYMBOL(raw_copy_to_user);
204
205static inline unsigned long copy_in_user_mvcos(void __user *to, const void __user *from,
206 unsigned long size)
207{
208 register unsigned long reg0 asm("0") = 0x810081UL;
209 unsigned long tmp1, tmp2;
210
211 tmp1 = -4096UL;
212
213 asm volatile(
214 "0: .insn ss,0xc80000000000,0(%0,%1),0(%2),0\n"
215 " jz 2f\n"
216 "1: algr %0,%3\n"
217 " slgr %1,%3\n"
218 " slgr %2,%3\n"
219 " j 0b\n"
220 "2:slgr %0,%0\n"
221 "3: \n"
222 EX_TABLE(0b,3b)
223 : "+a" (size), "+a" (to), "+a" (from), "+a" (tmp1), "=a" (tmp2)
224 : "d" (reg0) : "cc", "memory");
225 return size;
226}
227
228static inline unsigned long copy_in_user_mvc(void __user *to, const void __user *from,
229 unsigned long size)
230{
231 unsigned long tmp1;
232
233 asm volatile(
234 " sacf 256\n"
235 " aghi %0,-1\n"
236 " jo 5f\n"
237 " bras %3,3f\n"
238 "0: aghi %0,257\n"
239 "1: mvc 0(1,%1),0(%2)\n"
240 " la %1,1(%1)\n"
241 " la %2,1(%2)\n"
242 " aghi %0,-1\n"
243 " jnz 1b\n"
244 " j 5f\n"
245 "2: mvc 0(256,%1),0(%2)\n"
246 " la %1,256(%1)\n"
247 " la %2,256(%2)\n"
248 "3: aghi %0,-256\n"
249 " jnm 2b\n"
250 "4: ex %0,1b-0b(%3)\n"
251 "5: slgr %0,%0\n"
252 "6: sacf 768\n"
253 EX_TABLE(1b,6b) EX_TABLE(2b,0b) EX_TABLE(4b,0b)
254 : "+a" (size), "+a" (to), "+a" (from), "=a" (tmp1)
255 : : "cc", "memory");
256 return size;
257}
258
259unsigned long raw_copy_in_user(void __user *to, const void __user *from, unsigned long n)
260{
261 if (copy_with_mvcos())
262 return copy_in_user_mvcos(to, from, n);
263 return copy_in_user_mvc(to, from, n);
264}
265EXPORT_SYMBOL(raw_copy_in_user);
266
267static inline unsigned long clear_user_mvcos(void __user *to, unsigned long size)
268{
269 register unsigned long reg0 asm("0") = 0x810000UL;
270 unsigned long tmp1, tmp2;
271
272 tmp1 = -4096UL;
273 asm volatile(
274 "0: .insn ss,0xc80000000000,0(%0,%1),0(%4),0\n"
275 " jz 4f\n"
276 "1: algr %0,%2\n"
277 " slgr %1,%2\n"
278 " j 0b\n"
279 "2: la %3,4095(%1)\n"
280 " nr %3,%2\n"
281 " slgr %3,%1\n"
282 " clgr %0,%3\n"
283 " jnh 5f\n"
284 "3: .insn ss,0xc80000000000,0(%3,%1),0(%4),0\n"
285 " slgr %0,%3\n"
286 " j 5f\n"
287 "4: slgr %0,%0\n"
288 "5:\n"
289 EX_TABLE(0b,2b) EX_TABLE(3b,5b)
290 : "+a" (size), "+a" (to), "+a" (tmp1), "=a" (tmp2)
291 : "a" (empty_zero_page), "d" (reg0) : "cc", "memory");
292 return size;
293}
294
295static inline unsigned long clear_user_xc(void __user *to, unsigned long size)
296{
297 unsigned long tmp1, tmp2;
298
299 asm volatile(
300 " sacf 256\n"
301 " aghi %0,-1\n"
302 " jo 5f\n"
303 " bras %3,3f\n"
304 " xc 0(1,%1),0(%1)\n"
305 "0: aghi %0,257\n"
306 " la %2,255(%1)\n"
307 " srl %2,12\n"
308 " sll %2,12\n"
309 " slgr %2,%1\n"
310 " clgr %0,%2\n"
311 " jnh 5f\n"
312 " aghi %2,-1\n"
313 "1: ex %2,0(%3)\n"
314 " aghi %2,1\n"
315 " slgr %0,%2\n"
316 " j 5f\n"
317 "2: xc 0(256,%1),0(%1)\n"
318 " la %1,256(%1)\n"
319 "3: aghi %0,-256\n"
320 " jnm 2b\n"
321 "4: ex %0,0(%3)\n"
322 "5: slgr %0,%0\n"
323 "6: sacf 768\n"
324 EX_TABLE(1b,6b) EX_TABLE(2b,0b) EX_TABLE(4b,0b)
325 : "+a" (size), "+a" (to), "=a" (tmp1), "=a" (tmp2)
326 : : "cc", "memory");
327 return size;
328}
329
330unsigned long __clear_user(void __user *to, unsigned long size)
331{
332 if (copy_with_mvcos())
333 return clear_user_mvcos(to, size);
334 return clear_user_xc(to, size);
335}
336EXPORT_SYMBOL(__clear_user);
337
338static inline unsigned long strnlen_user_srst(const char __user *src,
339 unsigned long size)
340{
341 register unsigned long reg0 asm("0") = 0;
342 unsigned long tmp1, tmp2;
343
344 asm volatile(
345 " la %2,0(%1)\n"
346 " la %3,0(%0,%1)\n"
347 " slgr %0,%0\n"
348 " sacf 256\n"
349 "0: srst %3,%2\n"
350 " jo 0b\n"
351 " la %0,1(%3)\n"
352 " slgr %0,%1\n"
353 "1: sacf 768\n"
354 EX_TABLE(0b,1b)
355 : "+a" (size), "+a" (src), "=a" (tmp1), "=a" (tmp2)
356 : "d" (reg0) : "cc", "memory");
357 return size;
358}
359
360unsigned long __strnlen_user(const char __user *src, unsigned long size)
361{
362 if (unlikely(!size))
363 return 0;
364 return strnlen_user_srst(src, size);
365}
366EXPORT_SYMBOL(__strnlen_user);
367
368long __strncpy_from_user(char *dst, const char __user *src, long size)
369{
370 size_t done, len, offset, len_str;
371
372 if (unlikely(size <= 0))
373 return 0;
374 done = 0;
375 do {
376 offset = (size_t)src & (L1_CACHE_BYTES - 1);
377 len = min(size - done, L1_CACHE_BYTES - offset);
378 if (copy_from_user(dst, src, len))
379 return -EFAULT;
380 len_str = strnlen(dst, len);
381 done += len_str;
382 src += len_str;
383 dst += len_str;
384 } while ((len_str == len) && (done < size));
385 return done;
386}
387EXPORT_SYMBOL(__strncpy_from_user);
388