linux-old/include/asm-i386/uaccess.h
<<
>>
Prefs
   1#ifndef __i386_UACCESS_H
   2#define __i386_UACCESS_H
   3
   4/*
   5 * User space memory access functions
   6 */
   7#include <linux/config.h>
   8#include <linux/sched.h>
   9#include <linux/prefetch.h>
  10#include <asm/page.h>
  11
  12#define VERIFY_READ 0
  13#define VERIFY_WRITE 1
  14
  15/*
  16 * The fs value determines whether argument validity checking should be
  17 * performed or not.  If get_fs() == USER_DS, checking is performed, with
  18 * get_fs() == KERNEL_DS, checking is bypassed.
  19 *
  20 * For historical reasons, these macros are grossly misnamed.
  21 */
  22
  23#define MAKE_MM_SEG(s)  ((mm_segment_t) { (s) })
  24
  25
  26#define KERNEL_DS       MAKE_MM_SEG(0xFFFFFFFF)
  27#define USER_DS         MAKE_MM_SEG(PAGE_OFFSET)
  28
  29#define get_ds()        (KERNEL_DS)
  30#define get_fs()        (current->addr_limit)
  31#define set_fs(x)       (current->addr_limit = (x))
  32
  33#define segment_eq(a,b) ((a).seg == (b).seg)
  34
  35extern int __verify_write(const void *, unsigned long);
  36
  37#define __addr_ok(addr) ((unsigned long)(addr) < (current->addr_limit.seg))
  38
  39/*
  40 * Uhhuh, this needs 33-bit arithmetic. We have a carry..
  41 */
  42#define __range_ok(addr,size) ({ \
  43        unsigned long flag,sum; \
  44        asm("addl %3,%1 ; sbbl %0,%0; cmpl %1,%4; sbbl $0,%0" \
  45                :"=&r" (flag), "=r" (sum) \
  46                :"1" (addr),"g" ((int)(size)),"g" (current->addr_limit.seg)); \
  47        flag; })
  48
  49#ifdef CONFIG_X86_WP_WORKS_OK
  50
  51#define access_ok(type,addr,size) (__range_ok(addr,size) == 0)
  52
  53#else
  54
  55#define access_ok(type,addr,size) ( (__range_ok(addr,size) == 0) && \
  56                         ((type) == VERIFY_READ || boot_cpu_data.wp_works_ok || \
  57                         segment_eq(get_fs(),KERNEL_DS) || \
  58                          __verify_write((void *)(addr),(size))))
  59
  60#endif
  61
  62static inline int verify_area(int type, const void * addr, unsigned long size)
  63{
  64        return access_ok(type,addr,size) ? 0 : -EFAULT;
  65}
  66
  67
  68/*
  69 * The exception table consists of pairs of addresses: the first is the
  70 * address of an instruction that is allowed to fault, and the second is
  71 * the address at which the program should continue.  No registers are
  72 * modified, so it is entirely up to the continuation code to figure out
  73 * what to do.
  74 *
  75 * All the routines below use bits of fixup code that are out of line
  76 * with the main instruction path.  This means when everything is well,
  77 * we don't even have to jump over them.  Further, they do not intrude
  78 * on our cache or tlb entries.
  79 */
  80
  81struct exception_table_entry
  82{
  83        unsigned long insn, fixup;
  84};
  85
  86/* Returns 0 if exception not found and fixup otherwise.  */
  87extern unsigned long search_exception_table(unsigned long);
  88
  89
  90/*
  91 * These are the main single-value transfer routines.  They automatically
  92 * use the right size if we just have the right pointer type.
  93 *
  94 * This gets kind of ugly. We want to return _two_ values in "get_user()"
  95 * and yet we don't want to do any pointers, because that is too much
  96 * of a performance impact. Thus we have a few rather ugly macros here,
  97 * and hide all the uglyness from the user.
  98 *
  99 * The "__xxx" versions of the user access functions are versions that
 100 * do not verify the address space, that must have been done previously
 101 * with a separate "access_ok()" call (this is used when we do multiple
 102 * accesses to the same area of user memory).
 103 */
 104
 105extern void __get_user_1(void);
 106extern void __get_user_2(void);
 107extern void __get_user_4(void);
 108
 109#define __get_user_x(size,ret,x,ptr) \
 110        __asm__ __volatile__("call __get_user_" #size \
 111                :"=a" (ret),"=d" (x) \
 112                :"0" (ptr))
 113
 114/* Careful: we have to cast the result to the type of the pointer for sign reasons */
 115#define get_user(x,ptr)                                                 \
 116({      int __ret_gu,__val_gu;                                          \
 117        switch(sizeof (*(ptr))) {                                       \
 118        case 1:  __get_user_x(1,__ret_gu,__val_gu,ptr); break;          \
 119        case 2:  __get_user_x(2,__ret_gu,__val_gu,ptr); break;          \
 120        case 4:  __get_user_x(4,__ret_gu,__val_gu,ptr); break;          \
 121        default: __get_user_x(X,__ret_gu,__val_gu,ptr); break;          \
 122        }                                                               \
 123        (x) = (__typeof__(*(ptr)))__val_gu;                             \
 124        __ret_gu;                                                       \
 125})
 126
 127extern void __put_user_1(void);
 128extern void __put_user_2(void);
 129extern void __put_user_4(void);
 130extern void __put_user_8(void);
 131
 132extern void __put_user_bad(void);
 133
 134#define put_user(x,ptr)                                                 \
 135  __put_user_check((__typeof__(*(ptr)))(x),(ptr),sizeof(*(ptr)))
 136
 137#define __get_user(x,ptr) \
 138  __get_user_nocheck((x),(ptr),sizeof(*(ptr)))
 139#define __put_user(x,ptr) \
 140  __put_user_nocheck((__typeof__(*(ptr)))(x),(ptr),sizeof(*(ptr)))
 141
 142#define __put_user_nocheck(x,ptr,size)                  \
 143({                                                      \
 144        long __pu_err;                                  \
 145        __put_user_size((x),(ptr),(size),__pu_err);     \
 146        __pu_err;                                       \
 147})
 148
 149
 150#define __put_user_check(x,ptr,size)                    \
 151({                                                      \
 152        long __pu_err = -EFAULT;                                        \
 153        __typeof__(*(ptr)) *__pu_addr = (ptr);          \
 154        if (access_ok(VERIFY_WRITE,__pu_addr,size))     \
 155                __put_user_size((x),__pu_addr,(size),__pu_err); \
 156        __pu_err;                                       \
 157})                                                      
 158
 159#define __put_user_u64(x, addr, err)                            \
 160        __asm__ __volatile__(                                   \
 161                "1:     movl %%eax,0(%2)\n"                     \
 162                "2:     movl %%edx,4(%2)\n"                     \
 163                "3:\n"                                          \
 164                ".section .fixup,\"ax\"\n"                      \
 165                "4:     movl %3,%0\n"                           \
 166                "       jmp 3b\n"                               \
 167                ".previous\n"                                   \
 168                ".section __ex_table,\"a\"\n"                   \
 169                "       .align 4\n"                             \
 170                "       .long 1b,4b\n"                          \
 171                "       .long 2b,4b\n"                          \
 172                ".previous"                                     \
 173                : "=r"(err)                                     \
 174                : "A" (x), "r" (addr), "i"(-EFAULT), "0"(err))
 175
 176#define __put_user_size(x,ptr,size,retval)                              \
 177do {                                                                    \
 178        retval = 0;                                                     \
 179        switch (size) {                                                 \
 180          case 1: __put_user_asm(x,ptr,retval,"b","b","iq"); break;     \
 181          case 2: __put_user_asm(x,ptr,retval,"w","w","ir"); break;     \
 182          case 4: __put_user_asm(x,ptr,retval,"l","","ir"); break;      \
 183          case 8: __put_user_u64(x,ptr,retval); break;                  \
 184          default: __put_user_bad();                                    \
 185        }                                                               \
 186} while (0)
 187
 188struct __large_struct { unsigned long buf[100]; };
 189#define __m(x) (*(struct __large_struct *)(x))
 190
 191/*
 192 * Tell gcc we read from memory instead of writing: this is because
 193 * we do not write to any memory gcc knows about, so there are no
 194 * aliasing issues.
 195 */
 196#define __put_user_asm(x, addr, err, itype, rtype, ltype)       \
 197        __asm__ __volatile__(                                   \
 198                "1:     mov"itype" %"rtype"1,%2\n"              \
 199                "2:\n"                                          \
 200                ".section .fixup,\"ax\"\n"                      \
 201                "3:     movl %3,%0\n"                           \
 202                "       jmp 2b\n"                               \
 203                ".previous\n"                                   \
 204                ".section __ex_table,\"a\"\n"                   \
 205                "       .align 4\n"                             \
 206                "       .long 1b,3b\n"                          \
 207                ".previous"                                     \
 208                : "=r"(err)                                     \
 209                : ltype (x), "m"(__m(addr)), "i"(-EFAULT), "0"(err))
 210
 211
 212#define __get_user_nocheck(x,ptr,size)                          \
 213({                                                              \
 214        long __gu_err, __gu_val;                                \
 215        __get_user_size(__gu_val,(ptr),(size),__gu_err);        \
 216        (x) = (__typeof__(*(ptr)))__gu_val;                     \
 217        __gu_err;                                               \
 218})
 219
 220extern long __get_user_bad(void);
 221
 222#define __get_user_size(x,ptr,size,retval)                              \
 223do {                                                                    \
 224        retval = 0;                                                     \
 225        switch (size) {                                                 \
 226          case 1: __get_user_asm(x,ptr,retval,"b","b","=q"); break;     \
 227          case 2: __get_user_asm(x,ptr,retval,"w","w","=r"); break;     \
 228          case 4: __get_user_asm(x,ptr,retval,"l","","=r"); break;      \
 229          default: (x) = __get_user_bad();                              \
 230        }                                                               \
 231} while (0)
 232
 233#define __get_user_asm(x, addr, err, itype, rtype, ltype)       \
 234        __asm__ __volatile__(                                   \
 235                "1:     mov"itype" %2,%"rtype"1\n"              \
 236                "2:\n"                                          \
 237                ".section .fixup,\"ax\"\n"                      \
 238                "3:     movl %3,%0\n"                           \
 239                "       xor"itype" %"rtype"1,%"rtype"1\n"       \
 240                "       jmp 2b\n"                               \
 241                ".previous\n"                                   \
 242                ".section __ex_table,\"a\"\n"                   \
 243                "       .align 4\n"                             \
 244                "       .long 1b,3b\n"                          \
 245                ".previous"                                     \
 246                : "=r"(err), ltype (x)                          \
 247                : "m"(__m(addr)), "i"(-EFAULT), "0"(err))
 248
 249
 250/*
 251 * Copy To/From Userspace
 252 */
 253
 254/* Generic arbitrary sized copy.  */
 255#define __copy_user(to,from,size)                                       \
 256do {                                                                    \
 257        int __d0, __d1;                                                 \
 258        __asm__ __volatile__(                                           \
 259                "0:     rep; movsl\n"                                   \
 260                "       movl %3,%0\n"                                   \
 261                "1:     rep; movsb\n"                                   \
 262                "2:\n"                                                  \
 263                ".section .fixup,\"ax\"\n"                              \
 264                "3:     lea 0(%3,%0,4),%0\n"                            \
 265                "       jmp 2b\n"                                       \
 266                ".previous\n"                                           \
 267                ".section __ex_table,\"a\"\n"                           \
 268                "       .align 4\n"                                     \
 269                "       .long 0b,3b\n"                                  \
 270                "       .long 1b,2b\n"                                  \
 271                ".previous"                                             \
 272                : "=&c"(size), "=&D" (__d0), "=&S" (__d1)               \
 273                : "r"(size & 3), "0"(size / 4), "1"(to), "2"(from)      \
 274                : "memory");                                            \
 275} while (0)
 276
 277#define __copy_user_zeroing(to,from,size)                               \
 278do {                                                                    \
 279        int __d0, __d1;                                                 \
 280        __asm__ __volatile__(                                           \
 281                "0:     rep; movsl\n"                                   \
 282                "       movl %3,%0\n"                                   \
 283                "1:     rep; movsb\n"                                   \
 284                "2:\n"                                                  \
 285                ".section .fixup,\"ax\"\n"                              \
 286                "3:     lea 0(%3,%0,4),%0\n"                            \
 287                "4:     pushl %0\n"                                     \
 288                "       pushl %%eax\n"                                  \
 289                "       xorl %%eax,%%eax\n"                             \
 290                "       rep; stosb\n"                                   \
 291                "       popl %%eax\n"                                   \
 292                "       popl %0\n"                                      \
 293                "       jmp 2b\n"                                       \
 294                ".previous\n"                                           \
 295                ".section __ex_table,\"a\"\n"                           \
 296                "       .align 4\n"                                     \
 297                "       .long 0b,3b\n"                                  \
 298                "       .long 1b,4b\n"                                  \
 299                ".previous"                                             \
 300                : "=&c"(size), "=&D" (__d0), "=&S" (__d1)               \
 301                : "r"(size & 3), "0"(size / 4), "1"(to), "2"(from)      \
 302                : "memory");                                            \
 303} while (0)
 304
 305/* We let the __ versions of copy_from/to_user inline, because they're often
 306 * used in fast paths and have only a small space overhead.
 307 */
 308static inline unsigned long
 309__generic_copy_from_user_nocheck(void *to, const void *from, unsigned long n)
 310{
 311        __copy_user_zeroing(to,from,n);
 312        return n;
 313}
 314
 315static inline unsigned long
 316__generic_copy_to_user_nocheck(void *to, const void *from, unsigned long n)
 317{
 318        __copy_user(to,from,n);
 319        return n;
 320}
 321
 322
 323/* Optimize just a little bit when we know the size of the move. */
 324#define __constant_copy_user(to, from, size)                    \
 325do {                                                            \
 326        int __d0, __d1;                                         \
 327        switch (size & 3) {                                     \
 328        default:                                                \
 329                __asm__ __volatile__(                           \
 330                        "0:     rep; movsl\n"                   \
 331                        "1:\n"                                  \
 332                        ".section .fixup,\"ax\"\n"              \
 333                        "2:     shl $2,%0\n"                    \
 334                        "       jmp 1b\n"                       \
 335                        ".previous\n"                           \
 336                        ".section __ex_table,\"a\"\n"           \
 337                        "       .align 4\n"                     \
 338                        "       .long 0b,2b\n"                  \
 339                        ".previous"                             \
 340                        : "=c"(size), "=&S" (__d0), "=&D" (__d1)\
 341                        : "1"(from), "2"(to), "0"(size/4)       \
 342                        : "memory");                            \
 343                break;                                          \
 344        case 1:                                                 \
 345                __asm__ __volatile__(                           \
 346                        "0:     rep; movsl\n"                   \
 347                        "1:     movsb\n"                        \
 348                        "2:\n"                                  \
 349                        ".section .fixup,\"ax\"\n"              \
 350                        "3:     shl $2,%0\n"                    \
 351                        "4:     incl %0\n"                      \
 352                        "       jmp 2b\n"                       \
 353                        ".previous\n"                           \
 354                        ".section __ex_table,\"a\"\n"           \
 355                        "       .align 4\n"                     \
 356                        "       .long 0b,3b\n"                  \
 357                        "       .long 1b,4b\n"                  \
 358                        ".previous"                             \
 359                        : "=c"(size), "=&S" (__d0), "=&D" (__d1)\
 360                        : "1"(from), "2"(to), "0"(size/4)       \
 361                        : "memory");                            \
 362                break;                                          \
 363        case 2:                                                 \
 364                __asm__ __volatile__(                           \
 365                        "0:     rep; movsl\n"                   \
 366                        "1:     movsw\n"                        \
 367                        "2:\n"                                  \
 368                        ".section .fixup,\"ax\"\n"              \
 369                        "3:     shl $2,%0\n"                    \
 370                        "4:     addl $2,%0\n"                   \
 371                        "       jmp 2b\n"                       \
 372                        ".previous\n"                           \
 373                        ".section __ex_table,\"a\"\n"           \
 374                        "       .align 4\n"                     \
 375                        "       .long 0b,3b\n"                  \
 376                        "       .long 1b,4b\n"                  \
 377                        ".previous"                             \
 378                        : "=c"(size), "=&S" (__d0), "=&D" (__d1)\
 379                        : "1"(from), "2"(to), "0"(size/4)       \
 380                        : "memory");                            \
 381                break;                                          \
 382        case 3:                                                 \
 383                __asm__ __volatile__(                           \
 384                        "0:     rep; movsl\n"                   \
 385                        "1:     movsw\n"                        \
 386                        "2:     movsb\n"                        \
 387                        "3:\n"                                  \
 388                        ".section .fixup,\"ax\"\n"              \
 389                        "4:     shl $2,%0\n"                    \
 390                        "5:     addl $2,%0\n"                   \
 391                        "6:     incl %0\n"                      \
 392                        "       jmp 3b\n"                       \
 393                        ".previous\n"                           \
 394                        ".section __ex_table,\"a\"\n"           \
 395                        "       .align 4\n"                     \
 396                        "       .long 0b,4b\n"                  \
 397                        "       .long 1b,5b\n"                  \
 398                        "       .long 2b,6b\n"                  \
 399                        ".previous"                             \
 400                        : "=c"(size), "=&S" (__d0), "=&D" (__d1)\
 401                        : "1"(from), "2"(to), "0"(size/4)       \
 402                        : "memory");                            \
 403                break;                                          \
 404        }                                                       \
 405} while (0)
 406
 407/* Optimize just a little bit when we know the size of the move. */
 408#define __constant_copy_user_zeroing(to, from, size)            \
 409do {                                                            \
 410        int __d0, __d1;                                         \
 411        switch (size & 3) {                                     \
 412        default:                                                \
 413                __asm__ __volatile__(                           \
 414                        "0:     rep; movsl\n"                   \
 415                        "1:\n"                                  \
 416                        ".section .fixup,\"ax\"\n"              \
 417                        "2:     pushl %0\n"                     \
 418                        "       pushl %%eax\n"                  \
 419                        "       xorl %%eax,%%eax\n"             \
 420                        "       rep; stosl\n"                   \
 421                        "       popl %%eax\n"                   \
 422                        "       popl %0\n"                      \
 423                        "       shl $2,%0\n"                    \
 424                        "       jmp 1b\n"                       \
 425                        ".previous\n"                           \
 426                        ".section __ex_table,\"a\"\n"           \
 427                        "       .align 4\n"                     \
 428                        "       .long 0b,2b\n"                  \
 429                        ".previous"                             \
 430                        : "=c"(size), "=&S" (__d0), "=&D" (__d1)\
 431                        : "1"(from), "2"(to), "0"(size/4)       \
 432                        : "memory");                            \
 433                break;                                          \
 434        case 1:                                                 \
 435                __asm__ __volatile__(                           \
 436                        "0:     rep; movsl\n"                   \
 437                        "1:     movsb\n"                        \
 438                        "2:\n"                                  \
 439                        ".section .fixup,\"ax\"\n"              \
 440                        "3:     pushl %0\n"                     \
 441                        "       pushl %%eax\n"                  \
 442                        "       xorl %%eax,%%eax\n"             \
 443                        "       rep; stosl\n"                   \
 444                        "       stosb\n"                        \
 445                        "       popl %%eax\n"                   \
 446                        "       popl %0\n"                      \
 447                        "       shl $2,%0\n"                    \
 448                        "       incl %0\n"                      \
 449                        "       jmp 2b\n"                       \
 450                        "4:     pushl %%eax\n"                  \
 451                        "       xorl %%eax,%%eax\n"             \
 452                        "       stosb\n"                        \
 453                        "       popl %%eax\n"                   \
 454                        "       incl %0\n"                      \
 455                        "       jmp 2b\n"                       \
 456                        ".previous\n"                           \
 457                        ".section __ex_table,\"a\"\n"           \
 458                        "       .align 4\n"                     \
 459                        "       .long 0b,3b\n"                  \
 460                        "       .long 1b,4b\n"                  \
 461                        ".previous"                             \
 462                        : "=c"(size), "=&S" (__d0), "=&D" (__d1)\
 463                        : "1"(from), "2"(to), "0"(size/4)       \
 464                        : "memory");                            \
 465                break;                                          \
 466        case 2:                                                 \
 467                __asm__ __volatile__(                           \
 468                        "0:     rep; movsl\n"                   \
 469                        "1:     movsw\n"                        \
 470                        "2:\n"                                  \
 471                        ".section .fixup,\"ax\"\n"              \
 472                        "3:     pushl %0\n"                     \
 473                        "       pushl %%eax\n"                  \
 474                        "       xorl %%eax,%%eax\n"             \
 475                        "       rep; stosl\n"                   \
 476                        "       stosw\n"                        \
 477                        "       popl %%eax\n"                   \
 478                        "       popl %0\n"                      \
 479                        "       shl $2,%0\n"                    \
 480                        "       addl $2,%0\n"                   \
 481                        "       jmp 2b\n"                       \
 482                        "4:     pushl %%eax\n"                  \
 483                        "       xorl %%eax,%%eax\n"             \
 484                        "       stosw\n"                        \
 485                        "       popl %%eax\n"                   \
 486                        "       addl $2,%0\n"                   \
 487                        "       jmp 2b\n"                       \
 488                        ".previous\n"                           \
 489                        ".section __ex_table,\"a\"\n"           \
 490                        "       .align 4\n"                     \
 491                        "       .long 0b,3b\n"                  \
 492                        "       .long 1b,4b\n"                  \
 493                        ".previous"                             \
 494                        : "=c"(size), "=&S" (__d0), "=&D" (__d1)\
 495                        : "1"(from), "2"(to), "0"(size/4)       \
 496                        : "memory");                            \
 497                break;                                          \
 498        case 3:                                                 \
 499                __asm__ __volatile__(                           \
 500                        "0:     rep; movsl\n"                   \
 501                        "1:     movsw\n"                        \
 502                        "2:     movsb\n"                        \
 503                        "3:\n"                                  \
 504                        ".section .fixup,\"ax\"\n"              \
 505                        "4:     pushl %0\n"                     \
 506                        "       pushl %%eax\n"                  \
 507                        "       xorl %%eax,%%eax\n"             \
 508                        "       rep; stosl\n"                   \
 509                        "       stosw\n"                        \
 510                        "       stosb\n"                        \
 511                        "       popl %%eax\n"                   \
 512                        "       popl %0\n"                      \
 513                        "       shl $2,%0\n"                    \
 514                        "       addl $3,%0\n"                   \
 515                        "       jmp 2b\n"                       \
 516                        "5:     pushl %%eax\n"                  \
 517                        "       xorl %%eax,%%eax\n"             \
 518                        "       stosw\n"                        \
 519                        "       stosb\n"                        \
 520                        "       popl %%eax\n"                   \
 521                        "       addl $3,%0\n"                   \
 522                        "       jmp 2b\n"                       \
 523                        "6:     pushl %%eax\n"                  \
 524                        "       xorl %%eax,%%eax\n"             \
 525                        "       stosb\n"                        \
 526                        "       popl %%eax\n"                   \
 527                        "       incl %0\n"                      \
 528                        "       jmp 3b\n"                       \
 529                        ".previous\n"                           \
 530                        ".section __ex_table,\"a\"\n"           \
 531                        "       .align 4\n"                     \
 532                        "       .long 0b,4b\n"                  \
 533                        "       .long 1b,5b\n"                  \
 534                        "       .long 2b,6b\n"                  \
 535                        ".previous"                             \
 536                        : "=c"(size), "=&S" (__d0), "=&D" (__d1)\
 537                        : "1"(from), "2"(to), "0"(size/4)       \
 538                        : "memory");                            \
 539                break;                                          \
 540        }                                                       \
 541} while (0)
 542
 543unsigned long __generic_copy_to_user(void *, const void *, unsigned long);
 544unsigned long __generic_copy_from_user(void *, const void *, unsigned long);
 545
 546static inline unsigned long
 547__constant_copy_to_user(void *to, const void *from, unsigned long n)
 548{
 549        prefetch(from);
 550        if (access_ok(VERIFY_WRITE, to, n))
 551                __constant_copy_user(to,from,n);
 552        return n;
 553}
 554
 555static inline unsigned long
 556__constant_copy_from_user(void *to, const void *from, unsigned long n)
 557{
 558        if (access_ok(VERIFY_READ, from, n))
 559                __constant_copy_user_zeroing(to,from,n);
 560        else
 561                memset(to, 0, n);
 562        return n;
 563}
 564
 565static inline unsigned long
 566__constant_copy_to_user_nocheck(void *to, const void *from, unsigned long n)
 567{
 568        __constant_copy_user(to,from,n);
 569        return n;
 570}
 571
 572static inline unsigned long
 573__constant_copy_from_user_nocheck(void *to, const void *from, unsigned long n)
 574{
 575        __constant_copy_user_zeroing(to,from,n);
 576        return n;
 577}
 578
 579#define copy_to_user(to,from,n)                         \
 580        (__builtin_constant_p(n) ?                      \
 581         __constant_copy_to_user((to),(from),(n)) :     \
 582         __generic_copy_to_user((to),(from),(n)))
 583
 584#define copy_from_user(to,from,n)                       \
 585        (__builtin_constant_p(n) ?                      \
 586         __constant_copy_from_user((to),(from),(n)) :   \
 587         __generic_copy_from_user((to),(from),(n)))
 588
 589#define __copy_to_user(to,from,n)                       \
 590        (__builtin_constant_p(n) ?                      \
 591         __constant_copy_to_user_nocheck((to),(from),(n)) :     \
 592         __generic_copy_to_user_nocheck((to),(from),(n)))
 593
 594#define __copy_from_user(to,from,n)                     \
 595        (__builtin_constant_p(n) ?                      \
 596         __constant_copy_from_user_nocheck((to),(from),(n)) :   \
 597         __generic_copy_from_user_nocheck((to),(from),(n)))
 598
 599long strncpy_from_user(char *dst, const char *src, long count);
 600long __strncpy_from_user(char *dst, const char *src, long count);
 601#define strlen_user(str) strnlen_user(str, ~0UL >> 1)
 602long strnlen_user(const char *str, long n);
 603unsigned long clear_user(void *mem, unsigned long len);
 604unsigned long __clear_user(void *mem, unsigned long len);
 605
 606#endif /* __i386_UACCESS_H */
 607
lxr.linux.no kindly hosted by Redpill Linpro AS, provider of Linux consulting and operations services since 1995.