linux/crypto/cipher.c
<<
>>
Prefs
   1/*
   2 * Cryptographic API.
   3 *
   4 * Cipher operations.
   5 *
   6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   8 *
   9 * This program is free software; you can redistribute it and/or modify it
  10 * under the terms of the GNU General Public License as published by the Free
  11 * Software Foundation; either version 2 of the License, or (at your option) 
  12 * any later version.
  13 *
  14 */
  15#include <linux/compiler.h>
  16#include <linux/kernel.h>
  17#include <linux/crypto.h>
  18#include <linux/errno.h>
  19#include <linux/mm.h>
  20#include <linux/slab.h>
  21#include <linux/string.h>
  22#include <asm/scatterlist.h>
  23#include "internal.h"
  24#include "scatterwalk.h"
  25
  26struct cipher_alg_compat {
  27        unsigned int cia_min_keysize;
  28        unsigned int cia_max_keysize;
  29        int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
  30                          unsigned int keylen);
  31        void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  32        void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  33
  34        unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc,
  35                                        u8 *dst, const u8 *src,
  36                                        unsigned int nbytes);
  37        unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc,
  38                                        u8 *dst, const u8 *src,
  39                                        unsigned int nbytes);
  40        unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc,
  41                                        u8 *dst, const u8 *src,
  42                                        unsigned int nbytes);
  43        unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc,
  44                                        u8 *dst, const u8 *src,
  45                                        unsigned int nbytes);
  46};
  47
  48static inline void xor_64(u8 *a, const u8 *b)
  49{
  50        ((u32 *)a)[0] ^= ((u32 *)b)[0];
  51        ((u32 *)a)[1] ^= ((u32 *)b)[1];
  52}
  53
  54static inline void xor_128(u8 *a, const u8 *b)
  55{
  56        ((u32 *)a)[0] ^= ((u32 *)b)[0];
  57        ((u32 *)a)[1] ^= ((u32 *)b)[1];
  58        ((u32 *)a)[2] ^= ((u32 *)b)[2];
  59        ((u32 *)a)[3] ^= ((u32 *)b)[3];
  60}
  61
  62static unsigned int crypt_slow(const struct cipher_desc *desc,
  63                               struct scatter_walk *in,
  64                               struct scatter_walk *out, unsigned int bsize)
  65{
  66        unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm);
  67        u8 buffer[bsize * 2 + alignmask];
  68        u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  69        u8 *dst = src + bsize;
  70
  71        scatterwalk_copychunks(src, in, bsize, 0);
  72        desc->prfn(desc, dst, src, bsize);
  73        scatterwalk_copychunks(dst, out, bsize, 1);
  74
  75        return bsize;
  76}
  77
  78static inline unsigned int crypt_fast(const struct cipher_desc *desc,
  79                                      struct scatter_walk *in,
  80                                      struct scatter_walk *out,
  81                                      unsigned int nbytes, u8 *tmp)
  82{
  83        u8 *src, *dst;
  84        u8 *real_src, *real_dst;
  85
  86        real_src = scatterwalk_map(in, 0);
  87        real_dst = scatterwalk_map(out, 1);
  88
  89        src = real_src;
  90        dst = scatterwalk_samebuf(in, out) ? src : real_dst;
  91
  92        if (tmp) {
  93                memcpy(tmp, src, nbytes);
  94                src = tmp;
  95                dst = tmp;
  96        }
  97
  98        nbytes = desc->prfn(desc, dst, src, nbytes);
  99
 100        if (tmp)
 101                memcpy(real_dst, tmp, nbytes);
 102
 103        scatterwalk_unmap(real_src, 0);
 104        scatterwalk_unmap(real_dst, 1);
 105
 106        scatterwalk_advance(in, nbytes);
 107        scatterwalk_advance(out, nbytes);
 108
 109        return nbytes;
 110}
 111
 112/* 
 113 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
 114 * multiple page boundaries by using temporary blocks.  In user context,
 115 * the kernel is given a chance to schedule us once per page.
 116 */
 117static int crypt(const struct cipher_desc *desc,
 118                 struct scatterlist *dst,
 119                 struct scatterlist *src,
 120                 unsigned int nbytes)
 121{
 122        struct scatter_walk walk_in, walk_out;
 123        struct crypto_tfm *tfm = desc->tfm;
 124        const unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
 125        unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
 126        unsigned long buffer = 0;
 127
 128        if (!nbytes)
 129                return 0;
 130
 131        if (nbytes % bsize) {
 132                tfm->crt_flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
 133                return -EINVAL;
 134        }
 135
 136        scatterwalk_start(&walk_in, src);
 137        scatterwalk_start(&walk_out, dst);
 138
 139        for(;;) {
 140                unsigned int n = nbytes;
 141                u8 *tmp = NULL;
 142
 143                if (!scatterwalk_aligned(&walk_in, alignmask) ||
 144                    !scatterwalk_aligned(&walk_out, alignmask)) {
 145                        if (!buffer) {
 146                                buffer = __get_free_page(GFP_ATOMIC);
 147                                if (!buffer)
 148                                        n = 0;
 149                        }
 150                        tmp = (u8 *)buffer;
 151                }
 152
 153                n = scatterwalk_clamp(&walk_in, n);
 154                n = scatterwalk_clamp(&walk_out, n);
 155
 156                if (likely(n >= bsize))
 157                        n = crypt_fast(desc, &walk_in, &walk_out, n, tmp);
 158                else
 159                        n = crypt_slow(desc, &walk_in, &walk_out, bsize);
 160
 161                nbytes -= n;
 162
 163                scatterwalk_done(&walk_in, 0, nbytes);
 164                scatterwalk_done(&walk_out, 1, nbytes);
 165
 166                if (!nbytes)
 167                        break;
 168
 169                crypto_yield(tfm->crt_flags);
 170        }
 171
 172        if (buffer)
 173                free_page(buffer);
 174
 175        return 0;
 176}
 177
 178static int crypt_iv_unaligned(struct cipher_desc *desc,
 179                              struct scatterlist *dst,
 180                              struct scatterlist *src,
 181                              unsigned int nbytes)
 182{
 183        struct crypto_tfm *tfm = desc->tfm;
 184        unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 185        u8 *iv = desc->info;
 186
 187        if (unlikely(((unsigned long)iv & alignmask))) {
 188                unsigned int ivsize = tfm->crt_cipher.cit_ivsize;
 189                u8 buffer[ivsize + alignmask];
 190                u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 191                int err;
 192
 193                desc->info = memcpy(tmp, iv, ivsize);
 194                err = crypt(desc, dst, src, nbytes);
 195                memcpy(iv, tmp, ivsize);
 196
 197                return err;
 198        }
 199
 200        return crypt(desc, dst, src, nbytes);
 201}
 202
 203static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
 204                                        u8 *dst, const u8 *src,
 205                                        unsigned int nbytes)
 206{
 207        struct crypto_tfm *tfm = desc->tfm;
 208        void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
 209        int bsize = crypto_tfm_alg_blocksize(tfm);
 210
 211        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
 212        u8 *iv = desc->info;
 213        unsigned int done = 0;
 214
 215        nbytes -= bsize;
 216
 217        do {
 218                xor(iv, src);
 219                fn(tfm, dst, iv);
 220                memcpy(iv, dst, bsize);
 221
 222                src += bsize;
 223                dst += bsize;
 224        } while ((done += bsize) <= nbytes);
 225
 226        return done;
 227}
 228
 229static unsigned int cbc_process_decrypt(const struct cipher_desc *desc,
 230                                        u8 *dst, const u8 *src,
 231                                        unsigned int nbytes)
 232{
 233        struct crypto_tfm *tfm = desc->tfm;
 234        void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block;
 235        int bsize = crypto_tfm_alg_blocksize(tfm);
 236        unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm);
 237
 238        u8 stack[src == dst ? bsize + alignmask : 0];
 239        u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1);
 240        u8 **dst_p = src == dst ? &buf : &dst;
 241
 242        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
 243        u8 *iv = desc->info;
 244        unsigned int done = 0;
 245
 246        nbytes -= bsize;
 247
 248        do {
 249                u8 *tmp_dst = *dst_p;
 250
 251                fn(tfm, tmp_dst, src);
 252                xor(tmp_dst, iv);
 253                memcpy(iv, src, bsize);
 254                if (tmp_dst != dst)
 255                        memcpy(dst, tmp_dst, bsize);
 256
 257                src += bsize;
 258                dst += bsize;
 259        } while ((done += bsize) <= nbytes);
 260
 261        return done;
 262}
 263
 264static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst,
 265                                const u8 *src, unsigned int nbytes)
 266{
 267        struct crypto_tfm *tfm = desc->tfm;
 268        int bsize = crypto_tfm_alg_blocksize(tfm);
 269        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn;
 270        unsigned int done = 0;
 271
 272        nbytes -= bsize;
 273
 274        do {
 275                fn(tfm, dst, src);
 276
 277                src += bsize;
 278                dst += bsize;
 279        } while ((done += bsize) <= nbytes);
 280
 281        return done;
 282}
 283
 284static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
 285{
 286        struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
 287        
 288        tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 289        if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
 290                tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 291                return -EINVAL;
 292        } else
 293                return cia->cia_setkey(tfm, key, keylen);
 294}
 295
 296static int ecb_encrypt(struct crypto_tfm *tfm,
 297                       struct scatterlist *dst,
 298                       struct scatterlist *src, unsigned int nbytes)
 299{
 300        struct cipher_desc desc;
 301        struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
 302
 303        desc.tfm = tfm;
 304        desc.crfn = cipher->cia_encrypt;
 305        desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process;
 306
 307        return crypt(&desc, dst, src, nbytes);
 308}
 309
 310static int ecb_decrypt(struct crypto_tfm *tfm,
 311                       struct scatterlist *dst,
 312                       struct scatterlist *src,
 313                       unsigned int nbytes)
 314{
 315        struct cipher_desc desc;
 316        struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
 317
 318        desc.tfm = tfm;
 319        desc.crfn = cipher->cia_decrypt;
 320        desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process;
 321
 322        return crypt(&desc, dst, src, nbytes);
 323}
 324
 325static int cbc_encrypt(struct crypto_tfm *tfm,
 326                       struct scatterlist *dst,
 327                       struct scatterlist *src,
 328                       unsigned int nbytes)
 329{
 330        struct cipher_desc desc;
 331        struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
 332
 333        desc.tfm = tfm;
 334        desc.crfn = cipher->cia_encrypt;
 335        desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
 336        desc.info = tfm->crt_cipher.cit_iv;
 337
 338        return crypt(&desc, dst, src, nbytes);
 339}
 340
 341static int cbc_encrypt_iv(struct crypto_tfm *tfm,
 342                          struct scatterlist *dst,
 343                          struct scatterlist *src,
 344                          unsigned int nbytes, u8 *iv)
 345{
 346        struct cipher_desc desc;
 347        struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
 348
 349        desc.tfm = tfm;
 350        desc.crfn = cipher->cia_encrypt;
 351        desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt;
 352        desc.info = iv;
 353
 354        return crypt_iv_unaligned(&desc, dst, src, nbytes);
 355}
 356
 357static int cbc_decrypt(struct crypto_tfm *tfm,
 358                       struct scatterlist *dst,
 359                       struct scatterlist *src,
 360                       unsigned int nbytes)
 361{
 362        struct cipher_desc desc;
 363        struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
 364
 365        desc.tfm = tfm;
 366        desc.crfn = cipher->cia_decrypt;
 367        desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
 368        desc.info = tfm->crt_cipher.cit_iv;
 369
 370        return crypt(&desc, dst, src, nbytes);
 371}
 372
 373static int cbc_decrypt_iv(struct crypto_tfm *tfm,
 374                          struct scatterlist *dst,
 375                          struct scatterlist *src,
 376                          unsigned int nbytes, u8 *iv)
 377{
 378        struct cipher_desc desc;
 379        struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
 380
 381        desc.tfm = tfm;
 382        desc.crfn = cipher->cia_decrypt;
 383        desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt;
 384        desc.info = iv;
 385
 386        return crypt_iv_unaligned(&desc, dst, src, nbytes);
 387}
 388
 389static int nocrypt(struct crypto_tfm *tfm,
 390                   struct scatterlist *dst,
 391                   struct scatterlist *src,
 392                   unsigned int nbytes)
 393{
 394        return -ENOSYS;
 395}
 396
 397static int nocrypt_iv(struct crypto_tfm *tfm,
 398                      struct scatterlist *dst,
 399                      struct scatterlist *src,
 400                      unsigned int nbytes, u8 *iv)
 401{
 402        return -ENOSYS;
 403}
 404
 405int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags)
 406{
 407        u32 mode = flags & CRYPTO_TFM_MODE_MASK;
 408        tfm->crt_cipher.cit_mode = mode ? mode : CRYPTO_TFM_MODE_ECB;
 409        return 0;
 410}
 411
 412static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
 413                                              const u8 *),
 414                                   struct crypto_tfm *tfm,
 415                                   u8 *dst, const u8 *src)
 416{
 417        unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 418        unsigned int size = crypto_tfm_alg_blocksize(tfm);
 419        u8 buffer[size + alignmask];
 420        u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 421
 422        memcpy(tmp, src, size);
 423        fn(tfm, tmp, tmp);
 424        memcpy(dst, tmp, size);
 425}
 426
 427static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
 428                                     u8 *dst, const u8 *src)
 429{
 430        unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 431        struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
 432
 433        if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
 434                cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
 435                return;
 436        }
 437
 438        cipher->cia_encrypt(tfm, dst, src);
 439}
 440
 441static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
 442                                     u8 *dst, const u8 *src)
 443{
 444        unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 445        struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
 446
 447        if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
 448                cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
 449                return;
 450        }
 451
 452        cipher->cia_decrypt(tfm, dst, src);
 453}
 454
 455int crypto_init_cipher_ops(struct crypto_tfm *tfm)
 456{
 457        int ret = 0;
 458        struct cipher_tfm *ops = &tfm->crt_cipher;
 459        struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
 460
 461        ops->cit_setkey = setkey;
 462        ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
 463                cipher_encrypt_unaligned : cipher->cia_encrypt;
 464        ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
 465                cipher_decrypt_unaligned : cipher->cia_decrypt;
 466
 467        switch (tfm->crt_cipher.cit_mode) {
 468        case CRYPTO_TFM_MODE_ECB:
 469                ops->cit_encrypt = ecb_encrypt;
 470                ops->cit_decrypt = ecb_decrypt;
 471                ops->cit_encrypt_iv = nocrypt_iv;
 472                ops->cit_decrypt_iv = nocrypt_iv;
 473                break;
 474                
 475        case CRYPTO_TFM_MODE_CBC:
 476                ops->cit_encrypt = cbc_encrypt;
 477                ops->cit_decrypt = cbc_decrypt;
 478                ops->cit_encrypt_iv = cbc_encrypt_iv;
 479                ops->cit_decrypt_iv = cbc_decrypt_iv;
 480                break;
 481                
 482        case CRYPTO_TFM_MODE_CFB:
 483                ops->cit_encrypt = nocrypt;
 484                ops->cit_decrypt = nocrypt;
 485                ops->cit_encrypt_iv = nocrypt_iv;
 486                ops->cit_decrypt_iv = nocrypt_iv;
 487                break;
 488        
 489        case CRYPTO_TFM_MODE_CTR:
 490                ops->cit_encrypt = nocrypt;
 491                ops->cit_decrypt = nocrypt;
 492                ops->cit_encrypt_iv = nocrypt_iv;
 493                ops->cit_decrypt_iv = nocrypt_iv;
 494                break;
 495
 496        default:
 497                BUG();
 498        }
 499        
 500        if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) {
 501                unsigned long align;
 502                unsigned long addr;
 503                
 504                switch (crypto_tfm_alg_blocksize(tfm)) {
 505                case 8:
 506                        ops->cit_xor_block = xor_64;
 507                        break;
 508                        
 509                case 16:
 510                        ops->cit_xor_block = xor_128;
 511                        break;
 512                        
 513                default:
 514                        printk(KERN_WARNING "%s: block size %u not supported\n",
 515                               crypto_tfm_alg_name(tfm),
 516                               crypto_tfm_alg_blocksize(tfm));
 517                        ret = -EINVAL;
 518                        goto out;
 519                }
 520                
 521                ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm);
 522                align = crypto_tfm_alg_alignmask(tfm) + 1;
 523                addr = (unsigned long)crypto_tfm_ctx(tfm);
 524                addr = ALIGN(addr, align);
 525                addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
 526                ops->cit_iv = (void *)addr;
 527        }
 528
 529out:    
 530        return ret;
 531}
 532
 533void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
 534{
 535}
 536
lxr.linux.no kindly hosted by Redpill Linpro AS, provider of Linux consulting and operations services since 1995.