linux/crypto/cipher.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * Cryptographic API.
   4 *
   5 * Single-block cipher operations.
   6 *
   7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   8 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   9 */
  10
  11#include <crypto/algapi.h>
  12#include <crypto/internal/cipher.h>
  13#include <linux/kernel.h>
  14#include <linux/crypto.h>
  15#include <linux/errno.h>
  16#include <linux/slab.h>
  17#include <linux/string.h>
  18#include "internal.h"
  19
  20static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
  21                            unsigned int keylen)
  22{
  23        struct cipher_alg *cia = crypto_cipher_alg(tfm);
  24        unsigned long alignmask = crypto_cipher_alignmask(tfm);
  25        int ret;
  26        u8 *buffer, *alignbuffer;
  27        unsigned long absize;
  28
  29        absize = keylen + alignmask;
  30        buffer = kmalloc(absize, GFP_ATOMIC);
  31        if (!buffer)
  32                return -ENOMEM;
  33
  34        alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  35        memcpy(alignbuffer, key, keylen);
  36        ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
  37        memset(alignbuffer, 0, keylen);
  38        kfree(buffer);
  39        return ret;
  40
  41}
  42
  43int crypto_cipher_setkey(struct crypto_cipher *tfm,
  44                         const u8 *key, unsigned int keylen)
  45{
  46        struct cipher_alg *cia = crypto_cipher_alg(tfm);
  47        unsigned long alignmask = crypto_cipher_alignmask(tfm);
  48
  49        if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
  50                return -EINVAL;
  51
  52        if ((unsigned long)key & alignmask)
  53                return setkey_unaligned(tfm, key, keylen);
  54
  55        return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
  56}
  57EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL);
  58
  59static inline void cipher_crypt_one(struct crypto_cipher *tfm,
  60                                    u8 *dst, const u8 *src, bool enc)
  61{
  62        unsigned long alignmask = crypto_cipher_alignmask(tfm);
  63        struct cipher_alg *cia = crypto_cipher_alg(tfm);
  64        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  65                enc ? cia->cia_encrypt : cia->cia_decrypt;
  66
  67        if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
  68                unsigned int bs = crypto_cipher_blocksize(tfm);
  69                u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
  70                u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  71
  72                memcpy(tmp, src, bs);
  73                fn(crypto_cipher_tfm(tfm), tmp, tmp);
  74                memcpy(dst, tmp, bs);
  75        } else {
  76                fn(crypto_cipher_tfm(tfm), dst, src);
  77        }
  78}
  79
  80void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
  81                               u8 *dst, const u8 *src)
  82{
  83        cipher_crypt_one(tfm, dst, src, true);
  84}
  85EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL);
  86
  87void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
  88                               u8 *dst, const u8 *src)
  89{
  90        cipher_crypt_one(tfm, dst, src, false);
  91}
  92EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL);
  93
lxr.linux.no kindly hosted by Redpill Linpro AS, provider of Linux consulting and operations services since 1995.