linux/crypto/ahash.c
<<
>>
Prefs
   1/*
   2 * Asynchronous Cryptographic Hash operations.
   3 *
   4 * This is the asynchronous version of hash.c with notification of
   5 * completion via a callback.
   6 *
   7 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
   8 *
   9 * This program is free software; you can redistribute it and/or modify it
  10 * under the terms of the GNU General Public License as published by the Free
  11 * Software Foundation; either version 2 of the License, or (at your option)
  12 * any later version.
  13 *
  14 */
  15
  16#include <crypto/internal/hash.h>
  17#include <crypto/scatterwalk.h>
  18#include <linux/err.h>
  19#include <linux/kernel.h>
  20#include <linux/module.h>
  21#include <linux/sched.h>
  22#include <linux/slab.h>
  23#include <linux/seq_file.h>
  24
  25#include "internal.h"
  26
  27static int hash_walk_next(struct crypto_hash_walk *walk)
  28{
  29        unsigned int alignmask = walk->alignmask;
  30        unsigned int offset = walk->offset;
  31        unsigned int nbytes = min(walk->entrylen,
  32                                  ((unsigned int)(PAGE_SIZE)) - offset);
  33
  34        walk->data = crypto_kmap(walk->pg, 0);
  35        walk->data += offset;
  36
  37        if (offset & alignmask)
  38                nbytes = alignmask + 1 - (offset & alignmask);
  39
  40        walk->entrylen -= nbytes;
  41        return nbytes;
  42}
  43
  44static int hash_walk_new_entry(struct crypto_hash_walk *walk)
  45{
  46        struct scatterlist *sg;
  47
  48        sg = walk->sg;
  49        walk->pg = sg_page(sg);
  50        walk->offset = sg->offset;
  51        walk->entrylen = sg->length;
  52
  53        if (walk->entrylen > walk->total)
  54                walk->entrylen = walk->total;
  55        walk->total -= walk->entrylen;
  56
  57        return hash_walk_next(walk);
  58}
  59
  60int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
  61{
  62        unsigned int alignmask = walk->alignmask;
  63        unsigned int nbytes = walk->entrylen;
  64
  65        walk->data -= walk->offset;
  66
  67        if (nbytes && walk->offset & alignmask && !err) {
  68                walk->offset += alignmask - 1;
  69                walk->offset = ALIGN(walk->offset, alignmask + 1);
  70                walk->data += walk->offset;
  71
  72                nbytes = min(nbytes,
  73                             ((unsigned int)(PAGE_SIZE)) - walk->offset);
  74                walk->entrylen -= nbytes;
  75
  76                return nbytes;
  77        }
  78
  79        crypto_kunmap(walk->data, 0);
  80        crypto_yield(walk->flags);
  81
  82        if (err)
  83                return err;
  84
  85        walk->offset = 0;
  86
  87        if (nbytes)
  88                return hash_walk_next(walk);
  89
  90        if (!walk->total)
  91                return 0;
  92
  93        walk->sg = scatterwalk_sg_next(walk->sg);
  94
  95        return hash_walk_new_entry(walk);
  96}
  97EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
  98
  99int crypto_hash_walk_first(struct ahash_request *req,
 100                           struct crypto_hash_walk *walk)
 101{
 102        walk->total = req->nbytes;
 103
 104        if (!walk->total)
 105                return 0;
 106
 107        walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
 108        walk->sg = req->src;
 109        walk->flags = req->base.flags;
 110
 111        return hash_walk_new_entry(walk);
 112}
 113EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
 114
 115int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
 116                                  struct crypto_hash_walk *walk,
 117                                  struct scatterlist *sg, unsigned int len)
 118{
 119        walk->total = len;
 120
 121        if (!walk->total)
 122                return 0;
 123
 124        walk->alignmask = crypto_hash_alignmask(hdesc->tfm);
 125        walk->sg = sg;
 126        walk->flags = hdesc->flags;
 127
 128        return hash_walk_new_entry(walk);
 129}
 130
 131static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
 132                                unsigned int keylen)
 133{
 134        struct ahash_alg *ahash = crypto_ahash_alg(tfm);
 135        unsigned long alignmask = crypto_ahash_alignmask(tfm);
 136        int ret;
 137        u8 *buffer, *alignbuffer;
 138        unsigned long absize;
 139
 140        absize = keylen + alignmask;
 141        buffer = kmalloc(absize, GFP_ATOMIC);
 142        if (!buffer)
 143                return -ENOMEM;
 144
 145        alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 146        memcpy(alignbuffer, key, keylen);
 147        ret = ahash->setkey(tfm, alignbuffer, keylen);
 148        memset(alignbuffer, 0, keylen);
 149        kfree(buffer);
 150        return ret;
 151}
 152
 153static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
 154                        unsigned int keylen)
 155{
 156        struct ahash_alg *ahash = crypto_ahash_alg(tfm);
 157        unsigned long alignmask = crypto_ahash_alignmask(tfm);
 158
 159        if ((unsigned long)key & alignmask)
 160                return ahash_setkey_unaligned(tfm, key, keylen);
 161
 162        return ahash->setkey(tfm, key, keylen);
 163}
 164
 165static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
 166                          unsigned int keylen)
 167{
 168        return -ENOSYS;
 169}
 170
 171int crypto_ahash_import(struct ahash_request *req, const u8 *in)
 172{
 173        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 174        struct ahash_alg *alg = crypto_ahash_alg(tfm);
 175
 176        memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm));
 177
 178        if (alg->reinit)
 179                alg->reinit(req);
 180
 181        return 0;
 182}
 183EXPORT_SYMBOL_GPL(crypto_ahash_import);
 184
 185static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type,
 186                                        u32 mask)
 187{
 188        return alg->cra_ctxsize;
 189}
 190
 191static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 192{
 193        struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash;
 194        struct ahash_tfm *crt   = &tfm->crt_ahash;
 195
 196        if (alg->digestsize > PAGE_SIZE / 8)
 197                return -EINVAL;
 198
 199        crt->init = alg->init;
 200        crt->update = alg->update;
 201        crt->final  = alg->final;
 202        crt->digest = alg->digest;
 203        crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey;
 204        crt->digestsize = alg->digestsize;
 205
 206        return 0;
 207}
 208
 209static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
 210        __attribute__ ((unused));
 211static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
 212{
 213        seq_printf(m, "type         : ahash\n");
 214        seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
 215                                             "yes" : "no");
 216        seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
 217        seq_printf(m, "digestsize   : %u\n", alg->cra_ahash.digestsize);
 218}
 219
 220const struct crypto_type crypto_ahash_type = {
 221        .ctxsize = crypto_ahash_ctxsize,
 222        .init = crypto_init_ahash_ops,
 223#ifdef CONFIG_PROC_FS
 224        .show = crypto_ahash_show,
 225#endif
 226};
 227EXPORT_SYMBOL_GPL(crypto_ahash_type);
 228
 229MODULE_LICENSE("GPL");
 230MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
 231