linux/crypto/shash.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * Synchronous Cryptographic Hash operations.
   4 *
   5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
   6 */
   7
   8#include <crypto/scatterwalk.h>
   9#include <crypto/internal/hash.h>
  10#include <linux/err.h>
  11#include <linux/kernel.h>
  12#include <linux/module.h>
  13#include <linux/slab.h>
  14#include <linux/seq_file.h>
  15#include <linux/cryptouser.h>
  16#include <net/netlink.h>
  17#include <linux/compiler.h>
  18
  19#include "internal.h"
  20
  21static const struct crypto_type crypto_shash_type;
  22
  23static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
  24                           unsigned int keylen)
  25{
  26        return -ENOSYS;
  27}
  28
  29/*
  30 * Check whether an shash algorithm has a setkey function.
  31 *
  32 * For CFI compatibility, this must not be an inline function.  This is because
  33 * when CFI is enabled, modules won't get the same address for shash_no_setkey
  34 * (if it were exported, which inlining would require) as the core kernel will.
  35 */
  36bool crypto_shash_alg_has_setkey(struct shash_alg *alg)
  37{
  38        return alg->setkey != shash_no_setkey;
  39}
  40EXPORT_SYMBOL_GPL(crypto_shash_alg_has_setkey);
  41
  42static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
  43                                  unsigned int keylen)
  44{
  45        struct shash_alg *shash = crypto_shash_alg(tfm);
  46        unsigned long alignmask = crypto_shash_alignmask(tfm);
  47        unsigned long absize;
  48        u8 *buffer, *alignbuffer;
  49        int err;
  50
  51        absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
  52        buffer = kmalloc(absize, GFP_ATOMIC);
  53        if (!buffer)
  54                return -ENOMEM;
  55
  56        alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  57        memcpy(alignbuffer, key, keylen);
  58        err = shash->setkey(tfm, alignbuffer, keylen);
  59        kfree_sensitive(buffer);
  60        return err;
  61}
  62
  63static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
  64{
  65        if (crypto_shash_alg_needs_key(alg))
  66                crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
  67}
  68
  69int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
  70                        unsigned int keylen)
  71{
  72        struct shash_alg *shash = crypto_shash_alg(tfm);
  73        unsigned long alignmask = crypto_shash_alignmask(tfm);
  74        int err;
  75
  76        if ((unsigned long)key & alignmask)
  77                err = shash_setkey_unaligned(tfm, key, keylen);
  78        else
  79                err = shash->setkey(tfm, key, keylen);
  80
  81        if (unlikely(err)) {
  82                shash_set_needkey(tfm, shash);
  83                return err;
  84        }
  85
  86        crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
  87        return 0;
  88}
  89EXPORT_SYMBOL_GPL(crypto_shash_setkey);
  90
  91static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
  92                                  unsigned int len)
  93{
  94        struct crypto_shash *tfm = desc->tfm;
  95        struct shash_alg *shash = crypto_shash_alg(tfm);
  96        unsigned long alignmask = crypto_shash_alignmask(tfm);
  97        unsigned int unaligned_len = alignmask + 1 -
  98                                     ((unsigned long)data & alignmask);
  99        /*
 100         * We cannot count on __aligned() working for large values:
 101         * https://patchwork.kernel.org/patch/9507697/
 102         */
 103        u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
 104        u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 105        int err;
 106
 107        if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
 108                return -EINVAL;
 109
 110        if (unaligned_len > len)
 111                unaligned_len = len;
 112
 113        memcpy(buf, data, unaligned_len);
 114        err = shash->update(desc, buf, unaligned_len);
 115        memset(buf, 0, unaligned_len);
 116
 117        return err ?:
 118               shash->update(desc, data + unaligned_len, len - unaligned_len);
 119}
 120
 121int crypto_shash_update(struct shash_desc *desc, const u8 *data,
 122                        unsigned int len)
 123{
 124        struct crypto_shash *tfm = desc->tfm;
 125        struct shash_alg *shash = crypto_shash_alg(tfm);
 126        unsigned long alignmask = crypto_shash_alignmask(tfm);
 127
 128        if ((unsigned long)data & alignmask)
 129                return shash_update_unaligned(desc, data, len);
 130
 131        return shash->update(desc, data, len);
 132}
 133EXPORT_SYMBOL_GPL(crypto_shash_update);
 134
 135static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
 136{
 137        struct crypto_shash *tfm = desc->tfm;
 138        unsigned long alignmask = crypto_shash_alignmask(tfm);
 139        struct shash_alg *shash = crypto_shash_alg(tfm);
 140        unsigned int ds = crypto_shash_digestsize(tfm);
 141        /*
 142         * We cannot count on __aligned() working for large values:
 143         * https://patchwork.kernel.org/patch/9507697/
 144         */
 145        u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
 146        u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 147        int err;
 148
 149        if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
 150                return -EINVAL;
 151
 152        err = shash->final(desc, buf);
 153        if (err)
 154                goto out;
 155
 156        memcpy(out, buf, ds);
 157
 158out:
 159        memset(buf, 0, ds);
 160        return err;
 161}
 162
 163int crypto_shash_final(struct shash_desc *desc, u8 *out)
 164{
 165        struct crypto_shash *tfm = desc->tfm;
 166        struct shash_alg *shash = crypto_shash_alg(tfm);
 167        unsigned long alignmask = crypto_shash_alignmask(tfm);
 168
 169        if ((unsigned long)out & alignmask)
 170                return shash_final_unaligned(desc, out);
 171
 172        return shash->final(desc, out);
 173}
 174EXPORT_SYMBOL_GPL(crypto_shash_final);
 175
 176static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
 177                                 unsigned int len, u8 *out)
 178{
 179        return crypto_shash_update(desc, data, len) ?:
 180               crypto_shash_final(desc, out);
 181}
 182
 183int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
 184                       unsigned int len, u8 *out)
 185{
 186        struct crypto_shash *tfm = desc->tfm;
 187        struct shash_alg *shash = crypto_shash_alg(tfm);
 188        unsigned long alignmask = crypto_shash_alignmask(tfm);
 189
 190        if (((unsigned long)data | (unsigned long)out) & alignmask)
 191                return shash_finup_unaligned(desc, data, len, out);
 192
 193        return shash->finup(desc, data, len, out);
 194}
 195EXPORT_SYMBOL_GPL(crypto_shash_finup);
 196
 197static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
 198                                  unsigned int len, u8 *out)
 199{
 200        return crypto_shash_init(desc) ?:
 201               crypto_shash_finup(desc, data, len, out);
 202}
 203
 204int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
 205                        unsigned int len, u8 *out)
 206{
 207        struct crypto_shash *tfm = desc->tfm;
 208        struct shash_alg *shash = crypto_shash_alg(tfm);
 209        unsigned long alignmask = crypto_shash_alignmask(tfm);
 210
 211        if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
 212                return -ENOKEY;
 213
 214        if (((unsigned long)data | (unsigned long)out) & alignmask)
 215                return shash_digest_unaligned(desc, data, len, out);
 216
 217        return shash->digest(desc, data, len, out);
 218}
 219EXPORT_SYMBOL_GPL(crypto_shash_digest);
 220
 221int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
 222                            unsigned int len, u8 *out)
 223{
 224        SHASH_DESC_ON_STACK(desc, tfm);
 225        int err;
 226
 227        desc->tfm = tfm;
 228
 229        err = crypto_shash_digest(desc, data, len, out);
 230
 231        shash_desc_zero(desc);
 232
 233        return err;
 234}
 235EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
 236
 237static int shash_default_export(struct shash_desc *desc, void *out)
 238{
 239        memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
 240        return 0;
 241}
 242
 243static int shash_default_import(struct shash_desc *desc, const void *in)
 244{
 245        memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
 246        return 0;
 247}
 248
 249static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
 250                              unsigned int keylen)
 251{
 252        struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
 253
 254        return crypto_shash_setkey(*ctx, key, keylen);
 255}
 256
 257static int shash_async_init(struct ahash_request *req)
 258{
 259        struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
 260        struct shash_desc *desc = ahash_request_ctx(req);
 261
 262        desc->tfm = *ctx;
 263
 264        return crypto_shash_init(desc);
 265}
 266
 267int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
 268{
 269        struct crypto_hash_walk walk;
 270        int nbytes;
 271
 272        for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
 273             nbytes = crypto_hash_walk_done(&walk, nbytes))
 274                nbytes = crypto_shash_update(desc, walk.data, nbytes);
 275
 276        return nbytes;
 277}
 278EXPORT_SYMBOL_GPL(shash_ahash_update);
 279
 280static int shash_async_update(struct ahash_request *req)
 281{
 282        return shash_ahash_update(req, ahash_request_ctx(req));
 283}
 284
 285static int shash_async_final(struct ahash_request *req)
 286{
 287        return crypto_shash_final(ahash_request_ctx(req), req->result);
 288}
 289
 290int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
 291{
 292        struct crypto_hash_walk walk;
 293        int nbytes;
 294
 295        nbytes = crypto_hash_walk_first(req, &walk);
 296        if (!nbytes)
 297                return crypto_shash_final(desc, req->result);
 298
 299        do {
 300                nbytes = crypto_hash_walk_last(&walk) ?
 301                         crypto_shash_finup(desc, walk.data, nbytes,
 302                                            req->result) :
 303                         crypto_shash_update(desc, walk.data, nbytes);
 304                nbytes = crypto_hash_walk_done(&walk, nbytes);
 305        } while (nbytes > 0);
 306
 307        return nbytes;
 308}
 309EXPORT_SYMBOL_GPL(shash_ahash_finup);
 310
 311static int shash_async_finup(struct ahash_request *req)
 312{
 313        struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
 314        struct shash_desc *desc = ahash_request_ctx(req);
 315
 316        desc->tfm = *ctx;
 317
 318        return shash_ahash_finup(req, desc);
 319}
 320
 321int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
 322{
 323        unsigned int nbytes = req->nbytes;
 324        struct scatterlist *sg;
 325        unsigned int offset;
 326        int err;
 327
 328        if (nbytes &&
 329            (sg = req->src, offset = sg->offset,
 330             nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
 331                void *data;
 332
 333                data = kmap_atomic(sg_page(sg));
 334                err = crypto_shash_digest(desc, data + offset, nbytes,
 335                                          req->result);
 336                kunmap_atomic(data);
 337        } else
 338                err = crypto_shash_init(desc) ?:
 339                      shash_ahash_finup(req, desc);
 340
 341        return err;
 342}
 343EXPORT_SYMBOL_GPL(shash_ahash_digest);
 344
 345static int shash_async_digest(struct ahash_request *req)
 346{
 347        struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
 348        struct shash_desc *desc = ahash_request_ctx(req);
 349
 350        desc->tfm = *ctx;
 351
 352        return shash_ahash_digest(req, desc);
 353}
 354
 355static int shash_async_export(struct ahash_request *req, void *out)
 356{
 357        return crypto_shash_export(ahash_request_ctx(req), out);
 358}
 359
 360static int shash_async_import(struct ahash_request *req, const void *in)
 361{
 362        struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
 363        struct shash_desc *desc = ahash_request_ctx(req);
 364
 365        desc->tfm = *ctx;
 366
 367        return crypto_shash_import(desc, in);
 368}
 369
 370static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
 371{
 372        struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
 373
 374        crypto_free_shash(*ctx);
 375}
 376
 377int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
 378{
 379        struct crypto_alg *calg = tfm->__crt_alg;
 380        struct shash_alg *alg = __crypto_shash_alg(calg);
 381        struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
 382        struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
 383        struct crypto_shash *shash;
 384
 385        if (!crypto_mod_get(calg))
 386                return -EAGAIN;
 387
 388        shash = crypto_create_tfm(calg, &crypto_shash_type);
 389        if (IS_ERR(shash)) {
 390                crypto_mod_put(calg);
 391                return PTR_ERR(shash);
 392        }
 393
 394        *ctx = shash;
 395        tfm->exit = crypto_exit_shash_ops_async;
 396
 397        crt->init = shash_async_init;
 398        crt->update = shash_async_update;
 399        crt->final = shash_async_final;
 400        crt->finup = shash_async_finup;
 401        crt->digest = shash_async_digest;
 402        if (crypto_shash_alg_has_setkey(alg))
 403                crt->setkey = shash_async_setkey;
 404
 405        crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
 406                                    CRYPTO_TFM_NEED_KEY);
 407
 408        crt->export = shash_async_export;
 409        crt->import = shash_async_import;
 410
 411        crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
 412
 413        return 0;
 414}
 415
 416static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
 417{
 418        struct crypto_shash *hash = __crypto_shash_cast(tfm);
 419        struct shash_alg *alg = crypto_shash_alg(hash);
 420
 421        alg->exit_tfm(hash);
 422}
 423
 424static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
 425{
 426        struct crypto_shash *hash = __crypto_shash_cast(tfm);
 427        struct shash_alg *alg = crypto_shash_alg(hash);
 428        int err;
 429
 430        hash->descsize = alg->descsize;
 431
 432        shash_set_needkey(hash, alg);
 433
 434        if (alg->exit_tfm)
 435                tfm->exit = crypto_shash_exit_tfm;
 436
 437        if (!alg->init_tfm)
 438                return 0;
 439
 440        err = alg->init_tfm(hash);
 441        if (err)
 442                return err;
 443
 444        /* ->init_tfm() may have increased the descsize. */
 445        if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
 446                if (alg->exit_tfm)
 447                        alg->exit_tfm(hash);
 448                return -EINVAL;
 449        }
 450
 451        return 0;
 452}
 453
 454static void crypto_shash_free_instance(struct crypto_instance *inst)
 455{
 456        struct shash_instance *shash = shash_instance(inst);
 457
 458        shash->free(shash);
 459}
 460
 461#ifdef CONFIG_NET
 462static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
 463{
 464        struct crypto_report_hash rhash;
 465        struct shash_alg *salg = __crypto_shash_alg(alg);
 466
 467        memset(&rhash, 0, sizeof(rhash));
 468
 469        strscpy(rhash.type, "shash", sizeof(rhash.type));
 470
 471        rhash.blocksize = alg->cra_blocksize;
 472        rhash.digestsize = salg->digestsize;
 473
 474        return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
 475}
 476#else
 477static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
 478{
 479        return -ENOSYS;
 480}
 481#endif
 482
 483static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
 484        __maybe_unused;
 485static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
 486{
 487        struct shash_alg *salg = __crypto_shash_alg(alg);
 488
 489        seq_printf(m, "type         : shash\n");
 490        seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
 491        seq_printf(m, "digestsize   : %u\n", salg->digestsize);
 492}
 493
 494static const struct crypto_type crypto_shash_type = {
 495        .extsize = crypto_alg_extsize,
 496        .init_tfm = crypto_shash_init_tfm,
 497        .free = crypto_shash_free_instance,
 498#ifdef CONFIG_PROC_FS
 499        .show = crypto_shash_show,
 500#endif
 501        .report = crypto_shash_report,
 502        .maskclear = ~CRYPTO_ALG_TYPE_MASK,
 503        .maskset = CRYPTO_ALG_TYPE_MASK,
 504        .type = CRYPTO_ALG_TYPE_SHASH,
 505        .tfmsize = offsetof(struct crypto_shash, base),
 506};
 507
 508int crypto_grab_shash(struct crypto_shash_spawn *spawn,
 509                      struct crypto_instance *inst,
 510                      const char *name, u32 type, u32 mask)
 511{
 512        spawn->base.frontend = &crypto_shash_type;
 513        return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
 514}
 515EXPORT_SYMBOL_GPL(crypto_grab_shash);
 516
 517struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
 518                                        u32 mask)
 519{
 520        return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
 521}
 522EXPORT_SYMBOL_GPL(crypto_alloc_shash);
 523
 524static int shash_prepare_alg(struct shash_alg *alg)
 525{
 526        struct crypto_alg *base = &alg->base;
 527
 528        if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
 529            alg->descsize > HASH_MAX_DESCSIZE ||
 530            alg->statesize > HASH_MAX_STATESIZE)
 531                return -EINVAL;
 532
 533        if ((alg->export && !alg->import) || (alg->import && !alg->export))
 534                return -EINVAL;
 535
 536        base->cra_type = &crypto_shash_type;
 537        base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
 538        base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
 539
 540        if (!alg->finup)
 541                alg->finup = shash_finup_unaligned;
 542        if (!alg->digest)
 543                alg->digest = shash_digest_unaligned;
 544        if (!alg->export) {
 545                alg->export = shash_default_export;
 546                alg->import = shash_default_import;
 547                alg->statesize = alg->descsize;
 548        }
 549        if (!alg->setkey)
 550                alg->setkey = shash_no_setkey;
 551
 552        return 0;
 553}
 554
 555int crypto_register_shash(struct shash_alg *alg)
 556{
 557        struct crypto_alg *base = &alg->base;
 558        int err;
 559
 560        err = shash_prepare_alg(alg);
 561        if (err)
 562                return err;
 563
 564        return crypto_register_alg(base);
 565}
 566EXPORT_SYMBOL_GPL(crypto_register_shash);
 567
 568void crypto_unregister_shash(struct shash_alg *alg)
 569{
 570        crypto_unregister_alg(&alg->base);
 571}
 572EXPORT_SYMBOL_GPL(crypto_unregister_shash);
 573
 574int crypto_register_shashes(struct shash_alg *algs, int count)
 575{
 576        int i, ret;
 577
 578        for (i = 0; i < count; i++) {
 579                ret = crypto_register_shash(&algs[i]);
 580                if (ret)
 581                        goto err;
 582        }
 583
 584        return 0;
 585
 586err:
 587        for (--i; i >= 0; --i)
 588                crypto_unregister_shash(&algs[i]);
 589
 590        return ret;
 591}
 592EXPORT_SYMBOL_GPL(crypto_register_shashes);
 593
 594void crypto_unregister_shashes(struct shash_alg *algs, int count)
 595{
 596        int i;
 597
 598        for (i = count - 1; i >= 0; --i)
 599                crypto_unregister_shash(&algs[i]);
 600}
 601EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
 602
 603int shash_register_instance(struct crypto_template *tmpl,
 604                            struct shash_instance *inst)
 605{
 606        int err;
 607
 608        if (WARN_ON(!inst->free))
 609                return -EINVAL;
 610
 611        err = shash_prepare_alg(&inst->alg);
 612        if (err)
 613                return err;
 614
 615        return crypto_register_instance(tmpl, shash_crypto_instance(inst));
 616}
 617EXPORT_SYMBOL_GPL(shash_register_instance);
 618
 619void shash_free_singlespawn_instance(struct shash_instance *inst)
 620{
 621        crypto_drop_spawn(shash_instance_ctx(inst));
 622        kfree(inst);
 623}
 624EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
 625
 626MODULE_LICENSE("GPL");
 627MODULE_DESCRIPTION("Synchronous cryptographic hash type");
 628
lxr.linux.no kindly hosted by Redpill Linpro AS, provider of Linux consulting and operations services since 1995.