linux/crypto/api.c
<<
>>
Prefs
   1/*
   2 * Scatterlist Cryptographic API.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   7 *
   8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
   9 * and Nettle, by Niels Möller.
  10 *
  11 * This program is free software; you can redistribute it and/or modify it
  12 * under the terms of the GNU General Public License as published by the Free
  13 * Software Foundation; either version 2 of the License, or (at your option)
  14 * any later version.
  15 *
  16 */
  17
  18#include <linux/err.h>
  19#include <linux/errno.h>
  20#include <linux/kernel.h>
  21#include <linux/kmod.h>
  22#include <linux/module.h>
  23#include <linux/param.h>
  24#include <linux/sched.h>
  25#include <linux/slab.h>
  26#include <linux/string.h>
  27#include "internal.h"
  28
  29LIST_HEAD(crypto_alg_list);
  30EXPORT_SYMBOL_GPL(crypto_alg_list);
  31DECLARE_RWSEM(crypto_alg_sem);
  32EXPORT_SYMBOL_GPL(crypto_alg_sem);
  33
  34BLOCKING_NOTIFIER_HEAD(crypto_chain);
  35EXPORT_SYMBOL_GPL(crypto_chain);
  36
  37static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
  38
  39struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
  40{
  41        return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
  42}
  43EXPORT_SYMBOL_GPL(crypto_mod_get);
  44
  45void crypto_mod_put(struct crypto_alg *alg)
  46{
  47        struct module *module = alg->cra_module;
  48
  49        crypto_alg_put(alg);
  50        module_put(module);
  51}
  52EXPORT_SYMBOL_GPL(crypto_mod_put);
  53
  54static inline int crypto_is_test_larval(struct crypto_larval *larval)
  55{
  56        return larval->alg.cra_driver_name[0];
  57}
  58
  59static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
  60                                              u32 mask)
  61{
  62        struct crypto_alg *q, *alg = NULL;
  63        int best = -2;
  64
  65        list_for_each_entry(q, &crypto_alg_list, cra_list) {
  66                int exact, fuzzy;
  67
  68                if (crypto_is_moribund(q))
  69                        continue;
  70
  71                if ((q->cra_flags ^ type) & mask)
  72                        continue;
  73
  74                if (crypto_is_larval(q) &&
  75                    !crypto_is_test_larval((struct crypto_larval *)q) &&
  76                    ((struct crypto_larval *)q)->mask != mask)
  77                        continue;
  78
  79                exact = !strcmp(q->cra_driver_name, name);
  80                fuzzy = !strcmp(q->cra_name, name);
  81                if (!exact && !(fuzzy && q->cra_priority > best))
  82                        continue;
  83
  84                if (unlikely(!crypto_mod_get(q)))
  85                        continue;
  86
  87                best = q->cra_priority;
  88                if (alg)
  89                        crypto_mod_put(alg);
  90                alg = q;
  91
  92                if (exact)
  93                        break;
  94        }
  95
  96        return alg;
  97}
  98
  99static void crypto_larval_destroy(struct crypto_alg *alg)
 100{
 101        struct crypto_larval *larval = (void *)alg;
 102
 103        BUG_ON(!crypto_is_larval(alg));
 104        if (larval->adult)
 105                crypto_mod_put(larval->adult);
 106        kfree(larval);
 107}
 108
 109struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
 110{
 111        struct crypto_larval *larval;
 112
 113        larval = kzalloc(sizeof(*larval), GFP_KERNEL);
 114        if (!larval)
 115                return ERR_PTR(-ENOMEM);
 116
 117        larval->mask = mask;
 118        larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
 119        larval->alg.cra_priority = -1;
 120        larval->alg.cra_destroy = crypto_larval_destroy;
 121
 122        strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
 123        init_completion(&larval->completion);
 124
 125        return larval;
 126}
 127EXPORT_SYMBOL_GPL(crypto_larval_alloc);
 128
 129static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
 130                                            u32 mask)
 131{
 132        struct crypto_alg *alg;
 133        struct crypto_larval *larval;
 134
 135        larval = crypto_larval_alloc(name, type, mask);
 136        if (IS_ERR(larval))
 137                return ERR_CAST(larval);
 138
 139        atomic_set(&larval->alg.cra_refcnt, 2);
 140
 141        down_write(&crypto_alg_sem);
 142        alg = __crypto_alg_lookup(name, type, mask);
 143        if (!alg) {
 144                alg = &larval->alg;
 145                list_add(&alg->cra_list, &crypto_alg_list);
 146        }
 147        up_write(&crypto_alg_sem);
 148
 149        if (alg != &larval->alg) {
 150                kfree(larval);
 151                if (crypto_is_larval(alg))
 152                        alg = crypto_larval_wait(alg);
 153        }
 154
 155        return alg;
 156}
 157
 158void crypto_larval_kill(struct crypto_alg *alg)
 159{
 160        struct crypto_larval *larval = (void *)alg;
 161
 162        down_write(&crypto_alg_sem);
 163        list_del(&alg->cra_list);
 164        up_write(&crypto_alg_sem);
 165        complete_all(&larval->completion);
 166        crypto_alg_put(alg);
 167}
 168EXPORT_SYMBOL_GPL(crypto_larval_kill);
 169
 170static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
 171{
 172        struct crypto_larval *larval = (void *)alg;
 173        long timeout;
 174
 175        timeout = wait_for_completion_interruptible_timeout(
 176                &larval->completion, 60 * HZ);
 177
 178        alg = larval->adult;
 179        if (timeout < 0)
 180                alg = ERR_PTR(-EINTR);
 181        else if (!timeout)
 182                alg = ERR_PTR(-ETIMEDOUT);
 183        else if (!alg)
 184                alg = ERR_PTR(-ENOENT);
 185        else if (crypto_is_test_larval(larval) &&
 186                 !(alg->cra_flags & CRYPTO_ALG_TESTED))
 187                alg = ERR_PTR(-EAGAIN);
 188        else if (!crypto_mod_get(alg))
 189                alg = ERR_PTR(-EAGAIN);
 190        crypto_mod_put(&larval->alg);
 191
 192        return alg;
 193}
 194
 195struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
 196{
 197        struct crypto_alg *alg;
 198
 199        down_read(&crypto_alg_sem);
 200        alg = __crypto_alg_lookup(name, type, mask);
 201        up_read(&crypto_alg_sem);
 202
 203        return alg;
 204}
 205EXPORT_SYMBOL_GPL(crypto_alg_lookup);
 206
 207struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
 208{
 209        struct crypto_alg *alg;
 210
 211        if (!name)
 212                return ERR_PTR(-ENOENT);
 213
 214        mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 215        type &= mask;
 216
 217        alg = crypto_alg_lookup(name, type, mask);
 218        if (!alg) {
 219                request_module("%s", name);
 220
 221                if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
 222                      CRYPTO_ALG_NEED_FALLBACK))
 223                        request_module("%s-all", name);
 224
 225                alg = crypto_alg_lookup(name, type, mask);
 226        }
 227
 228        if (alg)
 229                return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
 230
 231        return crypto_larval_add(name, type, mask);
 232}
 233EXPORT_SYMBOL_GPL(crypto_larval_lookup);
 234
 235int crypto_probing_notify(unsigned long val, void *v)
 236{
 237        int ok;
 238
 239        ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 240        if (ok == NOTIFY_DONE) {
 241                request_module("cryptomgr");
 242                ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 243        }
 244
 245        return ok;
 246}
 247EXPORT_SYMBOL_GPL(crypto_probing_notify);
 248
 249struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
 250{
 251        struct crypto_alg *alg;
 252        struct crypto_alg *larval;
 253        int ok;
 254
 255        if (!((type | mask) & CRYPTO_ALG_TESTED)) {
 256                type |= CRYPTO_ALG_TESTED;
 257                mask |= CRYPTO_ALG_TESTED;
 258        }
 259
 260        larval = crypto_larval_lookup(name, type, mask);
 261        if (IS_ERR(larval) || !crypto_is_larval(larval))
 262                return larval;
 263
 264        ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
 265
 266        if (ok == NOTIFY_STOP)
 267                alg = crypto_larval_wait(larval);
 268        else {
 269                crypto_mod_put(larval);
 270                alg = ERR_PTR(-ENOENT);
 271        }
 272        crypto_larval_kill(larval);
 273        return alg;
 274}
 275EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
 276
 277static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 278{
 279        const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
 280
 281        if (type_obj)
 282                return type_obj->init(tfm, type, mask);
 283
 284        switch (crypto_tfm_alg_type(tfm)) {
 285        case CRYPTO_ALG_TYPE_CIPHER:
 286                return crypto_init_cipher_ops(tfm);
 287
 288        case CRYPTO_ALG_TYPE_COMPRESS:
 289                return crypto_init_compress_ops(tfm);
 290
 291        default:
 292                break;
 293        }
 294
 295        BUG();
 296        return -EINVAL;
 297}
 298
 299static void crypto_exit_ops(struct crypto_tfm *tfm)
 300{
 301        const struct crypto_type *type = tfm->__crt_alg->cra_type;
 302
 303        if (type) {
 304                if (tfm->exit)
 305                        tfm->exit(tfm);
 306                return;
 307        }
 308
 309        switch (crypto_tfm_alg_type(tfm)) {
 310        case CRYPTO_ALG_TYPE_CIPHER:
 311                crypto_exit_cipher_ops(tfm);
 312                break;
 313
 314        case CRYPTO_ALG_TYPE_COMPRESS:
 315                crypto_exit_compress_ops(tfm);
 316                break;
 317
 318        default:
 319                BUG();
 320        }
 321}
 322
 323static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
 324{
 325        const struct crypto_type *type_obj = alg->cra_type;
 326        unsigned int len;
 327
 328        len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
 329        if (type_obj)
 330                return len + type_obj->ctxsize(alg, type, mask);
 331
 332        switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
 333        default:
 334                BUG();
 335
 336        case CRYPTO_ALG_TYPE_CIPHER:
 337                len += crypto_cipher_ctxsize(alg);
 338                break;
 339
 340        case CRYPTO_ALG_TYPE_COMPRESS:
 341                len += crypto_compress_ctxsize(alg);
 342                break;
 343        }
 344
 345        return len;
 346}
 347
 348void crypto_shoot_alg(struct crypto_alg *alg)
 349{
 350        down_write(&crypto_alg_sem);
 351        alg->cra_flags |= CRYPTO_ALG_DYING;
 352        up_write(&crypto_alg_sem);
 353}
 354EXPORT_SYMBOL_GPL(crypto_shoot_alg);
 355
 356struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 357                                      u32 mask)
 358{
 359        struct crypto_tfm *tfm = NULL;
 360        unsigned int tfm_size;
 361        int err = -ENOMEM;
 362
 363        tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
 364        tfm = kzalloc(tfm_size, GFP_KERNEL);
 365        if (tfm == NULL)
 366                goto out_err;
 367
 368        tfm->__crt_alg = alg;
 369
 370        err = crypto_init_ops(tfm, type, mask);
 371        if (err)
 372                goto out_free_tfm;
 373
 374        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 375                goto cra_init_failed;
 376
 377        goto out;
 378
 379cra_init_failed:
 380        crypto_exit_ops(tfm);
 381out_free_tfm:
 382        if (err == -EAGAIN)
 383                crypto_shoot_alg(alg);
 384        kfree(tfm);
 385out_err:
 386        tfm = ERR_PTR(err);
 387out:
 388        return tfm;
 389}
 390EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
 391
 392/*
 393 *      crypto_alloc_base - Locate algorithm and allocate transform
 394 *      @alg_name: Name of algorithm
 395 *      @type: Type of algorithm
 396 *      @mask: Mask for type comparison
 397 *
 398 *      This function should not be used by new algorithm types.
 399 *      Please use crypto_alloc_tfm instead.
 400 *
 401 *      crypto_alloc_base() will first attempt to locate an already loaded
 402 *      algorithm.  If that fails and the kernel supports dynamically loadable
 403 *      modules, it will then attempt to load a module of the same name or
 404 *      alias.  If that fails it will send a query to any loaded crypto manager
 405 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 406 *      algorithm which is then associated with the new transform.
 407 *
 408 *      The returned transform is of a non-determinate type.  Most people
 409 *      should use one of the more specific allocation functions such as
 410 *      crypto_alloc_blkcipher.
 411 *
 412 *      In case of error the return value is an error pointer.
 413 */
 414struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
 415{
 416        struct crypto_tfm *tfm;
 417        int err;
 418
 419        for (;;) {
 420                struct crypto_alg *alg;
 421
 422                alg = crypto_alg_mod_lookup(alg_name, type, mask);
 423                if (IS_ERR(alg)) {
 424                        err = PTR_ERR(alg);
 425                        goto err;
 426                }
 427
 428                tfm = __crypto_alloc_tfm(alg, type, mask);
 429                if (!IS_ERR(tfm))
 430                        return tfm;
 431
 432                crypto_mod_put(alg);
 433                err = PTR_ERR(tfm);
 434
 435err:
 436                if (err != -EAGAIN)
 437                        break;
 438                if (signal_pending(current)) {
 439                        err = -EINTR;
 440                        break;
 441                }
 442        }
 443
 444        return ERR_PTR(err);
 445}
 446EXPORT_SYMBOL_GPL(crypto_alloc_base);
 447
 448void *crypto_create_tfm(struct crypto_alg *alg,
 449                        const struct crypto_type *frontend)
 450{
 451        char *mem;
 452        struct crypto_tfm *tfm = NULL;
 453        unsigned int tfmsize;
 454        unsigned int total;
 455        int err = -ENOMEM;
 456
 457        tfmsize = frontend->tfmsize;
 458        total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
 459
 460        mem = kzalloc(total, GFP_KERNEL);
 461        if (mem == NULL)
 462                goto out_err;
 463
 464        tfm = (struct crypto_tfm *)(mem + tfmsize);
 465        tfm->__crt_alg = alg;
 466
 467        err = frontend->init_tfm(tfm);
 468        if (err)
 469                goto out_free_tfm;
 470
 471        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 472                goto cra_init_failed;
 473
 474        goto out;
 475
 476cra_init_failed:
 477        crypto_exit_ops(tfm);
 478out_free_tfm:
 479        if (err == -EAGAIN)
 480                crypto_shoot_alg(alg);
 481        kfree(mem);
 482out_err:
 483        mem = ERR_PTR(err);
 484out:
 485        return mem;
 486}
 487EXPORT_SYMBOL_GPL(crypto_create_tfm);
 488
 489struct crypto_alg *crypto_find_alg(const char *alg_name,
 490                                   const struct crypto_type *frontend,
 491                                   u32 type, u32 mask)
 492{
 493        struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
 494                crypto_alg_mod_lookup;
 495
 496        if (frontend) {
 497                type &= frontend->maskclear;
 498                mask &= frontend->maskclear;
 499                type |= frontend->type;
 500                mask |= frontend->maskset;
 501
 502                if (frontend->lookup)
 503                        lookup = frontend->lookup;
 504        }
 505
 506        return lookup(alg_name, type, mask);
 507}
 508EXPORT_SYMBOL_GPL(crypto_find_alg);
 509
 510/*
 511 *      crypto_alloc_tfm - Locate algorithm and allocate transform
 512 *      @alg_name: Name of algorithm
 513 *      @frontend: Frontend algorithm type
 514 *      @type: Type of algorithm
 515 *      @mask: Mask for type comparison
 516 *
 517 *      crypto_alloc_tfm() will first attempt to locate an already loaded
 518 *      algorithm.  If that fails and the kernel supports dynamically loadable
 519 *      modules, it will then attempt to load a module of the same name or
 520 *      alias.  If that fails it will send a query to any loaded crypto manager
 521 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 522 *      algorithm which is then associated with the new transform.
 523 *
 524 *      The returned transform is of a non-determinate type.  Most people
 525 *      should use one of the more specific allocation functions such as
 526 *      crypto_alloc_blkcipher.
 527 *
 528 *      In case of error the return value is an error pointer.
 529 */
 530void *crypto_alloc_tfm(const char *alg_name,
 531                       const struct crypto_type *frontend, u32 type, u32 mask)
 532{
 533        void *tfm;
 534        int err;
 535
 536        for (;;) {
 537                struct crypto_alg *alg;
 538
 539                alg = crypto_find_alg(alg_name, frontend, type, mask);
 540                if (IS_ERR(alg)) {
 541                        err = PTR_ERR(alg);
 542                        goto err;
 543                }
 544
 545                tfm = crypto_create_tfm(alg, frontend);
 546                if (!IS_ERR(tfm))
 547                        return tfm;
 548
 549                crypto_mod_put(alg);
 550                err = PTR_ERR(tfm);
 551
 552err:
 553                if (err != -EAGAIN)
 554                        break;
 555                if (signal_pending(current)) {
 556                        err = -EINTR;
 557                        break;
 558                }
 559        }
 560
 561        return ERR_PTR(err);
 562}
 563EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
 564
 565/*
 566 *      crypto_destroy_tfm - Free crypto transform
 567 *      @mem: Start of tfm slab
 568 *      @tfm: Transform to free
 569 *
 570 *      This function frees up the transform and any associated resources,
 571 *      then drops the refcount on the associated algorithm.
 572 */
 573void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
 574{
 575        struct crypto_alg *alg;
 576
 577        if (unlikely(!mem))
 578                return;
 579
 580        alg = tfm->__crt_alg;
 581
 582        if (!tfm->exit && alg->cra_exit)
 583                alg->cra_exit(tfm);
 584        crypto_exit_ops(tfm);
 585        crypto_mod_put(alg);
 586        kzfree(mem);
 587}
 588EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
 589
 590int crypto_has_alg(const char *name, u32 type, u32 mask)
 591{
 592        int ret = 0;
 593        struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
 594
 595        if (!IS_ERR(alg)) {
 596                crypto_mod_put(alg);
 597                ret = 1;
 598        }
 599
 600        return ret;
 601}
 602EXPORT_SYMBOL_GPL(crypto_has_alg);
 603
 604MODULE_DESCRIPTION("Cryptographic core API");
 605MODULE_LICENSE("GPL");
 606
lxr.linux.no kindly hosted by Redpill Linpro AS, provider of Linux consulting and operations services since 1995.