linux/crypto/api.c
<<
>>
Prefs
   1/*
   2 * Scatterlist Cryptographic API.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   7 *
   8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
   9 * and Nettle, by Niels M\xC3\xB6ller.
  10 *
  11 * This program is free software; you can redistribute it and/or modify it
  12 * under the terms of the GNU General Public License as published by the Free
  13 * Software Foundation; either version 2 of the License, or (at your option) 
  14 * any later version.
  15 *
  16 */
  17
  18#include <linux/err.h>
  19#include <linux/errno.h>
  20#include <linux/kernel.h>
  21#include <linux/kmod.h>
  22#include <linux/module.h>
  23#include <linux/param.h>
  24#include <linux/sched.h>
  25#include <linux/slab.h>
  26#include <linux/string.h>
  27#include "internal.h"
  28
  29LIST_HEAD(crypto_alg_list);
  30EXPORT_SYMBOL_GPL(crypto_alg_list);
  31DECLARE_RWSEM(crypto_alg_sem);
  32EXPORT_SYMBOL_GPL(crypto_alg_sem);
  33
  34BLOCKING_NOTIFIER_HEAD(crypto_chain);
  35EXPORT_SYMBOL_GPL(crypto_chain);
  36
  37static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
  38{
  39        atomic_inc(&alg->cra_refcnt);
  40        return alg;
  41}
  42
  43struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
  44{
  45        return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
  46}
  47EXPORT_SYMBOL_GPL(crypto_mod_get);
  48
  49void crypto_mod_put(struct crypto_alg *alg)
  50{
  51        struct module *module = alg->cra_module;
  52
  53        crypto_alg_put(alg);
  54        module_put(module);
  55}
  56EXPORT_SYMBOL_GPL(crypto_mod_put);
  57
  58static inline int crypto_is_test_larval(struct crypto_larval *larval)
  59{
  60        return larval->alg.cra_driver_name[0];
  61}
  62
  63static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
  64                                              u32 mask)
  65{
  66        struct crypto_alg *q, *alg = NULL;
  67        int best = -2;
  68
  69        list_for_each_entry(q, &crypto_alg_list, cra_list) {
  70                int exact, fuzzy;
  71
  72                if (crypto_is_moribund(q))
  73                        continue;
  74
  75                if ((q->cra_flags ^ type) & mask)
  76                        continue;
  77
  78                if (crypto_is_larval(q) &&
  79                    !crypto_is_test_larval((struct crypto_larval *)q) &&
  80                    ((struct crypto_larval *)q)->mask != mask)
  81                        continue;
  82
  83                exact = !strcmp(q->cra_driver_name, name);
  84                fuzzy = !strcmp(q->cra_name, name);
  85                if (!exact && !(fuzzy && q->cra_priority > best))
  86                        continue;
  87
  88                if (unlikely(!crypto_mod_get(q)))
  89                        continue;
  90
  91                best = q->cra_priority;
  92                if (alg)
  93                        crypto_mod_put(alg);
  94                alg = q;
  95
  96                if (exact)
  97                        break;
  98        }
  99
 100        return alg;
 101}
 102
 103static void crypto_larval_destroy(struct crypto_alg *alg)
 104{
 105        struct crypto_larval *larval = (void *)alg;
 106
 107        BUG_ON(!crypto_is_larval(alg));
 108        if (larval->adult)
 109                crypto_mod_put(larval->adult);
 110        kfree(larval);
 111}
 112
 113struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
 114{
 115        struct crypto_larval *larval;
 116
 117        larval = kzalloc(sizeof(*larval), GFP_KERNEL);
 118        if (!larval)
 119                return ERR_PTR(-ENOMEM);
 120
 121        larval->mask = mask;
 122        larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
 123        larval->alg.cra_priority = -1;
 124        larval->alg.cra_destroy = crypto_larval_destroy;
 125
 126        strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
 127        init_completion(&larval->completion);
 128
 129        return larval;
 130}
 131EXPORT_SYMBOL_GPL(crypto_larval_alloc);
 132
 133static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
 134                                            u32 mask)
 135{
 136        struct crypto_alg *alg;
 137        struct crypto_larval *larval;
 138
 139        larval = crypto_larval_alloc(name, type, mask);
 140        if (IS_ERR(larval))
 141                return ERR_CAST(larval);
 142
 143        atomic_set(&larval->alg.cra_refcnt, 2);
 144
 145        down_write(&crypto_alg_sem);
 146        alg = __crypto_alg_lookup(name, type, mask);
 147        if (!alg) {
 148                alg = &larval->alg;
 149                list_add(&alg->cra_list, &crypto_alg_list);
 150        }
 151        up_write(&crypto_alg_sem);
 152
 153        if (alg != &larval->alg)
 154                kfree(larval);
 155
 156        return alg;
 157}
 158
 159void crypto_larval_kill(struct crypto_alg *alg)
 160{
 161        struct crypto_larval *larval = (void *)alg;
 162
 163        down_write(&crypto_alg_sem);
 164        list_del(&alg->cra_list);
 165        up_write(&crypto_alg_sem);
 166        complete_all(&larval->completion);
 167        crypto_alg_put(alg);
 168}
 169EXPORT_SYMBOL_GPL(crypto_larval_kill);
 170
 171static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
 172{
 173        struct crypto_larval *larval = (void *)alg;
 174        long timeout;
 175
 176        timeout = wait_for_completion_interruptible_timeout(
 177                &larval->completion, 60 * HZ);
 178
 179        alg = larval->adult;
 180        if (timeout < 0)
 181                alg = ERR_PTR(-EINTR);
 182        else if (!timeout)
 183                alg = ERR_PTR(-ETIMEDOUT);
 184        else if (!alg)
 185                alg = ERR_PTR(-ENOENT);
 186        else if (crypto_is_test_larval(larval) &&
 187                 !(alg->cra_flags & CRYPTO_ALG_TESTED))
 188                alg = ERR_PTR(-EAGAIN);
 189        else if (!crypto_mod_get(alg))
 190                alg = ERR_PTR(-EAGAIN);
 191        crypto_mod_put(&larval->alg);
 192
 193        return alg;
 194}
 195
 196struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
 197{
 198        struct crypto_alg *alg;
 199
 200        down_read(&crypto_alg_sem);
 201        alg = __crypto_alg_lookup(name, type, mask);
 202        up_read(&crypto_alg_sem);
 203
 204        return alg;
 205}
 206EXPORT_SYMBOL_GPL(crypto_alg_lookup);
 207
 208struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
 209{
 210        struct crypto_alg *alg;
 211
 212        if (!name)
 213                return ERR_PTR(-ENOENT);
 214
 215        mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 216        type &= mask;
 217
 218        alg = crypto_alg_lookup(name, type, mask);
 219        if (!alg) {
 220                char tmp[CRYPTO_MAX_ALG_NAME];
 221
 222                request_module(name);
 223
 224                if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask) &&
 225                    snprintf(tmp, sizeof(tmp), "%s-all", name) < sizeof(tmp))
 226                        request_module(tmp);
 227
 228                alg = crypto_alg_lookup(name, type, mask);
 229        }
 230
 231        if (alg)
 232                return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
 233
 234        return crypto_larval_add(name, type, mask);
 235}
 236EXPORT_SYMBOL_GPL(crypto_larval_lookup);
 237
 238int crypto_probing_notify(unsigned long val, void *v)
 239{
 240        int ok;
 241
 242        ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 243        if (ok == NOTIFY_DONE) {
 244                request_module("cryptomgr");
 245                ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 246        }
 247
 248        return ok;
 249}
 250EXPORT_SYMBOL_GPL(crypto_probing_notify);
 251
 252struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
 253{
 254        struct crypto_alg *alg;
 255        struct crypto_alg *larval;
 256        int ok;
 257
 258        if (!(mask & CRYPTO_ALG_TESTED)) {
 259                type |= CRYPTO_ALG_TESTED;
 260                mask |= CRYPTO_ALG_TESTED;
 261        }
 262
 263        larval = crypto_larval_lookup(name, type, mask);
 264        if (IS_ERR(larval) || !crypto_is_larval(larval))
 265                return larval;
 266
 267        ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
 268
 269        if (ok == NOTIFY_STOP)
 270                alg = crypto_larval_wait(larval);
 271        else {
 272                crypto_mod_put(larval);
 273                alg = ERR_PTR(-ENOENT);
 274        }
 275        crypto_larval_kill(larval);
 276        return alg;
 277}
 278EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
 279
 280static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 281{
 282        const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
 283
 284        if (type_obj)
 285                return type_obj->init(tfm, type, mask);
 286
 287        switch (crypto_tfm_alg_type(tfm)) {
 288        case CRYPTO_ALG_TYPE_CIPHER:
 289                return crypto_init_cipher_ops(tfm);
 290                
 291        case CRYPTO_ALG_TYPE_DIGEST:
 292                if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
 293                    CRYPTO_ALG_TYPE_HASH_MASK)
 294                        return crypto_init_digest_ops_async(tfm);
 295                else
 296                        return crypto_init_digest_ops(tfm);
 297
 298        case CRYPTO_ALG_TYPE_COMPRESS:
 299                return crypto_init_compress_ops(tfm);
 300        
 301        default:
 302                break;
 303        }
 304        
 305        BUG();
 306        return -EINVAL;
 307}
 308
 309static void crypto_exit_ops(struct crypto_tfm *tfm)
 310{
 311        const struct crypto_type *type = tfm->__crt_alg->cra_type;
 312
 313        if (type) {
 314                if (tfm->exit)
 315                        tfm->exit(tfm);
 316                return;
 317        }
 318
 319        switch (crypto_tfm_alg_type(tfm)) {
 320        case CRYPTO_ALG_TYPE_CIPHER:
 321                crypto_exit_cipher_ops(tfm);
 322                break;
 323                
 324        case CRYPTO_ALG_TYPE_DIGEST:
 325                crypto_exit_digest_ops(tfm);
 326                break;
 327                
 328        case CRYPTO_ALG_TYPE_COMPRESS:
 329                crypto_exit_compress_ops(tfm);
 330                break;
 331        
 332        default:
 333                BUG();
 334                
 335        }
 336}
 337
 338static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
 339{
 340        const struct crypto_type *type_obj = alg->cra_type;
 341        unsigned int len;
 342
 343        len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
 344        if (type_obj)
 345                return len + type_obj->ctxsize(alg, type, mask);
 346
 347        switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
 348        default:
 349                BUG();
 350
 351        case CRYPTO_ALG_TYPE_CIPHER:
 352                len += crypto_cipher_ctxsize(alg);
 353                break;
 354                
 355        case CRYPTO_ALG_TYPE_DIGEST:
 356                len += crypto_digest_ctxsize(alg);
 357                break;
 358                
 359        case CRYPTO_ALG_TYPE_COMPRESS:
 360                len += crypto_compress_ctxsize(alg);
 361                break;
 362        }
 363
 364        return len;
 365}
 366
 367void crypto_shoot_alg(struct crypto_alg *alg)
 368{
 369        down_write(&crypto_alg_sem);
 370        alg->cra_flags |= CRYPTO_ALG_DYING;
 371        up_write(&crypto_alg_sem);
 372}
 373EXPORT_SYMBOL_GPL(crypto_shoot_alg);
 374
 375struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 376                                      u32 mask)
 377{
 378        struct crypto_tfm *tfm = NULL;
 379        unsigned int tfm_size;
 380        int err = -ENOMEM;
 381
 382        tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
 383        tfm = kzalloc(tfm_size, GFP_KERNEL);
 384        if (tfm == NULL)
 385                goto out_err;
 386
 387        tfm->__crt_alg = alg;
 388
 389        err = crypto_init_ops(tfm, type, mask);
 390        if (err)
 391                goto out_free_tfm;
 392
 393        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 394                goto cra_init_failed;
 395
 396        goto out;
 397
 398cra_init_failed:
 399        crypto_exit_ops(tfm);
 400out_free_tfm:
 401        if (err == -EAGAIN)
 402                crypto_shoot_alg(alg);
 403        kfree(tfm);
 404out_err:
 405        tfm = ERR_PTR(err);
 406out:
 407        return tfm;
 408}
 409EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
 410
 411/*
 412 *      crypto_alloc_base - Locate algorithm and allocate transform
 413 *      @alg_name: Name of algorithm
 414 *      @type: Type of algorithm
 415 *      @mask: Mask for type comparison
 416 *
 417 *      This function should not be used by new algorithm types.
 418 *      Plesae use crypto_alloc_tfm instead.
 419 *
 420 *      crypto_alloc_base() will first attempt to locate an already loaded
 421 *      algorithm.  If that fails and the kernel supports dynamically loadable
 422 *      modules, it will then attempt to load a module of the same name or
 423 *      alias.  If that fails it will send a query to any loaded crypto manager
 424 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 425 *      algorithm which is then associated with the new transform.
 426 *
 427 *      The returned transform is of a non-determinate type.  Most people
 428 *      should use one of the more specific allocation functions such as
 429 *      crypto_alloc_blkcipher.
 430 *
 431 *      In case of error the return value is an error pointer.
 432 */
 433struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
 434{
 435        struct crypto_tfm *tfm;
 436        int err;
 437
 438        for (;;) {
 439                struct crypto_alg *alg;
 440
 441                alg = crypto_alg_mod_lookup(alg_name, type, mask);
 442                if (IS_ERR(alg)) {
 443                        err = PTR_ERR(alg);
 444                        goto err;
 445                }
 446
 447                tfm = __crypto_alloc_tfm(alg, type, mask);
 448                if (!IS_ERR(tfm))
 449                        return tfm;
 450
 451                crypto_mod_put(alg);
 452                err = PTR_ERR(tfm);
 453
 454err:
 455                if (err != -EAGAIN)
 456                        break;
 457                if (signal_pending(current)) {
 458                        err = -EINTR;
 459                        break;
 460                }
 461        }
 462
 463        return ERR_PTR(err);
 464}
 465EXPORT_SYMBOL_GPL(crypto_alloc_base);
 466
 467struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
 468                                     const struct crypto_type *frontend)
 469{
 470        char *mem;
 471        struct crypto_tfm *tfm = NULL;
 472        unsigned int tfmsize;
 473        unsigned int total;
 474        int err = -ENOMEM;
 475
 476        tfmsize = frontend->tfmsize;
 477        total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
 478
 479        mem = kzalloc(total, GFP_KERNEL);
 480        if (mem == NULL)
 481                goto out_err;
 482
 483        tfm = (struct crypto_tfm *)(mem + tfmsize);
 484        tfm->__crt_alg = alg;
 485
 486        err = frontend->init_tfm(tfm, frontend);
 487        if (err)
 488                goto out_free_tfm;
 489
 490        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 491                goto cra_init_failed;
 492
 493        goto out;
 494
 495cra_init_failed:
 496        crypto_exit_ops(tfm);
 497out_free_tfm:
 498        if (err == -EAGAIN)
 499                crypto_shoot_alg(alg);
 500        kfree(mem);
 501out_err:
 502        tfm = ERR_PTR(err);
 503out:
 504        return tfm;
 505}
 506EXPORT_SYMBOL_GPL(crypto_create_tfm);
 507
 508/*
 509 *      crypto_alloc_tfm - Locate algorithm and allocate transform
 510 *      @alg_name: Name of algorithm
 511 *      @frontend: Frontend algorithm type
 512 *      @type: Type of algorithm
 513 *      @mask: Mask for type comparison
 514 *
 515 *      crypto_alloc_tfm() will first attempt to locate an already loaded
 516 *      algorithm.  If that fails and the kernel supports dynamically loadable
 517 *      modules, it will then attempt to load a module of the same name or
 518 *      alias.  If that fails it will send a query to any loaded crypto manager
 519 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 520 *      algorithm which is then associated with the new transform.
 521 *
 522 *      The returned transform is of a non-determinate type.  Most people
 523 *      should use one of the more specific allocation functions such as
 524 *      crypto_alloc_blkcipher.
 525 *
 526 *      In case of error the return value is an error pointer.
 527 */
 528struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
 529                                    const struct crypto_type *frontend,
 530                                    u32 type, u32 mask)
 531{
 532        struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
 533        struct crypto_tfm *tfm;
 534        int err;
 535
 536        type &= frontend->maskclear;
 537        mask &= frontend->maskclear;
 538        type |= frontend->type;
 539        mask |= frontend->maskset;
 540
 541        lookup = frontend->lookup ?: crypto_alg_mod_lookup;
 542
 543        for (;;) {
 544                struct crypto_alg *alg;
 545
 546                alg = lookup(alg_name, type, mask);
 547                if (IS_ERR(alg)) {
 548                        err = PTR_ERR(alg);
 549                        goto err;
 550                }
 551
 552                tfm = crypto_create_tfm(alg, frontend);
 553                if (!IS_ERR(tfm))
 554                        return tfm;
 555
 556                crypto_mod_put(alg);
 557                err = PTR_ERR(tfm);
 558
 559err:
 560                if (err != -EAGAIN)
 561                        break;
 562                if (signal_pending(current)) {
 563                        err = -EINTR;
 564                        break;
 565                }
 566        }
 567
 568        return ERR_PTR(err);
 569}
 570EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
 571
 572/*
 573 *      crypto_destroy_tfm - Free crypto transform
 574 *      @mem: Start of tfm slab
 575 *      @tfm: Transform to free
 576 *
 577 *      This function frees up the transform and any associated resources,
 578 *      then drops the refcount on the associated algorithm.
 579 */
 580void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
 581{
 582        struct crypto_alg *alg;
 583        int size;
 584
 585        if (unlikely(!mem))
 586                return;
 587
 588        alg = tfm->__crt_alg;
 589        size = ksize(mem);
 590
 591        if (!tfm->exit && alg->cra_exit)
 592                alg->cra_exit(tfm);
 593        crypto_exit_ops(tfm);
 594        crypto_mod_put(alg);
 595        memset(mem, 0, size);
 596        kfree(mem);
 597}
 598EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
 599
 600int crypto_has_alg(const char *name, u32 type, u32 mask)
 601{
 602        int ret = 0;
 603        struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
 604        
 605        if (!IS_ERR(alg)) {
 606                crypto_mod_put(alg);
 607                ret = 1;
 608        }
 609        
 610        return ret;
 611}
 612EXPORT_SYMBOL_GPL(crypto_has_alg);
 613
 614MODULE_DESCRIPTION("Cryptographic core API");
 615MODULE_LICENSE("GPL");
 616