linux/crypto/pcrypt.c
<<
>>
Prefs
   1/*
   2 * pcrypt - Parallel crypto wrapper.
   3 *
   4 * Copyright (C) 2009 secunet Security Networks AG
   5 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
   6 *
   7 * This program is free software; you can redistribute it and/or modify it
   8 * under the terms and conditions of the GNU General Public License,
   9 * version 2, as published by the Free Software Foundation.
  10 *
  11 * This program is distributed in the hope it will be useful, but WITHOUT
  12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
  14 * more details.
  15 *
  16 * You should have received a copy of the GNU General Public License along with
  17 * this program; if not, write to the Free Software Foundation, Inc.,
  18 * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
  19 */
  20
  21#include <crypto/algapi.h>
  22#include <crypto/internal/aead.h>
  23#include <linux/err.h>
  24#include <linux/init.h>
  25#include <linux/module.h>
  26#include <linux/slab.h>
  27#include <crypto/pcrypt.h>
  28
  29static struct padata_instance *pcrypt_enc_padata;
  30static struct padata_instance *pcrypt_dec_padata;
  31static struct workqueue_struct *encwq;
  32static struct workqueue_struct *decwq;
  33
  34struct pcrypt_instance_ctx {
  35        struct crypto_spawn spawn;
  36        unsigned int tfm_count;
  37};
  38
  39struct pcrypt_aead_ctx {
  40        struct crypto_aead *child;
  41        unsigned int cb_cpu;
  42};
  43
  44static int pcrypt_do_parallel(struct padata_priv *padata, unsigned int *cb_cpu,
  45                              struct padata_instance *pinst)
  46{
  47        unsigned int cpu_index, cpu, i;
  48
  49        cpu = *cb_cpu;
  50
  51        if (cpumask_test_cpu(cpu, cpu_active_mask))
  52                        goto out;
  53
  54        cpu_index = cpu % cpumask_weight(cpu_active_mask);
  55
  56        cpu = cpumask_first(cpu_active_mask);
  57        for (i = 0; i < cpu_index; i++)
  58                cpu = cpumask_next(cpu, cpu_active_mask);
  59
  60        *cb_cpu = cpu;
  61
  62out:
  63        return padata_do_parallel(pinst, padata, cpu);
  64}
  65
  66static int pcrypt_aead_setkey(struct crypto_aead *parent,
  67                              const u8 *key, unsigned int keylen)
  68{
  69        struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
  70
  71        return crypto_aead_setkey(ctx->child, key, keylen);
  72}
  73
  74static int pcrypt_aead_setauthsize(struct crypto_aead *parent,
  75                                   unsigned int authsize)
  76{
  77        struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
  78
  79        return crypto_aead_setauthsize(ctx->child, authsize);
  80}
  81
  82static void pcrypt_aead_serial(struct padata_priv *padata)
  83{
  84        struct pcrypt_request *preq = pcrypt_padata_request(padata);
  85        struct aead_request *req = pcrypt_request_ctx(preq);
  86
  87        aead_request_complete(req->base.data, padata->info);
  88}
  89
  90static void pcrypt_aead_giv_serial(struct padata_priv *padata)
  91{
  92        struct pcrypt_request *preq = pcrypt_padata_request(padata);
  93        struct aead_givcrypt_request *req = pcrypt_request_ctx(preq);
  94
  95        aead_request_complete(req->areq.base.data, padata->info);
  96}
  97
  98static void pcrypt_aead_done(struct crypto_async_request *areq, int err)
  99{
 100        struct aead_request *req = areq->data;
 101        struct pcrypt_request *preq = aead_request_ctx(req);
 102        struct padata_priv *padata = pcrypt_request_padata(preq);
 103
 104        padata->info = err;
 105        req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 106
 107        padata_do_serial(padata);
 108}
 109
 110static void pcrypt_aead_enc(struct padata_priv *padata)
 111{
 112        struct pcrypt_request *preq = pcrypt_padata_request(padata);
 113        struct aead_request *req = pcrypt_request_ctx(preq);
 114
 115        padata->info = crypto_aead_encrypt(req);
 116
 117        if (padata->info == -EINPROGRESS)
 118                return;
 119
 120        padata_do_serial(padata);
 121}
 122
 123static int pcrypt_aead_encrypt(struct aead_request *req)
 124{
 125        int err;
 126        struct pcrypt_request *preq = aead_request_ctx(req);
 127        struct aead_request *creq = pcrypt_request_ctx(preq);
 128        struct padata_priv *padata = pcrypt_request_padata(preq);
 129        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 130        struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
 131        u32 flags = aead_request_flags(req);
 132
 133        memset(padata, 0, sizeof(struct padata_priv));
 134
 135        padata->parallel = pcrypt_aead_enc;
 136        padata->serial = pcrypt_aead_serial;
 137
 138        aead_request_set_tfm(creq, ctx->child);
 139        aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
 140                                  pcrypt_aead_done, req);
 141        aead_request_set_crypt(creq, req->src, req->dst,
 142                               req->cryptlen, req->iv);
 143        aead_request_set_assoc(creq, req->assoc, req->assoclen);
 144
 145        err = pcrypt_do_parallel(padata, &ctx->cb_cpu, pcrypt_enc_padata);
 146        if (err)
 147                return err;
 148        else
 149                err = crypto_aead_encrypt(creq);
 150
 151        return err;
 152}
 153
 154static void pcrypt_aead_dec(struct padata_priv *padata)
 155{
 156        struct pcrypt_request *preq = pcrypt_padata_request(padata);
 157        struct aead_request *req = pcrypt_request_ctx(preq);
 158
 159        padata->info = crypto_aead_decrypt(req);
 160
 161        if (padata->info == -EINPROGRESS)
 162                return;
 163
 164        padata_do_serial(padata);
 165}
 166
 167static int pcrypt_aead_decrypt(struct aead_request *req)
 168{
 169        int err;
 170        struct pcrypt_request *preq = aead_request_ctx(req);
 171        struct aead_request *creq = pcrypt_request_ctx(preq);
 172        struct padata_priv *padata = pcrypt_request_padata(preq);
 173        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 174        struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
 175        u32 flags = aead_request_flags(req);
 176
 177        memset(padata, 0, sizeof(struct padata_priv));
 178
 179        padata->parallel = pcrypt_aead_dec;
 180        padata->serial = pcrypt_aead_serial;
 181
 182        aead_request_set_tfm(creq, ctx->child);
 183        aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
 184                                  pcrypt_aead_done, req);
 185        aead_request_set_crypt(creq, req->src, req->dst,
 186                               req->cryptlen, req->iv);
 187        aead_request_set_assoc(creq, req->assoc, req->assoclen);
 188
 189        err = pcrypt_do_parallel(padata, &ctx->cb_cpu, pcrypt_dec_padata);
 190        if (err)
 191                return err;
 192        else
 193                err = crypto_aead_decrypt(creq);
 194
 195        return err;
 196}
 197
 198static void pcrypt_aead_givenc(struct padata_priv *padata)
 199{
 200        struct pcrypt_request *preq = pcrypt_padata_request(padata);
 201        struct aead_givcrypt_request *req = pcrypt_request_ctx(preq);
 202
 203        padata->info = crypto_aead_givencrypt(req);
 204
 205        if (padata->info == -EINPROGRESS)
 206                return;
 207
 208        padata_do_serial(padata);
 209}
 210
 211static int pcrypt_aead_givencrypt(struct aead_givcrypt_request *req)
 212{
 213        int err;
 214        struct aead_request *areq = &req->areq;
 215        struct pcrypt_request *preq = aead_request_ctx(areq);
 216        struct aead_givcrypt_request *creq = pcrypt_request_ctx(preq);
 217        struct padata_priv *padata = pcrypt_request_padata(preq);
 218        struct crypto_aead *aead = aead_givcrypt_reqtfm(req);
 219        struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
 220        u32 flags = aead_request_flags(areq);
 221
 222        memset(padata, 0, sizeof(struct padata_priv));
 223
 224        padata->parallel = pcrypt_aead_givenc;
 225        padata->serial = pcrypt_aead_giv_serial;
 226
 227        aead_givcrypt_set_tfm(creq, ctx->child);
 228        aead_givcrypt_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
 229                                   pcrypt_aead_done, areq);
 230        aead_givcrypt_set_crypt(creq, areq->src, areq->dst,
 231                                areq->cryptlen, areq->iv);
 232        aead_givcrypt_set_assoc(creq, areq->assoc, areq->assoclen);
 233        aead_givcrypt_set_giv(creq, req->giv, req->seq);
 234
 235        err = pcrypt_do_parallel(padata, &ctx->cb_cpu, pcrypt_enc_padata);
 236        if (err)
 237                return err;
 238        else
 239                err = crypto_aead_givencrypt(creq);
 240
 241        return err;
 242}
 243
 244static int pcrypt_aead_init_tfm(struct crypto_tfm *tfm)
 245{
 246        int cpu, cpu_index;
 247        struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
 248        struct pcrypt_instance_ctx *ictx = crypto_instance_ctx(inst);
 249        struct pcrypt_aead_ctx *ctx = crypto_tfm_ctx(tfm);
 250        struct crypto_aead *cipher;
 251
 252        ictx->tfm_count++;
 253
 254        cpu_index = ictx->tfm_count % cpumask_weight(cpu_active_mask);
 255
 256        ctx->cb_cpu = cpumask_first(cpu_active_mask);
 257        for (cpu = 0; cpu < cpu_index; cpu++)
 258                ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_active_mask);
 259
 260        cipher = crypto_spawn_aead(crypto_instance_ctx(inst));
 261
 262        if (IS_ERR(cipher))
 263                return PTR_ERR(cipher);
 264
 265        ctx->child = cipher;
 266        tfm->crt_aead.reqsize = sizeof(struct pcrypt_request)
 267                + sizeof(struct aead_givcrypt_request)
 268                + crypto_aead_reqsize(cipher);
 269
 270        return 0;
 271}
 272
 273static void pcrypt_aead_exit_tfm(struct crypto_tfm *tfm)
 274{
 275        struct pcrypt_aead_ctx *ctx = crypto_tfm_ctx(tfm);
 276
 277        crypto_free_aead(ctx->child);
 278}
 279
 280static struct crypto_instance *pcrypt_alloc_instance(struct crypto_alg *alg)
 281{
 282        struct crypto_instance *inst;
 283        struct pcrypt_instance_ctx *ctx;
 284        int err;
 285
 286        inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
 287        if (!inst) {
 288                inst = ERR_PTR(-ENOMEM);
 289                goto out;
 290        }
 291
 292        err = -ENAMETOOLONG;
 293        if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
 294                     "pcrypt(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
 295                goto out_free_inst;
 296
 297        memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
 298
 299        ctx = crypto_instance_ctx(inst);
 300        err = crypto_init_spawn(&ctx->spawn, alg, inst,
 301                                CRYPTO_ALG_TYPE_MASK);
 302        if (err)
 303                goto out_free_inst;
 304
 305        inst->alg.cra_priority = alg->cra_priority + 100;
 306        inst->alg.cra_blocksize = alg->cra_blocksize;
 307        inst->alg.cra_alignmask = alg->cra_alignmask;
 308
 309out:
 310        return inst;
 311
 312out_free_inst:
 313        kfree(inst);
 314        inst = ERR_PTR(err);
 315        goto out;
 316}
 317
 318static struct crypto_instance *pcrypt_alloc_aead(struct rtattr **tb,
 319                                                 u32 type, u32 mask)
 320{
 321        struct crypto_instance *inst;
 322        struct crypto_alg *alg;
 323
 324        alg = crypto_get_attr_alg(tb, type, (mask & CRYPTO_ALG_TYPE_MASK));
 325        if (IS_ERR(alg))
 326                return ERR_CAST(alg);
 327
 328        inst = pcrypt_alloc_instance(alg);
 329        if (IS_ERR(inst))
 330                goto out_put_alg;
 331
 332        inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
 333        inst->alg.cra_type = &crypto_aead_type;
 334
 335        inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
 336        inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
 337        inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
 338
 339        inst->alg.cra_ctxsize = sizeof(struct pcrypt_aead_ctx);
 340
 341        inst->alg.cra_init = pcrypt_aead_init_tfm;
 342        inst->alg.cra_exit = pcrypt_aead_exit_tfm;
 343
 344        inst->alg.cra_aead.setkey = pcrypt_aead_setkey;
 345        inst->alg.cra_aead.setauthsize = pcrypt_aead_setauthsize;
 346        inst->alg.cra_aead.encrypt = pcrypt_aead_encrypt;
 347        inst->alg.cra_aead.decrypt = pcrypt_aead_decrypt;
 348        inst->alg.cra_aead.givencrypt = pcrypt_aead_givencrypt;
 349
 350out_put_alg:
 351        crypto_mod_put(alg);
 352        return inst;
 353}
 354
 355static struct crypto_instance *pcrypt_alloc(struct rtattr **tb)
 356{
 357        struct crypto_attr_type *algt;
 358
 359        algt = crypto_get_attr_type(tb);
 360        if (IS_ERR(algt))
 361                return ERR_CAST(algt);
 362
 363        switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
 364        case CRYPTO_ALG_TYPE_AEAD:
 365                return pcrypt_alloc_aead(tb, algt->type, algt->mask);
 366        }
 367
 368        return ERR_PTR(-EINVAL);
 369}
 370
 371static void pcrypt_free(struct crypto_instance *inst)
 372{
 373        struct pcrypt_instance_ctx *ctx = crypto_instance_ctx(inst);
 374
 375        crypto_drop_spawn(&ctx->spawn);
 376        kfree(inst);
 377}
 378
 379static struct crypto_template pcrypt_tmpl = {
 380        .name = "pcrypt",
 381        .alloc = pcrypt_alloc,
 382        .free = pcrypt_free,
 383        .module = THIS_MODULE,
 384};
 385
 386static int __init pcrypt_init(void)
 387{
 388        encwq = create_workqueue("pencrypt");
 389        if (!encwq)
 390                goto err;
 391
 392        decwq = create_workqueue("pdecrypt");
 393        if (!decwq)
 394                goto err_destroy_encwq;
 395
 396
 397        pcrypt_enc_padata = padata_alloc(cpu_possible_mask, encwq);
 398        if (!pcrypt_enc_padata)
 399                goto err_destroy_decwq;
 400
 401        pcrypt_dec_padata = padata_alloc(cpu_possible_mask, decwq);
 402        if (!pcrypt_dec_padata)
 403                goto err_free_padata;
 404
 405        padata_start(pcrypt_enc_padata);
 406        padata_start(pcrypt_dec_padata);
 407
 408        return crypto_register_template(&pcrypt_tmpl);
 409
 410err_free_padata:
 411        padata_free(pcrypt_enc_padata);
 412
 413err_destroy_decwq:
 414        destroy_workqueue(decwq);
 415
 416err_destroy_encwq:
 417        destroy_workqueue(encwq);
 418
 419err:
 420        return -ENOMEM;
 421}
 422
 423static void __exit pcrypt_exit(void)
 424{
 425        padata_stop(pcrypt_enc_padata);
 426        padata_stop(pcrypt_dec_padata);
 427
 428        destroy_workqueue(encwq);
 429        destroy_workqueue(decwq);
 430
 431        padata_free(pcrypt_enc_padata);
 432        padata_free(pcrypt_dec_padata);
 433
 434        crypto_unregister_template(&pcrypt_tmpl);
 435}
 436
 437module_init(pcrypt_init);
 438module_exit(pcrypt_exit);
 439
 440MODULE_LICENSE("GPL");
 441MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
 442MODULE_DESCRIPTION("Parallel crypto wrapper");
 443