1
2
3
4
5
6
7
8
9
10
11
12
13#include <crypto/aead.h>
14#include <crypto/internal/skcipher.h>
15#include <crypto/authenc.h>
16#include <crypto/scatterwalk.h>
17#include <linux/err.h>
18#include <linux/init.h>
19#include <linux/kernel.h>
20#include <linux/module.h>
21#include <linux/rtnetlink.h>
22#include <linux/slab.h>
23#include <linux/spinlock.h>
24
25struct authenc_instance_ctx {
26 struct crypto_spawn auth;
27 struct crypto_skcipher_spawn enc;
28};
29
30struct crypto_authenc_ctx {
31 spinlock_t auth_lock;
32 struct crypto_hash *auth;
33 struct crypto_ablkcipher *enc;
34};
35
36static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
37 unsigned int keylen)
38{
39 unsigned int authkeylen;
40 unsigned int enckeylen;
41 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
42 struct crypto_hash *auth = ctx->auth;
43 struct crypto_ablkcipher *enc = ctx->enc;
44 struct rtattr *rta = (void *)key;
45 struct crypto_authenc_key_param *param;
46 int err = -EINVAL;
47
48 if (!RTA_OK(rta, keylen))
49 goto badkey;
50 if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM)
51 goto badkey;
52 if (RTA_PAYLOAD(rta) < sizeof(*param))
53 goto badkey;
54
55 param = RTA_DATA(rta);
56 enckeylen = be32_to_cpu(param->enckeylen);
57
58 key += RTA_ALIGN(rta->rta_len);
59 keylen -= RTA_ALIGN(rta->rta_len);
60
61 if (keylen < enckeylen)
62 goto badkey;
63
64 authkeylen = keylen - enckeylen;
65
66 crypto_hash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
67 crypto_hash_set_flags(auth, crypto_aead_get_flags(authenc) &
68 CRYPTO_TFM_REQ_MASK);
69 err = crypto_hash_setkey(auth, key, authkeylen);
70 crypto_aead_set_flags(authenc, crypto_hash_get_flags(auth) &
71 CRYPTO_TFM_RES_MASK);
72
73 if (err)
74 goto out;
75
76 crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
77 crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc) &
78 CRYPTO_TFM_REQ_MASK);
79 err = crypto_ablkcipher_setkey(enc, key + authkeylen, enckeylen);
80 crypto_aead_set_flags(authenc, crypto_ablkcipher_get_flags(enc) &
81 CRYPTO_TFM_RES_MASK);
82
83out:
84 return err;
85
86badkey:
87 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
88 goto out;
89}
90
91static void authenc_chain(struct scatterlist *head, struct scatterlist *sg,
92 int chain)
93{
94 if (chain) {
95 head->length += sg->length;
96 sg = scatterwalk_sg_next(sg);
97 }
98
99 if (sg)
100 scatterwalk_sg_chain(head, 2, sg);
101 else
102 sg_mark_end(head);
103}
104
105static u8 *crypto_authenc_hash(struct aead_request *req, unsigned int flags,
106 struct scatterlist *cipher,
107 unsigned int cryptlen)
108{
109 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
110 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
111 struct crypto_hash *auth = ctx->auth;
112 struct hash_desc desc = {
113 .tfm = auth,
114 .flags = aead_request_flags(req) & flags,
115 };
116 u8 *hash = aead_request_ctx(req);
117 int err;
118
119 hash = (u8 *)ALIGN((unsigned long)hash + crypto_hash_alignmask(auth),
120 crypto_hash_alignmask(auth) + 1);
121
122 spin_lock_bh(&ctx->auth_lock);
123 err = crypto_hash_init(&desc);
124 if (err)
125 goto auth_unlock;
126
127 err = crypto_hash_update(&desc, req->assoc, req->assoclen);
128 if (err)
129 goto auth_unlock;
130
131 err = crypto_hash_update(&desc, cipher, cryptlen);
132 if (err)
133 goto auth_unlock;
134
135 err = crypto_hash_final(&desc, hash);
136auth_unlock:
137 spin_unlock_bh(&ctx->auth_lock);
138
139 if (err)
140 return ERR_PTR(err);
141
142 return hash;
143}
144
145static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
146 unsigned int flags)
147{
148 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
149 struct scatterlist *dst = req->dst;
150 struct scatterlist cipher[2];
151 struct page *dstp;
152 unsigned int ivsize = crypto_aead_ivsize(authenc);
153 unsigned int cryptlen;
154 u8 *vdst;
155 u8 *hash;
156
157 dstp = sg_page(dst);
158 vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset;
159
160 if (ivsize) {
161 sg_init_table(cipher, 2);
162 sg_set_buf(cipher, iv, ivsize);
163 authenc_chain(cipher, dst, vdst == iv + ivsize);
164 dst = cipher;
165 }
166
167 cryptlen = req->cryptlen + ivsize;
168 hash = crypto_authenc_hash(req, flags, dst, cryptlen);
169 if (IS_ERR(hash))
170 return PTR_ERR(hash);
171
172 scatterwalk_map_and_copy(hash, dst, cryptlen,
173 crypto_aead_authsize(authenc), 1);
174 return 0;
175}
176
177static void crypto_authenc_encrypt_done(struct crypto_async_request *req,
178 int err)
179{
180 struct aead_request *areq = req->data;
181
182 if (!err) {
183 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
184 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
185 struct ablkcipher_request *abreq = aead_request_ctx(areq);
186 u8 *iv = (u8 *)(abreq + 1) +
187 crypto_ablkcipher_reqsize(ctx->enc);
188
189 err = crypto_authenc_genicv(areq, iv, 0);
190 }
191
192 aead_request_complete(areq, err);
193}
194
195static int crypto_authenc_encrypt(struct aead_request *req)
196{
197 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
198 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
199 struct ablkcipher_request *abreq = aead_request_ctx(req);
200 struct crypto_ablkcipher *enc = ctx->enc;
201 struct scatterlist *dst = req->dst;
202 unsigned int cryptlen = req->cryptlen;
203 u8 *iv = (u8 *)(abreq + 1) + crypto_ablkcipher_reqsize(enc);
204 int err;
205
206 ablkcipher_request_set_tfm(abreq, enc);
207 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
208 crypto_authenc_encrypt_done, req);
209 ablkcipher_request_set_crypt(abreq, req->src, dst, cryptlen, req->iv);
210
211 memcpy(iv, req->iv, crypto_aead_ivsize(authenc));
212
213 err = crypto_ablkcipher_encrypt(abreq);
214 if (err)
215 return err;
216
217 return crypto_authenc_genicv(req, iv, CRYPTO_TFM_REQ_MAY_SLEEP);
218}
219
220static void crypto_authenc_givencrypt_done(struct crypto_async_request *req,
221 int err)
222{
223 struct aead_request *areq = req->data;
224
225 if (!err) {
226 struct skcipher_givcrypt_request *greq = aead_request_ctx(areq);
227
228 err = crypto_authenc_genicv(areq, greq->giv, 0);
229 }
230
231 aead_request_complete(areq, err);
232}
233
234static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req)
235{
236 struct crypto_aead *authenc = aead_givcrypt_reqtfm(req);
237 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
238 struct aead_request *areq = &req->areq;
239 struct skcipher_givcrypt_request *greq = aead_request_ctx(areq);
240 u8 *iv = req->giv;
241 int err;
242
243 skcipher_givcrypt_set_tfm(greq, ctx->enc);
244 skcipher_givcrypt_set_callback(greq, aead_request_flags(areq),
245 crypto_authenc_givencrypt_done, areq);
246 skcipher_givcrypt_set_crypt(greq, areq->src, areq->dst, areq->cryptlen,
247 areq->iv);
248 skcipher_givcrypt_set_giv(greq, iv, req->seq);
249
250 err = crypto_skcipher_givencrypt(greq);
251 if (err)
252 return err;
253
254 return crypto_authenc_genicv(areq, iv, CRYPTO_TFM_REQ_MAY_SLEEP);
255}
256
257static int crypto_authenc_verify(struct aead_request *req,
258 struct scatterlist *cipher,
259 unsigned int cryptlen)
260{
261 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
262 u8 *ohash;
263 u8 *ihash;
264 unsigned int authsize;
265
266 ohash = crypto_authenc_hash(req, CRYPTO_TFM_REQ_MAY_SLEEP, cipher,
267 cryptlen);
268 if (IS_ERR(ohash))
269 return PTR_ERR(ohash);
270
271 authsize = crypto_aead_authsize(authenc);
272 ihash = ohash + authsize;
273 scatterwalk_map_and_copy(ihash, cipher, cryptlen, authsize, 0);
274 return memcmp(ihash, ohash, authsize) ? -EBADMSG: 0;
275}
276
277static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
278 unsigned int cryptlen)
279{
280 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
281 struct scatterlist *src = req->src;
282 struct scatterlist cipher[2];
283 struct page *srcp;
284 unsigned int ivsize = crypto_aead_ivsize(authenc);
285 u8 *vsrc;
286
287 srcp = sg_page(src);
288 vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + src->offset;
289
290 if (ivsize) {
291 sg_init_table(cipher, 2);
292 sg_set_buf(cipher, iv, ivsize);
293 authenc_chain(cipher, src, vsrc == iv + ivsize);
294 src = cipher;
295 }
296
297 return crypto_authenc_verify(req, src, cryptlen + ivsize);
298}
299
300static int crypto_authenc_decrypt(struct aead_request *req)
301{
302 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
303 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
304 struct ablkcipher_request *abreq = aead_request_ctx(req);
305 unsigned int cryptlen = req->cryptlen;
306 unsigned int authsize = crypto_aead_authsize(authenc);
307 u8 *iv = req->iv;
308 int err;
309
310 if (cryptlen < authsize)
311 return -EINVAL;
312 cryptlen -= authsize;
313
314 err = crypto_authenc_iverify(req, iv, cryptlen);
315 if (err)
316 return err;
317
318 ablkcipher_request_set_tfm(abreq, ctx->enc);
319 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
320 req->base.complete, req->base.data);
321 ablkcipher_request_set_crypt(abreq, req->src, req->dst, cryptlen, iv);
322
323 return crypto_ablkcipher_decrypt(abreq);
324}
325
326static int crypto_authenc_init_tfm(struct crypto_tfm *tfm)
327{
328 struct crypto_instance *inst = (void *)tfm->__crt_alg;
329 struct authenc_instance_ctx *ictx = crypto_instance_ctx(inst);
330 struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
331 struct crypto_hash *auth;
332 struct crypto_ablkcipher *enc;
333 int err;
334
335 auth = crypto_spawn_hash(&ictx->auth);
336 if (IS_ERR(auth))
337 return PTR_ERR(auth);
338
339 enc = crypto_spawn_skcipher(&ictx->enc);
340 err = PTR_ERR(enc);
341 if (IS_ERR(enc))
342 goto err_free_hash;
343
344 ctx->auth = auth;
345 ctx->enc = enc;
346 tfm->crt_aead.reqsize = max_t(unsigned int,
347 (crypto_hash_alignmask(auth) &
348 ~(crypto_tfm_ctx_alignment() - 1)) +
349 crypto_hash_digestsize(auth) * 2,
350 sizeof(struct skcipher_givcrypt_request) +
351 crypto_ablkcipher_reqsize(enc) +
352 crypto_ablkcipher_ivsize(enc));
353
354 spin_lock_init(&ctx->auth_lock);
355
356 return 0;
357
358err_free_hash:
359 crypto_free_hash(auth);
360 return err;
361}
362
363static void crypto_authenc_exit_tfm(struct crypto_tfm *tfm)
364{
365 struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
366
367 crypto_free_hash(ctx->auth);
368 crypto_free_ablkcipher(ctx->enc);
369}
370
371static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
372{
373 struct crypto_attr_type *algt;
374 struct crypto_instance *inst;
375 struct crypto_alg *auth;
376 struct crypto_alg *enc;
377 struct authenc_instance_ctx *ctx;
378 const char *enc_name;
379 int err;
380
381 algt = crypto_get_attr_type(tb);
382 err = PTR_ERR(algt);
383 if (IS_ERR(algt))
384 return ERR_PTR(err);
385
386 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
387 return ERR_PTR(-EINVAL);
388
389 auth = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
390 CRYPTO_ALG_TYPE_HASH_MASK);
391 if (IS_ERR(auth))
392 return ERR_PTR(PTR_ERR(auth));
393
394 enc_name = crypto_attr_alg_name(tb[2]);
395 err = PTR_ERR(enc_name);
396 if (IS_ERR(enc_name))
397 goto out_put_auth;
398
399 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
400 err = -ENOMEM;
401 if (!inst)
402 goto out_put_auth;
403
404 ctx = crypto_instance_ctx(inst);
405
406 err = crypto_init_spawn(&ctx->auth, auth, inst, CRYPTO_ALG_TYPE_MASK);
407 if (err)
408 goto err_free_inst;
409
410 crypto_set_skcipher_spawn(&ctx->enc, inst);
411 err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
412 crypto_requires_sync(algt->type,
413 algt->mask));
414 if (err)
415 goto err_drop_auth;
416
417 enc = crypto_skcipher_spawn_alg(&ctx->enc);
418
419 err = -ENAMETOOLONG;
420 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
421 "authenc(%s,%s)", auth->cra_name, enc->cra_name) >=
422 CRYPTO_MAX_ALG_NAME)
423 goto err_drop_enc;
424
425 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
426 "authenc(%s,%s)", auth->cra_driver_name,
427 enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
428 goto err_drop_enc;
429
430 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
431 inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC;
432 inst->alg.cra_priority = enc->cra_priority * 10 + auth->cra_priority;
433 inst->alg.cra_blocksize = enc->cra_blocksize;
434 inst->alg.cra_alignmask = auth->cra_alignmask | enc->cra_alignmask;
435 inst->alg.cra_type = &crypto_aead_type;
436
437 inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
438 inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ?
439 auth->cra_hash.digestsize :
440 auth->cra_digest.dia_digestsize;
441
442 inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
443
444 inst->alg.cra_init = crypto_authenc_init_tfm;
445 inst->alg.cra_exit = crypto_authenc_exit_tfm;
446
447 inst->alg.cra_aead.setkey = crypto_authenc_setkey;
448 inst->alg.cra_aead.encrypt = crypto_authenc_encrypt;
449 inst->alg.cra_aead.decrypt = crypto_authenc_decrypt;
450 inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt;
451
452out:
453 crypto_mod_put(auth);
454 return inst;
455
456err_drop_enc:
457 crypto_drop_skcipher(&ctx->enc);
458err_drop_auth:
459 crypto_drop_spawn(&ctx->auth);
460err_free_inst:
461 kfree(inst);
462out_put_auth:
463 inst = ERR_PTR(err);
464 goto out;
465}
466
467static void crypto_authenc_free(struct crypto_instance *inst)
468{
469 struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst);
470
471 crypto_drop_skcipher(&ctx->enc);
472 crypto_drop_spawn(&ctx->auth);
473 kfree(inst);
474}
475
476static struct crypto_template crypto_authenc_tmpl = {
477 .name = "authenc",
478 .alloc = crypto_authenc_alloc,
479 .free = crypto_authenc_free,
480 .module = THIS_MODULE,
481};
482
483static int __init crypto_authenc_module_init(void)
484{
485 return crypto_register_template(&crypto_authenc_tmpl);
486}
487
488static void __exit crypto_authenc_module_exit(void)
489{
490 crypto_unregister_template(&crypto_authenc_tmpl);
491}
492
493module_init(crypto_authenc_module_init);
494module_exit(crypto_authenc_module_exit);
495
496MODULE_LICENSE("GPL");
497MODULE_DESCRIPTION("Simple AEAD wrapper for IPsec");
498