blob: debf8d37cafdf6a3cde925b1641e5717b4ccf28e [file] [log] [blame]
Herbert Xu0a270322007-11-30 21:38:37 +11001/*
2 * seqiv: Sequence Number IV Generator
3 *
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt. This algorithm is mainly useful for CTR and similar modes.
6 *
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
Herbert Xu661cfd02015-05-27 14:37:37 +080016#include <crypto/internal/geniv.h>
Herbert Xu0a270322007-11-30 21:38:37 +110017#include <crypto/internal/skcipher.h>
Herbert Xu856e3f402015-05-21 15:11:13 +080018#include <crypto/null.h>
Herbert Xua0f000e2008-08-14 22:21:31 +100019#include <crypto/rng.h>
Herbert Xu856e3f402015-05-21 15:11:13 +080020#include <crypto/scatterwalk.h>
Herbert Xu0a270322007-11-30 21:38:37 +110021#include <linux/err.h>
22#include <linux/init.h>
23#include <linux/kernel.h>
24#include <linux/module.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090025#include <linux/slab.h>
Herbert Xu0a270322007-11-30 21:38:37 +110026#include <linux/spinlock.h>
27#include <linux/string.h>
28
29struct seqiv_ctx {
30 spinlock_t lock;
31 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
32};
33
Herbert Xu856e3f402015-05-21 15:11:13 +080034struct seqiv_aead_ctx {
Herbert Xu661cfd02015-05-27 14:37:37 +080035 /* aead_geniv_ctx must be first the element */
36 struct aead_geniv_ctx geniv;
Herbert Xu856e3f402015-05-21 15:11:13 +080037 struct crypto_blkcipher *null;
38 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
39};
40
Herbert Xu06771572015-05-23 15:41:51 +080041static void seqiv_free(struct crypto_instance *inst);
42
Herbert Xu0a270322007-11-30 21:38:37 +110043static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err)
44{
45 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
46 struct crypto_ablkcipher *geniv;
47
48 if (err == -EINPROGRESS)
49 return;
50
51 if (err)
52 goto out;
53
54 geniv = skcipher_givcrypt_reqtfm(req);
55 memcpy(req->creq.info, subreq->info, crypto_ablkcipher_ivsize(geniv));
56
57out:
58 kfree(subreq->info);
59}
60
61static void seqiv_complete(struct crypto_async_request *base, int err)
62{
63 struct skcipher_givcrypt_request *req = base->data;
64
65 seqiv_complete2(req, err);
66 skcipher_givcrypt_complete(req, err);
67}
68
Herbert Xu14df4d82007-12-12 12:27:26 +080069static void seqiv_aead_complete2(struct aead_givcrypt_request *req, int err)
70{
71 struct aead_request *subreq = aead_givcrypt_reqctx(req);
72 struct crypto_aead *geniv;
73
74 if (err == -EINPROGRESS)
75 return;
76
77 if (err)
78 goto out;
79
80 geniv = aead_givcrypt_reqtfm(req);
81 memcpy(req->areq.iv, subreq->iv, crypto_aead_ivsize(geniv));
82
83out:
84 kfree(subreq->iv);
85}
86
87static void seqiv_aead_complete(struct crypto_async_request *base, int err)
88{
89 struct aead_givcrypt_request *req = base->data;
90
91 seqiv_aead_complete2(req, err);
92 aead_givcrypt_complete(req, err);
93}
94
Herbert Xu856e3f402015-05-21 15:11:13 +080095static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
96{
97 struct aead_request *subreq = aead_request_ctx(req);
98 struct crypto_aead *geniv;
99
100 if (err == -EINPROGRESS)
101 return;
102
103 if (err)
104 goto out;
105
106 geniv = crypto_aead_reqtfm(req);
107 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
108
109out:
110 kzfree(subreq->iv);
111}
112
113static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
114 int err)
115{
116 struct aead_request *req = base->data;
117
118 seqiv_aead_encrypt_complete2(req, err);
119 aead_request_complete(req, err);
120}
121
Herbert Xu14df4d82007-12-12 12:27:26 +0800122static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
123 unsigned int ivsize)
124{
125 unsigned int len = ivsize;
126
127 if (ivsize > sizeof(u64)) {
128 memset(info, 0, ivsize - sizeof(u64));
129 len = sizeof(u64);
130 }
131 seq = cpu_to_be64(seq);
132 memcpy(info + ivsize - len, &seq, len);
133 crypto_xor(info, ctx->salt, ivsize);
134}
135
Herbert Xu0a270322007-11-30 21:38:37 +1100136static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
137{
138 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
139 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
140 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
Mark Rustad3e3dc252014-07-25 02:53:38 -0700141 crypto_completion_t compl;
Herbert Xu0a270322007-11-30 21:38:37 +1100142 void *data;
143 u8 *info;
Herbert Xu0a270322007-11-30 21:38:37 +1100144 unsigned int ivsize;
Herbert Xu0a270322007-11-30 21:38:37 +1100145 int err;
146
147 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
148
Mark Rustad3e3dc252014-07-25 02:53:38 -0700149 compl = req->creq.base.complete;
Herbert Xu0a270322007-11-30 21:38:37 +1100150 data = req->creq.base.data;
151 info = req->creq.info;
152
153 ivsize = crypto_ablkcipher_ivsize(geniv);
154
155 if (unlikely(!IS_ALIGNED((unsigned long)info,
156 crypto_ablkcipher_alignmask(geniv) + 1))) {
157 info = kmalloc(ivsize, req->creq.base.flags &
158 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
159 GFP_ATOMIC);
160 if (!info)
161 return -ENOMEM;
162
Mark Rustad3e3dc252014-07-25 02:53:38 -0700163 compl = seqiv_complete;
Herbert Xu0a270322007-11-30 21:38:37 +1100164 data = req;
165 }
166
Mark Rustad3e3dc252014-07-25 02:53:38 -0700167 ablkcipher_request_set_callback(subreq, req->creq.base.flags, compl,
Herbert Xu0a270322007-11-30 21:38:37 +1100168 data);
169 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
170 req->creq.nbytes, info);
171
Herbert Xu14df4d82007-12-12 12:27:26 +0800172 seqiv_geniv(ctx, info, req->seq, ivsize);
Herbert Xu0a270322007-11-30 21:38:37 +1100173 memcpy(req->giv, info, ivsize);
174
175 err = crypto_ablkcipher_encrypt(subreq);
176 if (unlikely(info != req->creq.info))
177 seqiv_complete2(req, err);
178 return err;
179}
180
Herbert Xu14df4d82007-12-12 12:27:26 +0800181static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req)
182{
183 struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
184 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
185 struct aead_request *areq = &req->areq;
186 struct aead_request *subreq = aead_givcrypt_reqctx(req);
Mark Rustad3e3dc252014-07-25 02:53:38 -0700187 crypto_completion_t compl;
Herbert Xu14df4d82007-12-12 12:27:26 +0800188 void *data;
189 u8 *info;
190 unsigned int ivsize;
191 int err;
192
193 aead_request_set_tfm(subreq, aead_geniv_base(geniv));
194
Mark Rustad3e3dc252014-07-25 02:53:38 -0700195 compl = areq->base.complete;
Herbert Xu14df4d82007-12-12 12:27:26 +0800196 data = areq->base.data;
197 info = areq->iv;
198
199 ivsize = crypto_aead_ivsize(geniv);
200
201 if (unlikely(!IS_ALIGNED((unsigned long)info,
202 crypto_aead_alignmask(geniv) + 1))) {
203 info = kmalloc(ivsize, areq->base.flags &
204 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
205 GFP_ATOMIC);
206 if (!info)
207 return -ENOMEM;
208
Mark Rustad3e3dc252014-07-25 02:53:38 -0700209 compl = seqiv_aead_complete;
Herbert Xu14df4d82007-12-12 12:27:26 +0800210 data = req;
211 }
212
Mark Rustad3e3dc252014-07-25 02:53:38 -0700213 aead_request_set_callback(subreq, areq->base.flags, compl, data);
Herbert Xu14df4d82007-12-12 12:27:26 +0800214 aead_request_set_crypt(subreq, areq->src, areq->dst, areq->cryptlen,
215 info);
216 aead_request_set_assoc(subreq, areq->assoc, areq->assoclen);
217
218 seqiv_geniv(ctx, info, req->seq, ivsize);
219 memcpy(req->giv, info, ivsize);
220
221 err = crypto_aead_encrypt(subreq);
222 if (unlikely(info != areq->iv))
223 seqiv_aead_complete2(req, err);
224 return err;
225}
226
Herbert Xu856e3f402015-05-21 15:11:13 +0800227static int seqiv_aead_encrypt(struct aead_request *req)
228{
229 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
230 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
231 struct aead_request *subreq = aead_request_ctx(req);
232 crypto_completion_t compl;
233 void *data;
234 u8 *info;
Herbert Xudd044462015-05-23 15:41:56 +0800235 unsigned int ivsize = 8;
Herbert Xu856e3f402015-05-21 15:11:13 +0800236 int err;
237
Herbert Xudd044462015-05-23 15:41:56 +0800238 if (req->cryptlen < ivsize)
239 return -EINVAL;
240
Herbert Xu661cfd02015-05-27 14:37:37 +0800241 aead_request_set_tfm(subreq, ctx->geniv.child);
Herbert Xu856e3f402015-05-21 15:11:13 +0800242
243 compl = req->base.complete;
244 data = req->base.data;
245 info = req->iv;
246
Herbert Xu856e3f402015-05-21 15:11:13 +0800247 if (req->src != req->dst) {
Herbert Xu856e3f402015-05-21 15:11:13 +0800248 struct blkcipher_desc desc = {
249 .tfm = ctx->null,
250 };
251
Herbert Xud0ad1b22015-05-27 14:37:36 +0800252 err = crypto_blkcipher_encrypt(&desc, req->dst, req->src,
253 req->assoclen + req->cryptlen);
Herbert Xu856e3f402015-05-21 15:11:13 +0800254 if (err)
255 return err;
256 }
257
258 if (unlikely(!IS_ALIGNED((unsigned long)info,
259 crypto_aead_alignmask(geniv) + 1))) {
260 info = kmalloc(ivsize, req->base.flags &
261 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
262 GFP_ATOMIC);
263 if (!info)
264 return -ENOMEM;
265
266 memcpy(info, req->iv, ivsize);
267 compl = seqiv_aead_encrypt_complete;
268 data = req;
269 }
270
271 aead_request_set_callback(subreq, req->base.flags, compl, data);
272 aead_request_set_crypt(subreq, req->dst, req->dst,
273 req->cryptlen - ivsize, info);
Herbert Xu374d4ad2015-05-23 15:41:57 +0800274 aead_request_set_ad(subreq, req->assoclen + ivsize);
Herbert Xu856e3f402015-05-21 15:11:13 +0800275
276 crypto_xor(info, ctx->salt, ivsize);
277 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
278
279 err = crypto_aead_encrypt(subreq);
280 if (unlikely(info != req->iv))
281 seqiv_aead_encrypt_complete2(req, err);
282 return err;
283}
284
Herbert Xu856e3f402015-05-21 15:11:13 +0800285static int seqiv_aead_decrypt(struct aead_request *req)
286{
287 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
288 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
289 struct aead_request *subreq = aead_request_ctx(req);
290 crypto_completion_t compl;
291 void *data;
Herbert Xudd044462015-05-23 15:41:56 +0800292 unsigned int ivsize = 8;
293
294 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
295 return -EINVAL;
Herbert Xu856e3f402015-05-21 15:11:13 +0800296
Herbert Xu661cfd02015-05-27 14:37:37 +0800297 aead_request_set_tfm(subreq, ctx->geniv.child);
Herbert Xu856e3f402015-05-21 15:11:13 +0800298
299 compl = req->base.complete;
300 data = req->base.data;
301
Herbert Xu856e3f402015-05-21 15:11:13 +0800302 aead_request_set_callback(subreq, req->base.flags, compl, data);
303 aead_request_set_crypt(subreq, req->src, req->dst,
304 req->cryptlen - ivsize, req->iv);
Herbert Xu374d4ad2015-05-23 15:41:57 +0800305 aead_request_set_ad(subreq, req->assoclen + ivsize);
Herbert Xu856e3f402015-05-21 15:11:13 +0800306
307 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
Herbert Xu856e3f402015-05-21 15:11:13 +0800308
309 return crypto_aead_decrypt(subreq);
310}
311
Herbert Xu0a270322007-11-30 21:38:37 +1100312static int seqiv_init(struct crypto_tfm *tfm)
313{
314 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
315 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
Herbert Xueeee12a2015-06-21 19:11:49 +0800316 int err;
Herbert Xu0a270322007-11-30 21:38:37 +1100317
318 spin_lock_init(&ctx->lock);
319
320 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request);
321
Herbert Xueeee12a2015-06-21 19:11:49 +0800322 err = 0;
323 if (!crypto_get_default_rng()) {
324 crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt;
325 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
326 crypto_ablkcipher_ivsize(geniv));
327 crypto_put_default_rng();
328 }
329
330 return err ?: skcipher_geniv_init(tfm);
Herbert Xu0a270322007-11-30 21:38:37 +1100331}
332
Herbert Xu856e3f402015-05-21 15:11:13 +0800333static int seqiv_old_aead_init(struct crypto_tfm *tfm)
Herbert Xu14df4d82007-12-12 12:27:26 +0800334{
335 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
336 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
Herbert Xueeee12a2015-06-21 19:11:49 +0800337 int err;
Herbert Xu14df4d82007-12-12 12:27:26 +0800338
339 spin_lock_init(&ctx->lock);
340
Herbert Xuba6d8e32015-05-11 17:48:03 +0800341 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
342 sizeof(struct aead_request));
Herbert Xueeee12a2015-06-21 19:11:49 +0800343 err = 0;
344 if (!crypto_get_default_rng()) {
345 geniv->givencrypt = seqiv_aead_givencrypt;
346 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
347 crypto_aead_ivsize(geniv));
348 crypto_put_default_rng();
349 }
Herbert Xu14df4d82007-12-12 12:27:26 +0800350
Herbert Xueeee12a2015-06-21 19:11:49 +0800351 return err ?: aead_geniv_init(tfm);
Herbert Xu14df4d82007-12-12 12:27:26 +0800352}
353
Herbert Xu5964f262015-07-09 07:17:22 +0800354static int seqiv_aead_init_common(struct crypto_aead *geniv,
355 unsigned int reqsize)
Herbert Xu856e3f402015-05-21 15:11:13 +0800356{
Herbert Xu856e3f402015-05-21 15:11:13 +0800357 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
358 int err;
359
Herbert Xu661cfd02015-05-27 14:37:37 +0800360 spin_lock_init(&ctx->geniv.lock);
Herbert Xu856e3f402015-05-21 15:11:13 +0800361
362 crypto_aead_set_reqsize(geniv, sizeof(struct aead_request));
363
Herbert Xueeee12a2015-06-21 19:11:49 +0800364 err = crypto_get_default_rng();
365 if (err)
366 goto out;
367
Herbert Xub7dcfab2015-06-03 14:49:27 +0800368 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
369 crypto_aead_ivsize(geniv));
Herbert Xueeee12a2015-06-21 19:11:49 +0800370 crypto_put_default_rng();
Herbert Xub7dcfab2015-06-03 14:49:27 +0800371 if (err)
372 goto out;
373
Herbert Xu856e3f402015-05-21 15:11:13 +0800374 ctx->null = crypto_get_default_null_skcipher();
375 err = PTR_ERR(ctx->null);
376 if (IS_ERR(ctx->null))
377 goto out;
378
Herbert Xu5964f262015-07-09 07:17:22 +0800379 err = aead_geniv_init(crypto_aead_tfm(geniv));
Herbert Xu856e3f402015-05-21 15:11:13 +0800380 if (err)
381 goto drop_null;
382
Herbert Xu661cfd02015-05-27 14:37:37 +0800383 ctx->geniv.child = geniv->child;
Herbert Xu856e3f402015-05-21 15:11:13 +0800384 geniv->child = geniv;
385
386out:
387 return err;
388
389drop_null:
390 crypto_put_default_null_skcipher();
391 goto out;
392}
393
Herbert Xu5964f262015-07-09 07:17:22 +0800394static int seqiv_aead_init(struct crypto_aead *tfm)
Herbert Xu856e3f402015-05-21 15:11:13 +0800395{
Herbert Xudd044462015-05-23 15:41:56 +0800396 return seqiv_aead_init_common(tfm, sizeof(struct aead_request));
397}
Herbert Xu856e3f402015-05-21 15:11:13 +0800398
Herbert Xu5964f262015-07-09 07:17:22 +0800399static void seqiv_aead_exit(struct crypto_aead *tfm)
Herbert Xu856e3f402015-05-21 15:11:13 +0800400{
Herbert Xu5964f262015-07-09 07:17:22 +0800401 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(tfm);
Herbert Xu856e3f402015-05-21 15:11:13 +0800402
Herbert Xu661cfd02015-05-27 14:37:37 +0800403 crypto_free_aead(ctx->geniv.child);
Herbert Xu856e3f402015-05-21 15:11:13 +0800404 crypto_put_default_null_skcipher();
405}
406
Herbert Xu06771572015-05-23 15:41:51 +0800407static int seqiv_ablkcipher_create(struct crypto_template *tmpl,
408 struct rtattr **tb)
Herbert Xu0a270322007-11-30 21:38:37 +1100409{
410 struct crypto_instance *inst;
Herbert Xu06771572015-05-23 15:41:51 +0800411 int err;
Herbert Xu0a270322007-11-30 21:38:37 +1100412
Herbert Xu06771572015-05-23 15:41:51 +0800413 inst = skcipher_geniv_alloc(tmpl, tb, 0, 0);
Herbert Xu14df4d82007-12-12 12:27:26 +0800414
Herbert Xu0a270322007-11-30 21:38:37 +1100415 if (IS_ERR(inst))
Herbert Xu06771572015-05-23 15:41:51 +0800416 return PTR_ERR(inst);
Herbert Xu0a270322007-11-30 21:38:37 +1100417
Herbert Xu06771572015-05-23 15:41:51 +0800418 err = -EINVAL;
419 if (inst->alg.cra_ablkcipher.ivsize < sizeof(u64))
420 goto free_inst;
Herbert Xuc0ecf892015-01-16 19:51:20 +1100421
Herbert Xu0a270322007-11-30 21:38:37 +1100422 inst->alg.cra_init = seqiv_init;
423 inst->alg.cra_exit = skcipher_geniv_exit;
424
Herbert Xu0a270322007-11-30 21:38:37 +1100425 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
Herbert Xu856e3f402015-05-21 15:11:13 +0800426 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
Herbert Xu0a270322007-11-30 21:38:37 +1100427
Herbert Xu06771572015-05-23 15:41:51 +0800428 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
429
430 err = crypto_register_instance(tmpl, inst);
431 if (err)
432 goto free_inst;
433
Herbert Xu0a270322007-11-30 21:38:37 +1100434out:
Herbert Xu06771572015-05-23 15:41:51 +0800435 return err;
436
437free_inst:
438 skcipher_geniv_free(inst);
439 goto out;
Herbert Xu0a270322007-11-30 21:38:37 +1100440}
441
Herbert Xu06771572015-05-23 15:41:51 +0800442static int seqiv_old_aead_create(struct crypto_template *tmpl,
443 struct aead_instance *aead)
Herbert Xu856e3f402015-05-21 15:11:13 +0800444{
445 struct crypto_instance *inst = aead_crypto_instance(aead);
Herbert Xu06771572015-05-23 15:41:51 +0800446 int err = -EINVAL;
Herbert Xu856e3f402015-05-21 15:11:13 +0800447
Herbert Xu06771572015-05-23 15:41:51 +0800448 if (inst->alg.cra_aead.ivsize < sizeof(u64))
449 goto free_inst;
Herbert Xu856e3f402015-05-21 15:11:13 +0800450
Herbert Xu856e3f402015-05-21 15:11:13 +0800451 inst->alg.cra_init = seqiv_old_aead_init;
452 inst->alg.cra_exit = aead_geniv_exit;
453
454 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize;
455 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
456
Herbert Xu06771572015-05-23 15:41:51 +0800457 err = crypto_register_instance(tmpl, inst);
458 if (err)
459 goto free_inst;
460
461out:
462 return err;
463
464free_inst:
465 aead_geniv_free(aead);
466 goto out;
Herbert Xu856e3f402015-05-21 15:11:13 +0800467}
468
Herbert Xu06771572015-05-23 15:41:51 +0800469static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
Herbert Xu14df4d82007-12-12 12:27:26 +0800470{
Herbert Xu856e3f402015-05-21 15:11:13 +0800471 struct aead_instance *inst;
472 struct crypto_aead_spawn *spawn;
473 struct aead_alg *alg;
Herbert Xu06771572015-05-23 15:41:51 +0800474 int err;
Herbert Xu14df4d82007-12-12 12:27:26 +0800475
Herbert Xu06771572015-05-23 15:41:51 +0800476 inst = aead_geniv_alloc(tmpl, tb, 0, 0);
Herbert Xu14df4d82007-12-12 12:27:26 +0800477
478 if (IS_ERR(inst))
Herbert Xu06771572015-05-23 15:41:51 +0800479 return PTR_ERR(inst);
480
481 inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
Herbert Xu14df4d82007-12-12 12:27:26 +0800482
Herbert Xu856e3f402015-05-21 15:11:13 +0800483 if (inst->alg.base.cra_aead.encrypt)
Herbert Xu06771572015-05-23 15:41:51 +0800484 return seqiv_old_aead_create(tmpl, inst);
Herbert Xu856e3f402015-05-21 15:11:13 +0800485
Herbert Xu661cfd02015-05-27 14:37:37 +0800486 spawn = aead_instance_ctx(inst);
487 alg = crypto_spawn_aead_alg(spawn);
488
489 if (alg->base.cra_aead.encrypt)
490 goto done;
491
Herbert Xu06771572015-05-23 15:41:51 +0800492 err = -EINVAL;
Herbert Xudd044462015-05-23 15:41:56 +0800493 if (inst->alg.ivsize != sizeof(u64))
Herbert Xu06771572015-05-23 15:41:51 +0800494 goto free_inst;
Herbert Xuc0ecf892015-01-16 19:51:20 +1100495
Herbert Xub7dcfab2015-06-03 14:49:27 +0800496 inst->alg.encrypt = seqiv_aead_encrypt;
Herbert Xu856e3f402015-05-21 15:11:13 +0800497 inst->alg.decrypt = seqiv_aead_decrypt;
Herbert Xu14df4d82007-12-12 12:27:26 +0800498
Herbert Xu5964f262015-07-09 07:17:22 +0800499 inst->alg.init = seqiv_aead_init;
500 inst->alg.exit = seqiv_aead_exit;
Herbert Xu856e3f402015-05-21 15:11:13 +0800501
502 inst->alg.base.cra_ctxsize = sizeof(struct seqiv_aead_ctx);
Herbert Xu5964f262015-07-09 07:17:22 +0800503 inst->alg.base.cra_ctxsize += inst->alg.ivsize;
Herbert Xu856e3f402015-05-21 15:11:13 +0800504
Herbert Xu661cfd02015-05-27 14:37:37 +0800505done:
Herbert Xu06771572015-05-23 15:41:51 +0800506 err = aead_register_instance(tmpl, inst);
507 if (err)
508 goto free_inst;
509
Herbert Xu14df4d82007-12-12 12:27:26 +0800510out:
Herbert Xu06771572015-05-23 15:41:51 +0800511 return err;
512
513free_inst:
514 aead_geniv_free(inst);
515 goto out;
Herbert Xu14df4d82007-12-12 12:27:26 +0800516}
517
Herbert Xu06771572015-05-23 15:41:51 +0800518static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb)
Herbert Xu14df4d82007-12-12 12:27:26 +0800519{
520 struct crypto_attr_type *algt;
Herbert Xu14df4d82007-12-12 12:27:26 +0800521 int err;
522
523 algt = crypto_get_attr_type(tb);
Herbert Xu14df4d82007-12-12 12:27:26 +0800524 if (IS_ERR(algt))
Herbert Xu06771572015-05-23 15:41:51 +0800525 return PTR_ERR(algt);
Herbert Xu14df4d82007-12-12 12:27:26 +0800526
527 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
Herbert Xu06771572015-05-23 15:41:51 +0800528 err = seqiv_ablkcipher_create(tmpl, tb);
Herbert Xu14df4d82007-12-12 12:27:26 +0800529 else
Herbert Xu06771572015-05-23 15:41:51 +0800530 err = seqiv_aead_create(tmpl, tb);
Herbert Xu14df4d82007-12-12 12:27:26 +0800531
Herbert Xu06771572015-05-23 15:41:51 +0800532 return err;
Herbert Xu14df4d82007-12-12 12:27:26 +0800533}
534
535static void seqiv_free(struct crypto_instance *inst)
536{
537 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
538 skcipher_geniv_free(inst);
539 else
Herbert Xu856e3f402015-05-21 15:11:13 +0800540 aead_geniv_free(aead_instance(inst));
Herbert Xu14df4d82007-12-12 12:27:26 +0800541}
542
Herbert Xu0a270322007-11-30 21:38:37 +1100543static struct crypto_template seqiv_tmpl = {
544 .name = "seqiv",
Herbert Xu06771572015-05-23 15:41:51 +0800545 .create = seqiv_create,
Herbert Xu14df4d82007-12-12 12:27:26 +0800546 .free = seqiv_free,
Herbert Xu0a270322007-11-30 21:38:37 +1100547 .module = THIS_MODULE,
548};
549
550static int __init seqiv_module_init(void)
551{
Herbert Xu8a2cd1c2015-08-13 17:28:53 +0800552 return crypto_register_template(&seqiv_tmpl);
Herbert Xu0a270322007-11-30 21:38:37 +1100553}
554
555static void __exit seqiv_module_exit(void)
556{
557 crypto_unregister_template(&seqiv_tmpl);
558}
559
560module_init(seqiv_module_init);
561module_exit(seqiv_module_exit);
562
563MODULE_LICENSE("GPL");
564MODULE_DESCRIPTION("Sequence Number IV Generator");
Kees Cook4943ba12014-11-24 16:32:38 -0800565MODULE_ALIAS_CRYPTO("seqiv");