1/*
2 * Glue Code for the AVX assembler implemention of the Cast6 Cipher
3 *
4 * Copyright (C) 2012 Johannes Goetzfried
5 *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
6 *
7 * Copyright �� 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
22 * USA
23 *
24 */
25
26#include <linux/module.h>
27#include <linux/hardirq.h>
28#include <linux/types.h>
29#include <linux/crypto.h>
30#include <linux/err.h>
31#include <crypto/ablk_helper.h>
32#include <crypto/algapi.h>
33#include <crypto/cast6.h>
34#include <crypto/cryptd.h>
35#include <crypto/b128ops.h>
36#include <crypto/ctr.h>
37#include <crypto/lrw.h>
38#include <crypto/xts.h>
39#include <asm/fpu/api.h>
40#include <asm/crypto/glue_helper.h>
41
42#define CAST6_PARALLEL_BLOCKS 8
43
44asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst,
45				   const u8 *src);
46asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst,
47				   const u8 *src);
48
49asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst,
50				   const u8 *src);
51asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src,
52			       le128 *iv);
53
54asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst,
55				   const u8 *src, le128 *iv);
56asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst,
57				   const u8 *src, le128 *iv);
58
59static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
60{
61	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
62				  GLUE_FUNC_CAST(__cast6_encrypt));
63}
64
65static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
66{
67	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
68				  GLUE_FUNC_CAST(__cast6_decrypt));
69}
70
71static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
72{
73	be128 ctrblk;
74
75	le128_to_be128(&ctrblk, iv);
76	le128_inc(iv);
77
78	__cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
79	u128_xor(dst, src, (u128 *)&ctrblk);
80}
81
82static const struct common_glue_ctx cast6_enc = {
83	.num_funcs = 2,
84	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
85
86	.funcs = { {
87		.num_blocks = CAST6_PARALLEL_BLOCKS,
88		.fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) }
89	}, {
90		.num_blocks = 1,
91		.fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) }
92	} }
93};
94
95static const struct common_glue_ctx cast6_ctr = {
96	.num_funcs = 2,
97	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
98
99	.funcs = { {
100		.num_blocks = CAST6_PARALLEL_BLOCKS,
101		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_ctr_8way) }
102	}, {
103		.num_blocks = 1,
104		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) }
105	} }
106};
107
108static const struct common_glue_ctx cast6_enc_xts = {
109	.num_funcs = 2,
110	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
111
112	.funcs = { {
113		.num_blocks = CAST6_PARALLEL_BLOCKS,
114		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way) }
115	}, {
116		.num_blocks = 1,
117		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc) }
118	} }
119};
120
121static const struct common_glue_ctx cast6_dec = {
122	.num_funcs = 2,
123	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
124
125	.funcs = { {
126		.num_blocks = CAST6_PARALLEL_BLOCKS,
127		.fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) }
128	}, {
129		.num_blocks = 1,
130		.fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) }
131	} }
132};
133
134static const struct common_glue_ctx cast6_dec_cbc = {
135	.num_funcs = 2,
136	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
137
138	.funcs = { {
139		.num_blocks = CAST6_PARALLEL_BLOCKS,
140		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way) }
141	}, {
142		.num_blocks = 1,
143		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) }
144	} }
145};
146
147static const struct common_glue_ctx cast6_dec_xts = {
148	.num_funcs = 2,
149	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
150
151	.funcs = { {
152		.num_blocks = CAST6_PARALLEL_BLOCKS,
153		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way) }
154	}, {
155		.num_blocks = 1,
156		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec) }
157	} }
158};
159
160static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
161		       struct scatterlist *src, unsigned int nbytes)
162{
163	return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes);
164}
165
166static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
167		       struct scatterlist *src, unsigned int nbytes)
168{
169	return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes);
170}
171
172static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
173		       struct scatterlist *src, unsigned int nbytes)
174{
175	return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__cast6_encrypt), desc,
176				       dst, src, nbytes);
177}
178
179static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
180		       struct scatterlist *src, unsigned int nbytes)
181{
182	return glue_cbc_decrypt_128bit(&cast6_dec_cbc, desc, dst, src,
183				       nbytes);
184}
185
186static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
187		     struct scatterlist *src, unsigned int nbytes)
188{
189	return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes);
190}
191
192static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes)
193{
194	return glue_fpu_begin(CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS,
195			      NULL, fpu_enabled, nbytes);
196}
197
198static inline void cast6_fpu_end(bool fpu_enabled)
199{
200	glue_fpu_end(fpu_enabled);
201}
202
203struct crypt_priv {
204	struct cast6_ctx *ctx;
205	bool fpu_enabled;
206};
207
208static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
209{
210	const unsigned int bsize = CAST6_BLOCK_SIZE;
211	struct crypt_priv *ctx = priv;
212	int i;
213
214	ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
215
216	if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
217		cast6_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
218		return;
219	}
220
221	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
222		__cast6_encrypt(ctx->ctx, srcdst, srcdst);
223}
224
225static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
226{
227	const unsigned int bsize = CAST6_BLOCK_SIZE;
228	struct crypt_priv *ctx = priv;
229	int i;
230
231	ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
232
233	if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
234		cast6_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
235		return;
236	}
237
238	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
239		__cast6_decrypt(ctx->ctx, srcdst, srcdst);
240}
241
242struct cast6_lrw_ctx {
243	struct lrw_table_ctx lrw_table;
244	struct cast6_ctx cast6_ctx;
245};
246
247static int lrw_cast6_setkey(struct crypto_tfm *tfm, const u8 *key,
248			      unsigned int keylen)
249{
250	struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
251	int err;
252
253	err = __cast6_setkey(&ctx->cast6_ctx, key, keylen - CAST6_BLOCK_SIZE,
254			     &tfm->crt_flags);
255	if (err)
256		return err;
257
258	return lrw_init_table(&ctx->lrw_table, key + keylen - CAST6_BLOCK_SIZE);
259}
260
261static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
262		       struct scatterlist *src, unsigned int nbytes)
263{
264	struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
265	be128 buf[CAST6_PARALLEL_BLOCKS];
266	struct crypt_priv crypt_ctx = {
267		.ctx = &ctx->cast6_ctx,
268		.fpu_enabled = false,
269	};
270	struct lrw_crypt_req req = {
271		.tbuf = buf,
272		.tbuflen = sizeof(buf),
273
274		.table_ctx = &ctx->lrw_table,
275		.crypt_ctx = &crypt_ctx,
276		.crypt_fn = encrypt_callback,
277	};
278	int ret;
279
280	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
281	ret = lrw_crypt(desc, dst, src, nbytes, &req);
282	cast6_fpu_end(crypt_ctx.fpu_enabled);
283
284	return ret;
285}
286
287static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
288		       struct scatterlist *src, unsigned int nbytes)
289{
290	struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
291	be128 buf[CAST6_PARALLEL_BLOCKS];
292	struct crypt_priv crypt_ctx = {
293		.ctx = &ctx->cast6_ctx,
294		.fpu_enabled = false,
295	};
296	struct lrw_crypt_req req = {
297		.tbuf = buf,
298		.tbuflen = sizeof(buf),
299
300		.table_ctx = &ctx->lrw_table,
301		.crypt_ctx = &crypt_ctx,
302		.crypt_fn = decrypt_callback,
303	};
304	int ret;
305
306	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
307	ret = lrw_crypt(desc, dst, src, nbytes, &req);
308	cast6_fpu_end(crypt_ctx.fpu_enabled);
309
310	return ret;
311}
312
313static void lrw_exit_tfm(struct crypto_tfm *tfm)
314{
315	struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
316
317	lrw_free_table(&ctx->lrw_table);
318}
319
320struct cast6_xts_ctx {
321	struct cast6_ctx tweak_ctx;
322	struct cast6_ctx crypt_ctx;
323};
324
325static int xts_cast6_setkey(struct crypto_tfm *tfm, const u8 *key,
326			      unsigned int keylen)
327{
328	struct cast6_xts_ctx *ctx = crypto_tfm_ctx(tfm);
329	u32 *flags = &tfm->crt_flags;
330	int err;
331
332	/* key consists of keys of equal size concatenated, therefore
333	 * the length must be even
334	 */
335	if (keylen % 2) {
336		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
337		return -EINVAL;
338	}
339
340	/* first half of xts-key is for crypt */
341	err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
342	if (err)
343		return err;
344
345	/* second half of xts-key is for tweak */
346	return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
347			      flags);
348}
349
350static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
351		       struct scatterlist *src, unsigned int nbytes)
352{
353	struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
354
355	return glue_xts_crypt_128bit(&cast6_enc_xts, desc, dst, src, nbytes,
356				     XTS_TWEAK_CAST(__cast6_encrypt),
357				     &ctx->tweak_ctx, &ctx->crypt_ctx);
358}
359
360static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
361		       struct scatterlist *src, unsigned int nbytes)
362{
363	struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
364
365	return glue_xts_crypt_128bit(&cast6_dec_xts, desc, dst, src, nbytes,
366				     XTS_TWEAK_CAST(__cast6_encrypt),
367				     &ctx->tweak_ctx, &ctx->crypt_ctx);
368}
369
370static struct crypto_alg cast6_algs[10] = { {
371	.cra_name		= "__ecb-cast6-avx",
372	.cra_driver_name	= "__driver-ecb-cast6-avx",
373	.cra_priority		= 0,
374	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
375				  CRYPTO_ALG_INTERNAL,
376	.cra_blocksize		= CAST6_BLOCK_SIZE,
377	.cra_ctxsize		= sizeof(struct cast6_ctx),
378	.cra_alignmask		= 0,
379	.cra_type		= &crypto_blkcipher_type,
380	.cra_module		= THIS_MODULE,
381	.cra_u = {
382		.blkcipher = {
383			.min_keysize	= CAST6_MIN_KEY_SIZE,
384			.max_keysize	= CAST6_MAX_KEY_SIZE,
385			.setkey		= cast6_setkey,
386			.encrypt	= ecb_encrypt,
387			.decrypt	= ecb_decrypt,
388		},
389	},
390}, {
391	.cra_name		= "__cbc-cast6-avx",
392	.cra_driver_name	= "__driver-cbc-cast6-avx",
393	.cra_priority		= 0,
394	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
395				  CRYPTO_ALG_INTERNAL,
396	.cra_blocksize		= CAST6_BLOCK_SIZE,
397	.cra_ctxsize		= sizeof(struct cast6_ctx),
398	.cra_alignmask		= 0,
399	.cra_type		= &crypto_blkcipher_type,
400	.cra_module		= THIS_MODULE,
401	.cra_u = {
402		.blkcipher = {
403			.min_keysize	= CAST6_MIN_KEY_SIZE,
404			.max_keysize	= CAST6_MAX_KEY_SIZE,
405			.setkey		= cast6_setkey,
406			.encrypt	= cbc_encrypt,
407			.decrypt	= cbc_decrypt,
408		},
409	},
410}, {
411	.cra_name		= "__ctr-cast6-avx",
412	.cra_driver_name	= "__driver-ctr-cast6-avx",
413	.cra_priority		= 0,
414	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
415				  CRYPTO_ALG_INTERNAL,
416	.cra_blocksize		= 1,
417	.cra_ctxsize		= sizeof(struct cast6_ctx),
418	.cra_alignmask		= 0,
419	.cra_type		= &crypto_blkcipher_type,
420	.cra_module		= THIS_MODULE,
421	.cra_u = {
422		.blkcipher = {
423			.min_keysize	= CAST6_MIN_KEY_SIZE,
424			.max_keysize	= CAST6_MAX_KEY_SIZE,
425			.ivsize		= CAST6_BLOCK_SIZE,
426			.setkey		= cast6_setkey,
427			.encrypt	= ctr_crypt,
428			.decrypt	= ctr_crypt,
429		},
430	},
431}, {
432	.cra_name		= "__lrw-cast6-avx",
433	.cra_driver_name	= "__driver-lrw-cast6-avx",
434	.cra_priority		= 0,
435	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
436				  CRYPTO_ALG_INTERNAL,
437	.cra_blocksize		= CAST6_BLOCK_SIZE,
438	.cra_ctxsize		= sizeof(struct cast6_lrw_ctx),
439	.cra_alignmask		= 0,
440	.cra_type		= &crypto_blkcipher_type,
441	.cra_module		= THIS_MODULE,
442	.cra_exit		= lrw_exit_tfm,
443	.cra_u = {
444		.blkcipher = {
445			.min_keysize	= CAST6_MIN_KEY_SIZE +
446					  CAST6_BLOCK_SIZE,
447			.max_keysize	= CAST6_MAX_KEY_SIZE +
448					  CAST6_BLOCK_SIZE,
449			.ivsize		= CAST6_BLOCK_SIZE,
450			.setkey		= lrw_cast6_setkey,
451			.encrypt	= lrw_encrypt,
452			.decrypt	= lrw_decrypt,
453		},
454	},
455}, {
456	.cra_name		= "__xts-cast6-avx",
457	.cra_driver_name	= "__driver-xts-cast6-avx",
458	.cra_priority		= 0,
459	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
460				  CRYPTO_ALG_INTERNAL,
461	.cra_blocksize		= CAST6_BLOCK_SIZE,
462	.cra_ctxsize		= sizeof(struct cast6_xts_ctx),
463	.cra_alignmask		= 0,
464	.cra_type		= &crypto_blkcipher_type,
465	.cra_module		= THIS_MODULE,
466	.cra_u = {
467		.blkcipher = {
468			.min_keysize	= CAST6_MIN_KEY_SIZE * 2,
469			.max_keysize	= CAST6_MAX_KEY_SIZE * 2,
470			.ivsize		= CAST6_BLOCK_SIZE,
471			.setkey		= xts_cast6_setkey,
472			.encrypt	= xts_encrypt,
473			.decrypt	= xts_decrypt,
474		},
475	},
476}, {
477	.cra_name		= "ecb(cast6)",
478	.cra_driver_name	= "ecb-cast6-avx",
479	.cra_priority		= 200,
480	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
481	.cra_blocksize		= CAST6_BLOCK_SIZE,
482	.cra_ctxsize		= sizeof(struct async_helper_ctx),
483	.cra_alignmask		= 0,
484	.cra_type		= &crypto_ablkcipher_type,
485	.cra_module		= THIS_MODULE,
486	.cra_init		= ablk_init,
487	.cra_exit		= ablk_exit,
488	.cra_u = {
489		.ablkcipher = {
490			.min_keysize	= CAST6_MIN_KEY_SIZE,
491			.max_keysize	= CAST6_MAX_KEY_SIZE,
492			.setkey		= ablk_set_key,
493			.encrypt	= ablk_encrypt,
494			.decrypt	= ablk_decrypt,
495		},
496	},
497}, {
498	.cra_name		= "cbc(cast6)",
499	.cra_driver_name	= "cbc-cast6-avx",
500	.cra_priority		= 200,
501	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
502	.cra_blocksize		= CAST6_BLOCK_SIZE,
503	.cra_ctxsize		= sizeof(struct async_helper_ctx),
504	.cra_alignmask		= 0,
505	.cra_type		= &crypto_ablkcipher_type,
506	.cra_module		= THIS_MODULE,
507	.cra_init		= ablk_init,
508	.cra_exit		= ablk_exit,
509	.cra_u = {
510		.ablkcipher = {
511			.min_keysize	= CAST6_MIN_KEY_SIZE,
512			.max_keysize	= CAST6_MAX_KEY_SIZE,
513			.ivsize		= CAST6_BLOCK_SIZE,
514			.setkey		= ablk_set_key,
515			.encrypt	= __ablk_encrypt,
516			.decrypt	= ablk_decrypt,
517		},
518	},
519}, {
520	.cra_name		= "ctr(cast6)",
521	.cra_driver_name	= "ctr-cast6-avx",
522	.cra_priority		= 200,
523	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
524	.cra_blocksize		= 1,
525	.cra_ctxsize		= sizeof(struct async_helper_ctx),
526	.cra_alignmask		= 0,
527	.cra_type		= &crypto_ablkcipher_type,
528	.cra_module		= THIS_MODULE,
529	.cra_init		= ablk_init,
530	.cra_exit		= ablk_exit,
531	.cra_u = {
532		.ablkcipher = {
533			.min_keysize	= CAST6_MIN_KEY_SIZE,
534			.max_keysize	= CAST6_MAX_KEY_SIZE,
535			.ivsize		= CAST6_BLOCK_SIZE,
536			.setkey		= ablk_set_key,
537			.encrypt	= ablk_encrypt,
538			.decrypt	= ablk_encrypt,
539			.geniv		= "chainiv",
540		},
541	},
542}, {
543	.cra_name		= "lrw(cast6)",
544	.cra_driver_name	= "lrw-cast6-avx",
545	.cra_priority		= 200,
546	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
547	.cra_blocksize		= CAST6_BLOCK_SIZE,
548	.cra_ctxsize		= sizeof(struct async_helper_ctx),
549	.cra_alignmask		= 0,
550	.cra_type		= &crypto_ablkcipher_type,
551	.cra_module		= THIS_MODULE,
552	.cra_init		= ablk_init,
553	.cra_exit		= ablk_exit,
554	.cra_u = {
555		.ablkcipher = {
556			.min_keysize	= CAST6_MIN_KEY_SIZE +
557					  CAST6_BLOCK_SIZE,
558			.max_keysize	= CAST6_MAX_KEY_SIZE +
559					  CAST6_BLOCK_SIZE,
560			.ivsize		= CAST6_BLOCK_SIZE,
561			.setkey		= ablk_set_key,
562			.encrypt	= ablk_encrypt,
563			.decrypt	= ablk_decrypt,
564		},
565	},
566}, {
567	.cra_name		= "xts(cast6)",
568	.cra_driver_name	= "xts-cast6-avx",
569	.cra_priority		= 200,
570	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
571	.cra_blocksize		= CAST6_BLOCK_SIZE,
572	.cra_ctxsize		= sizeof(struct async_helper_ctx),
573	.cra_alignmask		= 0,
574	.cra_type		= &crypto_ablkcipher_type,
575	.cra_module		= THIS_MODULE,
576	.cra_init		= ablk_init,
577	.cra_exit		= ablk_exit,
578	.cra_u = {
579		.ablkcipher = {
580			.min_keysize	= CAST6_MIN_KEY_SIZE * 2,
581			.max_keysize	= CAST6_MAX_KEY_SIZE * 2,
582			.ivsize		= CAST6_BLOCK_SIZE,
583			.setkey		= ablk_set_key,
584			.encrypt	= ablk_encrypt,
585			.decrypt	= ablk_decrypt,
586		},
587	},
588} };
589
590static int __init cast6_init(void)
591{
592	const char *feature_name;
593
594	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
595				&feature_name)) {
596		pr_info("CPU feature '%s' is not supported.\n", feature_name);
597		return -ENODEV;
598	}
599
600	return crypto_register_algs(cast6_algs, ARRAY_SIZE(cast6_algs));
601}
602
603static void __exit cast6_exit(void)
604{
605	crypto_unregister_algs(cast6_algs, ARRAY_SIZE(cast6_algs));
606}
607
608module_init(cast6_init);
609module_exit(cast6_exit);
610
611MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
612MODULE_LICENSE("GPL");
613MODULE_ALIAS_CRYPTO("cast6");
614