This source file includes following definitions.
- atmel_aes_reg_name
- atmel_aes_read
- atmel_aes_write
- atmel_aes_read_n
- atmel_aes_write_n
- atmel_aes_read_block
- atmel_aes_write_block
- atmel_aes_wait_for_data_ready
- atmel_aes_padlen
- atmel_aes_find_dev
- atmel_aes_hw_init
- atmel_aes_get_version
- atmel_aes_hw_version_init
- atmel_aes_set_mode
- atmel_aes_is_encrypt
- atmel_aes_set_iv_as_last_ciphertext_block
- atmel_aes_complete
- atmel_aes_write_ctrl_key
- atmel_aes_write_ctrl
- atmel_aes_cpu_transfer
- atmel_aes_cpu_start
- atmel_aes_check_aligned
- atmel_aes_restore_sg
- atmel_aes_map
- atmel_aes_unmap
- atmel_aes_dma_transfer_start
- atmel_aes_dma_transfer_stop
- atmel_aes_dma_start
- atmel_aes_dma_stop
- atmel_aes_dma_callback
- atmel_aes_handle_queue
- atmel_aes_transfer_complete
- atmel_aes_start
- atmel_aes_ctr_ctx_cast
- atmel_aes_ctr_transfer
- atmel_aes_ctr_start
- atmel_aes_crypt
- atmel_aes_setkey
- atmel_aes_ecb_encrypt
- atmel_aes_ecb_decrypt
- atmel_aes_cbc_encrypt
- atmel_aes_cbc_decrypt
- atmel_aes_ofb_encrypt
- atmel_aes_ofb_decrypt
- atmel_aes_cfb_encrypt
- atmel_aes_cfb_decrypt
- atmel_aes_cfb64_encrypt
- atmel_aes_cfb64_decrypt
- atmel_aes_cfb32_encrypt
- atmel_aes_cfb32_decrypt
- atmel_aes_cfb16_encrypt
- atmel_aes_cfb16_decrypt
- atmel_aes_cfb8_encrypt
- atmel_aes_cfb8_decrypt
- atmel_aes_ctr_encrypt
- atmel_aes_ctr_decrypt
- atmel_aes_cra_init
- atmel_aes_ctr_cra_init
- atmel_aes_gcm_ctx_cast
- atmel_aes_gcm_ghash
- atmel_aes_gcm_ghash_init
- atmel_aes_gcm_ghash_finalize
- atmel_aes_gcm_start
- atmel_aes_gcm_process
- atmel_aes_gcm_length
- atmel_aes_gcm_data
- atmel_aes_gcm_tag_init
- atmel_aes_gcm_tag
- atmel_aes_gcm_finalize
- atmel_aes_gcm_crypt
- atmel_aes_gcm_setkey
- atmel_aes_gcm_setauthsize
- atmel_aes_gcm_encrypt
- atmel_aes_gcm_decrypt
- atmel_aes_gcm_init
- atmel_aes_xts_ctx_cast
- atmel_aes_xts_start
- atmel_aes_xts_process_data
- atmel_aes_xts_setkey
- atmel_aes_xts_encrypt
- atmel_aes_xts_decrypt
- atmel_aes_xts_cra_init
- atmel_aes_authenc_complete
- atmel_aes_authenc_start
- atmel_aes_authenc_init
- atmel_aes_authenc_transfer
- atmel_aes_authenc_digest
- atmel_aes_authenc_final
- atmel_aes_authenc_setkey
- atmel_aes_authenc_init_tfm
- atmel_aes_authenc_hmac_sha1_init_tfm
- atmel_aes_authenc_hmac_sha224_init_tfm
- atmel_aes_authenc_hmac_sha256_init_tfm
- atmel_aes_authenc_hmac_sha384_init_tfm
- atmel_aes_authenc_hmac_sha512_init_tfm
- atmel_aes_authenc_exit_tfm
- atmel_aes_authenc_crypt
- atmel_aes_authenc_cbc_aes_encrypt
- atmel_aes_authenc_cbc_aes_decrypt
- atmel_aes_buff_init
- atmel_aes_buff_cleanup
- atmel_aes_filter
- atmel_aes_dma_init
- atmel_aes_dma_cleanup
- atmel_aes_queue_task
- atmel_aes_done_task
- atmel_aes_irq
- atmel_aes_unregister_algs
- atmel_aes_register_algs
- atmel_aes_get_cap
- atmel_aes_of_init
- atmel_aes_of_init
- atmel_aes_probe
- atmel_aes_remove
1
2
3
4
5
6
7
8
9
10
11
12
13
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/slab.h>
17 #include <linux/err.h>
18 #include <linux/clk.h>
19 #include <linux/io.h>
20 #include <linux/hw_random.h>
21 #include <linux/platform_device.h>
22
23 #include <linux/device.h>
24 #include <linux/init.h>
25 #include <linux/errno.h>
26 #include <linux/interrupt.h>
27 #include <linux/irq.h>
28 #include <linux/scatterlist.h>
29 #include <linux/dma-mapping.h>
30 #include <linux/of_device.h>
31 #include <linux/delay.h>
32 #include <linux/crypto.h>
33 #include <crypto/scatterwalk.h>
34 #include <crypto/algapi.h>
35 #include <crypto/aes.h>
36 #include <crypto/gcm.h>
37 #include <crypto/xts.h>
38 #include <crypto/internal/aead.h>
39 #include <linux/platform_data/crypto-atmel.h>
40 #include <dt-bindings/dma/at91.h>
41 #include "atmel-aes-regs.h"
42 #include "atmel-authenc.h"
43
44 #define ATMEL_AES_PRIORITY 300
45
46 #define ATMEL_AES_BUFFER_ORDER 2
47 #define ATMEL_AES_BUFFER_SIZE (PAGE_SIZE << ATMEL_AES_BUFFER_ORDER)
48
49 #define CFB8_BLOCK_SIZE 1
50 #define CFB16_BLOCK_SIZE 2
51 #define CFB32_BLOCK_SIZE 4
52 #define CFB64_BLOCK_SIZE 8
53
54 #define SIZE_IN_WORDS(x) ((x) >> 2)
55
56
57
58 #define AES_FLAGS_ENCRYPT AES_MR_CYPHER_ENC
59 #define AES_FLAGS_GTAGEN AES_MR_GTAGEN
60 #define AES_FLAGS_OPMODE_MASK (AES_MR_OPMOD_MASK | AES_MR_CFBS_MASK)
61 #define AES_FLAGS_ECB AES_MR_OPMOD_ECB
62 #define AES_FLAGS_CBC AES_MR_OPMOD_CBC
63 #define AES_FLAGS_OFB AES_MR_OPMOD_OFB
64 #define AES_FLAGS_CFB128 (AES_MR_OPMOD_CFB | AES_MR_CFBS_128b)
65 #define AES_FLAGS_CFB64 (AES_MR_OPMOD_CFB | AES_MR_CFBS_64b)
66 #define AES_FLAGS_CFB32 (AES_MR_OPMOD_CFB | AES_MR_CFBS_32b)
67 #define AES_FLAGS_CFB16 (AES_MR_OPMOD_CFB | AES_MR_CFBS_16b)
68 #define AES_FLAGS_CFB8 (AES_MR_OPMOD_CFB | AES_MR_CFBS_8b)
69 #define AES_FLAGS_CTR AES_MR_OPMOD_CTR
70 #define AES_FLAGS_GCM AES_MR_OPMOD_GCM
71 #define AES_FLAGS_XTS AES_MR_OPMOD_XTS
72
73 #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \
74 AES_FLAGS_ENCRYPT | \
75 AES_FLAGS_GTAGEN)
76
77 #define AES_FLAGS_BUSY BIT(3)
78 #define AES_FLAGS_DUMP_REG BIT(4)
79 #define AES_FLAGS_OWN_SHA BIT(5)
80
81 #define AES_FLAGS_PERSISTENT AES_FLAGS_BUSY
82
83 #define ATMEL_AES_QUEUE_LENGTH 50
84
85 #define ATMEL_AES_DMA_THRESHOLD 256
86
87
88 struct atmel_aes_caps {
89 bool has_dualbuff;
90 bool has_cfb64;
91 bool has_gcm;
92 bool has_xts;
93 bool has_authenc;
94 u32 max_burst_size;
95 };
96
97 struct atmel_aes_dev;
98
99
100 typedef int (*atmel_aes_fn_t)(struct atmel_aes_dev *);
101
102
103 struct atmel_aes_base_ctx {
104 struct atmel_aes_dev *dd;
105 atmel_aes_fn_t start;
106 int keylen;
107 u32 key[AES_KEYSIZE_256 / sizeof(u32)];
108 u16 block_size;
109 bool is_aead;
110 };
111
112 struct atmel_aes_ctx {
113 struct atmel_aes_base_ctx base;
114 };
115
116 struct atmel_aes_ctr_ctx {
117 struct atmel_aes_base_ctx base;
118
119 u32 iv[AES_BLOCK_SIZE / sizeof(u32)];
120 size_t offset;
121 struct scatterlist src[2];
122 struct scatterlist dst[2];
123 };
124
125 struct atmel_aes_gcm_ctx {
126 struct atmel_aes_base_ctx base;
127
128 struct scatterlist src[2];
129 struct scatterlist dst[2];
130
131 u32 j0[AES_BLOCK_SIZE / sizeof(u32)];
132 u32 tag[AES_BLOCK_SIZE / sizeof(u32)];
133 u32 ghash[AES_BLOCK_SIZE / sizeof(u32)];
134 size_t textlen;
135
136 const u32 *ghash_in;
137 u32 *ghash_out;
138 atmel_aes_fn_t ghash_resume;
139 };
140
141 struct atmel_aes_xts_ctx {
142 struct atmel_aes_base_ctx base;
143
144 u32 key2[AES_KEYSIZE_256 / sizeof(u32)];
145 };
146
147 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
148 struct atmel_aes_authenc_ctx {
149 struct atmel_aes_base_ctx base;
150 struct atmel_sha_authenc_ctx *auth;
151 };
152 #endif
153
154 struct atmel_aes_reqctx {
155 unsigned long mode;
156 u32 lastc[AES_BLOCK_SIZE / sizeof(u32)];
157 };
158
159 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
160 struct atmel_aes_authenc_reqctx {
161 struct atmel_aes_reqctx base;
162
163 struct scatterlist src[2];
164 struct scatterlist dst[2];
165 size_t textlen;
166 u32 digest[SHA512_DIGEST_SIZE / sizeof(u32)];
167
168
169 struct ahash_request auth_req;
170 };
171 #endif
172
173 struct atmel_aes_dma {
174 struct dma_chan *chan;
175 struct scatterlist *sg;
176 int nents;
177 unsigned int remainder;
178 unsigned int sg_len;
179 };
180
181 struct atmel_aes_dev {
182 struct list_head list;
183 unsigned long phys_base;
184 void __iomem *io_base;
185
186 struct crypto_async_request *areq;
187 struct atmel_aes_base_ctx *ctx;
188
189 bool is_async;
190 atmel_aes_fn_t resume;
191 atmel_aes_fn_t cpu_transfer_complete;
192
193 struct device *dev;
194 struct clk *iclk;
195 int irq;
196
197 unsigned long flags;
198
199 spinlock_t lock;
200 struct crypto_queue queue;
201
202 struct tasklet_struct done_task;
203 struct tasklet_struct queue_task;
204
205 size_t total;
206 size_t datalen;
207 u32 *data;
208
209 struct atmel_aes_dma src;
210 struct atmel_aes_dma dst;
211
212 size_t buflen;
213 void *buf;
214 struct scatterlist aligned_sg;
215 struct scatterlist *real_dst;
216
217 struct atmel_aes_caps caps;
218
219 u32 hw_version;
220 };
221
222 struct atmel_aes_drv {
223 struct list_head dev_list;
224 spinlock_t lock;
225 };
226
227 static struct atmel_aes_drv atmel_aes = {
228 .dev_list = LIST_HEAD_INIT(atmel_aes.dev_list),
229 .lock = __SPIN_LOCK_UNLOCKED(atmel_aes.lock),
230 };
231
232 #ifdef VERBOSE_DEBUG
233 static const char *atmel_aes_reg_name(u32 offset, char *tmp, size_t sz)
234 {
235 switch (offset) {
236 case AES_CR:
237 return "CR";
238
239 case AES_MR:
240 return "MR";
241
242 case AES_ISR:
243 return "ISR";
244
245 case AES_IMR:
246 return "IMR";
247
248 case AES_IER:
249 return "IER";
250
251 case AES_IDR:
252 return "IDR";
253
254 case AES_KEYWR(0):
255 case AES_KEYWR(1):
256 case AES_KEYWR(2):
257 case AES_KEYWR(3):
258 case AES_KEYWR(4):
259 case AES_KEYWR(5):
260 case AES_KEYWR(6):
261 case AES_KEYWR(7):
262 snprintf(tmp, sz, "KEYWR[%u]", (offset - AES_KEYWR(0)) >> 2);
263 break;
264
265 case AES_IDATAR(0):
266 case AES_IDATAR(1):
267 case AES_IDATAR(2):
268 case AES_IDATAR(3):
269 snprintf(tmp, sz, "IDATAR[%u]", (offset - AES_IDATAR(0)) >> 2);
270 break;
271
272 case AES_ODATAR(0):
273 case AES_ODATAR(1):
274 case AES_ODATAR(2):
275 case AES_ODATAR(3):
276 snprintf(tmp, sz, "ODATAR[%u]", (offset - AES_ODATAR(0)) >> 2);
277 break;
278
279 case AES_IVR(0):
280 case AES_IVR(1):
281 case AES_IVR(2):
282 case AES_IVR(3):
283 snprintf(tmp, sz, "IVR[%u]", (offset - AES_IVR(0)) >> 2);
284 break;
285
286 case AES_AADLENR:
287 return "AADLENR";
288
289 case AES_CLENR:
290 return "CLENR";
291
292 case AES_GHASHR(0):
293 case AES_GHASHR(1):
294 case AES_GHASHR(2):
295 case AES_GHASHR(3):
296 snprintf(tmp, sz, "GHASHR[%u]", (offset - AES_GHASHR(0)) >> 2);
297 break;
298
299 case AES_TAGR(0):
300 case AES_TAGR(1):
301 case AES_TAGR(2):
302 case AES_TAGR(3):
303 snprintf(tmp, sz, "TAGR[%u]", (offset - AES_TAGR(0)) >> 2);
304 break;
305
306 case AES_CTRR:
307 return "CTRR";
308
309 case AES_GCMHR(0):
310 case AES_GCMHR(1):
311 case AES_GCMHR(2):
312 case AES_GCMHR(3):
313 snprintf(tmp, sz, "GCMHR[%u]", (offset - AES_GCMHR(0)) >> 2);
314 break;
315
316 case AES_EMR:
317 return "EMR";
318
319 case AES_TWR(0):
320 case AES_TWR(1):
321 case AES_TWR(2):
322 case AES_TWR(3):
323 snprintf(tmp, sz, "TWR[%u]", (offset - AES_TWR(0)) >> 2);
324 break;
325
326 case AES_ALPHAR(0):
327 case AES_ALPHAR(1):
328 case AES_ALPHAR(2):
329 case AES_ALPHAR(3):
330 snprintf(tmp, sz, "ALPHAR[%u]", (offset - AES_ALPHAR(0)) >> 2);
331 break;
332
333 default:
334 snprintf(tmp, sz, "0x%02x", offset);
335 break;
336 }
337
338 return tmp;
339 }
340 #endif
341
342
343
344 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset)
345 {
346 u32 value = readl_relaxed(dd->io_base + offset);
347
348 #ifdef VERBOSE_DEBUG
349 if (dd->flags & AES_FLAGS_DUMP_REG) {
350 char tmp[16];
351
352 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value,
353 atmel_aes_reg_name(offset, tmp, sizeof(tmp)));
354 }
355 #endif
356
357 return value;
358 }
359
360 static inline void atmel_aes_write(struct atmel_aes_dev *dd,
361 u32 offset, u32 value)
362 {
363 #ifdef VERBOSE_DEBUG
364 if (dd->flags & AES_FLAGS_DUMP_REG) {
365 char tmp[16];
366
367 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value,
368 atmel_aes_reg_name(offset, tmp, sizeof(tmp)));
369 }
370 #endif
371
372 writel_relaxed(value, dd->io_base + offset);
373 }
374
375 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset,
376 u32 *value, int count)
377 {
378 for (; count--; value++, offset += 4)
379 *value = atmel_aes_read(dd, offset);
380 }
381
382 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset,
383 const u32 *value, int count)
384 {
385 for (; count--; value++, offset += 4)
386 atmel_aes_write(dd, offset, *value);
387 }
388
389 static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset,
390 u32 *value)
391 {
392 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE));
393 }
394
395 static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset,
396 const u32 *value)
397 {
398 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE));
399 }
400
401 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev *dd,
402 atmel_aes_fn_t resume)
403 {
404 u32 isr = atmel_aes_read(dd, AES_ISR);
405
406 if (unlikely(isr & AES_INT_DATARDY))
407 return resume(dd);
408
409 dd->resume = resume;
410 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
411 return -EINPROGRESS;
412 }
413
414 static inline size_t atmel_aes_padlen(size_t len, size_t block_size)
415 {
416 len &= block_size - 1;
417 return len ? block_size - len : 0;
418 }
419
420 static struct atmel_aes_dev *atmel_aes_find_dev(struct atmel_aes_base_ctx *ctx)
421 {
422 struct atmel_aes_dev *aes_dd = NULL;
423 struct atmel_aes_dev *tmp;
424
425 spin_lock_bh(&atmel_aes.lock);
426 if (!ctx->dd) {
427 list_for_each_entry(tmp, &atmel_aes.dev_list, list) {
428 aes_dd = tmp;
429 break;
430 }
431 ctx->dd = aes_dd;
432 } else {
433 aes_dd = ctx->dd;
434 }
435
436 spin_unlock_bh(&atmel_aes.lock);
437
438 return aes_dd;
439 }
440
441 static int atmel_aes_hw_init(struct atmel_aes_dev *dd)
442 {
443 int err;
444
445 err = clk_enable(dd->iclk);
446 if (err)
447 return err;
448
449 atmel_aes_write(dd, AES_CR, AES_CR_SWRST);
450 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET);
451
452 return 0;
453 }
454
455 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd)
456 {
457 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff;
458 }
459
460 static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd)
461 {
462 int err;
463
464 err = atmel_aes_hw_init(dd);
465 if (err)
466 return err;
467
468 dd->hw_version = atmel_aes_get_version(dd);
469
470 dev_info(dd->dev, "version: 0x%x\n", dd->hw_version);
471
472 clk_disable(dd->iclk);
473 return 0;
474 }
475
476 static inline void atmel_aes_set_mode(struct atmel_aes_dev *dd,
477 const struct atmel_aes_reqctx *rctx)
478 {
479
480 dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode;
481 }
482
483 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd)
484 {
485 return (dd->flags & AES_FLAGS_ENCRYPT);
486 }
487
488 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
489 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
490 #endif
491
492 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd)
493 {
494 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
495 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
496 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
497 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
498
499 if (req->nbytes < ivsize)
500 return;
501
502 if (rctx->mode & AES_FLAGS_ENCRYPT) {
503 scatterwalk_map_and_copy(req->info, req->dst,
504 req->nbytes - ivsize, ivsize, 0);
505 } else {
506 if (req->src == req->dst)
507 memcpy(req->info, rctx->lastc, ivsize);
508 else
509 scatterwalk_map_and_copy(req->info, req->src,
510 req->nbytes - ivsize,
511 ivsize, 0);
512 }
513 }
514
515 static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
516 {
517 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
518 if (dd->ctx->is_aead)
519 atmel_aes_authenc_complete(dd, err);
520 #endif
521
522 clk_disable(dd->iclk);
523 dd->flags &= ~AES_FLAGS_BUSY;
524
525 if (!dd->ctx->is_aead)
526 atmel_aes_set_iv_as_last_ciphertext_block(dd);
527
528 if (dd->is_async)
529 dd->areq->complete(dd->areq, err);
530
531 tasklet_schedule(&dd->queue_task);
532
533 return err;
534 }
535
536 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma,
537 const u32 *iv, const u32 *key, int keylen)
538 {
539 u32 valmr = 0;
540
541
542 if (keylen == AES_KEYSIZE_128)
543 valmr |= AES_MR_KEYSIZE_128;
544 else if (keylen == AES_KEYSIZE_192)
545 valmr |= AES_MR_KEYSIZE_192;
546 else
547 valmr |= AES_MR_KEYSIZE_256;
548
549 valmr |= dd->flags & AES_FLAGS_MODE_MASK;
550
551 if (use_dma) {
552 valmr |= AES_MR_SMOD_IDATAR0;
553 if (dd->caps.has_dualbuff)
554 valmr |= AES_MR_DUALBUFF;
555 } else {
556 valmr |= AES_MR_SMOD_AUTO;
557 }
558
559 atmel_aes_write(dd, AES_MR, valmr);
560
561 atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen));
562
563 if (iv && (valmr & AES_MR_OPMOD_MASK) != AES_MR_OPMOD_ECB)
564 atmel_aes_write_block(dd, AES_IVR(0), iv);
565 }
566
567 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma,
568 const u32 *iv)
569
570 {
571 atmel_aes_write_ctrl_key(dd, use_dma, iv,
572 dd->ctx->key, dd->ctx->keylen);
573 }
574
575
576
577 static int atmel_aes_cpu_transfer(struct atmel_aes_dev *dd)
578 {
579 int err = 0;
580 u32 isr;
581
582 for (;;) {
583 atmel_aes_read_block(dd, AES_ODATAR(0), dd->data);
584 dd->data += 4;
585 dd->datalen -= AES_BLOCK_SIZE;
586
587 if (dd->datalen < AES_BLOCK_SIZE)
588 break;
589
590 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
591
592 isr = atmel_aes_read(dd, AES_ISR);
593 if (!(isr & AES_INT_DATARDY)) {
594 dd->resume = atmel_aes_cpu_transfer;
595 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
596 return -EINPROGRESS;
597 }
598 }
599
600 if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst),
601 dd->buf, dd->total))
602 err = -EINVAL;
603
604 if (err)
605 return atmel_aes_complete(dd, err);
606
607 return dd->cpu_transfer_complete(dd);
608 }
609
610 static int atmel_aes_cpu_start(struct atmel_aes_dev *dd,
611 struct scatterlist *src,
612 struct scatterlist *dst,
613 size_t len,
614 atmel_aes_fn_t resume)
615 {
616 size_t padlen = atmel_aes_padlen(len, AES_BLOCK_SIZE);
617
618 if (unlikely(len == 0))
619 return -EINVAL;
620
621 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len);
622
623 dd->total = len;
624 dd->real_dst = dst;
625 dd->cpu_transfer_complete = resume;
626 dd->datalen = len + padlen;
627 dd->data = (u32 *)dd->buf;
628 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
629 return atmel_aes_wait_for_data_ready(dd, atmel_aes_cpu_transfer);
630 }
631
632
633
634
635 static void atmel_aes_dma_callback(void *data);
636
637 static bool atmel_aes_check_aligned(struct atmel_aes_dev *dd,
638 struct scatterlist *sg,
639 size_t len,
640 struct atmel_aes_dma *dma)
641 {
642 int nents;
643
644 if (!IS_ALIGNED(len, dd->ctx->block_size))
645 return false;
646
647 for (nents = 0; sg; sg = sg_next(sg), ++nents) {
648 if (!IS_ALIGNED(sg->offset, sizeof(u32)))
649 return false;
650
651 if (len <= sg->length) {
652 if (!IS_ALIGNED(len, dd->ctx->block_size))
653 return false;
654
655 dma->nents = nents+1;
656 dma->remainder = sg->length - len;
657 sg->length = len;
658 return true;
659 }
660
661 if (!IS_ALIGNED(sg->length, dd->ctx->block_size))
662 return false;
663
664 len -= sg->length;
665 }
666
667 return false;
668 }
669
670 static inline void atmel_aes_restore_sg(const struct atmel_aes_dma *dma)
671 {
672 struct scatterlist *sg = dma->sg;
673 int nents = dma->nents;
674
675 if (!dma->remainder)
676 return;
677
678 while (--nents > 0 && sg)
679 sg = sg_next(sg);
680
681 if (!sg)
682 return;
683
684 sg->length += dma->remainder;
685 }
686
687 static int atmel_aes_map(struct atmel_aes_dev *dd,
688 struct scatterlist *src,
689 struct scatterlist *dst,
690 size_t len)
691 {
692 bool src_aligned, dst_aligned;
693 size_t padlen;
694
695 dd->total = len;
696 dd->src.sg = src;
697 dd->dst.sg = dst;
698 dd->real_dst = dst;
699
700 src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src);
701 if (src == dst)
702 dst_aligned = src_aligned;
703 else
704 dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst);
705 if (!src_aligned || !dst_aligned) {
706 padlen = atmel_aes_padlen(len, dd->ctx->block_size);
707
708 if (dd->buflen < len + padlen)
709 return -ENOMEM;
710
711 if (!src_aligned) {
712 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len);
713 dd->src.sg = &dd->aligned_sg;
714 dd->src.nents = 1;
715 dd->src.remainder = 0;
716 }
717
718 if (!dst_aligned) {
719 dd->dst.sg = &dd->aligned_sg;
720 dd->dst.nents = 1;
721 dd->dst.remainder = 0;
722 }
723
724 sg_init_table(&dd->aligned_sg, 1);
725 sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen);
726 }
727
728 if (dd->src.sg == dd->dst.sg) {
729 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents,
730 DMA_BIDIRECTIONAL);
731 dd->dst.sg_len = dd->src.sg_len;
732 if (!dd->src.sg_len)
733 return -EFAULT;
734 } else {
735 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents,
736 DMA_TO_DEVICE);
737 if (!dd->src.sg_len)
738 return -EFAULT;
739
740 dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents,
741 DMA_FROM_DEVICE);
742 if (!dd->dst.sg_len) {
743 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
744 DMA_TO_DEVICE);
745 return -EFAULT;
746 }
747 }
748
749 return 0;
750 }
751
752 static void atmel_aes_unmap(struct atmel_aes_dev *dd)
753 {
754 if (dd->src.sg == dd->dst.sg) {
755 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
756 DMA_BIDIRECTIONAL);
757
758 if (dd->src.sg != &dd->aligned_sg)
759 atmel_aes_restore_sg(&dd->src);
760 } else {
761 dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents,
762 DMA_FROM_DEVICE);
763
764 if (dd->dst.sg != &dd->aligned_sg)
765 atmel_aes_restore_sg(&dd->dst);
766
767 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
768 DMA_TO_DEVICE);
769
770 if (dd->src.sg != &dd->aligned_sg)
771 atmel_aes_restore_sg(&dd->src);
772 }
773
774 if (dd->dst.sg == &dd->aligned_sg)
775 sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst),
776 dd->buf, dd->total);
777 }
778
779 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev *dd,
780 enum dma_slave_buswidth addr_width,
781 enum dma_transfer_direction dir,
782 u32 maxburst)
783 {
784 struct dma_async_tx_descriptor *desc;
785 struct dma_slave_config config;
786 dma_async_tx_callback callback;
787 struct atmel_aes_dma *dma;
788 int err;
789
790 memset(&config, 0, sizeof(config));
791 config.direction = dir;
792 config.src_addr_width = addr_width;
793 config.dst_addr_width = addr_width;
794 config.src_maxburst = maxburst;
795 config.dst_maxburst = maxburst;
796
797 switch (dir) {
798 case DMA_MEM_TO_DEV:
799 dma = &dd->src;
800 callback = NULL;
801 config.dst_addr = dd->phys_base + AES_IDATAR(0);
802 break;
803
804 case DMA_DEV_TO_MEM:
805 dma = &dd->dst;
806 callback = atmel_aes_dma_callback;
807 config.src_addr = dd->phys_base + AES_ODATAR(0);
808 break;
809
810 default:
811 return -EINVAL;
812 }
813
814 err = dmaengine_slave_config(dma->chan, &config);
815 if (err)
816 return err;
817
818 desc = dmaengine_prep_slave_sg(dma->chan, dma->sg, dma->sg_len, dir,
819 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
820 if (!desc)
821 return -ENOMEM;
822
823 desc->callback = callback;
824 desc->callback_param = dd;
825 dmaengine_submit(desc);
826 dma_async_issue_pending(dma->chan);
827
828 return 0;
829 }
830
831 static void atmel_aes_dma_transfer_stop(struct atmel_aes_dev *dd,
832 enum dma_transfer_direction dir)
833 {
834 struct atmel_aes_dma *dma;
835
836 switch (dir) {
837 case DMA_MEM_TO_DEV:
838 dma = &dd->src;
839 break;
840
841 case DMA_DEV_TO_MEM:
842 dma = &dd->dst;
843 break;
844
845 default:
846 return;
847 }
848
849 dmaengine_terminate_all(dma->chan);
850 }
851
852 static int atmel_aes_dma_start(struct atmel_aes_dev *dd,
853 struct scatterlist *src,
854 struct scatterlist *dst,
855 size_t len,
856 atmel_aes_fn_t resume)
857 {
858 enum dma_slave_buswidth addr_width;
859 u32 maxburst;
860 int err;
861
862 switch (dd->ctx->block_size) {
863 case CFB8_BLOCK_SIZE:
864 addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
865 maxburst = 1;
866 break;
867
868 case CFB16_BLOCK_SIZE:
869 addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
870 maxburst = 1;
871 break;
872
873 case CFB32_BLOCK_SIZE:
874 case CFB64_BLOCK_SIZE:
875 addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
876 maxburst = 1;
877 break;
878
879 case AES_BLOCK_SIZE:
880 addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
881 maxburst = dd->caps.max_burst_size;
882 break;
883
884 default:
885 err = -EINVAL;
886 goto exit;
887 }
888
889 err = atmel_aes_map(dd, src, dst, len);
890 if (err)
891 goto exit;
892
893 dd->resume = resume;
894
895
896 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_DEV_TO_MEM,
897 maxburst);
898 if (err)
899 goto unmap;
900
901
902 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_MEM_TO_DEV,
903 maxburst);
904 if (err)
905 goto output_transfer_stop;
906
907 return -EINPROGRESS;
908
909 output_transfer_stop:
910 atmel_aes_dma_transfer_stop(dd, DMA_DEV_TO_MEM);
911 unmap:
912 atmel_aes_unmap(dd);
913 exit:
914 return atmel_aes_complete(dd, err);
915 }
916
917 static void atmel_aes_dma_stop(struct atmel_aes_dev *dd)
918 {
919 atmel_aes_dma_transfer_stop(dd, DMA_MEM_TO_DEV);
920 atmel_aes_dma_transfer_stop(dd, DMA_DEV_TO_MEM);
921 atmel_aes_unmap(dd);
922 }
923
924 static void atmel_aes_dma_callback(void *data)
925 {
926 struct atmel_aes_dev *dd = data;
927
928 atmel_aes_dma_stop(dd);
929 dd->is_async = true;
930 (void)dd->resume(dd);
931 }
932
933 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd,
934 struct crypto_async_request *new_areq)
935 {
936 struct crypto_async_request *areq, *backlog;
937 struct atmel_aes_base_ctx *ctx;
938 unsigned long flags;
939 bool start_async;
940 int err, ret = 0;
941
942 spin_lock_irqsave(&dd->lock, flags);
943 if (new_areq)
944 ret = crypto_enqueue_request(&dd->queue, new_areq);
945 if (dd->flags & AES_FLAGS_BUSY) {
946 spin_unlock_irqrestore(&dd->lock, flags);
947 return ret;
948 }
949 backlog = crypto_get_backlog(&dd->queue);
950 areq = crypto_dequeue_request(&dd->queue);
951 if (areq)
952 dd->flags |= AES_FLAGS_BUSY;
953 spin_unlock_irqrestore(&dd->lock, flags);
954
955 if (!areq)
956 return ret;
957
958 if (backlog)
959 backlog->complete(backlog, -EINPROGRESS);
960
961 ctx = crypto_tfm_ctx(areq->tfm);
962
963 dd->areq = areq;
964 dd->ctx = ctx;
965 start_async = (areq != new_areq);
966 dd->is_async = start_async;
967
968
969 err = ctx->start(dd);
970 return (start_async) ? ret : err;
971 }
972
973
974
975
976 static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd)
977 {
978 return atmel_aes_complete(dd, 0);
979 }
980
981 static int atmel_aes_start(struct atmel_aes_dev *dd)
982 {
983 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
984 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
985 bool use_dma = (req->nbytes >= ATMEL_AES_DMA_THRESHOLD ||
986 dd->ctx->block_size != AES_BLOCK_SIZE);
987 int err;
988
989 atmel_aes_set_mode(dd, rctx);
990
991 err = atmel_aes_hw_init(dd);
992 if (err)
993 return atmel_aes_complete(dd, err);
994
995 atmel_aes_write_ctrl(dd, use_dma, req->info);
996 if (use_dma)
997 return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes,
998 atmel_aes_transfer_complete);
999
1000 return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes,
1001 atmel_aes_transfer_complete);
1002 }
1003
1004 static inline struct atmel_aes_ctr_ctx *
1005 atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
1006 {
1007 return container_of(ctx, struct atmel_aes_ctr_ctx, base);
1008 }
1009
1010 static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
1011 {
1012 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
1013 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
1014 struct scatterlist *src, *dst;
1015 size_t datalen;
1016 u32 ctr;
1017 u16 blocks, start, end;
1018 bool use_dma, fragmented = false;
1019
1020
1021 ctx->offset += dd->total;
1022 if (ctx->offset >= req->nbytes)
1023 return atmel_aes_transfer_complete(dd);
1024
1025
1026 datalen = req->nbytes - ctx->offset;
1027 blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE);
1028 ctr = be32_to_cpu(ctx->iv[3]);
1029
1030
1031 start = ctr & 0xffff;
1032 end = start + blocks - 1;
1033
1034 if (blocks >> 16 || end < start) {
1035 ctr |= 0xffff;
1036 datalen = AES_BLOCK_SIZE * (0x10000 - start);
1037 fragmented = true;
1038 }
1039
1040 use_dma = (datalen >= ATMEL_AES_DMA_THRESHOLD);
1041
1042
1043 src = scatterwalk_ffwd(ctx->src, req->src, ctx->offset);
1044 dst = ((req->src == req->dst) ? src :
1045 scatterwalk_ffwd(ctx->dst, req->dst, ctx->offset));
1046
1047
1048 atmel_aes_write_ctrl(dd, use_dma, ctx->iv);
1049 if (unlikely(fragmented)) {
1050
1051
1052
1053
1054 ctx->iv[3] = cpu_to_be32(ctr);
1055 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE);
1056 }
1057
1058 if (use_dma)
1059 return atmel_aes_dma_start(dd, src, dst, datalen,
1060 atmel_aes_ctr_transfer);
1061
1062 return atmel_aes_cpu_start(dd, src, dst, datalen,
1063 atmel_aes_ctr_transfer);
1064 }
1065
1066 static int atmel_aes_ctr_start(struct atmel_aes_dev *dd)
1067 {
1068 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
1069 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
1070 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
1071 int err;
1072
1073 atmel_aes_set_mode(dd, rctx);
1074
1075 err = atmel_aes_hw_init(dd);
1076 if (err)
1077 return atmel_aes_complete(dd, err);
1078
1079 memcpy(ctx->iv, req->info, AES_BLOCK_SIZE);
1080 ctx->offset = 0;
1081 dd->total = 0;
1082 return atmel_aes_ctr_transfer(dd);
1083 }
1084
1085 static int atmel_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
1086 {
1087 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1088 struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1089 struct atmel_aes_reqctx *rctx;
1090 struct atmel_aes_dev *dd;
1091
1092 switch (mode & AES_FLAGS_OPMODE_MASK) {
1093 case AES_FLAGS_CFB8:
1094 ctx->block_size = CFB8_BLOCK_SIZE;
1095 break;
1096
1097 case AES_FLAGS_CFB16:
1098 ctx->block_size = CFB16_BLOCK_SIZE;
1099 break;
1100
1101 case AES_FLAGS_CFB32:
1102 ctx->block_size = CFB32_BLOCK_SIZE;
1103 break;
1104
1105 case AES_FLAGS_CFB64:
1106 ctx->block_size = CFB64_BLOCK_SIZE;
1107 break;
1108
1109 default:
1110 ctx->block_size = AES_BLOCK_SIZE;
1111 break;
1112 }
1113 ctx->is_aead = false;
1114
1115 dd = atmel_aes_find_dev(ctx);
1116 if (!dd)
1117 return -ENODEV;
1118
1119 rctx = ablkcipher_request_ctx(req);
1120 rctx->mode = mode;
1121
1122 if (!(mode & AES_FLAGS_ENCRYPT) && (req->src == req->dst)) {
1123 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1124
1125 if (req->nbytes >= ivsize)
1126 scatterwalk_map_and_copy(rctx->lastc, req->src,
1127 req->nbytes - ivsize,
1128 ivsize, 0);
1129 }
1130
1131 return atmel_aes_handle_queue(dd, &req->base);
1132 }
1133
1134 static int atmel_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
1135 unsigned int keylen)
1136 {
1137 struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(tfm);
1138
1139 if (keylen != AES_KEYSIZE_128 &&
1140 keylen != AES_KEYSIZE_192 &&
1141 keylen != AES_KEYSIZE_256) {
1142 crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1143 return -EINVAL;
1144 }
1145
1146 memcpy(ctx->key, key, keylen);
1147 ctx->keylen = keylen;
1148
1149 return 0;
1150 }
1151
1152 static int atmel_aes_ecb_encrypt(struct ablkcipher_request *req)
1153 {
1154 return atmel_aes_crypt(req, AES_FLAGS_ECB | AES_FLAGS_ENCRYPT);
1155 }
1156
1157 static int atmel_aes_ecb_decrypt(struct ablkcipher_request *req)
1158 {
1159 return atmel_aes_crypt(req, AES_FLAGS_ECB);
1160 }
1161
1162 static int atmel_aes_cbc_encrypt(struct ablkcipher_request *req)
1163 {
1164 return atmel_aes_crypt(req, AES_FLAGS_CBC | AES_FLAGS_ENCRYPT);
1165 }
1166
1167 static int atmel_aes_cbc_decrypt(struct ablkcipher_request *req)
1168 {
1169 return atmel_aes_crypt(req, AES_FLAGS_CBC);
1170 }
1171
1172 static int atmel_aes_ofb_encrypt(struct ablkcipher_request *req)
1173 {
1174 return atmel_aes_crypt(req, AES_FLAGS_OFB | AES_FLAGS_ENCRYPT);
1175 }
1176
1177 static int atmel_aes_ofb_decrypt(struct ablkcipher_request *req)
1178 {
1179 return atmel_aes_crypt(req, AES_FLAGS_OFB);
1180 }
1181
1182 static int atmel_aes_cfb_encrypt(struct ablkcipher_request *req)
1183 {
1184 return atmel_aes_crypt(req, AES_FLAGS_CFB128 | AES_FLAGS_ENCRYPT);
1185 }
1186
1187 static int atmel_aes_cfb_decrypt(struct ablkcipher_request *req)
1188 {
1189 return atmel_aes_crypt(req, AES_FLAGS_CFB128);
1190 }
1191
1192 static int atmel_aes_cfb64_encrypt(struct ablkcipher_request *req)
1193 {
1194 return atmel_aes_crypt(req, AES_FLAGS_CFB64 | AES_FLAGS_ENCRYPT);
1195 }
1196
1197 static int atmel_aes_cfb64_decrypt(struct ablkcipher_request *req)
1198 {
1199 return atmel_aes_crypt(req, AES_FLAGS_CFB64);
1200 }
1201
1202 static int atmel_aes_cfb32_encrypt(struct ablkcipher_request *req)
1203 {
1204 return atmel_aes_crypt(req, AES_FLAGS_CFB32 | AES_FLAGS_ENCRYPT);
1205 }
1206
1207 static int atmel_aes_cfb32_decrypt(struct ablkcipher_request *req)
1208 {
1209 return atmel_aes_crypt(req, AES_FLAGS_CFB32);
1210 }
1211
1212 static int atmel_aes_cfb16_encrypt(struct ablkcipher_request *req)
1213 {
1214 return atmel_aes_crypt(req, AES_FLAGS_CFB16 | AES_FLAGS_ENCRYPT);
1215 }
1216
1217 static int atmel_aes_cfb16_decrypt(struct ablkcipher_request *req)
1218 {
1219 return atmel_aes_crypt(req, AES_FLAGS_CFB16);
1220 }
1221
1222 static int atmel_aes_cfb8_encrypt(struct ablkcipher_request *req)
1223 {
1224 return atmel_aes_crypt(req, AES_FLAGS_CFB8 | AES_FLAGS_ENCRYPT);
1225 }
1226
1227 static int atmel_aes_cfb8_decrypt(struct ablkcipher_request *req)
1228 {
1229 return atmel_aes_crypt(req, AES_FLAGS_CFB8);
1230 }
1231
1232 static int atmel_aes_ctr_encrypt(struct ablkcipher_request *req)
1233 {
1234 return atmel_aes_crypt(req, AES_FLAGS_CTR | AES_FLAGS_ENCRYPT);
1235 }
1236
1237 static int atmel_aes_ctr_decrypt(struct ablkcipher_request *req)
1238 {
1239 return atmel_aes_crypt(req, AES_FLAGS_CTR);
1240 }
1241
1242 static int atmel_aes_cra_init(struct crypto_tfm *tfm)
1243 {
1244 struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm);
1245
1246 tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx);
1247 ctx->base.start = atmel_aes_start;
1248
1249 return 0;
1250 }
1251
1252 static int atmel_aes_ctr_cra_init(struct crypto_tfm *tfm)
1253 {
1254 struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm);
1255
1256 tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx);
1257 ctx->base.start = atmel_aes_ctr_start;
1258
1259 return 0;
1260 }
1261
1262 static struct crypto_alg aes_algs[] = {
1263 {
1264 .cra_name = "ecb(aes)",
1265 .cra_driver_name = "atmel-ecb-aes",
1266 .cra_priority = ATMEL_AES_PRIORITY,
1267 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1268 .cra_blocksize = AES_BLOCK_SIZE,
1269 .cra_ctxsize = sizeof(struct atmel_aes_ctx),
1270 .cra_alignmask = 0xf,
1271 .cra_type = &crypto_ablkcipher_type,
1272 .cra_module = THIS_MODULE,
1273 .cra_init = atmel_aes_cra_init,
1274 .cra_u.ablkcipher = {
1275 .min_keysize = AES_MIN_KEY_SIZE,
1276 .max_keysize = AES_MAX_KEY_SIZE,
1277 .setkey = atmel_aes_setkey,
1278 .encrypt = atmel_aes_ecb_encrypt,
1279 .decrypt = atmel_aes_ecb_decrypt,
1280 }
1281 },
1282 {
1283 .cra_name = "cbc(aes)",
1284 .cra_driver_name = "atmel-cbc-aes",
1285 .cra_priority = ATMEL_AES_PRIORITY,
1286 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1287 .cra_blocksize = AES_BLOCK_SIZE,
1288 .cra_ctxsize = sizeof(struct atmel_aes_ctx),
1289 .cra_alignmask = 0xf,
1290 .cra_type = &crypto_ablkcipher_type,
1291 .cra_module = THIS_MODULE,
1292 .cra_init = atmel_aes_cra_init,
1293 .cra_u.ablkcipher = {
1294 .min_keysize = AES_MIN_KEY_SIZE,
1295 .max_keysize = AES_MAX_KEY_SIZE,
1296 .ivsize = AES_BLOCK_SIZE,
1297 .setkey = atmel_aes_setkey,
1298 .encrypt = atmel_aes_cbc_encrypt,
1299 .decrypt = atmel_aes_cbc_decrypt,
1300 }
1301 },
1302 {
1303 .cra_name = "ofb(aes)",
1304 .cra_driver_name = "atmel-ofb-aes",
1305 .cra_priority = ATMEL_AES_PRIORITY,
1306 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1307 .cra_blocksize = AES_BLOCK_SIZE,
1308 .cra_ctxsize = sizeof(struct atmel_aes_ctx),
1309 .cra_alignmask = 0xf,
1310 .cra_type = &crypto_ablkcipher_type,
1311 .cra_module = THIS_MODULE,
1312 .cra_init = atmel_aes_cra_init,
1313 .cra_u.ablkcipher = {
1314 .min_keysize = AES_MIN_KEY_SIZE,
1315 .max_keysize = AES_MAX_KEY_SIZE,
1316 .ivsize = AES_BLOCK_SIZE,
1317 .setkey = atmel_aes_setkey,
1318 .encrypt = atmel_aes_ofb_encrypt,
1319 .decrypt = atmel_aes_ofb_decrypt,
1320 }
1321 },
1322 {
1323 .cra_name = "cfb(aes)",
1324 .cra_driver_name = "atmel-cfb-aes",
1325 .cra_priority = ATMEL_AES_PRIORITY,
1326 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1327 .cra_blocksize = AES_BLOCK_SIZE,
1328 .cra_ctxsize = sizeof(struct atmel_aes_ctx),
1329 .cra_alignmask = 0xf,
1330 .cra_type = &crypto_ablkcipher_type,
1331 .cra_module = THIS_MODULE,
1332 .cra_init = atmel_aes_cra_init,
1333 .cra_u.ablkcipher = {
1334 .min_keysize = AES_MIN_KEY_SIZE,
1335 .max_keysize = AES_MAX_KEY_SIZE,
1336 .ivsize = AES_BLOCK_SIZE,
1337 .setkey = atmel_aes_setkey,
1338 .encrypt = atmel_aes_cfb_encrypt,
1339 .decrypt = atmel_aes_cfb_decrypt,
1340 }
1341 },
1342 {
1343 .cra_name = "cfb32(aes)",
1344 .cra_driver_name = "atmel-cfb32-aes",
1345 .cra_priority = ATMEL_AES_PRIORITY,
1346 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1347 .cra_blocksize = CFB32_BLOCK_SIZE,
1348 .cra_ctxsize = sizeof(struct atmel_aes_ctx),
1349 .cra_alignmask = 0x3,
1350 .cra_type = &crypto_ablkcipher_type,
1351 .cra_module = THIS_MODULE,
1352 .cra_init = atmel_aes_cra_init,
1353 .cra_u.ablkcipher = {
1354 .min_keysize = AES_MIN_KEY_SIZE,
1355 .max_keysize = AES_MAX_KEY_SIZE,
1356 .ivsize = AES_BLOCK_SIZE,
1357 .setkey = atmel_aes_setkey,
1358 .encrypt = atmel_aes_cfb32_encrypt,
1359 .decrypt = atmel_aes_cfb32_decrypt,
1360 }
1361 },
1362 {
1363 .cra_name = "cfb16(aes)",
1364 .cra_driver_name = "atmel-cfb16-aes",
1365 .cra_priority = ATMEL_AES_PRIORITY,
1366 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1367 .cra_blocksize = CFB16_BLOCK_SIZE,
1368 .cra_ctxsize = sizeof(struct atmel_aes_ctx),
1369 .cra_alignmask = 0x1,
1370 .cra_type = &crypto_ablkcipher_type,
1371 .cra_module = THIS_MODULE,
1372 .cra_init = atmel_aes_cra_init,
1373 .cra_u.ablkcipher = {
1374 .min_keysize = AES_MIN_KEY_SIZE,
1375 .max_keysize = AES_MAX_KEY_SIZE,
1376 .ivsize = AES_BLOCK_SIZE,
1377 .setkey = atmel_aes_setkey,
1378 .encrypt = atmel_aes_cfb16_encrypt,
1379 .decrypt = atmel_aes_cfb16_decrypt,
1380 }
1381 },
1382 {
1383 .cra_name = "cfb8(aes)",
1384 .cra_driver_name = "atmel-cfb8-aes",
1385 .cra_priority = ATMEL_AES_PRIORITY,
1386 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1387 .cra_blocksize = CFB8_BLOCK_SIZE,
1388 .cra_ctxsize = sizeof(struct atmel_aes_ctx),
1389 .cra_alignmask = 0x0,
1390 .cra_type = &crypto_ablkcipher_type,
1391 .cra_module = THIS_MODULE,
1392 .cra_init = atmel_aes_cra_init,
1393 .cra_u.ablkcipher = {
1394 .min_keysize = AES_MIN_KEY_SIZE,
1395 .max_keysize = AES_MAX_KEY_SIZE,
1396 .ivsize = AES_BLOCK_SIZE,
1397 .setkey = atmel_aes_setkey,
1398 .encrypt = atmel_aes_cfb8_encrypt,
1399 .decrypt = atmel_aes_cfb8_decrypt,
1400 }
1401 },
1402 {
1403 .cra_name = "ctr(aes)",
1404 .cra_driver_name = "atmel-ctr-aes",
1405 .cra_priority = ATMEL_AES_PRIORITY,
1406 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1407 .cra_blocksize = 1,
1408 .cra_ctxsize = sizeof(struct atmel_aes_ctr_ctx),
1409 .cra_alignmask = 0xf,
1410 .cra_type = &crypto_ablkcipher_type,
1411 .cra_module = THIS_MODULE,
1412 .cra_init = atmel_aes_ctr_cra_init,
1413 .cra_u.ablkcipher = {
1414 .min_keysize = AES_MIN_KEY_SIZE,
1415 .max_keysize = AES_MAX_KEY_SIZE,
1416 .ivsize = AES_BLOCK_SIZE,
1417 .setkey = atmel_aes_setkey,
1418 .encrypt = atmel_aes_ctr_encrypt,
1419 .decrypt = atmel_aes_ctr_decrypt,
1420 }
1421 },
1422 };
1423
1424 static struct crypto_alg aes_cfb64_alg = {
1425 .cra_name = "cfb64(aes)",
1426 .cra_driver_name = "atmel-cfb64-aes",
1427 .cra_priority = ATMEL_AES_PRIORITY,
1428 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1429 .cra_blocksize = CFB64_BLOCK_SIZE,
1430 .cra_ctxsize = sizeof(struct atmel_aes_ctx),
1431 .cra_alignmask = 0x7,
1432 .cra_type = &crypto_ablkcipher_type,
1433 .cra_module = THIS_MODULE,
1434 .cra_init = atmel_aes_cra_init,
1435 .cra_u.ablkcipher = {
1436 .min_keysize = AES_MIN_KEY_SIZE,
1437 .max_keysize = AES_MAX_KEY_SIZE,
1438 .ivsize = AES_BLOCK_SIZE,
1439 .setkey = atmel_aes_setkey,
1440 .encrypt = atmel_aes_cfb64_encrypt,
1441 .decrypt = atmel_aes_cfb64_decrypt,
1442 }
1443 };
1444
1445
1446
1447
1448 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1449 const u32 *data, size_t datalen,
1450 const u32 *ghash_in, u32 *ghash_out,
1451 atmel_aes_fn_t resume);
1452 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
1453 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
1454
1455 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd);
1456 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd);
1457 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd);
1458 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd);
1459 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd);
1460 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd);
1461 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd);
1462
1463 static inline struct atmel_aes_gcm_ctx *
1464 atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx *ctx)
1465 {
1466 return container_of(ctx, struct atmel_aes_gcm_ctx, base);
1467 }
1468
1469 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1470 const u32 *data, size_t datalen,
1471 const u32 *ghash_in, u32 *ghash_out,
1472 atmel_aes_fn_t resume)
1473 {
1474 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1475
1476 dd->data = (u32 *)data;
1477 dd->datalen = datalen;
1478 ctx->ghash_in = ghash_in;
1479 ctx->ghash_out = ghash_out;
1480 ctx->ghash_resume = resume;
1481
1482 atmel_aes_write_ctrl(dd, false, NULL);
1483 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_ghash_init);
1484 }
1485
1486 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd)
1487 {
1488 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1489
1490
1491 atmel_aes_write(dd, AES_AADLENR, dd->total);
1492 atmel_aes_write(dd, AES_CLENR, 0);
1493
1494
1495 if (ctx->ghash_in)
1496 atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in);
1497
1498 return atmel_aes_gcm_ghash_finalize(dd);
1499 }
1500
1501 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd)
1502 {
1503 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1504 u32 isr;
1505
1506
1507 while (dd->datalen > 0) {
1508 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
1509 dd->data += 4;
1510 dd->datalen -= AES_BLOCK_SIZE;
1511
1512 isr = atmel_aes_read(dd, AES_ISR);
1513 if (!(isr & AES_INT_DATARDY)) {
1514 dd->resume = atmel_aes_gcm_ghash_finalize;
1515 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
1516 return -EINPROGRESS;
1517 }
1518 }
1519
1520
1521 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out);
1522
1523 return ctx->ghash_resume(dd);
1524 }
1525
1526
1527 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd)
1528 {
1529 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1530 struct aead_request *req = aead_request_cast(dd->areq);
1531 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1532 struct atmel_aes_reqctx *rctx = aead_request_ctx(req);
1533 size_t ivsize = crypto_aead_ivsize(tfm);
1534 size_t datalen, padlen;
1535 const void *iv = req->iv;
1536 u8 *data = dd->buf;
1537 int err;
1538
1539 atmel_aes_set_mode(dd, rctx);
1540
1541 err = atmel_aes_hw_init(dd);
1542 if (err)
1543 return atmel_aes_complete(dd, err);
1544
1545 if (likely(ivsize == GCM_AES_IV_SIZE)) {
1546 memcpy(ctx->j0, iv, ivsize);
1547 ctx->j0[3] = cpu_to_be32(1);
1548 return atmel_aes_gcm_process(dd);
1549 }
1550
1551 padlen = atmel_aes_padlen(ivsize, AES_BLOCK_SIZE);
1552 datalen = ivsize + padlen + AES_BLOCK_SIZE;
1553 if (datalen > dd->buflen)
1554 return atmel_aes_complete(dd, -EINVAL);
1555
1556 memcpy(data, iv, ivsize);
1557 memset(data + ivsize, 0, padlen + sizeof(u64));
1558 ((u64 *)(data + datalen))[-1] = cpu_to_be64(ivsize * 8);
1559
1560 return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen,
1561 NULL, ctx->j0, atmel_aes_gcm_process);
1562 }
1563
1564 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd)
1565 {
1566 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1567 struct aead_request *req = aead_request_cast(dd->areq);
1568 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1569 bool enc = atmel_aes_is_encrypt(dd);
1570 u32 authsize;
1571
1572
1573 authsize = crypto_aead_authsize(tfm);
1574 ctx->textlen = req->cryptlen - (enc ? 0 : authsize);
1575
1576
1577
1578
1579
1580 if (likely(req->assoclen != 0 || ctx->textlen != 0))
1581 dd->flags |= AES_FLAGS_GTAGEN;
1582
1583 atmel_aes_write_ctrl(dd, false, NULL);
1584 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_length);
1585 }
1586
1587 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd)
1588 {
1589 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1590 struct aead_request *req = aead_request_cast(dd->areq);
1591 u32 j0_lsw, *j0 = ctx->j0;
1592 size_t padlen;
1593
1594
1595 j0_lsw = j0[3];
1596 j0[3] = cpu_to_be32(be32_to_cpu(j0[3]) + 1);
1597 atmel_aes_write_block(dd, AES_IVR(0), j0);
1598 j0[3] = j0_lsw;
1599
1600
1601 atmel_aes_write(dd, AES_AADLENR, req->assoclen);
1602 atmel_aes_write(dd, AES_CLENR, ctx->textlen);
1603
1604
1605 if (unlikely(req->assoclen == 0)) {
1606 dd->datalen = 0;
1607 return atmel_aes_gcm_data(dd);
1608 }
1609
1610
1611 padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE);
1612 if (unlikely(req->assoclen + padlen > dd->buflen))
1613 return atmel_aes_complete(dd, -EINVAL);
1614 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen);
1615
1616
1617 dd->data = (u32 *)dd->buf;
1618 dd->datalen = req->assoclen + padlen;
1619 return atmel_aes_gcm_data(dd);
1620 }
1621
1622 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd)
1623 {
1624 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1625 struct aead_request *req = aead_request_cast(dd->areq);
1626 bool use_dma = (ctx->textlen >= ATMEL_AES_DMA_THRESHOLD);
1627 struct scatterlist *src, *dst;
1628 u32 isr, mr;
1629
1630
1631 while (dd->datalen > 0) {
1632 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data);
1633 dd->data += 4;
1634 dd->datalen -= AES_BLOCK_SIZE;
1635
1636 isr = atmel_aes_read(dd, AES_ISR);
1637 if (!(isr & AES_INT_DATARDY)) {
1638 dd->resume = atmel_aes_gcm_data;
1639 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
1640 return -EINPROGRESS;
1641 }
1642 }
1643
1644
1645 if (unlikely(ctx->textlen == 0))
1646 return atmel_aes_gcm_tag_init(dd);
1647
1648
1649 src = scatterwalk_ffwd(ctx->src, req->src, req->assoclen);
1650 dst = ((req->src == req->dst) ? src :
1651 scatterwalk_ffwd(ctx->dst, req->dst, req->assoclen));
1652
1653 if (use_dma) {
1654
1655 mr = atmel_aes_read(dd, AES_MR);
1656 mr &= ~(AES_MR_SMOD_MASK | AES_MR_DUALBUFF);
1657 mr |= AES_MR_SMOD_IDATAR0;
1658 if (dd->caps.has_dualbuff)
1659 mr |= AES_MR_DUALBUFF;
1660 atmel_aes_write(dd, AES_MR, mr);
1661
1662 return atmel_aes_dma_start(dd, src, dst, ctx->textlen,
1663 atmel_aes_gcm_tag_init);
1664 }
1665
1666 return atmel_aes_cpu_start(dd, src, dst, ctx->textlen,
1667 atmel_aes_gcm_tag_init);
1668 }
1669
1670 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd)
1671 {
1672 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1673 struct aead_request *req = aead_request_cast(dd->areq);
1674 u64 *data = dd->buf;
1675
1676 if (likely(dd->flags & AES_FLAGS_GTAGEN)) {
1677 if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) {
1678 dd->resume = atmel_aes_gcm_tag_init;
1679 atmel_aes_write(dd, AES_IER, AES_INT_TAGRDY);
1680 return -EINPROGRESS;
1681 }
1682
1683 return atmel_aes_gcm_finalize(dd);
1684 }
1685
1686
1687 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash);
1688
1689 data[0] = cpu_to_be64(req->assoclen * 8);
1690 data[1] = cpu_to_be64(ctx->textlen * 8);
1691
1692 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE,
1693 ctx->ghash, ctx->ghash, atmel_aes_gcm_tag);
1694 }
1695
1696 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd)
1697 {
1698 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1699 unsigned long flags;
1700
1701
1702
1703
1704
1705 flags = dd->flags;
1706 dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN);
1707 dd->flags |= AES_FLAGS_CTR;
1708 atmel_aes_write_ctrl(dd, false, ctx->j0);
1709 dd->flags = flags;
1710
1711 atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash);
1712 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_finalize);
1713 }
1714
1715 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd)
1716 {
1717 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
1718 struct aead_request *req = aead_request_cast(dd->areq);
1719 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1720 bool enc = atmel_aes_is_encrypt(dd);
1721 u32 offset, authsize, itag[4], *otag = ctx->tag;
1722 int err;
1723
1724
1725 if (likely(dd->flags & AES_FLAGS_GTAGEN))
1726 atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag);
1727 else
1728 atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag);
1729
1730 offset = req->assoclen + ctx->textlen;
1731 authsize = crypto_aead_authsize(tfm);
1732 if (enc) {
1733 scatterwalk_map_and_copy(otag, req->dst, offset, authsize, 1);
1734 err = 0;
1735 } else {
1736 scatterwalk_map_and_copy(itag, req->src, offset, authsize, 0);
1737 err = crypto_memneq(itag, otag, authsize) ? -EBADMSG : 0;
1738 }
1739
1740 return atmel_aes_complete(dd, err);
1741 }
1742
1743 static int atmel_aes_gcm_crypt(struct aead_request *req,
1744 unsigned long mode)
1745 {
1746 struct atmel_aes_base_ctx *ctx;
1747 struct atmel_aes_reqctx *rctx;
1748 struct atmel_aes_dev *dd;
1749
1750 ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1751 ctx->block_size = AES_BLOCK_SIZE;
1752 ctx->is_aead = true;
1753
1754 dd = atmel_aes_find_dev(ctx);
1755 if (!dd)
1756 return -ENODEV;
1757
1758 rctx = aead_request_ctx(req);
1759 rctx->mode = AES_FLAGS_GCM | mode;
1760
1761 return atmel_aes_handle_queue(dd, &req->base);
1762 }
1763
1764 static int atmel_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
1765 unsigned int keylen)
1766 {
1767 struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
1768
1769 if (keylen != AES_KEYSIZE_256 &&
1770 keylen != AES_KEYSIZE_192 &&
1771 keylen != AES_KEYSIZE_128) {
1772 crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1773 return -EINVAL;
1774 }
1775
1776 memcpy(ctx->key, key, keylen);
1777 ctx->keylen = keylen;
1778
1779 return 0;
1780 }
1781
1782 static int atmel_aes_gcm_setauthsize(struct crypto_aead *tfm,
1783 unsigned int authsize)
1784 {
1785
1786 switch (authsize) {
1787 case 4:
1788 case 8:
1789 case 12:
1790 case 13:
1791 case 14:
1792 case 15:
1793 case 16:
1794 break;
1795 default:
1796 return -EINVAL;
1797 }
1798
1799 return 0;
1800 }
1801
1802 static int atmel_aes_gcm_encrypt(struct aead_request *req)
1803 {
1804 return atmel_aes_gcm_crypt(req, AES_FLAGS_ENCRYPT);
1805 }
1806
1807 static int atmel_aes_gcm_decrypt(struct aead_request *req)
1808 {
1809 return atmel_aes_gcm_crypt(req, 0);
1810 }
1811
1812 static int atmel_aes_gcm_init(struct crypto_aead *tfm)
1813 {
1814 struct atmel_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
1815
1816 crypto_aead_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx));
1817 ctx->base.start = atmel_aes_gcm_start;
1818
1819 return 0;
1820 }
1821
1822 static struct aead_alg aes_gcm_alg = {
1823 .setkey = atmel_aes_gcm_setkey,
1824 .setauthsize = atmel_aes_gcm_setauthsize,
1825 .encrypt = atmel_aes_gcm_encrypt,
1826 .decrypt = atmel_aes_gcm_decrypt,
1827 .init = atmel_aes_gcm_init,
1828 .ivsize = GCM_AES_IV_SIZE,
1829 .maxauthsize = AES_BLOCK_SIZE,
1830
1831 .base = {
1832 .cra_name = "gcm(aes)",
1833 .cra_driver_name = "atmel-gcm-aes",
1834 .cra_priority = ATMEL_AES_PRIORITY,
1835 .cra_flags = CRYPTO_ALG_ASYNC,
1836 .cra_blocksize = 1,
1837 .cra_ctxsize = sizeof(struct atmel_aes_gcm_ctx),
1838 .cra_alignmask = 0xf,
1839 .cra_module = THIS_MODULE,
1840 },
1841 };
1842
1843
1844
1845
1846 static inline struct atmel_aes_xts_ctx *
1847 atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx *ctx)
1848 {
1849 return container_of(ctx, struct atmel_aes_xts_ctx, base);
1850 }
1851
1852 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
1853
1854 static int atmel_aes_xts_start(struct atmel_aes_dev *dd)
1855 {
1856 struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx);
1857 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
1858 struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
1859 unsigned long flags;
1860 int err;
1861
1862 atmel_aes_set_mode(dd, rctx);
1863
1864 err = atmel_aes_hw_init(dd);
1865 if (err)
1866 return atmel_aes_complete(dd, err);
1867
1868
1869 flags = dd->flags;
1870 dd->flags &= ~AES_FLAGS_MODE_MASK;
1871 dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT);
1872 atmel_aes_write_ctrl_key(dd, false, NULL,
1873 ctx->key2, ctx->base.keylen);
1874 dd->flags = flags;
1875
1876 atmel_aes_write_block(dd, AES_IDATAR(0), req->info);
1877 return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data);
1878 }
1879
1880 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd)
1881 {
1882 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq);
1883 bool use_dma = (req->nbytes >= ATMEL_AES_DMA_THRESHOLD);
1884 u32 tweak[AES_BLOCK_SIZE / sizeof(u32)];
1885 static const u32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), };
1886 u8 *tweak_bytes = (u8 *)tweak;
1887 int i;
1888
1889
1890 atmel_aes_read_block(dd, AES_ODATAR(0), tweak);
1891
1892
1893
1894
1895
1896 for (i = 0; i < AES_BLOCK_SIZE/2; ++i) {
1897 u8 tmp = tweak_bytes[AES_BLOCK_SIZE - 1 - i];
1898
1899 tweak_bytes[AES_BLOCK_SIZE - 1 - i] = tweak_bytes[i];
1900 tweak_bytes[i] = tmp;
1901 }
1902
1903
1904 atmel_aes_write_ctrl(dd, use_dma, NULL);
1905 atmel_aes_write_block(dd, AES_TWR(0), tweak);
1906 atmel_aes_write_block(dd, AES_ALPHAR(0), one);
1907 if (use_dma)
1908 return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes,
1909 atmel_aes_transfer_complete);
1910
1911 return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes,
1912 atmel_aes_transfer_complete);
1913 }
1914
1915 static int atmel_aes_xts_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
1916 unsigned int keylen)
1917 {
1918 struct atmel_aes_xts_ctx *ctx = crypto_ablkcipher_ctx(tfm);
1919 int err;
1920
1921 err = xts_check_key(crypto_ablkcipher_tfm(tfm), key, keylen);
1922 if (err)
1923 return err;
1924
1925 memcpy(ctx->base.key, key, keylen/2);
1926 memcpy(ctx->key2, key + keylen/2, keylen/2);
1927 ctx->base.keylen = keylen/2;
1928
1929 return 0;
1930 }
1931
1932 static int atmel_aes_xts_encrypt(struct ablkcipher_request *req)
1933 {
1934 return atmel_aes_crypt(req, AES_FLAGS_XTS | AES_FLAGS_ENCRYPT);
1935 }
1936
1937 static int atmel_aes_xts_decrypt(struct ablkcipher_request *req)
1938 {
1939 return atmel_aes_crypt(req, AES_FLAGS_XTS);
1940 }
1941
1942 static int atmel_aes_xts_cra_init(struct crypto_tfm *tfm)
1943 {
1944 struct atmel_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
1945
1946 tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx);
1947 ctx->base.start = atmel_aes_xts_start;
1948
1949 return 0;
1950 }
1951
1952 static struct crypto_alg aes_xts_alg = {
1953 .cra_name = "xts(aes)",
1954 .cra_driver_name = "atmel-xts-aes",
1955 .cra_priority = ATMEL_AES_PRIORITY,
1956 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1957 .cra_blocksize = AES_BLOCK_SIZE,
1958 .cra_ctxsize = sizeof(struct atmel_aes_xts_ctx),
1959 .cra_alignmask = 0xf,
1960 .cra_type = &crypto_ablkcipher_type,
1961 .cra_module = THIS_MODULE,
1962 .cra_init = atmel_aes_xts_cra_init,
1963 .cra_u.ablkcipher = {
1964 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1965 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1966 .ivsize = AES_BLOCK_SIZE,
1967 .setkey = atmel_aes_xts_setkey,
1968 .encrypt = atmel_aes_xts_encrypt,
1969 .decrypt = atmel_aes_xts_decrypt,
1970 }
1971 };
1972
1973 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
1974
1975
1976 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
1977 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
1978 bool is_async);
1979 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
1980 bool is_async);
1981 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd);
1982 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
1983 bool is_async);
1984
1985 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err)
1986 {
1987 struct aead_request *req = aead_request_cast(dd->areq);
1988 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
1989
1990 if (err && (dd->flags & AES_FLAGS_OWN_SHA))
1991 atmel_sha_authenc_abort(&rctx->auth_req);
1992 dd->flags &= ~AES_FLAGS_OWN_SHA;
1993 }
1994
1995 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd)
1996 {
1997 struct aead_request *req = aead_request_cast(dd->areq);
1998 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
1999 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2000 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
2001 int err;
2002
2003 atmel_aes_set_mode(dd, &rctx->base);
2004
2005 err = atmel_aes_hw_init(dd);
2006 if (err)
2007 return atmel_aes_complete(dd, err);
2008
2009 return atmel_sha_authenc_schedule(&rctx->auth_req, ctx->auth,
2010 atmel_aes_authenc_init, dd);
2011 }
2012
2013 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
2014 bool is_async)
2015 {
2016 struct aead_request *req = aead_request_cast(dd->areq);
2017 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2018
2019 if (is_async)
2020 dd->is_async = true;
2021 if (err)
2022 return atmel_aes_complete(dd, err);
2023
2024
2025 dd->flags |= AES_FLAGS_OWN_SHA;
2026
2027
2028 return atmel_sha_authenc_init(&rctx->auth_req,
2029 req->src, req->assoclen,
2030 rctx->textlen,
2031 atmel_aes_authenc_transfer, dd);
2032 }
2033
2034 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
2035 bool is_async)
2036 {
2037 struct aead_request *req = aead_request_cast(dd->areq);
2038 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2039 bool enc = atmel_aes_is_encrypt(dd);
2040 struct scatterlist *src, *dst;
2041 u32 iv[AES_BLOCK_SIZE / sizeof(u32)];
2042 u32 emr;
2043
2044 if (is_async)
2045 dd->is_async = true;
2046 if (err)
2047 return atmel_aes_complete(dd, err);
2048
2049
2050 src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen);
2051 dst = src;
2052
2053 if (req->src != req->dst)
2054 dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen);
2055
2056
2057 memcpy(iv, req->iv, sizeof(iv));
2058
2059
2060
2061
2062
2063
2064
2065
2066 atmel_aes_write_ctrl(dd, true, iv);
2067 emr = AES_EMR_PLIPEN;
2068 if (!enc)
2069 emr |= AES_EMR_PLIPD;
2070 atmel_aes_write(dd, AES_EMR, emr);
2071
2072
2073 return atmel_aes_dma_start(dd, src, dst, rctx->textlen,
2074 atmel_aes_authenc_digest);
2075 }
2076
2077 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd)
2078 {
2079 struct aead_request *req = aead_request_cast(dd->areq);
2080 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2081
2082
2083 dd->flags &= ~AES_FLAGS_OWN_SHA;
2084 return atmel_sha_authenc_final(&rctx->auth_req,
2085 rctx->digest, sizeof(rctx->digest),
2086 atmel_aes_authenc_final, dd);
2087 }
2088
2089 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
2090 bool is_async)
2091 {
2092 struct aead_request *req = aead_request_cast(dd->areq);
2093 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2094 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2095 bool enc = atmel_aes_is_encrypt(dd);
2096 u32 idigest[SHA512_DIGEST_SIZE / sizeof(u32)], *odigest = rctx->digest;
2097 u32 offs, authsize;
2098
2099 if (is_async)
2100 dd->is_async = true;
2101 if (err)
2102 goto complete;
2103
2104 offs = req->assoclen + rctx->textlen;
2105 authsize = crypto_aead_authsize(tfm);
2106 if (enc) {
2107 scatterwalk_map_and_copy(odigest, req->dst, offs, authsize, 1);
2108 } else {
2109 scatterwalk_map_and_copy(idigest, req->src, offs, authsize, 0);
2110 if (crypto_memneq(idigest, odigest, authsize))
2111 err = -EBADMSG;
2112 }
2113
2114 complete:
2115 return atmel_aes_complete(dd, err);
2116 }
2117
2118 static int atmel_aes_authenc_setkey(struct crypto_aead *tfm, const u8 *key,
2119 unsigned int keylen)
2120 {
2121 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
2122 struct crypto_authenc_keys keys;
2123 u32 flags;
2124 int err;
2125
2126 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
2127 goto badkey;
2128
2129 if (keys.enckeylen > sizeof(ctx->base.key))
2130 goto badkey;
2131
2132
2133 flags = crypto_aead_get_flags(tfm);
2134 err = atmel_sha_authenc_setkey(ctx->auth,
2135 keys.authkey, keys.authkeylen,
2136 &flags);
2137 crypto_aead_set_flags(tfm, flags & CRYPTO_TFM_RES_MASK);
2138 if (err) {
2139 memzero_explicit(&keys, sizeof(keys));
2140 return err;
2141 }
2142
2143
2144 ctx->base.keylen = keys.enckeylen;
2145 memcpy(ctx->base.key, keys.enckey, keys.enckeylen);
2146
2147 memzero_explicit(&keys, sizeof(keys));
2148 return 0;
2149
2150 badkey:
2151 crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2152 memzero_explicit(&keys, sizeof(keys));
2153 return -EINVAL;
2154 }
2155
2156 static int atmel_aes_authenc_init_tfm(struct crypto_aead *tfm,
2157 unsigned long auth_mode)
2158 {
2159 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
2160 unsigned int auth_reqsize = atmel_sha_authenc_get_reqsize();
2161
2162 ctx->auth = atmel_sha_authenc_spawn(auth_mode);
2163 if (IS_ERR(ctx->auth))
2164 return PTR_ERR(ctx->auth);
2165
2166 crypto_aead_set_reqsize(tfm, (sizeof(struct atmel_aes_authenc_reqctx) +
2167 auth_reqsize));
2168 ctx->base.start = atmel_aes_authenc_start;
2169
2170 return 0;
2171 }
2172
2173 static int atmel_aes_authenc_hmac_sha1_init_tfm(struct crypto_aead *tfm)
2174 {
2175 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA1);
2176 }
2177
2178 static int atmel_aes_authenc_hmac_sha224_init_tfm(struct crypto_aead *tfm)
2179 {
2180 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA224);
2181 }
2182
2183 static int atmel_aes_authenc_hmac_sha256_init_tfm(struct crypto_aead *tfm)
2184 {
2185 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA256);
2186 }
2187
2188 static int atmel_aes_authenc_hmac_sha384_init_tfm(struct crypto_aead *tfm)
2189 {
2190 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA384);
2191 }
2192
2193 static int atmel_aes_authenc_hmac_sha512_init_tfm(struct crypto_aead *tfm)
2194 {
2195 return atmel_aes_authenc_init_tfm(tfm, SHA_FLAGS_HMAC_SHA512);
2196 }
2197
2198 static void atmel_aes_authenc_exit_tfm(struct crypto_aead *tfm)
2199 {
2200 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
2201
2202 atmel_sha_authenc_free(ctx->auth);
2203 }
2204
2205 static int atmel_aes_authenc_crypt(struct aead_request *req,
2206 unsigned long mode)
2207 {
2208 struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req);
2209 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2210 struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
2211 u32 authsize = crypto_aead_authsize(tfm);
2212 bool enc = (mode & AES_FLAGS_ENCRYPT);
2213 struct atmel_aes_dev *dd;
2214
2215
2216 if (!enc && req->cryptlen < authsize)
2217 return -EINVAL;
2218 rctx->textlen = req->cryptlen - (enc ? 0 : authsize);
2219
2220
2221
2222
2223
2224
2225 if (!rctx->textlen && !req->assoclen)
2226 return -EINVAL;
2227
2228 rctx->base.mode = mode;
2229 ctx->block_size = AES_BLOCK_SIZE;
2230 ctx->is_aead = true;
2231
2232 dd = atmel_aes_find_dev(ctx);
2233 if (!dd)
2234 return -ENODEV;
2235
2236 return atmel_aes_handle_queue(dd, &req->base);
2237 }
2238
2239 static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request *req)
2240 {
2241 return atmel_aes_authenc_crypt(req, AES_FLAGS_CBC | AES_FLAGS_ENCRYPT);
2242 }
2243
2244 static int atmel_aes_authenc_cbc_aes_decrypt(struct aead_request *req)
2245 {
2246 return atmel_aes_authenc_crypt(req, AES_FLAGS_CBC);
2247 }
2248
2249 static struct aead_alg aes_authenc_algs[] = {
2250 {
2251 .setkey = atmel_aes_authenc_setkey,
2252 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2253 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2254 .init = atmel_aes_authenc_hmac_sha1_init_tfm,
2255 .exit = atmel_aes_authenc_exit_tfm,
2256 .ivsize = AES_BLOCK_SIZE,
2257 .maxauthsize = SHA1_DIGEST_SIZE,
2258
2259 .base = {
2260 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2261 .cra_driver_name = "atmel-authenc-hmac-sha1-cbc-aes",
2262 .cra_priority = ATMEL_AES_PRIORITY,
2263 .cra_flags = CRYPTO_ALG_ASYNC,
2264 .cra_blocksize = AES_BLOCK_SIZE,
2265 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2266 .cra_alignmask = 0xf,
2267 .cra_module = THIS_MODULE,
2268 },
2269 },
2270 {
2271 .setkey = atmel_aes_authenc_setkey,
2272 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2273 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2274 .init = atmel_aes_authenc_hmac_sha224_init_tfm,
2275 .exit = atmel_aes_authenc_exit_tfm,
2276 .ivsize = AES_BLOCK_SIZE,
2277 .maxauthsize = SHA224_DIGEST_SIZE,
2278
2279 .base = {
2280 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2281 .cra_driver_name = "atmel-authenc-hmac-sha224-cbc-aes",
2282 .cra_priority = ATMEL_AES_PRIORITY,
2283 .cra_flags = CRYPTO_ALG_ASYNC,
2284 .cra_blocksize = AES_BLOCK_SIZE,
2285 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2286 .cra_alignmask = 0xf,
2287 .cra_module = THIS_MODULE,
2288 },
2289 },
2290 {
2291 .setkey = atmel_aes_authenc_setkey,
2292 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2293 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2294 .init = atmel_aes_authenc_hmac_sha256_init_tfm,
2295 .exit = atmel_aes_authenc_exit_tfm,
2296 .ivsize = AES_BLOCK_SIZE,
2297 .maxauthsize = SHA256_DIGEST_SIZE,
2298
2299 .base = {
2300 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2301 .cra_driver_name = "atmel-authenc-hmac-sha256-cbc-aes",
2302 .cra_priority = ATMEL_AES_PRIORITY,
2303 .cra_flags = CRYPTO_ALG_ASYNC,
2304 .cra_blocksize = AES_BLOCK_SIZE,
2305 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2306 .cra_alignmask = 0xf,
2307 .cra_module = THIS_MODULE,
2308 },
2309 },
2310 {
2311 .setkey = atmel_aes_authenc_setkey,
2312 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2313 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2314 .init = atmel_aes_authenc_hmac_sha384_init_tfm,
2315 .exit = atmel_aes_authenc_exit_tfm,
2316 .ivsize = AES_BLOCK_SIZE,
2317 .maxauthsize = SHA384_DIGEST_SIZE,
2318
2319 .base = {
2320 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2321 .cra_driver_name = "atmel-authenc-hmac-sha384-cbc-aes",
2322 .cra_priority = ATMEL_AES_PRIORITY,
2323 .cra_flags = CRYPTO_ALG_ASYNC,
2324 .cra_blocksize = AES_BLOCK_SIZE,
2325 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2326 .cra_alignmask = 0xf,
2327 .cra_module = THIS_MODULE,
2328 },
2329 },
2330 {
2331 .setkey = atmel_aes_authenc_setkey,
2332 .encrypt = atmel_aes_authenc_cbc_aes_encrypt,
2333 .decrypt = atmel_aes_authenc_cbc_aes_decrypt,
2334 .init = atmel_aes_authenc_hmac_sha512_init_tfm,
2335 .exit = atmel_aes_authenc_exit_tfm,
2336 .ivsize = AES_BLOCK_SIZE,
2337 .maxauthsize = SHA512_DIGEST_SIZE,
2338
2339 .base = {
2340 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2341 .cra_driver_name = "atmel-authenc-hmac-sha512-cbc-aes",
2342 .cra_priority = ATMEL_AES_PRIORITY,
2343 .cra_flags = CRYPTO_ALG_ASYNC,
2344 .cra_blocksize = AES_BLOCK_SIZE,
2345 .cra_ctxsize = sizeof(struct atmel_aes_authenc_ctx),
2346 .cra_alignmask = 0xf,
2347 .cra_module = THIS_MODULE,
2348 },
2349 },
2350 };
2351 #endif
2352
2353
2354
2355 static int atmel_aes_buff_init(struct atmel_aes_dev *dd)
2356 {
2357 dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER);
2358 dd->buflen = ATMEL_AES_BUFFER_SIZE;
2359 dd->buflen &= ~(AES_BLOCK_SIZE - 1);
2360
2361 if (!dd->buf) {
2362 dev_err(dd->dev, "unable to alloc pages.\n");
2363 return -ENOMEM;
2364 }
2365
2366 return 0;
2367 }
2368
2369 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd)
2370 {
2371 free_page((unsigned long)dd->buf);
2372 }
2373
2374 static bool atmel_aes_filter(struct dma_chan *chan, void *slave)
2375 {
2376 struct at_dma_slave *sl = slave;
2377
2378 if (sl && sl->dma_dev == chan->device->dev) {
2379 chan->private = sl;
2380 return true;
2381 } else {
2382 return false;
2383 }
2384 }
2385
2386 static int atmel_aes_dma_init(struct atmel_aes_dev *dd,
2387 struct crypto_platform_data *pdata)
2388 {
2389 struct at_dma_slave *slave;
2390 dma_cap_mask_t mask;
2391
2392 dma_cap_zero(mask);
2393 dma_cap_set(DMA_SLAVE, mask);
2394
2395
2396 slave = &pdata->dma_slave->rxdata;
2397 dd->src.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter,
2398 slave, dd->dev, "tx");
2399 if (!dd->src.chan)
2400 goto err_dma_in;
2401
2402 slave = &pdata->dma_slave->txdata;
2403 dd->dst.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter,
2404 slave, dd->dev, "rx");
2405 if (!dd->dst.chan)
2406 goto err_dma_out;
2407
2408 return 0;
2409
2410 err_dma_out:
2411 dma_release_channel(dd->src.chan);
2412 err_dma_in:
2413 dev_warn(dd->dev, "no DMA channel available\n");
2414 return -ENODEV;
2415 }
2416
2417 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd)
2418 {
2419 dma_release_channel(dd->dst.chan);
2420 dma_release_channel(dd->src.chan);
2421 }
2422
2423 static void atmel_aes_queue_task(unsigned long data)
2424 {
2425 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data;
2426
2427 atmel_aes_handle_queue(dd, NULL);
2428 }
2429
2430 static void atmel_aes_done_task(unsigned long data)
2431 {
2432 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data;
2433
2434 dd->is_async = true;
2435 (void)dd->resume(dd);
2436 }
2437
2438 static irqreturn_t atmel_aes_irq(int irq, void *dev_id)
2439 {
2440 struct atmel_aes_dev *aes_dd = dev_id;
2441 u32 reg;
2442
2443 reg = atmel_aes_read(aes_dd, AES_ISR);
2444 if (reg & atmel_aes_read(aes_dd, AES_IMR)) {
2445 atmel_aes_write(aes_dd, AES_IDR, reg);
2446 if (AES_FLAGS_BUSY & aes_dd->flags)
2447 tasklet_schedule(&aes_dd->done_task);
2448 else
2449 dev_warn(aes_dd->dev, "AES interrupt when no active requests.\n");
2450 return IRQ_HANDLED;
2451 }
2452
2453 return IRQ_NONE;
2454 }
2455
2456 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd)
2457 {
2458 int i;
2459
2460 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2461 if (dd->caps.has_authenc)
2462 for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++)
2463 crypto_unregister_aead(&aes_authenc_algs[i]);
2464 #endif
2465
2466 if (dd->caps.has_xts)
2467 crypto_unregister_alg(&aes_xts_alg);
2468
2469 if (dd->caps.has_gcm)
2470 crypto_unregister_aead(&aes_gcm_alg);
2471
2472 if (dd->caps.has_cfb64)
2473 crypto_unregister_alg(&aes_cfb64_alg);
2474
2475 for (i = 0; i < ARRAY_SIZE(aes_algs); i++)
2476 crypto_unregister_alg(&aes_algs[i]);
2477 }
2478
2479 static int atmel_aes_register_algs(struct atmel_aes_dev *dd)
2480 {
2481 int err, i, j;
2482
2483 for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
2484 err = crypto_register_alg(&aes_algs[i]);
2485 if (err)
2486 goto err_aes_algs;
2487 }
2488
2489 if (dd->caps.has_cfb64) {
2490 err = crypto_register_alg(&aes_cfb64_alg);
2491 if (err)
2492 goto err_aes_cfb64_alg;
2493 }
2494
2495 if (dd->caps.has_gcm) {
2496 err = crypto_register_aead(&aes_gcm_alg);
2497 if (err)
2498 goto err_aes_gcm_alg;
2499 }
2500
2501 if (dd->caps.has_xts) {
2502 err = crypto_register_alg(&aes_xts_alg);
2503 if (err)
2504 goto err_aes_xts_alg;
2505 }
2506
2507 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2508 if (dd->caps.has_authenc) {
2509 for (i = 0; i < ARRAY_SIZE(aes_authenc_algs); i++) {
2510 err = crypto_register_aead(&aes_authenc_algs[i]);
2511 if (err)
2512 goto err_aes_authenc_alg;
2513 }
2514 }
2515 #endif
2516
2517 return 0;
2518
2519 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2520
2521 err_aes_authenc_alg:
2522 for (j = 0; j < i; j++)
2523 crypto_unregister_aead(&aes_authenc_algs[j]);
2524 crypto_unregister_alg(&aes_xts_alg);
2525 #endif
2526 err_aes_xts_alg:
2527 crypto_unregister_aead(&aes_gcm_alg);
2528 err_aes_gcm_alg:
2529 crypto_unregister_alg(&aes_cfb64_alg);
2530 err_aes_cfb64_alg:
2531 i = ARRAY_SIZE(aes_algs);
2532 err_aes_algs:
2533 for (j = 0; j < i; j++)
2534 crypto_unregister_alg(&aes_algs[j]);
2535
2536 return err;
2537 }
2538
2539 static void atmel_aes_get_cap(struct atmel_aes_dev *dd)
2540 {
2541 dd->caps.has_dualbuff = 0;
2542 dd->caps.has_cfb64 = 0;
2543 dd->caps.has_gcm = 0;
2544 dd->caps.has_xts = 0;
2545 dd->caps.has_authenc = 0;
2546 dd->caps.max_burst_size = 1;
2547
2548
2549 switch (dd->hw_version & 0xff0) {
2550 case 0x500:
2551 dd->caps.has_dualbuff = 1;
2552 dd->caps.has_cfb64 = 1;
2553 dd->caps.has_gcm = 1;
2554 dd->caps.has_xts = 1;
2555 dd->caps.has_authenc = 1;
2556 dd->caps.max_burst_size = 4;
2557 break;
2558 case 0x200:
2559 dd->caps.has_dualbuff = 1;
2560 dd->caps.has_cfb64 = 1;
2561 dd->caps.has_gcm = 1;
2562 dd->caps.max_burst_size = 4;
2563 break;
2564 case 0x130:
2565 dd->caps.has_dualbuff = 1;
2566 dd->caps.has_cfb64 = 1;
2567 dd->caps.max_burst_size = 4;
2568 break;
2569 case 0x120:
2570 break;
2571 default:
2572 dev_warn(dd->dev,
2573 "Unmanaged aes version, set minimum capabilities\n");
2574 break;
2575 }
2576 }
2577
2578 #if defined(CONFIG_OF)
2579 static const struct of_device_id atmel_aes_dt_ids[] = {
2580 { .compatible = "atmel,at91sam9g46-aes" },
2581 { }
2582 };
2583 MODULE_DEVICE_TABLE(of, atmel_aes_dt_ids);
2584
2585 static struct crypto_platform_data *atmel_aes_of_init(struct platform_device *pdev)
2586 {
2587 struct device_node *np = pdev->dev.of_node;
2588 struct crypto_platform_data *pdata;
2589
2590 if (!np) {
2591 dev_err(&pdev->dev, "device node not found\n");
2592 return ERR_PTR(-EINVAL);
2593 }
2594
2595 pdata = devm_kzalloc(&pdev->dev, sizeof(*pdata), GFP_KERNEL);
2596 if (!pdata)
2597 return ERR_PTR(-ENOMEM);
2598
2599 pdata->dma_slave = devm_kzalloc(&pdev->dev,
2600 sizeof(*(pdata->dma_slave)),
2601 GFP_KERNEL);
2602 if (!pdata->dma_slave) {
2603 devm_kfree(&pdev->dev, pdata);
2604 return ERR_PTR(-ENOMEM);
2605 }
2606
2607 return pdata;
2608 }
2609 #else
2610 static inline struct crypto_platform_data *atmel_aes_of_init(struct platform_device *pdev)
2611 {
2612 return ERR_PTR(-EINVAL);
2613 }
2614 #endif
2615
2616 static int atmel_aes_probe(struct platform_device *pdev)
2617 {
2618 struct atmel_aes_dev *aes_dd;
2619 struct crypto_platform_data *pdata;
2620 struct device *dev = &pdev->dev;
2621 struct resource *aes_res;
2622 int err;
2623
2624 pdata = pdev->dev.platform_data;
2625 if (!pdata) {
2626 pdata = atmel_aes_of_init(pdev);
2627 if (IS_ERR(pdata)) {
2628 err = PTR_ERR(pdata);
2629 goto aes_dd_err;
2630 }
2631 }
2632
2633 if (!pdata->dma_slave) {
2634 err = -ENXIO;
2635 goto aes_dd_err;
2636 }
2637
2638 aes_dd = devm_kzalloc(&pdev->dev, sizeof(*aes_dd), GFP_KERNEL);
2639 if (aes_dd == NULL) {
2640 err = -ENOMEM;
2641 goto aes_dd_err;
2642 }
2643
2644 aes_dd->dev = dev;
2645
2646 platform_set_drvdata(pdev, aes_dd);
2647
2648 INIT_LIST_HEAD(&aes_dd->list);
2649 spin_lock_init(&aes_dd->lock);
2650
2651 tasklet_init(&aes_dd->done_task, atmel_aes_done_task,
2652 (unsigned long)aes_dd);
2653 tasklet_init(&aes_dd->queue_task, atmel_aes_queue_task,
2654 (unsigned long)aes_dd);
2655
2656 crypto_init_queue(&aes_dd->queue, ATMEL_AES_QUEUE_LENGTH);
2657
2658
2659 aes_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
2660 if (!aes_res) {
2661 dev_err(dev, "no MEM resource info\n");
2662 err = -ENODEV;
2663 goto res_err;
2664 }
2665 aes_dd->phys_base = aes_res->start;
2666
2667
2668 aes_dd->irq = platform_get_irq(pdev, 0);
2669 if (aes_dd->irq < 0) {
2670 err = aes_dd->irq;
2671 goto res_err;
2672 }
2673
2674 err = devm_request_irq(&pdev->dev, aes_dd->irq, atmel_aes_irq,
2675 IRQF_SHARED, "atmel-aes", aes_dd);
2676 if (err) {
2677 dev_err(dev, "unable to request aes irq.\n");
2678 goto res_err;
2679 }
2680
2681
2682 aes_dd->iclk = devm_clk_get(&pdev->dev, "aes_clk");
2683 if (IS_ERR(aes_dd->iclk)) {
2684 dev_err(dev, "clock initialization failed.\n");
2685 err = PTR_ERR(aes_dd->iclk);
2686 goto res_err;
2687 }
2688
2689 aes_dd->io_base = devm_ioremap_resource(&pdev->dev, aes_res);
2690 if (IS_ERR(aes_dd->io_base)) {
2691 dev_err(dev, "can't ioremap\n");
2692 err = PTR_ERR(aes_dd->io_base);
2693 goto res_err;
2694 }
2695
2696 err = clk_prepare(aes_dd->iclk);
2697 if (err)
2698 goto res_err;
2699
2700 err = atmel_aes_hw_version_init(aes_dd);
2701 if (err)
2702 goto iclk_unprepare;
2703
2704 atmel_aes_get_cap(aes_dd);
2705
2706 #if IS_ENABLED(CONFIG_CRYPTO_DEV_ATMEL_AUTHENC)
2707 if (aes_dd->caps.has_authenc && !atmel_sha_authenc_is_ready()) {
2708 err = -EPROBE_DEFER;
2709 goto iclk_unprepare;
2710 }
2711 #endif
2712
2713 err = atmel_aes_buff_init(aes_dd);
2714 if (err)
2715 goto err_aes_buff;
2716
2717 err = atmel_aes_dma_init(aes_dd, pdata);
2718 if (err)
2719 goto err_aes_dma;
2720
2721 spin_lock(&atmel_aes.lock);
2722 list_add_tail(&aes_dd->list, &atmel_aes.dev_list);
2723 spin_unlock(&atmel_aes.lock);
2724
2725 err = atmel_aes_register_algs(aes_dd);
2726 if (err)
2727 goto err_algs;
2728
2729 dev_info(dev, "Atmel AES - Using %s, %s for DMA transfers\n",
2730 dma_chan_name(aes_dd->src.chan),
2731 dma_chan_name(aes_dd->dst.chan));
2732
2733 return 0;
2734
2735 err_algs:
2736 spin_lock(&atmel_aes.lock);
2737 list_del(&aes_dd->list);
2738 spin_unlock(&atmel_aes.lock);
2739 atmel_aes_dma_cleanup(aes_dd);
2740 err_aes_dma:
2741 atmel_aes_buff_cleanup(aes_dd);
2742 err_aes_buff:
2743 iclk_unprepare:
2744 clk_unprepare(aes_dd->iclk);
2745 res_err:
2746 tasklet_kill(&aes_dd->done_task);
2747 tasklet_kill(&aes_dd->queue_task);
2748 aes_dd_err:
2749 if (err != -EPROBE_DEFER)
2750 dev_err(dev, "initialization failed.\n");
2751
2752 return err;
2753 }
2754
2755 static int atmel_aes_remove(struct platform_device *pdev)
2756 {
2757 struct atmel_aes_dev *aes_dd;
2758
2759 aes_dd = platform_get_drvdata(pdev);
2760 if (!aes_dd)
2761 return -ENODEV;
2762 spin_lock(&atmel_aes.lock);
2763 list_del(&aes_dd->list);
2764 spin_unlock(&atmel_aes.lock);
2765
2766 atmel_aes_unregister_algs(aes_dd);
2767
2768 tasklet_kill(&aes_dd->done_task);
2769 tasklet_kill(&aes_dd->queue_task);
2770
2771 atmel_aes_dma_cleanup(aes_dd);
2772 atmel_aes_buff_cleanup(aes_dd);
2773
2774 clk_unprepare(aes_dd->iclk);
2775
2776 return 0;
2777 }
2778
2779 static struct platform_driver atmel_aes_driver = {
2780 .probe = atmel_aes_probe,
2781 .remove = atmel_aes_remove,
2782 .driver = {
2783 .name = "atmel_aes",
2784 .of_match_table = of_match_ptr(atmel_aes_dt_ids),
2785 },
2786 };
2787
2788 module_platform_driver(atmel_aes_driver);
2789
2790 MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
2791 MODULE_LICENSE("GPL v2");
2792 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");